summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEd Heyl <edheyl@google.com>2012-11-12 15:58:48 -0800
committerEd Heyl <edheyl@google.com>2012-11-12 15:58:48 -0800
commitbb9fa8d7ccd01c5b73cd5776909ddecb3536dbfe (patch)
treed45283ca27adaf20a0e03e4d5c2418eace5c4f9e
parentf39254e8779c9ed990ed8a728632f7956c27de7e (diff)
parent0db9de9c050c698df5c160249e4060efa96e10a7 (diff)
downloadomap4-aah-bb9fa8d7ccd01c5b73cd5776909ddecb3536dbfe.tar.gz
fixed merge conflict with f39254e8779c9ed990ed8a728632f7956c27de7e
Change-Id: I6a4a579e763e7f88a85546b11f9a216604c2c15f
-rw-r--r--camera/ANativeWindowDisplayAdapter.cpp550
-rwxr-xr-x[-rw-r--r--]camera/Android.mk277
-rw-r--r--camera/AppCallbackNotifier.cpp490
-rw-r--r--camera/BaseCameraAdapter.cpp943
-rw-r--r--camera/BufferSourceAdapter.cpp1001
-rw-r--r--camera/CameraHal.cpp2981
-rw-r--r--camera/CameraHalCommon.cpp102
-rw-r--r--camera/CameraHalUtilClasses.cpp25
-rw-r--r--camera/CameraHal_Module.cpp303
-rw-r--r--camera/CameraParameters.cpp138
-rw-r--r--camera/CameraProperties.cpp44
-rwxr-xr-xcamera/Decoder_libjpeg.cpp281
-rw-r--r--camera/Encoder_libjpeg.cpp109
-rw-r--r--camera/MemoryManager.cpp168
-rw-r--r--camera/NV12_resize.cpp (renamed from camera/NV12_resize.c)221
-rw-r--r--camera/OMXCameraAdapter/OMX3A.cpp764
-rw-r--r--camera/OMXCameraAdapter/OMXAlgo.cpp585
-rw-r--r--[-rwxr-xr-x]camera/OMXCameraAdapter/OMXCameraAdapter.cpp2462
-rw-r--r--camera/OMXCameraAdapter/OMXCapabilities.cpp1882
-rw-r--r--camera/OMXCameraAdapter/OMXCapture.cpp1525
-rw-r--r--camera/OMXCameraAdapter/OMXDCC.cpp223
-rw-r--r--camera/OMXCameraAdapter/OMXDccDataSave.cpp361
-rw-r--r--camera/OMXCameraAdapter/OMXDefaults.cpp40
-rw-r--r--camera/OMXCameraAdapter/OMXExif.cpp55
-rw-r--r--camera/OMXCameraAdapter/OMXFD.cpp296
-rw-r--r--camera/OMXCameraAdapter/OMXFocus.cpp221
-rw-r--r--camera/OMXCameraAdapter/OMXMetadata.cpp181
-rw-r--r--camera/OMXCameraAdapter/OMXReprocess.cpp382
-rw-r--r--camera/OMXCameraAdapter/OMXZoom.cpp37
-rw-r--r--camera/SensorListener.cpp51
-rw-r--r--camera/TICameraParameters.cpp140
-rw-r--r--camera/V4LCameraAdapter/V4LCameraAdapter.cpp1183
-rw-r--r--camera/V4LCameraAdapter/V4LCapabilities.cpp367
-rw-r--r--camera/inc/ANativeWindowDisplayAdapter.h47
-rw-r--r--camera/inc/BaseCameraAdapter.h122
-rw-r--r--camera/inc/BufferSourceAdapter.h226
-rw-r--r--camera/inc/CameraHal.h648
-rw-r--r--camera/inc/CameraProperties.h106
-rw-r--r--camera/inc/Common.h65
-rwxr-xr-xcamera/inc/Decoder_libjpeg.h57
-rw-r--r--camera/inc/Encoder_libjpeg.h43
-rw-r--r--camera/inc/General3A_Settings.h141
-rw-r--r--camera/inc/NV12_resize.h161
-rw-r--r--camera/inc/OMXCameraAdapter/OMXCameraAdapter.h568
-rw-r--r--camera/inc/OMXCameraAdapter/OMXDCC.h44
-rw-r--r--camera/inc/OMXCameraAdapter/OMXSceneModeTables.h506
-rw-r--r--camera/inc/SensorListener.h26
-rw-r--r--camera/inc/TICameraParameters.h137
-rw-r--r--camera/inc/V4LCameraAdapter/V4LCameraAdapter.h130
-rw-r--r--domx/Android.mk35
-rw-r--r--domx/domx/Android.mk9
-rw-r--r--domx/domx/Makefile10
-rwxr-xr-xdomx/domx/omx_proxy_common.h78
-rw-r--r--domx/domx/omx_proxy_common/src/omx_proxy_common.c1103
-rwxr-xr-xdomx/domx/omx_rpc/inc/omx_rpc_internal.h2
-rwxr-xr-xdomx/domx/omx_rpc/inc/omx_rpc_stub.h2
-rwxr-xr-xdomx/domx/omx_rpc/inc/omx_rpc_utils.h5
-rwxr-xr-xdomx/domx/omx_rpc/src/omx_rpc.c14
-rwxr-xr-xdomx/domx/omx_rpc/src/omx_rpc_config.c131
-rwxr-xr-xdomx/domx/omx_rpc/src/omx_rpc_platform.c84
-rwxr-xr-xdomx/domx/omx_rpc/src/omx_rpc_stub.c109
-rw-r--r--domx/domx/profiling/inc/profile.h87
-rw-r--r--domx/domx/profiling/src/profile.c299
-rw-r--r--domx/mm_osal/Android.mk3
-rwxr-xr-xdomx/mm_osal/inc/timm_osal_trace.h47
-rwxr-xr-xdomx/mm_osal/src/timm_osal_pipes.c41
-rwxr-xr-xdomx/mm_osal/src/timm_osal_trace.c46
-rw-r--r--domx/omx_core/Android.mk3
-rwxr-xr-xdomx/omx_core/inc/OMX_Audio.h613
-rwxr-xr-xdomx/omx_core/inc/OMX_Component.h213
-rwxr-xr-xdomx/omx_core/inc/OMX_ContentPipe.h105
-rwxr-xr-xdomx/omx_core/inc/OMX_Core.h619
-rwxr-xr-xdomx/omx_core/inc/OMX_IVCommon.h430
-rwxr-xr-xdomx/omx_core/inc/OMX_Image.h195
-rwxr-xr-xdomx/omx_core/inc/OMX_Index.h58
-rwxr-xr-xdomx/omx_core/inc/OMX_Other.h17
-rwxr-xr-xdomx/omx_core/inc/OMX_TI_Common.h122
-rwxr-xr-xdomx/omx_core/inc/OMX_TI_IVCommon.h1457
-rwxr-xr-xdomx/omx_core/inc/OMX_TI_Image.h7
-rwxr-xr-xdomx/omx_core/inc/OMX_TI_Index.h326
-rwxr-xr-xdomx/omx_core/inc/OMX_TI_Video.h1875
-rwxr-xr-xdomx/omx_core/inc/OMX_Types.h119
-rwxr-xr-xdomx/omx_core/inc/OMX_Video.h626
-rwxr-xr-xdomx/omx_core/src/OMX_Core.c93
-rwxr-xr-xdomx/omx_core/src/OMX_Core_Wrapper.c28
-rw-r--r--domx/omx_proxy_component/Android.mk29
-rwxr-xr-xdomx/omx_proxy_component/omx_camera/src/omx_proxy_camera.c352
-rw-r--r--domx/omx_proxy_component/omx_h264_enc/src/omx_proxy_h264enc.c305
-rw-r--r--domx/omx_proxy_component/omx_mpeg4_enc/src/omx_proxy_mpeg4enc.c185
-rwxr-xr-xdomx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec.c120
-rw-r--r--domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_secure.c24
-rwxr-xr-xdomx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_utils.c318
-rw-r--r--hwc/Android.mk10
-rw-r--r--hwc/hal_public.h21
-rw-r--r--hwc/hwc.c1104
-rw-r--r--hwc/rgz_2d.c1675
-rw-r--r--hwc/rgz_2d.h290
-rw-r--r--ion/Android.mk20
-rw-r--r--ion/ion.c72
-rw-r--r--ion/ion_test.c323
-rw-r--r--ion/ion_test_2.c481
-rw-r--r--kernel-headers-ti/linux/bltsville.h520
-rw-r--r--kernel-headers-ti/linux/bvblend.h210
-rw-r--r--kernel-headers-ti/linux/bvbuffdesc.h56
-rw-r--r--kernel-headers-ti/linux/bvcache.h41
-rw-r--r--kernel-headers-ti/linux/bventry.h39
-rw-r--r--kernel-headers-ti/linux/bverror.h331
-rw-r--r--kernel-headers-ti/linux/bvfilter.h34
-rw-r--r--kernel-headers-ti/linux/bvinternal.h29
-rw-r--r--kernel-headers-ti/linux/bvsurfgeom.h34
-rw-r--r--kernel-headers-ti/linux/ion.h76
-rw-r--r--kernel-headers-ti/linux/ocd.h624
-rw-r--r--kernel-headers-ti/linux/omap_ion.h61
-rw-r--r--kernel-headers-ti/linux/rpmsg_omx.h60
-rw-r--r--kernel-headers-ti/video/dsscomp.h414
-rw-r--r--kernel-headers-ti/video/omap_hwc.h48
-rw-r--r--libtiutils/Android.mk28
-rw-r--r--libtiutils/DebugUtils.cpp96
-rw-r--r--libtiutils/DebugUtils.h385
-rw-r--r--libtiutils/ErrorUtils.cpp9
-rw-r--r--libtiutils/ErrorUtils.h8
-rw-r--r--libtiutils/MessageQueue.cpp8
-rw-r--r--libtiutils/MessageQueue.h33
-rw-r--r--libtiutils/Semaphore.cpp8
-rw-r--r--libtiutils/Semaphore.h8
-rw-r--r--libtiutils/Status.h67
-rw-r--r--libtiutils/UtilsCommon.h99
-rw-r--r--security/smc_pa_ctrl/Android.mk14
-rw-r--r--security/smc_pa_ctrl/s_version.h11
-rw-r--r--security/tee_client_api/Android.mk14
-rw-r--r--security/tee_client_api/s_version.h10
-rw-r--r--security/tee_client_api/schannel6_protocol.h20
-rw-r--r--security/tee_client_api/tee_client_api_linux_driver.c12
-rw-r--r--security/tf_crypto_sst/Android.mk9
-rw-r--r--security/tf_crypto_sst/pkcs11_object.c235
-rw-r--r--security/tf_crypto_sst/sst_stub.c3
-rw-r--r--security/tf_daemon/Android.mk16
-rw-r--r--security/tf_daemon/delegation_client.c194
-rw-r--r--security/tf_daemon/lib_uuid.h155
-rw-r--r--security/tf_daemon/s_version.h10
-rw-r--r--security/tf_daemon/service_delegation_protocol.h22
-rw-r--r--security/tf_daemon/smc_properties.c4
-rw-r--r--security/tf_daemon/smc_properties_parser.c61
-rw-r--r--security/tf_sdk/include/OEMCrypto.h388
-rw-r--r--security/tf_sdk/include/common_secure_driver_protocol.h35
-rw-r--r--security/tf_sdk/include/s_type.h2
-rw-r--r--security/tf_sdk/include/wvdrm_protocol.h131
-rw-r--r--system-core-headers-ti/ion/ion.h50
-rw-r--r--test/CameraHal/Android.mk67
-rw-r--r--test/CameraHal/camera_test.h429
-rw-r--r--test/CameraHal/camera_test_bufferqueue.h156
-rw-r--r--test/CameraHal/camera_test_menu.cpp3135
-rw-r--r--test/CameraHal/camera_test_script.cpp1009
-rw-r--r--test/CameraHal/camera_test_surfacetexture.cpp909
-rw-r--r--test/CameraHal/camera_test_surfacetexture.h236
-rw-r--r--test/CameraHal/surfacetexture_test.cpp253
156 files changed, 36862 insertions, 9809 deletions
diff --git a/camera/ANativeWindowDisplayAdapter.cpp b/camera/ANativeWindowDisplayAdapter.cpp
index e4a70ae..fce9c50 100644
--- a/camera/ANativeWindowDisplayAdapter.cpp
+++ b/camera/ANativeWindowDisplayAdapter.cpp
@@ -14,18 +14,14 @@
* limitations under the License.
*/
-
-
-
-#define LOG_TAG "CameraHAL"
-
#include "ANativeWindowDisplayAdapter.h"
#include <OMX_IVCommon.h>
#include <ui/GraphicBuffer.h>
#include <ui/GraphicBufferMapper.h>
#include <hal_public.h>
-namespace android {
+namespace Ti {
+namespace Camera {
///Constant declarations
///@todo Check the time units
@@ -41,100 +37,35 @@ OMX_COLOR_FORMATTYPE toOMXPixFormat(const char* parameters_format)
if ( parameters_format != NULL )
{
- if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
{
CAMHAL_LOGDA("CbYCrY format selected");
pixFormat = OMX_COLOR_FormatCbYCrY;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
{
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
{
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
}
else
{
- CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default");
- pixFormat = OMX_COLOR_FormatCbYCrY;
+ CAMHAL_LOGDA("Invalid format, NV12 format selected as default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
}
}
else {
- CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
- pixFormat = OMX_COLOR_FormatCbYCrY;
- }
-
- return pixFormat;
-}
-
-const char* getPixFormatConstant(const char* parameters_format)
-{
- const char* pixFormat;
-
- if ( parameters_format != NULL )
- {
- if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
- CAMHAL_LOGVA("CbYCrY format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
- }
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
- {
- // TODO(XXX): We are treating YV12 the same as YUV420SP
- CAMHAL_LOGVA("YUV420SP format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP;
- }
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
- CAMHAL_LOGVA("RGB565 format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_RGB565;
- }
- else
- {
- CAMHAL_LOGEA("Invalid format, CbYCrY format selected as default");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
- }
- }
- else
- {
- CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
}
return pixFormat;
}
-const size_t getBufSize(const char* parameters_format, int width, int height)
-{
- int buf_size;
-
- if ( parameters_format != NULL ) {
- if (strcmp(parameters_format,
- (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
- buf_size = width * height * 2;
- }
- else if((strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
- (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
- buf_size = width * height * 3 / 2;
- }
- else if(strcmp(parameters_format,
- (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
- buf_size = width * height * 2;
- } else {
- CAMHAL_LOGEA("Invalid format");
- buf_size = 0;
- }
- } else {
- CAMHAL_LOGEA("Preview format is NULL");
- buf_size = 0;
- }
-
- return buf_size;
-}
/*--------------------ANativeWindowDisplayAdapter Class STARTS here-----------------------------*/
@@ -162,8 +93,7 @@ ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
#endif
mPixelFormat = NULL;
- mBufferHandleMap = NULL;
- mGrallocHandleMap = NULL;
+ mBuffers = NULL;
mOffsetsMap = NULL;
mFrameProvider = NULL;
mANativeWindow = NULL;
@@ -188,8 +118,8 @@ ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
ANativeWindowDisplayAdapter::~ANativeWindowDisplayAdapter()
{
- Semaphore sem;
- TIUTILS::Message msg;
+ Utils::Semaphore sem;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -245,7 +175,7 @@ status_t ANativeWindowDisplayAdapter::initialize()
}
///Start the display thread
- status_t ret = mDisplayThread->run("DisplayThread", PRIORITY_URGENT_DISPLAY);
+ status_t ret = mDisplayThread->run("DisplayThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
CAMHAL_LOGEA("Couldn't run display thread");
@@ -316,10 +246,9 @@ int ANativeWindowDisplayAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
LOG_FUNCTION_NAME;
- if ( NULL == errorNotifier )
- {
+ if ( NULL == errorNotifier ) {
CAMHAL_LOGEA("Invalid Error Notifier reference");
- ret = -EINVAL;
+ ret = BAD_VALUE;
}
if ( NO_ERROR == ret )
@@ -342,7 +271,7 @@ status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime
if ( NULL != refTime )
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
memcpy(&mStartCapture, refTime, sizeof(struct timeval));
}
@@ -354,10 +283,10 @@ status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime
#endif
-int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime, S3DParameters *s3dParams)
+int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime)
{
- Semaphore sem;
- TIUTILS::Message msg;
+ Utils::Semaphore sem;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -369,17 +298,11 @@ int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct tim
return NO_ERROR;
}
-#if 0 //TODO: s3d is not part of bringup...will reenable
- if (s3dParams)
- mOverlay->set_s3d_params(s3dParams->mode, s3dParams->framePacking,
- s3dParams->order, s3dParams->subSampling);
-#endif
-
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
if ( NULL != refTime )
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
memcpy(&mStandbyToShot, refTime, sizeof(struct timeval));
mMeasureStandby = true;
}
@@ -401,6 +324,7 @@ int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct tim
// Register with the frame provider for frames
mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
mDisplayEnabled = true;
mPreviewWidth = width;
@@ -416,7 +340,7 @@ int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct tim
int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
{
status_t ret = NO_ERROR;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
LOG_FUNCTION_NAME;
@@ -429,15 +353,16 @@ int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
// Unregister with the frame provider here
mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
mFrameProvider->removeFramePointers();
if ( NULL != mDisplayThread.get() )
{
//Send STOP_DISPLAY COMMAND to display thread. Display thread will stop and dequeue all messages
// and then wait for message
- Semaphore sem;
+ Utils::Semaphore sem;
sem.Create();
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = DisplayThread::DISPLAY_STOP;
// Send the semaphore to signal once the command is completed
@@ -452,11 +377,14 @@ int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
}
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
{
///Reset the display enabled flag
mDisplayEnabled = false;
+ // Reset pause flag since display is being disabled
+ mPaused = false;
+
///Reset the offset values
mXOff = -1;
mYOff = -1;
@@ -493,7 +421,7 @@ status_t ANativeWindowDisplayAdapter::pauseDisplay(bool pause)
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mPaused = pause;
}
@@ -520,18 +448,20 @@ void ANativeWindowDisplayAdapter::destroy()
}
// Implementation of inherited interfaces
-void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
+CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs)
{
LOG_FUNCTION_NAME;
status_t err;
int i = -1;
const int lnumBufs = numBufs;
- mBufferHandleMap = new buffer_handle_t*[lnumBufs];
- mGrallocHandleMap = new IMG_native_handle_t*[lnumBufs];
int undequeued = 0;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ mFramesType.clear();
if ( NULL == mANativeWindow ) {
return NULL;
@@ -539,10 +469,10 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
// Set gralloc usage bits for window.
err = mANativeWindow->set_usage(mANativeWindow, CAMHAL_GRALLOC_USAGE);
- if (err != 0) {
- ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::setUsage failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -553,10 +483,10 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs);
///Set the number of buffers needed for camera preview
err = mANativeWindow->set_buffer_count(mANativeWindow, numBufs);
- if (err != 0) {
- ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::setBufferCount failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -574,10 +504,10 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
height,
/*toOMXPixFormat(format)*/HAL_PIXEL_FORMAT_TI_NV12); // Gralloc only supports NV12 alloc!
- if (err != 0) {
- ALOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -588,7 +518,7 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
///We just return the buffers from ANativeWindow, if the width and height are same, else (vstab, vnf case)
///re-allocate buffers using ANativeWindow and then get them
///@todo - Re-allocate buffers for vnf and vstab using the width, height, format, numBufs etc
- if ( mBufferHandleMap == NULL )
+ if ( mBuffers == NULL )
{
CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers");
LOG_FUNCTION_NAME_EXIT;
@@ -596,20 +526,20 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
}
mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+ mPixelFormat = CameraHal::getPixelFormatConstant(format);
for ( i=0; i < mBufferCount; i++ )
{
- IMG_native_handle_t** hndl2hndl;
- IMG_native_handle_t* handle;
+ buffer_handle_t *handle;
int stride; // dummy variable to get stride
// TODO(XXX): Do we need to keep stride information in camera hal?
- err = mANativeWindow->dequeue_buffer(mANativeWindow, (buffer_handle_t**) &hndl2hndl, &stride);
+ err = mANativeWindow->dequeue_buffer(mANativeWindow, &handle, &stride);
- if (err != 0) {
- CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::dequeueBuffer failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -617,13 +547,19 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
goto fail;
}
- handle = *hndl2hndl;
+ CAMHAL_LOGDB("got handle %p", handle);
+ mBuffers[i].opaque = (void *)handle;
+ mBuffers[i].type = CAMERA_BUFFER_ANW;
+ mBuffers[i].format = mPixelFormat;
+ mFramesWithCameraAdapterMap.add(handle, i);
- mBufferHandleMap[i] = (buffer_handle_t*) hndl2hndl;
- mGrallocHandleMap[i] = handle;
- mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i);
+ // Tag remaining preview buffers as preview frames
+ if ( i >= ( mBufferCount - undequeued ) ) {
+ mFramesType.add( (int) mBuffers[i].opaque,
+ CameraFrame::PREVIEW_FRAME_SYNC);
+ }
- bytes = getBufSize(format, width, height);
+ bytes = CameraHal::calculateBufferSize(format, width, height);
}
@@ -636,60 +572,63 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
for( i = 0; i < mBufferCount-undequeued; i++ )
{
void *y_uv[2];
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
- mANativeWindow->lock_buffer(mANativeWindow, mBufferHandleMap[i]);
+ mANativeWindow->lock_buffer(mANativeWindow, handle);
- mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv);
- mFrameProvider->addFramePointers(mGrallocHandleMap[i] , y_uv);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ mFrameProvider->addFramePointers(&mBuffers[i], y_uv);
}
// return the rest of the buffers back to ANativeWindow
for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++)
{
- err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]);
- if (err != 0) {
- CAMHAL_LOGEB("cancel_buffer failed: %s (%d)", strerror(-err), -err);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ err = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::cancelBuffer failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
goto fail;
}
- mFramesWithCameraAdapterMap.removeItem((int) mGrallocHandleMap[i]);
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[i].opaque);
//LOCK UNLOCK TO GET YUV POINTERS
void *y_uv[2];
- mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv);
- mFrameProvider->addFramePointers(mGrallocHandleMap[i] , y_uv);
- mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
+ mapper.lock(*(buffer_handle_t *) mBuffers[i].opaque, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ mFrameProvider->addFramePointers(&mBuffers[i], y_uv);
+ mapper.unlock(*(buffer_handle_t *) mBuffers[i].opaque);
}
mFirstInit = true;
- mPixelFormat = getPixFormatConstant(format);
mFrameWidth = width;
mFrameHeight = height;
- return mGrallocHandleMap;
+ return mBuffers;
fail:
// need to cancel buffers if any were dequeued
for (int start = 0; start < i && i > 0; start++) {
- int err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[start]);
- if (err != 0) {
- CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err);
+ status_t err = mANativeWindow->cancel_buffer(mANativeWindow,
+ (buffer_handle_t *) mBuffers[start].opaque);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::cancelBuffer failed w/ error 0x%08x", err);
break;
}
- mFramesWithCameraAdapterMap.removeItem((int) mGrallocHandleMap[start]);
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[start].opaque);
}
- freeBuffer(mGrallocHandleMap);
+ freeBufferList(mBuffers);
CAMHAL_LOGEA("Error occurred, performing cleanup");
- if ( NULL != mErrorNotifier.get() )
- {
- mErrorNotifier->errorNotify(-ENOMEM);
+ if ( NULL != mErrorNotifier.get() ) {
+ mErrorNotifier->errorNotify(NO_MEMORY);
}
LOG_FUNCTION_NAME_EXIT;
@@ -697,6 +636,13 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
}
+CameraBuffer* ANativeWindowDisplayAdapter::getBufferList(int *numBufs) {
+ LOG_FUNCTION_NAME;
+ if (numBufs) *numBufs = -1;
+
+ return NULL;
+}
+
uint32_t * ANativeWindowDisplayAdapter::getOffsets()
{
const int lnumBufs = mBufferCount;
@@ -711,7 +657,7 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets()
goto fail;
}
- if( mBufferHandleMap == NULL)
+ if( mBuffers == NULL)
{
CAMHAL_LOGEA("Buffers not allocated yet!!");
goto fail;
@@ -722,7 +668,6 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets()
mOffsetsMap = new uint32_t[lnumBufs];
for(int i = 0; i < mBufferCount; i++)
{
- IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[i]);
mOffsetsMap[i] = 0;
}
}
@@ -739,9 +684,8 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets()
mOffsetsMap = NULL;
}
- if ( NULL != mErrorNotifier.get() )
- {
- mErrorNotifier->errorNotify(-ENOSYS);
+ if ( NULL != mErrorNotifier.get() ) {
+ mErrorNotifier->errorNotify(INVALID_OPERATION);
}
LOG_FUNCTION_NAME_EXIT;
@@ -749,34 +693,48 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets()
return NULL;
}
-int ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable)
-{
+status_t ANativeWindowDisplayAdapter::minUndequeueableBuffers(int& undequeueable) {
LOG_FUNCTION_NAME;
- int ret = NO_ERROR;
- int undequeued = 0;
-
- if(mBufferCount == 0)
- {
- ret = -ENOSYS;
- goto end;
- }
+ status_t ret = NO_ERROR;
- if(!mANativeWindow)
- {
- ret = -ENOSYS;
+ if(!mANativeWindow) {
+ ret = INVALID_OPERATION;
goto end;
}
- ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+ ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeueable);
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret);
- if ( ENODEV == ret ) {
+ if ( NO_INIT == ret ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
- return -ret;
+ return ret;
+ }
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+ int undequeued = 0;
+
+ if(mBufferCount == 0)
+ {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = minUndequeueableBuffers(undequeued);
+ if (ret != NO_ERROR) {
+ goto end;
}
queueable = mBufferCount - undequeued;
@@ -792,10 +750,12 @@ int ANativeWindowDisplayAdapter::getFd()
if(mFD == -1)
{
- IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[0]);
+ buffer_handle_t *handle = (buffer_handle_t *)mBuffers[0].opaque;
+ IMG_native_handle_t *img = (IMG_native_handle_t *)handle;
// TODO: should we dup the fd? not really necessary and another thing for ANativeWindow
// to manage and close...
- mFD = dup(handle->fd[0]);
+
+ mFD = dup(img->fd[0]);
}
LOG_FUNCTION_NAME_EXIT;
@@ -808,29 +768,36 @@ status_t ANativeWindowDisplayAdapter::returnBuffersToWindow()
{
status_t ret = NO_ERROR;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
//Give the buffers back to display here - sort of free it
if (mANativeWindow)
for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
int value = mFramesWithCameraAdapterMap.valueAt(i);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[value].opaque;
+
+ // if buffer index is out of bounds skip
+ if ((value < 0) || (value >= mBufferCount)) {
+ CAMHAL_LOGEA("Potential out bounds access to handle...skipping");
+ continue;
+ }
// unlock buffer before giving it up
- mapper.unlock((buffer_handle_t) mGrallocHandleMap[value]);
+ mapper.unlock(*handle);
- ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[value]);
- if ( ENODEV == ret ) {
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_INIT == ret ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
- return -ret;
+ return ret;
} else if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("cancel_buffer() failed: %s (%d)",
+ CAMHAL_LOGE("Surface::cancelBuffer() failed: %s (%d)",
strerror(-ret),
-ret);
- return -ret;
+ return ret;
}
}
else
- ALOGE("mANativeWindow is NULL");
+ CAMHAL_LOGE("mANativeWindow is NULL");
///Clear the frames with camera adapter map
mFramesWithCameraAdapterMap.clear();
@@ -839,36 +806,35 @@ status_t ANativeWindowDisplayAdapter::returnBuffersToWindow()
}
-int ANativeWindowDisplayAdapter::freeBuffer(void* buf)
+int ANativeWindowDisplayAdapter::freeBufferList(CameraBuffer * buflist)
{
LOG_FUNCTION_NAME;
- int *buffers = (int *) buf;
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
- if((int *)mGrallocHandleMap != buffers)
+ if(mBuffers != buflist)
{
CAMHAL_LOGEA("CameraHal passed wrong set of buffers to free!!!");
- if (mGrallocHandleMap != NULL)
- delete []mGrallocHandleMap;
- mGrallocHandleMap = NULL;
+ if (mBuffers != NULL)
+ delete []mBuffers;
+ mBuffers = NULL;
}
-
+ /* FIXME this will probably want the list that was just deleted */
returnBuffersToWindow();
- if ( NULL != buf )
+ if ( NULL != buflist )
{
- delete [] buffers;
- mGrallocHandleMap = NULL;
+ delete [] buflist;
+ mBuffers = NULL;
}
- if( mBufferHandleMap != NULL)
+ if( mBuffers != NULL)
{
- delete [] mBufferHandleMap;
- mBufferHandleMap = NULL;
+ delete [] mBuffers;
+ mBuffers = NULL;
}
if ( NULL != mOffsetsMap )
@@ -883,6 +849,8 @@ int ANativeWindowDisplayAdapter::freeBuffer(void* buf)
mFD = -1;
}
+ mFramesType.clear();
+
return NO_ERROR;
}
@@ -892,11 +860,6 @@ bool ANativeWindowDisplayAdapter::supportsExternalBuffering()
return false;
}
-int ANativeWindowDisplayAdapter::useBuffers(void *bufArr, int num)
-{
- return NO_ERROR;
-}
-
void ANativeWindowDisplayAdapter::displayThread()
{
bool shouldLive = true;
@@ -907,7 +870,7 @@ void ANativeWindowDisplayAdapter::displayThread()
while(shouldLive)
{
- ret = TIUTILS::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
+ ret = Utils::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
, &mDisplayQ
, NULL
, ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT);
@@ -929,7 +892,7 @@ void ANativeWindowDisplayAdapter::displayThread()
}
else
{
- TIUTILS::Message msg;
+ Utils::Message msg;
///Get the dummy msg from the displayQ
if(mDisplayQ.get(&msg)!=NO_ERROR)
{
@@ -960,7 +923,7 @@ void ANativeWindowDisplayAdapter::displayThread()
bool ANativeWindowDisplayAdapter::processHalMsg()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -987,6 +950,12 @@ bool ANativeWindowDisplayAdapter::processHalMsg()
CAMHAL_LOGDA("Display thread received DISPLAY_STOP command from Camera HAL");
mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STOPPED;
+ // flush frame message queue
+ while ( !mDisplayQ.isEmpty() ) {
+ Utils::Message message;
+ mDisplayQ.get(&message);
+ }
+
break;
case DisplayThread::DISPLAY_EXIT:
@@ -1013,7 +982,7 @@ bool ANativeWindowDisplayAdapter::processHalMsg()
{
CAMHAL_LOGDA("+Signalling display semaphore");
- Semaphore &sem = *((Semaphore*)msg.arg1);
+ Utils::Semaphore &sem = *((Utils::Semaphore*)msg.arg1);
sem.Signal();
@@ -1031,7 +1000,7 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
status_t ret = NO_ERROR;
uint32_t actualFramesWithDisplay = 0;
android_native_buffer_t *buffer = NULL;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
int i;
///@todo Do cropping based on the stabilized frame coordinates
@@ -1039,114 +1008,101 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
///display or rendering rate whichever is lower
///Queue the buffer to overlay
- if (!mGrallocHandleMap || !dispFrame.mBuffer) {
+ if ( NULL == mANativeWindow ) {
+ return NO_INIT;
+ }
+
+ if (!mBuffers || !dispFrame.mBuffer) {
CAMHAL_LOGEA("NULL sent to PostFrame");
- return -EINVAL;
+ return BAD_VALUE;
}
for ( i = 0; i < mBufferCount; i++ )
+ {
+ if ( dispFrame.mBuffer == &mBuffers[i] )
{
- if ( ((int) dispFrame.mBuffer ) == (int)mGrallocHandleMap[i] )
- {
break;
}
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( mMeasureStandby ) {
+ CameraHal::PPM("Standby to first shot: Sensor Change completed - ", &mStandbyToShot);
+ mMeasureStandby = false;
+ } else if (CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) {
+ CameraHal::PPM("Shot to snapshot: ", &mStartCapture);
+ mShotToShot = true;
+ } else if ( mShotToShot ) {
+ CameraHal::PPM("Shot to shot: ", &mStartCapture);
+ mShotToShot = false;
+ }
+
+#endif
+
+ android::AutoMutex lock(mLock);
+
+ mFramesType.add( (int)mBuffers[i].opaque, dispFrame.mType);
+
if ( mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED &&
(!mPaused || CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) &&
!mSuspend)
{
- Mutex::Autolock lock(mLock);
- uint32_t xOff = (dispFrame.mOffset% PAGE_SIZE);
- uint32_t yOff = (dispFrame.mOffset / PAGE_SIZE);
+ uint32_t xOff, yOff;
+
+ CameraHal::getXYFromOffset(&xOff, &yOff, dispFrame.mOffset, PAGE_SIZE, mPixelFormat);
// Set crop only if current x and y offsets do not match with frame offsets
- if((mXOff!=xOff) || (mYOff!=yOff))
- {
- CAMHAL_LOGDB("Offset %d xOff = %d, yOff = %d", dispFrame.mOffset, xOff, yOff);
- uint8_t bytesPerPixel;
- ///Calculate bytes per pixel based on the pixel format
- if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
- bytesPerPixel = 2;
- }
- else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
- bytesPerPixel = 2;
- }
- else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
- {
- bytesPerPixel = 1;
- }
- else
- {
- bytesPerPixel = 1;
- }
+ if ((mXOff != xOff) || (mYOff != yOff)) {
+ CAMHAL_LOGDB("offset = %u left = %d top = %d right = %d bottom = %d",
+ dispFrame.mOffset, xOff, yOff ,
+ xOff + mPreviewWidth, yOff + mPreviewHeight);
- CAMHAL_LOGVB(" crop.left = %d crop.top = %d crop.right = %d crop.bottom = %d",
- xOff/bytesPerPixel, yOff , (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight);
// We'll ignore any errors here, if the surface is
// already invalid, we'll know soon enough.
- mANativeWindow->set_crop(mANativeWindow, xOff/bytesPerPixel, yOff,
- (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight);
+ mANativeWindow->set_crop(mANativeWindow, xOff, yOff,
+ xOff + mPreviewWidth, yOff + mPreviewHeight);
- ///Update the current x and y offsets
+ // Update the current x and y offsets
mXOff = xOff;
mYOff = yOff;
}
- // unlock buffer before sending to display
- mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
- ret = mANativeWindow->enqueue_buffer(mANativeWindow, mBufferHandleMap[i]);
- if (ret != 0) {
- ALOGE("Surface::queueBuffer returned error %d", ret);
+ {
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ // unlock buffer before sending to display
+ mapper.unlock(*handle);
+ ret = mANativeWindow->enqueue_buffer(mANativeWindow, handle);
+ }
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Surface::queueBuffer returned error %d", ret);
}
- mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer);
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) dispFrame.mBuffer->opaque);
// HWComposer has not minimum buffer requirement. We should be able to dequeue
// the buffer immediately
- TIUTILS::Message msg;
+ Utils::Message msg;
mDisplayQ.put(&msg);
-
-#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
-
- if ( mMeasureStandby )
- {
- CameraHal::PPM("Standby to first shot: Sensor Change completed - ", &mStandbyToShot);
- mMeasureStandby = false;
- }
- else if (CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType)
- {
- CameraHal::PPM("Shot to snapshot: ", &mStartCapture);
- mShotToShot = true;
- }
- else if ( mShotToShot )
- {
- CameraHal::PPM("Shot to shot: ", &mStartCapture);
- mShotToShot = false;
- }
-#endif
-
}
else
{
- Mutex::Autolock lock(mLock);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
// unlock buffer before giving it up
- mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
+ mapper.unlock(*handle);
// cancel buffer and dequeue another one
- ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]);
- if (ret != 0) {
- ALOGE("Surface::queueBuffer returned error %d", ret);
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Surface::cancelBuffer returned error %d", ret);
}
- mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer);
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) dispFrame.mBuffer->opaque);
- TIUTILS::Message msg;
+ Utils::Message msg;
mDisplayQ.put(&msg);
ret = NO_ERROR;
}
@@ -1158,11 +1114,14 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
bool ANativeWindowDisplayAdapter::handleFrameReturn()
{
status_t err;
- buffer_handle_t* buf;
+ buffer_handle_t *buf;
int i = 0;
+ unsigned int k;
int stride; // dummy variable to get stride
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+ CameraFrame::FrameType frameType = CameraFrame::PREVIEW_FRAME_SYNC;
+
void *y_uv[2];
// TODO(XXX): Do we need to keep stride information in camera hal?
@@ -1173,9 +1132,9 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
err = mANativeWindow->dequeue_buffer(mANativeWindow, &buf, &stride);
if (err != 0) {
- CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ CAMHAL_LOGE("Surface::dequeueBuffer failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -1184,10 +1143,10 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
}
err = mANativeWindow->lock_buffer(mANativeWindow, buf);
- if (err != 0) {
- CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::lockBuffer failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -1197,9 +1156,12 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
for(i = 0; i < mBufferCount; i++)
{
- if (mBufferHandleMap[i] == buf)
+ if (mBuffers[i].opaque == buf)
break;
}
+ if (i == mBufferCount) {
+ CAMHAL_LOGEB("Failed to find handle %p", buf);
+ }
// lock buffer before sending to FrameProvider for filling
bounds.left = 0;
@@ -1208,7 +1170,7 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
bounds.bottom = mFrameHeight;
int lock_try_count = 0;
- while (mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv) < 0){
+ while (mapper.lock(*(buffer_handle_t *) mBuffers[i].opaque, CAMHAL_GRALLOC_USAGE, bounds, y_uv) < 0){
if (++lock_try_count > LOCK_BUFFER_TRIES){
if ( NULL != mErrorNotifier.get() ){
mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
@@ -1219,10 +1181,27 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
usleep(15000);
}
- mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i);
+ {
+ android::AutoMutex lock(mLock);
+ mFramesWithCameraAdapterMap.add((buffer_handle_t *) mBuffers[i].opaque, i);
+
+ for( k = 0; k < mFramesType.size() ; k++) {
+ if(mFramesType.keyAt(k) == (int)mBuffers[i].opaque)
+ break;
+ }
+
+ if ( k == mFramesType.size() ) {
+ CAMHAL_LOGE("Frame type for preview buffer 0%x not found!!", mBuffers[i].opaque);
+ return false;
+ }
+
+ frameType = (CameraFrame::FrameType) mFramesType.valueAt(k);
+ mFramesType.removeItem((int) mBuffers[i].opaque);
+ }
CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount-1);
- mFrameProvider->returnFrame( (void*)mGrallocHandleMap[i], CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->returnFrame(&mBuffers[i], frameType);
+
return true;
}
@@ -1251,6 +1230,7 @@ void ANativeWindowDisplayAdapter::frameCallbackRelay(CameraFrame* caFrame)
void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
{
///Call queueBuffer of overlay in the context of the callback thread
+
DisplayFrame df;
df.mBuffer = caFrame->mBuffer;
df.mType = (CameraFrame::FrameType) caFrame->mFrameType;
@@ -1265,5 +1245,5 @@ void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
/*--------------------ANativeWindowDisplayAdapter Class ENDS here-----------------------------*/
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/Android.mk b/camera/Android.mk
index f1d7cb4..90f95c1 100644..100755
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -1,89 +1,153 @@
-ifeq ($(TARGET_BOARD_PLATFORM),omap4)
+ifeq ($(findstring omap, $(TARGET_BOARD_PLATFORM)),omap)
+ifeq ($(TI_OMAP4_CAMERAHAL_VARIANT),)
LOCAL_PATH:= $(call my-dir)
-OMAP4_CAMERA_HAL_USES:= OMX
-# OMAP4_CAMERA_HAL_USES:= USB
-
-OMAP4_CAMERA_HAL_SRC := \
- CameraHal_Module.cpp \
- CameraHal.cpp \
- CameraHalUtilClasses.cpp \
- AppCallbackNotifier.cpp \
- ANativeWindowDisplayAdapter.cpp \
- CameraProperties.cpp \
- MemoryManager.cpp \
- Encoder_libjpeg.cpp \
- SensorListener.cpp \
- NV12_resize.c
-
-OMAP4_CAMERA_COMMON_SRC:= \
- CameraParameters.cpp \
- TICameraParameters.cpp \
- CameraHalCommon.cpp
-
-OMAP4_CAMERA_OMX_SRC:= \
- BaseCameraAdapter.cpp \
- OMXCameraAdapter/OMX3A.cpp \
- OMXCameraAdapter/OMXAlgo.cpp \
- OMXCameraAdapter/OMXCameraAdapter.cpp \
- OMXCameraAdapter/OMXCapabilities.cpp \
- OMXCameraAdapter/OMXCapture.cpp \
- OMXCameraAdapter/OMXDefaults.cpp \
- OMXCameraAdapter/OMXExif.cpp \
- OMXCameraAdapter/OMXFD.cpp \
- OMXCameraAdapter/OMXFocus.cpp \
- OMXCameraAdapter/OMXZoom.cpp \
-
-OMAP4_CAMERA_USB_SRC:= \
- BaseCameraAdapter.cpp \
- V4LCameraAdapter/V4LCameraAdapter.cpp
-
-#
-# OMX Camera HAL
-#
+# A@H customization: only select USB camera support
+OMAP4_CAMERA_HAL_USES := USB
-ifeq ($(OMAP4_CAMERA_HAL_USES),OMX)
+# A@H customization: simulate the macros defined by TI's android-api.mk
+CAMERAHAL_CFLAGS += -DANDROID_API_JB_OR_LATER -DANDROID_API_ICS_OR_LATER
-include $(CLEAR_VARS)
+ifdef TI_CAMERAHAL_DEBUG_ENABLED
+ # Enable CameraHAL debug logs
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_DEBUG
+endif
-LOCAL_SRC_FILES:= \
- $(OMAP4_CAMERA_HAL_SRC) \
- $(OMAP4_CAMERA_OMX_SRC) \
- $(OMAP4_CAMERA_COMMON_SRC)
+ifdef TI_CAMERAHAL_VERBOSE_DEBUG_ENABLED
+ # Enable CameraHAL verbose debug logs
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_DEBUG_VERBOSE
+endif
-LOCAL_C_INCLUDES += \
- $(LOCAL_PATH)/inc/ \
- $(LOCAL_PATH)/../hwc \
- $(LOCAL_PATH)/../include \
- $(LOCAL_PATH)/inc/OMXCameraAdapter \
- $(LOCAL_PATH)/../libtiutils \
+ifdef TI_CAMERAHAL_DEBUG_FUNCTION_NAMES
+ # Enable CameraHAL function enter/exit logging
+ CAMERAHAL_CFLAGS += -DTI_UTILS_FUNCTION_LOGGER_ENABLE
+endif
+
+ifdef TI_CAMERAHAL_DEBUG_TIMESTAMPS
+ # Enable timestamp logging
+ CAMERAHAL_CFLAGS += -DTI_UTILS_DEBUG_USE_TIMESTAMPS
+endif
+
+ifndef TI_CAMERAHAL_DONT_USE_RAW_IMAGE_SAVING
+ # Enabled saving RAW images to file
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_USE_RAW_IMAGE_SAVING
+endif
+
+ifdef TI_CAMERAHAL_PROFILING
+ # Enable OMX Camera component profiling
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_OMX_PROFILING
+endif
+
+ifeq ($(findstring omap5, $(TARGET_BOARD_PLATFORM)),omap5)
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_OMAP5_CAPTURE_MODES
+endif
+
+CAMERAHAL_CFLAGS += -DENHANCED_DOMX
+
+CAMERAHAL_CFLAGS += -DLOG_TAG=\"CameraHal\"
+
+TI_CAMERAHAL_COMMON_INCLUDES := \
hardware/ti/omap4xxx/tiler \
- hardware/ti/omap4xxx/ion \
- hardware/ti/omap4xxx/domx/omx_core/inc \
- hardware/ti/omap4xxx/domx/mm_osal/inc \
- frameworks/base/include/media/stagefright \
- frameworks/native/include/media/hardware \
- frameworks/native/include/media/openmax \
+ hardware/ti/omap4xxx/hwc \
external/jpeg \
- external/jhead
+ external/jhead \
+ $(LOCAL_PATH)/../libtiutils \
+ $(LOCAL_PATH)/inc \
+ $(HARDWARE_TI_OMAP4_BASE)/domx/mm_osal/inc \
+ $(HARDWARE_TI_OMAP4_BASE)/domx/omx_core/inc \
+ $(HARDWARE_TI_OMAP4_BASE)/kernel-headers-ti \
+ $(HARDWARE_TI_OMAP4_BASE)/system-core-headers-ti \
-LOCAL_SHARED_LIBRARIES:= \
+TI_CAMERAHAL_COMMON_INCLUDES += \
+ frameworks/native/include/media/hardware \
+ system/core/include
+
+TI_CAMERAHAL_COMMON_SRC := \
+ CameraHal_Module.cpp \
+ CameraHal.cpp \
+ CameraHalUtilClasses.cpp \
+ AppCallbackNotifier.cpp \
+ ANativeWindowDisplayAdapter.cpp \
+ BufferSourceAdapter.cpp \
+ CameraProperties.cpp \
+ BaseCameraAdapter.cpp \
+ MemoryManager.cpp \
+ Encoder_libjpeg.cpp \
+ Decoder_libjpeg.cpp \
+ SensorListener.cpp \
+ NV12_resize.cpp \
+ CameraParameters.cpp \
+ TICameraParameters.cpp \
+ CameraHalCommon.cpp
+
+TI_CAMERAHAL_OMX_SRC := \
+ OMXCameraAdapter/OMX3A.cpp \
+ OMXCameraAdapter/OMXAlgo.cpp \
+ OMXCameraAdapter/OMXCameraAdapter.cpp \
+ OMXCameraAdapter/OMXCapabilities.cpp \
+ OMXCameraAdapter/OMXCapture.cpp \
+ OMXCameraAdapter/OMXReprocess.cpp \
+ OMXCameraAdapter/OMXDefaults.cpp \
+ OMXCameraAdapter/OMXExif.cpp \
+ OMXCameraAdapter/OMXFD.cpp \
+ OMXCameraAdapter/OMXFocus.cpp \
+ OMXCameraAdapter/OMXMetadata.cpp \
+ OMXCameraAdapter/OMXZoom.cpp \
+ OMXCameraAdapter/OMXDccDataSave.cpp \
+ OMXCameraAdapter/OMXDCC.cpp
+
+TI_CAMERAHAL_USB_SRC := \
+ V4LCameraAdapter/V4LCameraAdapter.cpp \
+ V4LCameraAdapter/V4LCapabilities.cpp
+
+TI_CAMERAHAL_COMMON_SHARED_LIBRARIES := \
libui \
libbinder \
libutils \
libcutils \
libtiutils \
- libmm_osal \
- libOMX_Core \
libcamera_client \
libgui \
- libdomx \
libion_ti \
libjpeg \
libexif
-LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+ifdef OMAP_ENHANCEMENT_CPCAM
+TI_CAMERAHAL_COMMON_STATIC_LIBRARIES += \
+ libcpcamcamera_client
+endif
+
+
+# ====================
+# OMX Camera Adapter
+# --------------------
+
+ifeq ($(OMAP4_CAMERA_HAL_USES),OMX)
+
+include $(CLEAR_VARS)
+
+CAMERAHAL_CFLAGS += -DOMX_CAMERA_ADAPTER
+
+LOCAL_SRC_FILES:= \
+ $(TI_CAMERAHAL_COMMON_SRC) \
+ $(TI_CAMERAHAL_OMX_SRC)
+
+LOCAL_C_INCLUDES += \
+ $(TI_CAMERAHAL_COMMON_INCLUDES) \
+ $(HARDWARE_TI_OMAP4_BASE)/domx/omx_core/inc \
+ $(HARDWARE_TI_OMAP4_BASE)/domx/mm_osal/inc \
+ $(LOCAL_PATH)/inc/OMXCameraAdapter
+
+LOCAL_SHARED_LIBRARIES:= \
+ $(TI_CAMERAHAL_COMMON_SHARED_LIBRARIES) \
+ libmm_osal \
+ libOMX_Core \
+ libdomx
+
+LOCAL_STATIC_LIBRARIES := $(TI_CAMERAHAL_COMMON_STATIC_LIBRARIES)
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER $(CAMERAHAL_CFLAGS)
LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
LOCAL_MODULE:= camera.$(TARGET_BOARD_PLATFORM)
@@ -94,45 +158,78 @@ include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
else
ifeq ($(OMAP4_CAMERA_HAL_USES),USB)
-#
-# USB Camera Adapter
-#
+
+# ====================
+# USB Camera Adapter
+# --------------------
include $(CLEAR_VARS)
+CAMERAHAL_CFLAGS += -DV4L_CAMERA_ADAPTER
+
LOCAL_SRC_FILES:= \
- $(OMAP4_CAMERA_HAL_SRC) \
- $(OMAP4_CAMERA_USB_SRC) \
- $(OMAP4_CAMERA_COMMON_SRC)
+ $(TI_CAMERAHAL_COMMON_SRC) \
+ $(TI_CAMERAHAL_USB_SRC)
LOCAL_C_INCLUDES += \
- $(LOCAL_PATH)/inc/ \
- $(LOCAL_PATH)/../hwc \
- $(LOCAL_PATH)/../include \
- $(LOCAL_PATH)/inc/V4LCameraAdapter \
- $(LOCAL_PATH)/../libtiutils \
- hardware/ti/omap4xxx/tiler \
- hardware/ti/omap4xxx/ion \
- frameworks/base/include/ui \
- frameworks/base/include/utils \
- frameworks/base/include/media/stagefright/openmax
+ $(TI_CAMERAHAL_COMMON_INCLUDES) \
+ $(LOCAL_PATH)/inc/V4LCameraAdapter
LOCAL_SHARED_LIBRARIES:= \
- libui \
- libbinder \
- libutils \
- libcutils \
- libtiutils \
- libcamera_client \
- libion_ti \
+ $(TI_CAMERAHAL_COMMON_SHARED_LIBRARIES)
+
+LOCAL_STATIC_LIBRARIES := $(TI_CAMERAHAL_COMMON_STATIC_LIBRARIES)
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER $(CAMERAHAL_CFLAGS)
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE:= camera.$(TARGET_BOARD_PLATFORM)
+LOCAL_MODULE_TAGS:= optional
+
+include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
-LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+else
+ifeq ($(OMAP4_CAMERA_HAL_USES),ALL)
+
+
+# =====================
+# ALL Camera Adapters
+# ---------------------
+
+include $(CLEAR_VARS)
+
+CAMERAHAL_CFLAGS += -DOMX_CAMERA_ADAPTER -DV4L_CAMERA_ADAPTER
+
+LOCAL_SRC_FILES:= \
+ $(TI_CAMERAHAL_COMMON_SRC) \
+ $(TI_CAMERAHAL_OMX_SRC) \
+ $(TI_CAMERAHAL_USB_SRC)
+
+LOCAL_C_INCLUDES += \
+ $(TI_CAMERAHAL_COMMON_INCLUDES) \
+ $(HARDWARE_TI_OMAP4_BASE)/domx/omx_core/inc \
+ $(HARDWARE_TI_OMAP4_BASE)/domx/mm_osal/inc \
+ $(LOCAL_PATH)/inc/OMXCameraAdapter \
+ $(LOCAL_PATH)/inc/V4LCameraAdapter
+
+LOCAL_SHARED_LIBRARIES:= \
+ $(TI_CAMERAHAL_COMMON_SHARED_LIBRARIES) \
+ libmm_osal \
+ libOMX_Core \
+ libdomx
+
+LOCAL_STATIC_LIBRARIES := $(TI_CAMERAHAL_COMMON_STATIC_LIBRARIES)
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER $(CAMERAHAL_CFLAGS)
LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
LOCAL_MODULE:= camera.$(TARGET_BOARD_PLATFORM)
LOCAL_MODULE_TAGS:= optional
include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
+
+endif
+endif
+endif
endif
-endif
endif
diff --git a/camera/AppCallbackNotifier.cpp b/camera/AppCallbackNotifier.cpp
index c6dfffa..e7f2b19 100644
--- a/camera/AppCallbackNotifier.cpp
+++ b/camera/AppCallbackNotifier.cpp
@@ -14,12 +14,6 @@
* limitations under the License.
*/
-
-
-
-#define LOG_TAG "CameraHAL"
-
-
#include "CameraHal.h"
#include "VideoMetadata.h"
#include "Encoder_libjpeg.h"
@@ -27,11 +21,13 @@
#include <ui/GraphicBuffer.h>
#include <ui/GraphicBufferMapper.h>
#include "NV12_resize.h"
+#include "TICameraParameters.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const int AppCallbackNotifier::NOTIFIER_TIMEOUT = -1;
-KeyedVector<void*, sp<Encoder_libjpeg> > gEncoderQueue;
+android::KeyedVector<void*, android::sp<Encoder_libjpeg> > gEncoderQueue;
void AppCallbackNotifierEncoderCallback(void* main_jpeg,
void* thumb_jpeg,
@@ -39,11 +35,12 @@ void AppCallbackNotifierEncoderCallback(void* main_jpeg,
void* cookie1,
void* cookie2,
void* cookie3,
+ void* cookie4,
bool canceled)
{
if (cookie1 && !canceled) {
AppCallbackNotifier* cb = (AppCallbackNotifier*) cookie1;
- cb->EncoderDoneCb(main_jpeg, thumb_jpeg, type, cookie2, cookie3);
+ cb->EncoderDoneCb(main_jpeg, thumb_jpeg, type, cookie2, cookie3, cookie4);
}
if (main_jpeg) {
@@ -60,20 +57,21 @@ void AppCallbackNotifierEncoderCallback(void* main_jpeg,
/*--------------------NotificationHandler Class STARTS here-----------------------------*/
-void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, CameraFrame::FrameType type, void* cookie1, void* cookie2)
+void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, CameraFrame::FrameType type, void* cookie1, void* cookie2, void *cookie3)
{
camera_memory_t* encoded_mem = NULL;
Encoder_libjpeg::params *main_param = NULL, *thumb_param = NULL;
size_t jpeg_size;
uint8_t* src = NULL;
- sp<Encoder_libjpeg> encoder = NULL;
+ CameraBuffer *camera_buffer;
+ android::sp<Encoder_libjpeg> encoder = NULL;
LOG_FUNCTION_NAME;
camera_memory_t* picture = NULL;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if (!main_jpeg) {
goto exit;
@@ -82,6 +80,7 @@ void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, Camer
encoded_mem = (camera_memory_t*) cookie1;
main_param = (Encoder_libjpeg::params *) main_jpeg;
jpeg_size = main_param->jpeg_size;
+ camera_buffer = (CameraBuffer *)cookie3;
src = main_param->src;
if(encoded_mem && encoded_mem->data && (jpeg_size > 0)) {
@@ -126,11 +125,13 @@ void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, Camer
if(picture && (mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED) &&
(mCameraHal->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE)))
{
- Mutex::Autolock lock(mBurstLock);
-#if 0 //TODO: enable burst mode later
+ android::AutoMutex lock(mBurstLock);
+
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
if ( mBurst )
{
- `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ mDataCb(CAMERA_MSG_COMPRESSED_BURST_IMAGE, picture, 0, NULL, mCallbackCookie);
+
}
else
#endif
@@ -157,7 +158,7 @@ void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, Camer
gEncoderQueue.removeItem(src);
encoder.clear();
}
- mFrameProvider->returnFrame(src, type);
+ mFrameProvider->returnFrame(camera_buffer, type);
}
LOG_FUNCTION_NAME_EXIT;
@@ -172,8 +173,12 @@ status_t AppCallbackNotifier::initialize()
{
LOG_FUNCTION_NAME;
+ mPreviewMemory = 0;
+
mMeasurementEnabled = false;
+ mNotifierState = NOTIFIER_STOPPED;
+
///Create the app notifier thread
mNotificationThread = new NotificationThread(this);
if(!mNotificationThread.get())
@@ -183,7 +188,7 @@ status_t AppCallbackNotifier::initialize()
}
///Start the display thread
- status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY);
+ status_t ret = mNotificationThread->run("NotificationThread", android::PRIORITY_URGENT_DISPLAY);
if(ret!=NO_ERROR)
{
CAMHAL_LOGEA("Couldn't run NotificationThread");
@@ -194,6 +199,9 @@ status_t AppCallbackNotifier::initialize()
mUseMetaDataBufferMode = true;
mRawAvailable = false;
+ mRecording = false;
+ mPreviewing = false;
+
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -206,7 +214,7 @@ void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
camera_request_memory get_memory,
void *user)
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
@@ -222,7 +230,7 @@ void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
void AppCallbackNotifier::setMeasurements(bool enable)
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
@@ -245,7 +253,9 @@ void AppCallbackNotifier::errorNotify(int error)
CAMHAL_LOGEB("AppCallbackNotifier received error %d", error);
// If it is a fatal error abort here!
- if((error == CAMERA_ERROR_FATAL) || (error == CAMERA_ERROR_HARD)) {
+ // If TILER is Out of memory we notify Mediaserver so that Memory is cleared and we can restart usecase
+ if((error == CAMERA_ERROR_FATAL) || (error == CAMERA_ERROR_HARD) || (error == -ENOMEM))
+ {
//We kill media server if we encounter these errors as there is
//no point continuing and apps also don't handle errors other
//than media server death always.
@@ -272,7 +282,7 @@ bool AppCallbackNotifier::notificationThread()
LOG_FUNCTION_NAME;
//CAMHAL_LOGDA("Notification Thread waiting for message");
- ret = TIUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
+ ret = Utils::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
&mEventQ,
&mFrameQ,
AppCallbackNotifier::NOTIFIER_TIMEOUT);
@@ -308,21 +318,21 @@ bool AppCallbackNotifier::notificationThread()
void AppCallbackNotifier::notifyEvent()
{
///Receive and send the event notifications to app
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
- if(!mEventQ.isEmpty()) {
- mEventQ.get(&msg);
- } else {
+ android::AutoMutex lock(mLock);
+ if ( !mEventQ.hasMsg() ) {
return;
+ } else {
+ mEventQ.get(&msg);
}
}
bool ret = true;
CameraHalEvent *evt = NULL;
CameraHalEvent::FocusEventData *focusEvtData;
CameraHalEvent::ZoomEventData *zoomEvtData;
- CameraHalEvent::FaceEventData faceEvtData;
+ CameraHalEvent::MetaEventData metaEvtData;
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED)
{
@@ -357,37 +367,39 @@ void AppCallbackNotifier::notifyEvent()
case CameraHalEvent::EVENT_FOCUS_LOCKED:
case CameraHalEvent::EVENT_FOCUS_ERROR:
- focusEvtData = &evt->mEventData->focusEvent;
- if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_SUCCESS ) &&
- ( NULL != mCameraHal ) &&
- ( NULL != mNotifyCb ) &&
- ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
- {
- mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
- mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
- }
- else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_FAIL ) &&
- ( NULL != mCameraHal ) &&
- ( NULL != mNotifyCb ) &&
- ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
- {
- mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
- mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
- }
- else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_PENDING ) &&
- ( NULL != mCameraHal ) &&
- ( NULL != mNotifyCb ) &&
- ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) )
- {
- mNotifyCb(CAMERA_MSG_FOCUS_MOVE, true, 0, mCallbackCookie);
- }
- else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_DONE ) &&
- ( NULL != mCameraHal ) &&
- ( NULL != mNotifyCb ) &&
- ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) )
- {
- mNotifyCb(CAMERA_MSG_FOCUS_MOVE, false, 0, mCallbackCookie);
+ if ( mCameraHal && mNotifyCb ) {
+ focusEvtData = &evt->mEventData->focusEvent;
+
+ switch ( focusEvtData->focusStatus ) {
+ case CameraHalEvent::FOCUS_STATUS_SUCCESS:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) {
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
+ }
+ break;
+
+ case CameraHalEvent::FOCUS_STATUS_FAIL:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) {
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
+ }
+ break;
+
+#ifdef ANDROID_API_JB_OR_LATER
+ case CameraHalEvent::FOCUS_STATUS_PENDING:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) {
+ mNotifyCb(CAMERA_MSG_FOCUS_MOVE, true, 0, mCallbackCookie);
+ }
+ break;
+
+ case CameraHalEvent::FOCUS_STATUS_DONE:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) {
+ mNotifyCb(CAMERA_MSG_FOCUS_MOVE, false, 0, mCallbackCookie);
+ }
+ break;
+#endif
}
+ }
break;
@@ -404,9 +416,9 @@ void AppCallbackNotifier::notifyEvent()
break;
- case CameraHalEvent::EVENT_FACE:
+ case CameraHalEvent::EVENT_METADATA:
- faceEvtData = evt->mEventData->faceEvent;
+ metaEvtData = evt->mEventData->metadataEvent;
if ( ( NULL != mCameraHal ) &&
( NULL != mNotifyCb) &&
@@ -418,10 +430,10 @@ void AppCallbackNotifier::notifyEvent()
mDataCb(CAMERA_MSG_PREVIEW_METADATA,
tmpBuffer,
0,
- faceEvtData->getFaceResult(),
+ metaEvtData->getMetadataResult(),
mCallbackCookie);
- faceEvtData.clear();
+ metaEvtData.clear();
if ( NULL != tmpBuffer ) {
tmpBuffer->release(tmpBuffer);
@@ -452,11 +464,11 @@ void AppCallbackNotifier::notifyEvent()
static void alignYV12(int width,
int height,
- int &yStride,
- int &uvStride,
- int &ySize,
- int &uvSize,
- int &size)
+ size_t &yStride,
+ size_t &uvStride,
+ size_t &ySize,
+ size_t &uvSize,
+ size_t &size)
{
yStride = ( width + 0xF ) & ~0xF;
uvStride = ( yStride / 2 + 0xF ) & ~0xF;
@@ -483,10 +495,10 @@ static void copy2Dto1D(void *dst,
unsigned int *y_uv = (unsigned int *)src;
CAMHAL_LOGVB("copy2Dto1D() y= %p ; uv=%p.",y_uv[0], y_uv[1]);
- CAMHAL_LOGVB("pixelFormat= %s; offset=%d", pixelFormat,offset);
+ CAMHAL_LOGVB("pixelFormat = %s; offset=%d",pixelFormat,offset);
if (pixelFormat!=NULL) {
- if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
bytesPerPixel = 2;
bufferSrc = ( unsigned char * ) y_uv[0] + offset;
uint32_t xOff = offset % stride;
@@ -531,8 +543,8 @@ static void copy2Dto1D(void *dst,
}
return;
- } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
bytesPerPixel = 1;
bufferDst = ( unsigned char * ) dst;
bufferDstEnd = ( unsigned char * ) dst + width*height*bytesPerPixel;
@@ -557,7 +569,7 @@ static void copy2Dto1D(void *dst,
bufferSrc_UV = ( uint16_t * ) ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff);
- if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
uint16_t *bufferDst_UV;
// Step 2: UV plane: convert NV12 to NV21 by swapping U & V
@@ -605,7 +617,7 @@ static void copy2Dto1D(void *dst,
: "cc", "memory", "q0", "q1"
);
}
- } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
uint16_t *bufferDst_U;
uint16_t *bufferDst_V;
@@ -614,7 +626,7 @@ static void copy2Dto1D(void *dst,
// camera adapter to support YV12. Need to address for
// USBCamera
- int yStride, uvStride, ySize, uvSize, size;
+ size_t yStride, uvStride, ySize, uvSize, size;
alignYV12(width, height, yStride, uvStride, ySize, uvSize, size);
bufferDst_V = (uint16_t *) (((uint8_t*)dst) + ySize);
@@ -671,7 +683,7 @@ static void copy2Dto1D(void *dst,
}
return ;
- } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
bytesPerPixel = 2;
}
}
@@ -687,26 +699,77 @@ static void copy2Dto1D(void *dst,
}
}
+static void copyCroppedNV12(CameraFrame* frame, unsigned char *dst)
+{
+ unsigned int stride, width, height;
+ uint32_t offset, uvoffset;
+ size_t size;
+
+ CAMHAL_ASSERT(frame && dst);
+
+ offset = frame->mOffset;
+ stride = frame->mAlignment;
+ width = frame->mWidth;
+ height = frame->mHeight;
+ size = frame->mLength;
+ unsigned const char *src = (unsigned char *) frame->mBuffer->mapped;
+
+ // offset to beginning of uv plane
+ uvoffset = (offset + size) * 2 / 3;
+ // offset to beginning of valid region of uv plane
+ uvoffset += (offset - (offset % stride)) / 2 + (offset % stride);
+
+ // start of valid luma region
+ unsigned const char *luma = src + offset;
+ // start of valid chroma region
+ unsigned const char *chroma = src + uvoffset;
+
+ // copy luma and chroma line x line
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(dst, luma, width);
+ luma += stride;
+ dst += width;
+ }
+ for (unsigned int i = 0; i < height / 2; i++) {
+ memcpy(dst, chroma, width);
+ chroma += stride;
+ dst += width;
+ }
+}
+
void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType)
{
camera_memory_t* picture = NULL;
void *dest = NULL, *src = NULL;
// scope for lock
- {
- Mutex::Autolock lock(mLock);
+ if (mCameraHal->msgTypeEnabled(msgType)) {
+ android::AutoMutex lock(mLock);
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
goto exit;
}
- picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+ if (frame->mBuffer->format &&
+ (strcmp(frame->mBuffer->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) &&
+ (frame->mAlignment != frame->mWidth) &&
+ ( msgType == CAMERA_MSG_RAW_IMAGE )) {
+ size_t size;
- if (NULL != picture) {
- dest = picture->data;
- if (NULL != dest) {
- src = (void *) ((unsigned int) frame->mBuffer + frame->mOffset);
- memcpy(dest, src, frame->mLength);
+ size = CameraHal::calculateBufferSize(frame->mBuffer->format, frame->mWidth, frame->mHeight);
+ picture = mRequestMemory(-1, size, 1, NULL);
+ if (picture && picture->data) {
+ copyCroppedNV12(frame, (unsigned char*) picture->data);
+ }
+ } else {
+ picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if (NULL != picture) {
+ dest = picture->data;
+ if (NULL != dest) {
+ src = (void *) ((unsigned int) frame->mBuffer->mapped + frame->mOffset);
+ memcpy(dest, src, frame->mLength);
+ }
}
}
}
@@ -726,11 +789,11 @@ void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t ms
void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType)
{
camera_memory_t* picture = NULL;
- void* dest = NULL;
+ CameraBuffer * dest = NULL;
// scope for lock
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
goto exit;
@@ -741,27 +804,27 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
goto exit;
}
-
- dest = (void*) mPreviewBufs[mPreviewBufCount];
+ dest = &mPreviewBuffers[mPreviewBufCount];
CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d,%s)",
__LINE__,
- buf,
+ dest,
frame->mBuffer,
- frame->mWidth,
- frame->mHeight,
- frame->mAlignment,
+ mPreviewWidth,
+ mPreviewHeight,
+ mPreviewStride,
2,
frame->mLength,
mPreviewPixelFormat);
- if ( NULL != dest ) {
+ /* FIXME map dest */
+ if ( NULL != dest && dest->mapped != NULL ) {
// data sync frames don't need conversion
if (CameraFrame::FRAME_DATA_SYNC == frame->mFrameType) {
if ( (mPreviewMemory->size / MAX_BUFFERS) >= frame->mLength ) {
- memcpy(dest, (void*) frame->mBuffer, frame->mLength);
+ memcpy(dest->mapped, (void*) frame->mBuffer->mapped, frame->mLength);
} else {
- memset(dest, 0, (mPreviewMemory->size / MAX_BUFFERS));
+ memset(dest->mapped, 0, (mPreviewMemory->size / MAX_BUFFERS));
}
} else {
if ((NULL == frame->mYuv[0]) || (NULL == frame->mYuv[1])){
@@ -769,11 +832,11 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
goto exit;
}
else{
- copy2Dto1D(dest,
+ copy2Dto1D(dest->mapped,
frame->mYuv,
- frame->mWidth,
- frame->mHeight,
- frame->mAlignment,
+ mPreviewWidth,
+ mPreviewHeight,
+ mPreviewStride,
frame->mOffset,
2,
frame->mLength,
@@ -788,8 +851,10 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) &&
mCameraHal->msgTypeEnabled(msgType) &&
- (dest != NULL)) {
- mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
+ (dest != NULL) && (dest->mapped != NULL)) {
+ android::AutoMutex locker(mLock);
+ if ( mPreviewMemory )
+ mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
}
// increment for next buffer
@@ -833,17 +898,17 @@ status_t AppCallbackNotifier::dummyRaw()
void AppCallbackNotifier::notifyFrame()
{
///Receive and send the frame notifications to app
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraFrame *frame;
- MemoryHeapBase *heap;
- MemoryBase *buffer = NULL;
- sp<MemoryBase> memBase;
+ android::MemoryHeapBase *heap;
+ android::MemoryBase *buffer = NULL;
+ android::sp<android::MemoryBase> memBase;
void *buf = NULL;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(!mFrameQ.isEmpty()) {
mFrameQ.get(&msg);
} else {
@@ -900,23 +965,24 @@ void AppCallbackNotifier::notifyFrame()
unsigned int current_snapshot = 0;
Encoder_libjpeg::params *main_jpeg = NULL, *tn_jpeg = NULL;
void* exif_data = NULL;
+ const char *previewFormat = NULL;
camera_memory_t* raw_picture = mRequestMemory(-1, frame->mLength, 1, NULL);
if(raw_picture) {
buf = raw_picture->data;
}
- CameraParameters parameters;
+ android::CameraParameters parameters;
char *params = mCameraHal->getParameters();
- const String8 strParams(params);
+ const android::String8 strParams(params);
parameters.unflatten(strParams);
- encode_quality = parameters.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ encode_quality = parameters.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
if (encode_quality < 0 || encode_quality > 100) {
encode_quality = 100;
}
- tn_quality = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ tn_quality = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
if (tn_quality < 0 || tn_quality > 100) {
tn_quality = 100;
}
@@ -936,7 +1002,7 @@ void AppCallbackNotifier::notifyFrame()
CAMHAL_LOGDB("Video snapshot offset = %d", frame->mOffset);
if (main_jpeg) {
- main_jpeg->src = (uint8_t*) frame->mBuffer;
+ main_jpeg->src = (uint8_t *)frame->mBuffer->mapped;
main_jpeg->src_size = frame->mLength;
main_jpeg->dst = (uint8_t*) buf;
main_jpeg->dst_size = frame->mLength;
@@ -947,13 +1013,19 @@ void AppCallbackNotifier::notifyFrame()
main_jpeg->out_height = frame->mHeight;
main_jpeg->right_crop = rightCrop;
main_jpeg->start_offset = frame->mOffset;
- main_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV422I;
+ if ( CameraFrame::FORMAT_YUV422I_UYVY & frame->mQuirks) {
+ main_jpeg->format = TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY;
+ }
+ else { //if ( CameraFrame::FORMAT_YUV422I_YUYV & frame->mQuirks)
+ main_jpeg->format = android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
}
- tn_width = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- tn_height = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ tn_width = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ tn_height = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ previewFormat = parameters.getPreviewFormat();
- if ((tn_width > 0) && (tn_height > 0)) {
+ if ((tn_width > 0) && (tn_height > 0) && ( NULL != previewFormat )) {
tn_jpeg = (Encoder_libjpeg::params*)
malloc(sizeof(Encoder_libjpeg::params));
// if malloc fails just keep going and encode main jpeg
@@ -966,10 +1038,12 @@ void AppCallbackNotifier::notifyFrame()
int width, height;
parameters.getPreviewSize(&width,&height);
current_snapshot = (mPreviewBufCount + MAX_BUFFERS - 1) % MAX_BUFFERS;
- tn_jpeg->src = (uint8_t*) mPreviewBufs[current_snapshot];
+ tn_jpeg->src = (uint8_t *)mPreviewBuffers[current_snapshot].mapped;
tn_jpeg->src_size = mPreviewMemory->size / MAX_BUFFERS;
- tn_jpeg->dst = (uint8_t*) malloc(tn_jpeg->src_size);
- tn_jpeg->dst_size = tn_jpeg->src_size;
+ tn_jpeg->dst_size = CameraHal::calculateBufferSize(previewFormat,
+ tn_width,
+ tn_height);
+ tn_jpeg->dst = (uint8_t*) malloc(tn_jpeg->dst_size);
tn_jpeg->quality = tn_quality;
tn_jpeg->in_width = width;
tn_jpeg->in_height = height;
@@ -977,18 +1051,18 @@ void AppCallbackNotifier::notifyFrame()
tn_jpeg->out_height = tn_height;
tn_jpeg->right_crop = 0;
tn_jpeg->start_offset = 0;
- tn_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV420SP;;
+ tn_jpeg->format = android::CameraParameters::PIXEL_FORMAT_YUV420SP;;
}
- sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
+ android::sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
tn_jpeg,
AppCallbackNotifierEncoderCallback,
(CameraFrame::FrameType)frame->mFrameType,
this,
raw_picture,
- exif_data);
+ exif_data, frame->mBuffer);
+ gEncoderQueue.add(frame->mBuffer->mapped, encoder);
encoder->run();
- gEncoderQueue.add(frame->mBuffer, encoder);
encoder.clear();
if (params != NULL)
{
@@ -1004,7 +1078,7 @@ void AppCallbackNotifier::notifyFrame()
// who registers a raw callback should receive one
// as well. This is not always the case with
// CameraAdapters though.
- if (!mRawAvailable) {
+ if (!mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE)) {
dummyRaw();
} else {
mRawAvailable = false;
@@ -1012,11 +1086,11 @@ void AppCallbackNotifier::notifyFrame()
#ifdef COPY_IMAGE_BUFFER
{
- Mutex::Autolock lock(mBurstLock);
-#if 0 //TODO: enable burst mode later
+ android::AutoMutex lock(mBurstLock);
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
if ( mBurst )
{
- `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ copyAndSendPictureFrame(frame, CAMERA_MSG_COMPRESSED_BURST_IMAGE);
}
else
#endif
@@ -1033,13 +1107,13 @@ void AppCallbackNotifier::notifyFrame()
( NULL != mDataCb) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) ) )
{
- mRecordingLock.lock();
+ android::AutoMutex locker(mRecordingLock);
if(mRecording)
{
if(mUseMetaDataBufferMode)
{
camera_memory_t *videoMedatadaBufferMemory =
- (camera_memory_t *) mVideoMetadataBufferMemoryMap.valueFor((uint32_t) frame->mBuffer);
+ mVideoMetadataBufferMemoryMap.valueFor(frame->mBuffer->opaque);
video_metadata_t *videoMetadataBuffer = (video_metadata_t *) videoMedatadaBufferMemory->data;
if( (NULL == videoMedatadaBufferMemory) || (NULL == videoMetadataBuffer) || (NULL == frame->mBuffer) )
@@ -1050,9 +1124,9 @@ void AppCallbackNotifier::notifyFrame()
if ( mUseVideoBuffers )
{
- int vBuf = mVideoMap.valueFor((uint32_t) frame->mBuffer);
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ CameraBuffer *vBuf = mVideoMap.valueFor(frame->mBuffer->opaque);
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
bounds.left = 0;
bounds.top = 0;
bounds.right = mVideoWidth;
@@ -1060,6 +1134,7 @@ void AppCallbackNotifier::notifyFrame()
void *y_uv[2];
mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ y_uv[1] = y_uv[0] + mVideoHeight*4096;
structConvImage input = {frame->mWidth,
frame->mHeight,
@@ -1078,20 +1153,21 @@ void AppCallbackNotifier::notifyFrame()
0};
VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
- mapper.unlock((buffer_handle_t)vBuf);
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
- videoMetadataBuffer->handle = (void *)vBuf;
+ mapper.unlock((buffer_handle_t)vBuf->opaque);
+ videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
+ /* FIXME remove cast */
+ videoMetadataBuffer->handle = (void *)vBuf->opaque;
videoMetadataBuffer->offset = 0;
}
else
{
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
- videoMetadataBuffer->handle = frame->mBuffer;
+ videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->handle = camera_buffer_get_omx_ptr(frame->mBuffer);
videoMetadataBuffer->offset = frame->mOffset;
}
CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x",
- frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory);
+ frame->mBuffer->opaque, videoMetadataBuffer, videoMedatadaBufferMemory);
mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME,
videoMedatadaBufferMemory, 0, mCallbackCookie);
@@ -1099,20 +1175,18 @@ void AppCallbackNotifier::notifyFrame()
else
{
//TODO: Need to revisit this, should ideally be mapping the TILER buffer using mRequestMemory
- camera_memory_t* fakebuf = mRequestMemory(-1, 4, 1, NULL);
+ camera_memory_t* fakebuf = mRequestMemory(-1, sizeof(buffer_handle_t), 1, NULL);
if( (NULL == fakebuf) || ( NULL == fakebuf->data) || ( NULL == frame->mBuffer))
{
CAMHAL_LOGEA("Error! One of the video buffers is NULL");
break;
}
- fakebuf->data = frame->mBuffer;
+ *reinterpret_cast<buffer_handle_t*>(fakebuf->data) = reinterpret_cast<buffer_handle_t>(frame->mBuffer->mapped);
mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, fakebuf, 0, mCallbackCookie);
fakebuf->release(fakebuf);
}
}
- mRecordingLock.unlock();
-
}
else if(( CameraFrame::SNAPSHOT_FRAME == frame->mFrameType ) &&
( NULL != mCameraHal ) &&
@@ -1178,7 +1252,7 @@ void AppCallbackNotifier::frameCallbackRelay(CameraFrame* caFrame)
void AppCallbackNotifier::frameCallback(CameraFrame* caFrame)
{
///Post the event to the event queue of AppCallbackNotifier
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraFrame *frame;
LOG_FUNCTION_NAME;
@@ -1205,10 +1279,12 @@ void AppCallbackNotifier::frameCallback(CameraFrame* caFrame)
void AppCallbackNotifier::flushAndReturnFrames()
{
- TIUTILS::Message msg;
+ LOG_FUNCTION_NAME;
+
+ Utils::Message msg;
CameraFrame *frame;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
while (!mFrameQ.isEmpty()) {
mFrameQ.get(&msg);
frame = (CameraFrame*) msg.arg1;
@@ -1233,7 +1309,7 @@ void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
{
///Post the event to the event queue of AppCallbackNotifier
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraHalEvent *event;
@@ -1248,7 +1324,7 @@ void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT;
msg.arg1 = event;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mEventQ.put(&msg);
}
}
@@ -1267,7 +1343,7 @@ void AppCallbackNotifier::flushEventQueue()
{
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mEventQ.clear();
}
}
@@ -1276,7 +1352,7 @@ void AppCallbackNotifier::flushEventQueue()
bool AppCallbackNotifier::processMessage()
{
///Retrieve the command from the command queue and process it
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -1289,7 +1365,7 @@ bool AppCallbackNotifier::processMessage()
{
case NotificationThread::NOTIFIER_EXIT:
{
- CAMHAL_LOGDA("Received NOTIFIER_EXIT command from Camera HAL");
+ CAMHAL_LOGD("Received NOTIFIER_EXIT command from Camera HAL");
mNotifierState = AppCallbackNotifier::NOTIFIER_EXITED;
ret = false;
break;
@@ -1327,7 +1403,7 @@ AppCallbackNotifier::~AppCallbackNotifier()
mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
}
- TIUTILS::Message msg = {0,0,0,0,0,0};
+ Utils::Message msg = {0,0,0,0,0,0};
msg.command = NotificationThread::NOTIFIER_EXIT;
///Post the message to display thread
@@ -1373,11 +1449,11 @@ void AppCallbackNotifier::releaseSharedVideoBuffers()
camera_memory_t* videoMedatadaBufferMemory;
for (unsigned int i = 0; i < mVideoMetadataBufferMemoryMap.size(); i++)
{
- videoMedatadaBufferMemory = (camera_memory_t*) mVideoMetadataBufferMemoryMap.valueAt(i);
+ videoMedatadaBufferMemory = mVideoMetadataBufferMemoryMap.valueAt(i);
if(NULL != videoMedatadaBufferMemory)
{
videoMedatadaBufferMemory->release(videoMedatadaBufferMemory);
- CAMHAL_LOGDB("Released videoMedatadaBufferMemory=0x%x", videoMedatadaBufferMemory);
+ CAMHAL_LOGDB("Released videoMedatadaBufferMemory=%p", videoMedatadaBufferMemory);
}
}
@@ -1434,16 +1510,14 @@ void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier)
LOG_FUNCTION_NAME_EXIT;
}
-status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
+status_t AppCallbackNotifier::startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
{
- sp<MemoryHeapBase> heap;
- sp<MemoryBase> buffer;
unsigned int *bufArr;
int size = 0;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if ( NULL == mFrameProvider )
{
@@ -1461,30 +1535,12 @@ status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, vo
///Get preview size
params.getPreviewSize(&w, &h);
- //Get the preview pixel format
- mPreviewPixelFormat = params.getPreviewFormat();
-
- if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
- size = w*h*2;
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV422I;
- }
- else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 )
- {
- size = (w*h*3)/2;
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420SP;
- }
- else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
- size = w*h*2;
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_RGB565;
- }
- else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
- {
- int yStride, uvStride, ySize, uvSize;
- alignYV12(w, h, yStride, uvStride, ySize, uvSize, size);
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420P;
- }
+ // save preview pixel format, size and stride
+ mPreviewWidth = w;
+ mPreviewHeight = h;
+ mPreviewStride = 4096;
+ mPreviewPixelFormat = CameraHal::getPixelFormatConstant(params.getPreviewFormat());
+ size = CameraHal::calculateBufferSize(mPreviewPixelFormat, w, h);
mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL);
if (!mPreviewMemory) {
@@ -1492,18 +1548,24 @@ status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, vo
}
for (int i=0; i < AppCallbackNotifier::MAX_BUFFERS; i++) {
- mPreviewBufs[i] = (unsigned char*) mPreviewMemory->data + (i*size);
+ mPreviewBuffers[i].type = CAMERA_BUFFER_MEMORY;
+ mPreviewBuffers[i].opaque = (unsigned char*) mPreviewMemory->data + (i*size);
+ mPreviewBuffers[i].mapped = mPreviewBuffers[i].opaque;
}
if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) {
mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
}
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_POSTVIEW_FRAME) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
mPreviewBufCount = 0;
mPreviewing = true;
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
}
@@ -1512,7 +1574,7 @@ void AppCallbackNotifier::setBurst(bool burst)
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mBurstLock);
+ android::AutoMutex lock(mBurstLock);
mBurst = burst;
@@ -1545,9 +1607,6 @@ void AppCallbackNotifier::setVideoRes(int width, int height)
status_t AppCallbackNotifier::stopPreviewCallbacks()
{
- sp<MemoryHeapBase> heap;
- sp<MemoryBase> buffer;
-
LOG_FUNCTION_NAME;
if ( NULL == mFrameProvider )
@@ -1562,10 +1621,12 @@ status_t AppCallbackNotifier::stopPreviewCallbacks()
}
mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mPreviewMemory->release(mPreviewMemory);
+ mPreviewMemory = 0;
}
mPreviewing = false;
@@ -1590,7 +1651,7 @@ status_t AppCallbackNotifier::startRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mRecordingLock);
+ android::AutoMutex lock(mRecordingLock);
if ( NULL == mFrameProvider )
{
@@ -1616,14 +1677,13 @@ status_t AppCallbackNotifier::startRecording()
}
//Allocate metadata buffers for video recording
-status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count, void *vidBufs)
+status_t AppCallbackNotifier::initSharedVideoBuffers(CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count, CameraBuffer *vidBufs)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
if(mUseMetaDataBufferMode)
{
- uint32_t *bufArr = NULL;
camera_memory_t* videoMedatadaBufferMemory = NULL;
if(NULL == buffers)
@@ -1631,7 +1691,6 @@ status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *of
CAMHAL_LOGEA("Error! Video buffers are NULL");
return BAD_VALUE;
}
- bufArr = (uint32_t *) buffers;
for (uint32_t i = 0; i < count; i++)
{
@@ -1642,16 +1701,18 @@ status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *of
return NO_MEMORY;
}
- mVideoMetadataBufferMemoryMap.add(bufArr[i], (uint32_t)(videoMedatadaBufferMemory));
- mVideoMetadataBufferReverseMap.add((uint32_t)(videoMedatadaBufferMemory->data), bufArr[i]);
- CAMHAL_LOGDB("bufArr[%d]=0x%x, videoMedatadaBufferMemory=0x%x, videoMedatadaBufferMemory->data=0x%x",
- i, bufArr[i], videoMedatadaBufferMemory, videoMedatadaBufferMemory->data);
+ // FIXME remove cast
+ mVideoMetadataBufferMemoryMap.add((void *)buffers[i].opaque, videoMedatadaBufferMemory);
+ mVideoMetadataBufferReverseMap.add(videoMedatadaBufferMemory->data, &buffers[i]);
+ CAMHAL_LOGDB("buffers[%d]=%p, videoMedatadaBufferMemory=%p, videoMedatadaBufferMemory->data=%p",
+ i, &buffers[i], videoMedatadaBufferMemory, videoMedatadaBufferMemory->data);
if (vidBufs != NULL)
{
- uint32_t *vBufArr = (uint32_t *) vidBufs;
- mVideoMap.add(bufArr[i], vBufArr[i]);
- CAMHAL_LOGVB("bufArr[%d]=0x%x, vBuffArr[%d]=0x%x", i, bufArr[i], i, vBufArr[i]);
+ //ASSERT(buffers[i].type == CAMERA_BUFFER_GRALLOC);
+ // FIXME remove cast
+ mVideoMap.add((void *)buffers[i].opaque, &vidBufs[i]);
+ CAMHAL_LOGVB("buffers[%d]=%p, vBuffArr[%d]=%p", i, &buffers[i], i, &vidBufs[i]);
}
}
}
@@ -1668,7 +1729,7 @@ status_t AppCallbackNotifier::stopRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mRecordingLock);
+ android::AutoMutex lock(mRecordingLock);
if ( NULL == mFrameProvider )
{
@@ -1699,7 +1760,7 @@ status_t AppCallbackNotifier::stopRecording()
status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
{
status_t ret = NO_ERROR;
- void *frame = NULL;
+ CameraBuffer *frame = NULL;
LOG_FUNCTION_NAME;
if ( NULL == mFrameProvider )
@@ -1722,13 +1783,15 @@ status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
if(mUseMetaDataBufferMode)
{
video_metadata_t *videoMetadataBuffer = (video_metadata_t *) mem ;
- frame = (void*) mVideoMetadataBufferReverseMap.valueFor((uint32_t) videoMetadataBuffer);
+ /* FIXME remove cast */
+ frame = mVideoMetadataBufferReverseMap.valueFor(videoMetadataBuffer);
CAMHAL_LOGVB("Releasing frame with videoMetadataBuffer=0x%x, videoMetadataBuffer->handle=0x%x & frame handle=0x%x\n",
videoMetadataBuffer, videoMetadataBuffer->handle, frame);
}
else
{
- frame = (void*)(*((uint32_t *)mem));
+ /* FIXME this won't work */
+ frame = (CameraBuffer *)(void*)(*((uint32_t *)mem));
}
if ( NO_ERROR == ret )
@@ -1743,19 +1806,35 @@ status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
status_t AppCallbackNotifier::enableMsgType(int32_t msgType)
{
- if( msgType & (CAMERA_MSG_POSTVIEW_FRAME | CAMERA_MSG_PREVIEW_FRAME) ) {
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
}
+ if( msgType & CAMERA_MSG_POSTVIEW_FRAME ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ if(msgType & CAMERA_MSG_RAW_IMAGE) {
+ mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
return NO_ERROR;
}
status_t AppCallbackNotifier::disableMsgType(int32_t msgType)
{
- if( msgType & (CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_POSTVIEW_FRAME) ) {
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
}
+ if( msgType & CAMERA_MSG_POSTVIEW_FRAME ) {
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ if(msgType & CAMERA_MSG_RAW_IMAGE) {
+ mFrameProvider->disableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
return NO_ERROR;
}
@@ -1812,14 +1891,14 @@ status_t AppCallbackNotifier::stop()
return ALREADY_EXISTS;
}
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mNotifierState = AppCallbackNotifier::NOTIFIER_STOPPED;
CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STOPPED \n");
}
while(!gEncoderQueue.isEmpty()) {
- sp<Encoder_libjpeg> encoder = gEncoderQueue.valueAt(0);
+ android::sp<Encoder_libjpeg> encoder = gEncoderQueue.valueAt(0);
camera_memory_t* encoded_mem = NULL;
ExifElementsTable* exif = NULL;
@@ -1848,4 +1927,5 @@ status_t AppCallbackNotifier::stop()
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/BaseCameraAdapter.cpp b/camera/BaseCameraAdapter.cpp
index bb7a5b8..5301c6d 100644
--- a/camera/BaseCameraAdapter.cpp
+++ b/camera/BaseCameraAdapter.cpp
@@ -14,13 +14,50 @@
* limitations under the License.
*/
-
-
-#define LOG_TAG "CameraHAL"
-
#include "BaseCameraAdapter.h"
-namespace android {
+const int EVENT_MASK = 0xffff;
+
+namespace Ti {
+namespace Camera {
+
+const LUT cameraCommandsUserToHAL[] = {
+ { "CAMERA_START_PREVIEW", CameraAdapter::CAMERA_START_PREVIEW },
+ { "CAMERA_STOP_PREVIEW", CameraAdapter::CAMERA_STOP_PREVIEW },
+ { "CAMERA_START_VIDEO", CameraAdapter::CAMERA_START_VIDEO },
+ { "CAMERA_STOP_VIDEO", CameraAdapter::CAMERA_STOP_VIDEO },
+ { "CAMERA_START_IMAGE_CAPTURE", CameraAdapter::CAMERA_START_IMAGE_CAPTURE },
+ { "CAMERA_STOP_IMAGE_CAPTURE", CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE },
+ { "CAMERA_PERFORM_AUTOFOCUS", CameraAdapter::CAMERA_PERFORM_AUTOFOCUS },
+ { "CAMERA_CANCEL_AUTOFOCUS", CameraAdapter::CAMERA_CANCEL_AUTOFOCUS },
+ { "CAMERA_PREVIEW_FLUSH_BUFFERS", CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS },
+ { "CAMERA_START_SMOOTH_ZOOM", CameraAdapter::CAMERA_START_SMOOTH_ZOOM },
+ { "CAMERA_STOP_SMOOTH_ZOOM", CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM },
+ { "CAMERA_USE_BUFFERS_PREVIEW", CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW },
+ { "CAMERA_SET_TIMEOUT", CameraAdapter::CAMERA_SET_TIMEOUT },
+ { "CAMERA_CANCEL_TIMEOUT", CameraAdapter::CAMERA_CANCEL_TIMEOUT },
+ { "CAMERA_START_BRACKET_CAPTURE", CameraAdapter::CAMERA_START_BRACKET_CAPTURE },
+ { "CAMERA_STOP_BRACKET_CAPTURE", CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE },
+ { "CAMERA_QUERY_RESOLUTION_PREVIEW", CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW },
+ { "CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE", CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE },
+ { "CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA", CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA },
+ { "CAMERA_USE_BUFFERS_IMAGE_CAPTURE", CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE },
+ { "CAMERA_USE_BUFFERS_PREVIEW_DATA", CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA },
+ { "CAMERA_TIMEOUT_EXPIRED", CameraAdapter::CAMERA_TIMEOUT_EXPIRED },
+ { "CAMERA_START_FD", CameraAdapter::CAMERA_START_FD },
+ { "CAMERA_STOP_FD", CameraAdapter::CAMERA_STOP_FD },
+ { "CAMERA_SWITCH_TO_EXECUTING", CameraAdapter::CAMERA_SWITCH_TO_EXECUTING },
+ { "CAMERA_USE_BUFFERS_VIDEO_CAPTURE", CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE },
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ { "CAMERA_USE_BUFFERS_REPROCESS", CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS },
+ { "CAMERA_START_REPROCESS", CameraAdapter::CAMERA_START_REPROCESS },
+#endif
+};
+
+const LUTtypeHAL CamCommandsLUT = {
+ sizeof(cameraCommandsUserToHAL)/sizeof(cameraCommandsUserToHAL[0]),
+ cameraCommandsUserToHAL
+};
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
@@ -51,6 +88,8 @@ BaseCameraAdapter::BaseCameraAdapter()
mAdapterState = INTIALIZED_STATE;
+ mSharedAllocator = NULL;
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
mStartFocus.tv_sec = 0;
mStartFocus.tv_usec = 0;
@@ -64,16 +103,18 @@ BaseCameraAdapter::~BaseCameraAdapter()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
mFrameSubscribers.clear();
mImageSubscribers.clear();
mRawSubscribers.clear();
mVideoSubscribers.clear();
+ mVideoInSubscribers.clear();
mFocusSubscribers.clear();
mShutterSubscribers.clear();
mZoomSubscribers.clear();
- mFaceSubscribers.clear();
+ mSnapshotSubscribers.clear();
+ mMetadataSubscribers.clear();
LOG_FUNCTION_NAME_EXIT;
}
@@ -130,40 +171,59 @@ status_t BaseCameraAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, event_callback eventCb, void* cookie)
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
LOG_FUNCTION_NAME;
- if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs )
- {
- mFrameSubscribers.add((int) cookie, callback);
- }
- else if ( CameraFrame::FRAME_DATA_SYNC == msgs )
- {
- mFrameDataSubscribers.add((int) cookie, callback);
- }
- else if ( CameraFrame::IMAGE_FRAME == msgs)
- {
- mImageSubscribers.add((int) cookie, callback);
- }
- else if ( CameraFrame::RAW_FRAME == msgs)
- {
- mRawSubscribers.add((int) cookie, callback);
- }
- else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs)
- {
- mVideoSubscribers.add((int) cookie, callback);
- }
- else if ( CameraHalEvent::ALL_EVENTS == msgs)
+ int32_t frameMsg = ((msgs >> MessageNotifier::FRAME_BIT_FIELD_POSITION) & EVENT_MASK);
+ int32_t eventMsg = ((msgs >> MessageNotifier::EVENT_BIT_FIELD_POSITION) & EVENT_MASK);
+
+ if ( frameMsg != 0 )
{
- mFocusSubscribers.add((int) cookie, eventCb);
- mShutterSubscribers.add((int) cookie, eventCb);
- mZoomSubscribers.add((int) cookie, eventCb);
- mFaceSubscribers.add((int) cookie, eventCb);
+ CAMHAL_LOGVB("Frame message type id=0x%x subscription request", frameMsg);
+ switch ( frameMsg )
+ {
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ mFrameSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ mFrameDataSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ mSnapshotSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::IMAGE_FRAME:
+ mImageSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::RAW_FRAME:
+ mRawSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ mVideoSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ mVideoInSubscribers.add((int) cookie, callback);
+ break;
+ default:
+ CAMHAL_LOGEA("Frame message type id=0x%x subscription no supported yet!", frameMsg);
+ break;
+ }
}
- else
+
+ if ( eventMsg != 0)
{
- CAMHAL_LOGEA("Message type subscription no supported yet!");
+ CAMHAL_LOGVB("Event message type id=0x%x subscription request", eventMsg);
+ if ( CameraHalEvent::ALL_EVENTS == eventMsg )
+ {
+ mFocusSubscribers.add((int) cookie, eventCb);
+ mShutterSubscribers.add((int) cookie, eventCb);
+ mZoomSubscribers.add((int) cookie, eventCb);
+ mMetadataSubscribers.add((int) cookie, eventCb);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Event message type id=0x%x subscription no supported yet!", eventMsg);
+ }
}
LOG_FUNCTION_NAME_EXIT;
@@ -171,59 +231,78 @@ void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, eve
void BaseCameraAdapter::disableMsgType(int32_t msgs, void* cookie)
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
LOG_FUNCTION_NAME;
- if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs )
- {
- mFrameSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::FRAME_DATA_SYNC == msgs )
- {
- mFrameDataSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::IMAGE_FRAME == msgs)
- {
- mImageSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::RAW_FRAME == msgs)
- {
- mRawSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs)
- {
- mVideoSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::ALL_FRAMES == msgs )
- {
- mFrameSubscribers.removeItem((int) cookie);
- mFrameDataSubscribers.removeItem((int) cookie);
- mImageSubscribers.removeItem((int) cookie);
- mRawSubscribers.removeItem((int) cookie);
- mVideoSubscribers.removeItem((int) cookie);
- }
- else if ( CameraHalEvent::ALL_EVENTS == msgs)
+ int32_t frameMsg = ((msgs >> MessageNotifier::FRAME_BIT_FIELD_POSITION) & EVENT_MASK);
+ int32_t eventMsg = ((msgs >> MessageNotifier::EVENT_BIT_FIELD_POSITION) & EVENT_MASK);
+
+ if ( frameMsg != 0 )
{
- //Subscribe only for focus
- //TODO: Process case by case
- mFocusSubscribers.removeItem((int) cookie);
- mShutterSubscribers.removeItem((int) cookie);
- mZoomSubscribers.removeItem((int) cookie);
- mFaceSubscribers.removeItem((int) cookie);
+ CAMHAL_LOGVB("Frame message type id=0x%x remove subscription request", frameMsg);
+ switch ( frameMsg )
+ {
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ mFrameSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ mFrameDataSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ mSnapshotSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::IMAGE_FRAME:
+ mImageSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::RAW_FRAME:
+ mRawSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ mVideoSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ mVideoInSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::ALL_FRAMES:
+ mFrameSubscribers.removeItem((int) cookie);
+ mFrameDataSubscribers.removeItem((int) cookie);
+ mSnapshotSubscribers.removeItem((int) cookie);
+ mImageSubscribers.removeItem((int) cookie);
+ mRawSubscribers.removeItem((int) cookie);
+ mVideoSubscribers.removeItem((int) cookie);
+ mVideoInSubscribers.removeItem((int) cookie);
+ break;
+ default:
+ CAMHAL_LOGEA("Frame message type id=0x%x subscription remove not supported yet!", frameMsg);
+ break;
+ }
}
- else
+
+ if ( eventMsg != 0 )
{
- CAMHAL_LOGEB("Message type 0x%x subscription no supported yet!", msgs);
+ CAMHAL_LOGVB("Event message type id=0x%x remove subscription request", eventMsg);
+ if ( CameraHalEvent::ALL_EVENTS == eventMsg)
+ {
+ //TODO: Process case by case
+ mFocusSubscribers.removeItem((int) cookie);
+ mShutterSubscribers.removeItem((int) cookie);
+ mZoomSubscribers.removeItem((int) cookie);
+ mMetadataSubscribers.removeItem((int) cookie);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Event message type id=0x%x subscription remove not supported yet!", eventMsg);
+ }
}
LOG_FUNCTION_NAME_EXIT;
}
-void BaseCameraAdapter::addFramePointers(void *frameBuf, void *buf)
+void BaseCameraAdapter::addFramePointers(CameraBuffer *frameBuf, void *buf)
{
unsigned int *pBuf = (unsigned int *)buf;
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
if ((frameBuf != NULL) && ( pBuf != NULL) )
{
@@ -239,7 +318,7 @@ void BaseCameraAdapter::addFramePointers(void *frameBuf, void *buf)
void BaseCameraAdapter::removeFramePointers()
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
int size = mFrameQueue.size();
CAMHAL_LOGVB("Removing %d Frames = ", size);
@@ -252,7 +331,7 @@ void BaseCameraAdapter::removeFramePointers()
mFrameQueue.clear();
}
-void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frameType)
+void BaseCameraAdapter::returnFrame(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
status_t res = NO_ERROR;
size_t subscriberCount = 0;
@@ -266,7 +345,7 @@ void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frame
if ( NO_ERROR == res)
{
- Mutex::Autolock lock(mReturnFrameLock);
+ android::AutoMutex lock(mReturnFrameLock);
refCount = getFrameRefCount(frameBuf, frameType);
@@ -310,13 +389,15 @@ void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frame
//check if someone is holding this buffer
if ( 0 == refCount )
{
-#ifdef DEBUG_LOG
- if(mBuffersWithDucati.indexOfKey((int)frameBuf)>=0)
+#ifdef CAMERAHAL_DEBUG
+ if((mBuffersWithDucati.indexOfKey((int)camera_buffer_get_omx_ptr(frameBuf)) >= 0) &&
+ ((CameraFrame::PREVIEW_FRAME_SYNC == frameType) ||
+ (CameraFrame::SNAPSHOT_FRAME == frameType)))
{
- ALOGE("Buffer already with Ducati!! 0x%x", frameBuf);
- for(int i=0;i<mBuffersWithDucati.size();i++) ALOGE("0x%x", mBuffersWithDucati.keyAt(i));
+ CAMHAL_LOGE("Buffer already with Ducati!! 0x%x", frameBuf);
+ for(int i=0;i<mBuffersWithDucati.size();i++) CAMHAL_LOGE("0x%x", mBuffersWithDucati.keyAt(i));
}
- mBuffersWithDucati.add((int)frameBuf,1);
+ mBuffersWithDucati.add((int)camera_buffer_get_omx_ptr(frameBuf),1);
#endif
res = fillThisBuffer(frameBuf, frameType);
}
@@ -324,8 +405,7 @@ void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frame
}
-status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, int value2, int value3)
-{
+status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, int value2, int value3, int value4) {
status_t ret = NO_ERROR;
struct timeval *refTimestamp;
BuffersDescriptor *desc = NULL;
@@ -351,19 +431,20 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mPreviewBufferLock);
- mPreviewBuffers = (int *) desc->mBuffers;
+ android::AutoMutex lock(mPreviewBufferLock);
+ mPreviewBuffers = desc->mBuffers;
mPreviewBuffersLength = desc->mLength;
mPreviewBuffersAvailable.clear();
+ mSnapshotBuffersAvailable.clear();
for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
{
- mPreviewBuffersAvailable.add(mPreviewBuffers[i], 0);
+ mPreviewBuffersAvailable.add(&mPreviewBuffers[i], 0);
}
// initial ref count for undeqeueued buffers is 1 since buffer provider
// is still holding on to it
for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
{
- mPreviewBuffersAvailable.add(mPreviewBuffers[i], 1);
+ mPreviewBuffersAvailable.add(&mPreviewBuffers[i], 1);
}
}
@@ -404,19 +485,19 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
- mPreviewDataBuffers = (int *) desc->mBuffers;
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffers = desc->mBuffers;
mPreviewDataBuffersLength = desc->mLength;
mPreviewDataBuffersAvailable.clear();
for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
{
- mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], 0);
+ mPreviewDataBuffersAvailable.add(&mPreviewDataBuffers[i], 0);
}
// initial ref count for undeqeueued buffers is 1 since buffer provider
// is still holding on to it
for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
{
- mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], 1);
+ mPreviewDataBuffersAvailable.add(&mPreviewDataBuffers[i], 1);
}
}
@@ -457,20 +538,9 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mCaptureBufferLock);
- mCaptureBuffers = (int *) desc->mBuffers;
+ android::AutoMutex lock(mCaptureBufferLock);
+ mCaptureBuffers = desc->mBuffers;
mCaptureBuffersLength = desc->mLength;
- mCaptureBuffersAvailable.clear();
- for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
- {
- mCaptureBuffersAvailable.add(mCaptureBuffers[i], 0);
- }
- // initial ref count for undeqeueued buffers is 1 since buffer provider
- // is still holding on to it
- for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
- {
- mCaptureBuffersAvailable.add(mCaptureBuffers[i], 1);
- }
}
if ( NULL != desc )
@@ -493,6 +563,48 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
break;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDA("Use buffers for reprocessing");
+ desc = (BuffersDescriptor *) value1;
+
+ if (NULL == desc) {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if (ret == NO_ERROR) {
+ ret = setState(operation);
+ }
+
+ if (ret == NO_ERROR) {
+ android::AutoMutex lock(mVideoInBufferLock);
+ mVideoInBuffers = desc->mBuffers;
+ mVideoInBuffersAvailable.clear();
+ for (uint32_t i = 0 ; i < desc->mMaxQueueable ; i++) {
+ mVideoInBuffersAvailable.add(&mVideoInBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) {
+ mVideoInBuffersAvailable.add(&mVideoInBuffers[i], 1);
+ }
+ ret = useBuffers(CameraAdapter::CAMERA_REPROCESS,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = commitState();
+ } else {
+ ret |= rollbackState();
+ }
+
+ break;
+#endif
+
case CameraAdapter::CAMERA_START_SMOOTH_ZOOM:
{
@@ -657,32 +769,6 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
}
- case CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS:
- {
-
- if ( ret == NO_ERROR )
- {
- ret = setState(operation);
- }
-
- if ( ret == NO_ERROR )
- {
- ret = flushBuffers();
- }
-
- if ( ret == NO_ERROR )
- {
- ret = commitState();
- }
- else
- {
- ret |= rollbackState();
- }
-
- break;
-
- }
-
case CameraAdapter::CAMERA_START_IMAGE_CAPTURE:
{
@@ -908,7 +994,7 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( NULL != frame )
{
- ret = getPictureBufferSize(frame->mLength, value2);
+ ret = getPictureBufferSize(*frame, value2);
}
else
{
@@ -971,9 +1057,68 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
break;
- case CameraAdapter::CAMERA_SWITCH_TO_EXECUTING:
- ret = switchToExecuting();
- break;
+ case CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE:
+
+ CAMHAL_LOGDA("Use buffers for video (RAW + JPEG) capture");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc ) {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR ) {
+ android::AutoMutex lock(mVideoBufferLock);
+ mVideoBuffers = desc->mBuffers;
+ mVideoBuffersLength = desc->mLength;
+ mVideoBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ ) {
+ mVideoBuffersAvailable.add(&mVideoBuffers[i], 1);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) {
+ mVideoBuffersAvailable.add(&mVideoBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc ) {
+ ret = useBuffers(CameraAdapter::CAMERA_VIDEO,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = commitState();
+ } else {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_SWITCH_TO_EXECUTING:
+ ret = switchToExecuting();
+ break;
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ case CameraAdapter::CAMERA_SETUP_TUNNEL:
+ ret = setupTunnel(value1, value2, value3, value4);
+ break;
+
+ case CameraAdapter::CAMERA_DESTROY_TUNNEL:
+ ret = destroyTunnel();
+ break;
+#endif
+
+ case CameraAdapter::CAMERA_PREVIEW_INITIALIZATION:
+ ret = cameraPreviewInitialization();
+ break;
default:
CAMHAL_LOGEB("Command 0x%x unsupported!", operation);
@@ -998,9 +1143,9 @@ status_t BaseCameraAdapter::notifyFocusSubscribers(CameraHalEvent::FocusStatus s
}
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- if (status == CameraHalEvent::FOCUS_STATUS_PENDING) {
+ if (status == CameraHalEvent::FOCUS_STATUS_PENDING) {
gettimeofday(&mStartFocus, NULL);
- } else {
+ } else {
//dump the AF latency
CameraHal::PPM("Focus finished in: ", &mStartFocus);
}
@@ -1054,14 +1199,14 @@ status_t BaseCameraAdapter::notifyShutterSubscribers()
shutterEvent.mCookie = ( void * ) mShutterSubscribers.keyAt(i);
eventCb = ( event_callback ) mShutterSubscribers.valueAt(i);
- CAMHAL_LOGDA("Sending shutter callback");
+ CAMHAL_LOGD("Sending shutter callback");
eventCb ( &shutterEvent );
}
shutterEvent.mEventData.clear();
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1102,35 +1247,35 @@ status_t BaseCameraAdapter::notifyZoomSubscribers(int zoomIdx, bool targetReache
return ret;
}
-status_t BaseCameraAdapter::notifyFaceSubscribers(sp<CameraFDResult> &faces)
+status_t BaseCameraAdapter::notifyMetadataSubscribers(android::sp<CameraMetadataResult> &meta)
{
event_callback eventCb;
- CameraHalEvent faceEvent;
+ CameraHalEvent metaEvent;
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- if ( mFaceSubscribers.size() == 0 ) {
- CAMHAL_LOGDA("No face detection subscribers!");
+ if ( mMetadataSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No preview metadata subscribers!");
return NO_INIT;
}
- faceEvent.mEventData = new CameraHalEvent::CameraHalEventData();
- if ( NULL == faceEvent.mEventData.get() ) {
+ metaEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == metaEvent.mEventData.get() ) {
return -ENOMEM;
}
- faceEvent.mEventType = CameraHalEvent::EVENT_FACE;
- faceEvent.mEventData->faceEvent = faces;
+ metaEvent.mEventType = CameraHalEvent::EVENT_METADATA;
+ metaEvent.mEventData->metadataEvent = meta;
- for (unsigned int i = 0 ; i < mFaceSubscribers.size(); i++ ) {
- faceEvent.mCookie = (void *) mFaceSubscribers.keyAt(i);
- eventCb = (event_callback) mFaceSubscribers.valueAt(i);
+ for (unsigned int i = 0 ; i < mMetadataSubscribers.size(); i++ ) {
+ metaEvent.mCookie = (void *) mMetadataSubscribers.keyAt(i);
+ eventCb = (event_callback) mMetadataSubscribers.valueAt(i);
- eventCb ( &faceEvent );
+ eventCb ( &metaEvent );
}
- faceEvent.mEventData.clear();
+ metaEvent.mEventData.clear();
LOG_FUNCTION_NAME_EXIT;
@@ -1172,7 +1317,7 @@ status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
break;
case CameraFrame::SNAPSHOT_FRAME:
{
- ret = __sendFrameToSubscribers(frame, &mFrameSubscribers, CameraFrame::SNAPSHOT_FRAME);
+ ret = __sendFrameToSubscribers(frame, &mSnapshotSubscribers, CameraFrame::SNAPSHOT_FRAME);
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
@@ -1185,6 +1330,11 @@ status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
ret = __sendFrameToSubscribers(frame, &mFrameDataSubscribers, CameraFrame::FRAME_DATA_SYNC);
}
break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ {
+ ret = __sendFrameToSubscribers(frame, &mVideoInSubscribers, CameraFrame::REPROCESS_INPUT_FRAME);
+ }
+ break;
default:
CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", mask);
break;
@@ -1202,7 +1352,7 @@ status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
}
status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
- KeyedVector<int, frame_callback> *subscribers,
+ android::KeyedVector<int, frame_callback> *subscribers,
CameraFrame::FrameType frameType)
{
size_t refCount = 0;
@@ -1217,7 +1367,7 @@ status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
if (mFrameQueue.size() > 0){
CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(frame->mBuffer);
frame->mYuv[0] = lframe->mYuv[0];
- frame->mYuv[1] = lframe->mYuv[1];
+ frame->mYuv[1] = frame->mYuv[0] + (frame->mLength + frame->mOffset)*2/3;
}
else{
CAMHAL_LOGDA("Empty Frame Queue");
@@ -1262,7 +1412,7 @@ status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
return ret;
}
-int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
+int BaseCameraAdapter::setInitFrameRefCount(CameraBuffer * buf, unsigned int mask)
{
int ret = NO_ERROR;
unsigned int lmask;
@@ -1295,7 +1445,7 @@ int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
break;
case CameraFrame::SNAPSHOT_FRAME:
{
- setFrameRefCount(buf, CameraFrame::SNAPSHOT_FRAME, mFrameSubscribers.size());
+ setFrameRefCount(buf, CameraFrame::SNAPSHOT_FRAME, mSnapshotSubscribers.size());
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
@@ -1308,6 +1458,11 @@ int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
setFrameRefCount(buf, CameraFrame::FRAME_DATA_SYNC, mFrameDataSubscribers.size());
}
break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ {
+ setFrameRefCount(buf,CameraFrame::REPROCESS_INPUT_FRAME, mVideoInSubscribers.size());
+ }
+ break;
default:
CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", lmask);
break;
@@ -1319,7 +1474,7 @@ int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
return ret;
}
-int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType)
+int BaseCameraAdapter::getFrameRefCount(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
int res = -1;
@@ -1330,29 +1485,39 @@ int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType f
case CameraFrame::IMAGE_FRAME:
case CameraFrame::RAW_FRAME:
{
- Mutex::Autolock lock(mCaptureBufferLock);
- res = mCaptureBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ android::AutoMutex lock(mCaptureBufferLock);
+ res = mCaptureBuffersAvailable.valueFor(frameBuf );
}
break;
- case CameraFrame::PREVIEW_FRAME_SYNC:
case CameraFrame::SNAPSHOT_FRAME:
{
- Mutex::Autolock lock(mPreviewBufferLock);
- res = mPreviewBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ android::AutoMutex lock(mSnapshotBufferLock);
+ res = mSnapshotBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ res = mPreviewBuffersAvailable.valueFor(frameBuf );
}
break;
case CameraFrame::FRAME_DATA_SYNC:
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
- res = mPreviewDataBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ res = mPreviewDataBuffersAvailable.valueFor(frameBuf );
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
{
- Mutex::Autolock lock(mVideoBufferLock);
- res = mVideoBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ android::AutoMutex lock(mVideoBufferLock);
+ res = mVideoBuffersAvailable.valueFor(frameBuf );
}
break;
+ case CameraFrame::REPROCESS_INPUT_FRAME: {
+ android::AutoMutex lock(mVideoInBufferLock);
+ res = mVideoInBuffersAvailable.valueFor(frameBuf );
+ }
+ break;
default:
break;
};
@@ -1362,7 +1527,7 @@ int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType f
return res;
}
-void BaseCameraAdapter::setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount)
+void BaseCameraAdapter::setFrameRefCount(CameraBuffer * frameBuf, CameraFrame::FrameType frameType, int refCount)
{
LOG_FUNCTION_NAME;
@@ -1372,29 +1537,39 @@ void BaseCameraAdapter::setFrameRefCount(void* frameBuf, CameraFrame::FrameType
case CameraFrame::IMAGE_FRAME:
case CameraFrame::RAW_FRAME:
{
- Mutex::Autolock lock(mCaptureBufferLock);
- mCaptureBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ android::AutoMutex lock(mCaptureBufferLock);
+ mCaptureBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
- case CameraFrame::PREVIEW_FRAME_SYNC:
case CameraFrame::SNAPSHOT_FRAME:
{
- Mutex::Autolock lock(mPreviewBufferLock);
- mPreviewBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ android::AutoMutex lock(mSnapshotBufferLock);
+ mSnapshotBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ mPreviewBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
case CameraFrame::FRAME_DATA_SYNC:
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
- mPreviewDataBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
{
- Mutex::Autolock lock(mVideoBufferLock);
- mVideoBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ android::AutoMutex lock(mVideoBufferLock);
+ mVideoBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
+ case CameraFrame::REPROCESS_INPUT_FRAME: {
+ android::AutoMutex lock(mVideoInBufferLock);
+ mVideoInBuffersAvailable.replaceValueFor(frameBuf, refCount);
+ }
+ break;
default:
break;
};
@@ -1409,7 +1584,7 @@ status_t BaseCameraAdapter::startVideoCapture()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mVideoBufferLock);
+ android::AutoMutex lock(mVideoBufferLock);
//If the capture is already ongoing, return from here.
if ( mRecording )
@@ -1449,7 +1624,7 @@ status_t BaseCameraAdapter::stopVideoCapture()
{
for ( unsigned int i = 0 ; i < mVideoBuffersAvailable.size() ; i++ )
{
- void *frameBuf = ( void * ) mVideoBuffersAvailable.keyAt(i);
+ CameraBuffer *frameBuf = mVideoBuffersAvailable.keyAt(i);
if( getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC) > 0)
{
returnFrame(frameBuf, CameraFrame::VIDEO_FRAME_SYNC);
@@ -1580,7 +1755,7 @@ status_t BaseCameraAdapter::stopPreview()
return ret;
}
-status_t BaseCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+status_t BaseCameraAdapter::useBuffers(CameraMode mode, CameraBuffer* bufArr, int num, size_t length, unsigned int queueable)
{
status_t ret = NO_ERROR;
@@ -1591,7 +1766,7 @@ status_t BaseCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, s
return ret;
}
-status_t BaseCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+status_t BaseCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
status_t ret = NO_ERROR;
@@ -1624,7 +1799,7 @@ status_t BaseCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t buffe
return ret;
}
-status_t BaseCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+status_t BaseCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
{
status_t ret = NO_ERROR;
@@ -1665,12 +1840,44 @@ status_t BaseCameraAdapter::switchToExecuting()
return ret;
}
+const char* BaseCameraAdapter::getLUTvalue_translateHAL(int Value, LUTtypeHAL LUT) {
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].halDefinition == Value )
+ return LUT.Table[i].userDefinition;
+
+ return NULL;
+}
+
+status_t BaseCameraAdapter::setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height) {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::destroyTunnel() {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::cameraPreviewInitialization() {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
status_t BaseCameraAdapter::setState(CameraCommands operation)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
+ const char *printState = getLUTvalue_translateHAL(operation, CamCommandsLUT);
+
mLock.lock();
switch ( mAdapterState )
@@ -1682,29 +1889,29 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_USE_BUFFERS_PREVIEW:
- CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->LOADED_PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->LOADED_PREVIEW_STATE event = %s",
+ printState);
mNextState = LOADED_PREVIEW_STATE;
break;
//These events don't change the current state
case CAMERA_QUERY_RESOLUTION_PREVIEW:
- case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
- CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = %s",
+ printState);
mNextState = INTIALIZED_STATE;
break;
-
- case CAMERA_CANCEL_AUTOFOCUS:
case CAMERA_STOP_BRACKET_CAPTURE:
case CAMERA_STOP_IMAGE_CAPTURE:
ret = INVALID_OPERATION;
break;
+ case CAMERA_CANCEL_AUTOFOCUS:
+ ret = INVALID_OPERATION;
+ break;
default:
- CAMHAL_LOGEB("Adapter state switch INTIALIZED_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch INTIALIZED_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1718,8 +1925,8 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_START_PREVIEW:
- CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
@@ -1733,14 +1940,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
case CAMERA_USE_BUFFERS_PREVIEW_DATA:
- CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->LOADED_PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->LOADED_PREVIEW_STATE event = %s",
+ printState);
mNextState = LOADED_PREVIEW_STATE;
break;
default:
- CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1754,46 +1961,61 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_PREVIEW:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->INTIALIZED_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->INTIALIZED_STATE event = %s",
+ printState);
mNextState = INTIALIZED_STATE;
break;
case CAMERA_PERFORM_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->AF_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->AF_STATE event = %s",
+ printState);
mNextState = AF_STATE;
break;
case CAMERA_START_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->ZOOM_STATE event = %s",
+ printState);
mNextState = ZOOM_STATE;
break;
case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
mNextState = LOADED_CAPTURE_STATE;
break;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+#endif
+
case CAMERA_START_VIDEO:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->VIDEO_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->VIDEO_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
case CAMERA_CANCEL_AUTOFOCUS:
case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
default:
- CAMHAL_LOGEB("Adapter state switch PREVIEW_ACTIVE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch PREVIEW_ACTIVE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1801,26 +2023,72 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
break;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case LOADED_REPROCESS_STATE:
+ switch (operation) {
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_STATE->LOADED_REPROCESS_CAPTURE_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_CAPTURE_STATE;
+ break;
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_STATE->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_REPROCESS_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+ }
+
+ break;
+
+ case LOADED_REPROCESS_CAPTURE_STATE:
+ switch (operation) {
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_CAPTURE_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = REPROCESS_STATE;
+ break;
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_REPROCESS_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+ }
+ break;
+#endif
+
case LOADED_CAPTURE_STATE:
switch ( operation )
{
case CAMERA_START_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
mNextState = CAPTURE_STATE;
break;
case CAMERA_START_BRACKET_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->BRACKETING_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->BRACKETING_STATE event = %s",
+ printState);
mNextState = BRACKETING_STATE;
break;
+ case CAMERA_USE_BUFFERS_VIDEO_CAPTURE:
+ //Hadnle this state for raw capture path.
+ //Just need to keep the same state.
+ //The next CAMERA_START_IMAGE_CAPTURE command will assign the mNextState.
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
+ break;
+
default:
- CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1834,14 +2102,29 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_IMAGE_CAPTURE:
case CAMERA_STOP_BRACKET_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
+ mNextState = CAPTURE_STATE;
+ break;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+#endif
+
default:
- CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1856,20 +2139,20 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
case CAMERA_STOP_IMAGE_CAPTURE:
case CAMERA_STOP_BRACKET_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
case CAMERA_START_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->CAPTURE_STATE event = %s",
+ printState);
mNextState = CAPTURE_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch BRACKETING_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1883,20 +2166,20 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_CANCEL_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
case CAMERA_START_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch AF_STATE->AF_ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->AF_ZOOM_STATE event = %s",
+ printState);
mNextState = AF_ZOOM_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch AF_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch AF_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1910,32 +2193,32 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_CANCEL_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = ZOOM_STATE;
break;
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
case CAMERA_PERFORM_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->AF_ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->AF_ZOOM_STATE event = %s",
+ printState);
mNextState = AF_ZOOM_STATE;
break;
case CAMERA_START_VIDEO:
- CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->VIDEO_ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->VIDEO_ZOOM_STATE event = %s",
+ printState);
mNextState = VIDEO_ZOOM_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch ZOOM_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch ZOOM_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1949,38 +2232,38 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_VIDEO:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
case CAMERA_PERFORM_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_AF_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_AF_STATE event = %s",
+ printState);
mNextState = VIDEO_AF_STATE;
break;
case CAMERA_START_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_ZOOM_STATE event = %s",
+ printState);
mNextState = VIDEO_ZOOM_STATE;
break;
case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_LOADED_CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_LOADED_CAPTURE_STATE event = %s",
+ printState);
mNextState = VIDEO_LOADED_CAPTURE_STATE;
break;
case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch VIDEO_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch VIDEO_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1994,14 +2277,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_CANCEL_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch VIDEO_AF_STATE->VIDEO_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_AF_STATE->VIDEO_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch VIDEO_AF_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch VIDEO_AF_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2015,14 +2298,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_START_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
mNextState = VIDEO_CAPTURE_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2035,14 +2318,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
switch ( operation )
{
case CAMERA_STOP_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2056,20 +2339,20 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->AF_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->AF_STATE event = %s",
+ printState);
mNextState = AF_STATE;
break;
case CAMERA_CANCEL_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->ZOOM_STATE event = %s",
+ printState);
mNextState = ZOOM_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch AF_ZOOM_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch AF_ZOOM_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2083,20 +2366,20 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->VIDEO_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->VIDEO_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
case CAMERA_STOP_VIDEO:
- CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->ZOOM_STATE event = %s",
+ printState);
mNextState = ZOOM_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch VIDEO_ZOOM_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch VIDEO_ZOOM_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2110,20 +2393,57 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch BRACKETING_ZOOM_STATE->BRACKETING_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_ZOOM_STATE->BRACKETING_STATE event = %s",
+ printState);
mNextState = BRACKETING_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch BRACKETING_ZOOM_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_ZOOM_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case REPROCESS_STATE:
+ switch (operation) {
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = REPROCESS_STATE;
+ break;
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
+ mNextState = LOADED_CAPTURE_STATE;
+ break;
+ default:
+ CAMHAL_LOGEB("Adapter state switch REPROCESS_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
}
break;
+#endif
+
default:
CAMHAL_LOGEA("Invalid Adapter state!");
@@ -2167,6 +2487,9 @@ status_t BaseCameraAdapter::rollbackToPreviousState()
break;
case CAPTURE_STATE:
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case REPROCESS_STATE:
+#endif
ret = sendCommand(CAMERA_STOP_IMAGE_CAPTURE);
break;
@@ -2257,7 +2580,7 @@ CameraAdapter::AdapterState BaseCameraAdapter::getState()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME_EXIT;
@@ -2270,7 +2593,7 @@ CameraAdapter::AdapterState BaseCameraAdapter::getNextState()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME_EXIT;
@@ -2313,11 +2636,57 @@ void BaseCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
LOG_FUNCTION_NAME;
LOG_FUNCTION_NAME_EXIT;
}
+
//-----------------------------------------------------------------------------
+extern "C" status_t OMXCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras);
+extern "C" status_t V4LCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras);
+extern "C" status_t CameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
-};
+ status_t ret = NO_ERROR;
+ status_t err = NO_ERROR;
+ int num_cameras_supported = 0;
+
+ LOG_FUNCTION_NAME;
+
+ supportedCameras = 0;
+#ifdef OMX_CAMERA_ADAPTER
+ //Query OMX cameras
+ err = OMXCameraAdapter_Capabilities( properties_array, starting_camera,
+ max_camera, supportedCameras);
+ if(err != NO_ERROR) {
+ CAMHAL_LOGEA("error while getting OMXCameraAdapter capabilities");
+ ret = UNKNOWN_ERROR;
+ }
+#endif
+#ifdef V4L_CAMERA_ADAPTER
+ //Query V4L cameras
+ err = V4LCameraAdapter_Capabilities( properties_array, (const int) supportedCameras,
+ max_camera, num_cameras_supported);
+ if(err != NO_ERROR) {
+ CAMHAL_LOGEA("error while getting V4LCameraAdapter capabilities");
+ ret = UNKNOWN_ERROR;
+ }
+#endif
+
+ supportedCameras += num_cameras_supported;
+ CAMHAL_LOGEB("supportedCameras= %d\n", supportedCameras);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+//-----------------------------------------------------------------------------
+
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
diff --git a/camera/BufferSourceAdapter.cpp b/camera/BufferSourceAdapter.cpp
new file mode 100644
index 0000000..97a7809
--- /dev/null
+++ b/camera/BufferSourceAdapter.cpp
@@ -0,0 +1,1001 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+
+#include "BufferSourceAdapter.h"
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace Ti {
+namespace Camera {
+
+static int getANWFormat(const char* parameters_format)
+{
+ int format = HAL_PIXEL_FORMAT_TI_NV12;
+
+ if (parameters_format != NULL) {
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ CAMHAL_LOGDA("CbYCrY format selected");
+ format = HAL_PIXEL_FORMAT_TI_UYVY;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ format = HAL_PIXEL_FORMAT_TI_NV12;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ CAMHAL_LOGDA("RGB565 format selected");
+ // TODO(XXX): not defined yet
+ format = -1;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ format = HAL_PIXEL_FORMAT_TI_Y16;
+ } else {
+ CAMHAL_LOGDA("Invalid format, NV12 format selected as default");
+ format = HAL_PIXEL_FORMAT_TI_NV12;
+ }
+ }
+
+ return format;
+}
+
+static int getUsageFromANW(int format)
+{
+ int usage = GRALLOC_USAGE_SW_READ_RARELY |
+ GRALLOC_USAGE_SW_WRITE_NEVER;
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ // This usage flag indicates to gralloc we want the
+ // buffers to come from system heap
+ usage |= GRALLOC_USAGE_PRIVATE_0;
+ break;
+ default:
+ // No special flags needed
+ break;
+ }
+ return usage;
+}
+
+static const char* getFormatFromANW(int format)
+{
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ // Assuming NV12 1D is RAW or Image frame
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ return android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ return android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ default:
+ break;
+ }
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+}
+
+static CameraFrame::FrameType formatToOutputFrameType(const char* format) {
+ switch (getANWFormat(format)) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ // Assuming NV12 1D is RAW or Image frame
+ return CameraFrame::RAW_FRAME;
+ default:
+ break;
+ }
+ return CameraFrame::RAW_FRAME;
+}
+
+static int getHeightFromFormat(const char* format, int stride, int size) {
+ CAMHAL_ASSERT((NULL != format) && (0 <= stride) && (0 <= size));
+ switch (getANWFormat(format)) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ return (size / (3 * stride)) * 2;
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ return (size / stride) / 2;
+ default:
+ break;
+ }
+ return 0;
+}
+
+/*--------------------BufferSourceAdapter Class STARTS here-----------------------------*/
+
+
+///Constant definitions
+// TODO(XXX): Temporarily increase number of buffers we can allocate from ANW
+// until faux-NPA mode is implemented
+const int BufferSourceAdapter::NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP = 15;
+
+/**
+ * Display Adapter class STARTS here..
+ */
+BufferSourceAdapter::BufferSourceAdapter() : mBufferCount(0)
+{
+ LOG_FUNCTION_NAME;
+
+ mPixelFormat = NULL;
+ mBuffers = NULL;
+ mFrameProvider = NULL;
+ mBufferSource = NULL;
+
+ mFrameWidth = 0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+BufferSourceAdapter::~BufferSourceAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ freeBufferList(mBuffers);
+
+ android::AutoMutex lock(mLock);
+
+ destroy();
+
+ if (mFrameProvider) {
+ // Unregister with the frame provider
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ if (mQueueFrame.get()) {
+ mQueueFrame->requestExit();
+ mQueueFrame.clear();
+ }
+
+ if (mReturnFrame.get()) {
+ mReturnFrame->requestExit();
+ mReturnFrame.clear();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t BufferSourceAdapter::initialize()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mReturnFrame.clear();
+ mReturnFrame = new ReturnFrame(this);
+ mReturnFrame->run();
+
+ mQueueFrame.clear();
+ mQueueFrame = new QueueFrame(this);
+ mQueueFrame->run();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int BufferSourceAdapter::setPreviewWindow(preview_stream_ops_t *source)
+{
+ LOG_FUNCTION_NAME;
+
+ if (!source) {
+ CAMHAL_LOGEA("NULL window object passed to DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if (mBufferSource) {
+ char id1[OP_STR_SIZE], id2[OP_STR_SIZE];
+ status_t ret;
+
+ ret = extendedOps()->get_id(mBufferSource, id1, sizeof(id1));
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ return ret;
+ }
+
+ ret = extendedOps()->get_id(source, id2, sizeof(id2));
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ return ret;
+ }
+ if ((0 >= strlen(id1)) || (0 >= strlen(id2))) {
+ CAMHAL_LOGE("Cannot set ST without name: id1:\"%s\" id2:\"%s\"",
+ id1, id2);
+ return NOT_ENOUGH_DATA;
+ }
+ if (0 == strcmp(id1, id2)) {
+ return ALREADY_EXISTS;
+ }
+
+ // client has to unset mBufferSource before being able to set a new one
+ return BAD_VALUE;
+ }
+
+ // Move to new source obj
+ mBufferSource = source;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+bool BufferSourceAdapter::match(const char * str) {
+ char id1[OP_STR_SIZE];
+ status_t ret;
+
+ ret = extendedOps()->get_id(mBufferSource, id1, sizeof(id1));
+
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ }
+
+ return strcmp(id1, str) == 0;
+}
+
+int BufferSourceAdapter::setFrameProvider(FrameNotifier *frameProvider)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( !frameProvider ) {
+ CAMHAL_LOGEA("NULL passed for frame provider");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( NULL != mFrameProvider ) {
+ delete mFrameProvider;
+ }
+
+ mFrameProvider = new FrameProvider(frameProvider, this, frameCallback);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int BufferSourceAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier ) {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ return -EINVAL;
+ }
+
+ mErrorNotifier = errorNotifier;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int BufferSourceAdapter::enableDisplay(int width, int height,
+ struct timeval *refTime)
+{
+ LOG_FUNCTION_NAME;
+ CameraFrame::FrameType frameType;
+
+ if (mFrameProvider == NULL) {
+ // no-op frame provider not set yet
+ return NO_ERROR;
+ }
+
+ if (mBufferSourceDirection == BUFFER_SOURCE_TAP_IN) {
+ // only supporting one type of input frame
+ frameType = CameraFrame::REPROCESS_INPUT_FRAME;
+ } else {
+ frameType = formatToOutputFrameType(mPixelFormat);
+ }
+
+ mFrameProvider->enableFrameNotification(frameType);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int BufferSourceAdapter::disableDisplay(bool cancel_buffer)
+{
+ LOG_FUNCTION_NAME;
+
+ if (mFrameProvider) mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t BufferSourceAdapter::pauseDisplay(bool pause)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // no-op for BufferSourceAdapter
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+void BufferSourceAdapter::destroy()
+{
+ LOG_FUNCTION_NAME;
+
+ mBufferCount = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int dummyHeight, const char* format,
+ int &bytes, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+ status_t err;
+ int i = -1;
+ const int lnumBufs = numBufs;
+ int undequeued = 0;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ if ( NULL == mBufferSource ) {
+ return NULL;
+ }
+
+ int pixFormat = getANWFormat(format);
+ int usage = getUsageFromANW(pixFormat);
+ mPixelFormat = CameraHal::getPixelFormatConstant(format);
+
+ // Set gralloc usage bits for window.
+ err = mBufferSource->set_usage(mBufferSource, usage);
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Number of buffers set to BufferSourceAdapter %d", numBufs);
+ // Set the number of buffers needed for this buffer source
+ err = mBufferSource->set_buffer_count(mBufferSource, numBufs);
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Configuring %d buffers for ANativeWindow", numBufs);
+ mBufferCount = numBufs;
+
+ // re-calculate height depending on stride and size
+ int height = getHeightFromFormat(format, width, bytes);
+
+ // Set window geometry
+ err = mBufferSource->set_buffers_geometry(mBufferSource,
+ width, height,
+ pixFormat);
+
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ return NULL;
+ }
+
+ if ( mBuffers == NULL ) {
+ CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers");
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeued);
+
+ for (i = 0; i < mBufferCount; i++ ) {
+ buffer_handle_t *handle;
+ int stride; // dummy variable to get stride
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &handle, &stride);
+
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+
+ CAMHAL_LOGDB("got handle %p", handle);
+ mBuffers[i].opaque = (void *)handle;
+ mBuffers[i].type = CAMERA_BUFFER_ANW;
+ mBuffers[i].format = mPixelFormat;
+ mFramesWithCameraAdapterMap.add(handle, i);
+
+ bytes = CameraHal::calculateBufferSize(format, width, height);
+ }
+
+ for( i = 0; i < mBufferCount-undequeued; i++ ) {
+ void *y_uv[2];
+ android::Rect bounds(width, height);
+
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ mBufferSource->lock_buffer(mBufferSource, handle);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ }
+
+ // return the rest of the buffers back to ANativeWindow
+ for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++) {
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ void *y_uv[2];
+ android::Rect bounds(width, height);
+
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ mapper.unlock(*handle);
+
+ err = mBufferSource->cancel_buffer(mBufferSource, handle);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancel_buffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[i].opaque);
+ }
+
+ mFrameWidth = width;
+ mFrameHeight = height;
+ mBufferSourceDirection = BUFFER_SOURCE_TAP_OUT;
+
+ return mBuffers;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ for (int start = 0; start < i && i > 0; start++) {
+ int err = mBufferSource->cancel_buffer(mBufferSource,
+ (buffer_handle_t *) mBuffers[start].opaque);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err);
+ break;
+ }
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[start].opaque);
+ }
+
+ freeBufferList(mBuffers);
+
+ CAMHAL_LOGEA("Error occurred, performing cleanup");
+
+ if (NULL != mErrorNotifier.get()) {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+
+}
+
+CameraBuffer *BufferSourceAdapter::getBuffers(bool reset) {
+ int undequeued = 0;
+ status_t err;
+ android::Mutex::Autolock lock(mLock);
+
+ if (!mBufferSource || !mBuffers) {
+ CAMHAL_LOGE("Adapter is not set up properly: "
+ "mBufferSource:%p mBuffers:%p",
+ mBufferSource, mBuffers);
+ goto fail;
+ }
+
+ // CameraHal is indicating to us that the state of the mBuffer
+ // might have changed. We might need to check the state of the
+ // buffer list and pass a new one depending on the state of our
+ // surface
+ if (reset) {
+ const int lnumBufs = mBufferCount;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds(mFrameWidth, mFrameHeight);
+ void *y_uv[2];
+ CameraBuffer * newBuffers = NULL;
+ unsigned int index = 0;
+ android::KeyedVector<void*, int> missingIndices;
+
+ newBuffers = new CameraBuffer [lnumBufs];
+ memset (newBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ // Use this vector to figure out missing indices
+ for (int i = 0; i < mBufferCount; i++) {
+ missingIndices.add(mBuffers[i].opaque, i);
+ }
+
+ // assign buffers that we have already dequeued
+ for (index = 0; index < mFramesWithCameraAdapterMap.size(); index++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(index);
+ newBuffers[index].opaque = mBuffers[value].opaque;
+ newBuffers[index].type = mBuffers[value].type;
+ newBuffers[index].format = mBuffers[value].format;
+ newBuffers[index].mapped = mBuffers[value].mapped;
+ mFramesWithCameraAdapterMap.replaceValueAt(index, index);
+ missingIndices.removeItem(newBuffers[index].opaque);
+ }
+
+ mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeued);
+
+ // dequeue the rest of the buffers
+ for (index; index < (unsigned int)(mBufferCount-undequeued); index++) {
+ buffer_handle_t *handle;
+ int stride; // dummy variable to get stride
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &handle, &stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+ newBuffers[index].opaque = (void *)handle;
+ newBuffers[index].type = CAMERA_BUFFER_ANW;
+ newBuffers[index].format = mPixelFormat;
+ mFramesWithCameraAdapterMap.add(handle, index);
+
+ mBufferSource->lock_buffer(mBufferSource, handle);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ newBuffers[index].mapped = y_uv[0];
+ CAMHAL_LOGDB("got handle %p", handle);
+
+ missingIndices.removeItem(newBuffers[index].opaque);
+ }
+
+ // now we need to figure out which buffers aren't dequeued
+ // which are in mBuffers but not newBuffers yet
+ if ((mBufferCount - index) != missingIndices.size()) {
+ CAMHAL_LOGD("Hrmm somethings gone awry. We are missing a different number"
+ " of buffers than we can fill");
+ }
+ for (unsigned int i = 0; i < missingIndices.size(); i++) {
+ int j = missingIndices.valueAt(i);
+
+ CAMHAL_LOGD("Filling at %d", j);
+ newBuffers[index].opaque = mBuffers[j].opaque;
+ newBuffers[index].type = mBuffers[j].type;
+ newBuffers[index].format = mBuffers[j].format;
+ newBuffers[index].mapped = mBuffers[j].mapped;
+ }
+
+ delete [] mBuffers;
+ mBuffers = newBuffers;
+ }
+
+ return mBuffers;
+
+ fail:
+ return NULL;
+}
+
+unsigned int BufferSourceAdapter::getSize() {
+ android::Mutex::Autolock lock(mLock);
+ return CameraHal::calculateBufferSize(mPixelFormat, mFrameWidth, mFrameHeight);
+}
+
+int BufferSourceAdapter::getBufferCount() {
+ int count = -1;
+
+ android::Mutex::Autolock lock(mLock);
+ if (mBufferSource) extendedOps()->get_buffer_count(mBufferSource, &count);
+ return count;
+}
+
+CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
+ LOG_FUNCTION_NAME;
+ status_t err;
+ const int lnumBufs = 1;
+ int formatSource;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ buffer_handle_t *handle;
+
+ // TODO(XXX): Only supporting one input buffer at a time right now
+ *num = 1;
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ if ( NULL == mBufferSource ) {
+ return NULL;
+ }
+
+ err = extendedOps()->update_and_get_buffer(mBufferSource, &handle, &mBuffers[0].stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("update and get buffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+
+ CAMHAL_LOGD("got handle %p", handle);
+ mBuffers[0].opaque = (void *)handle;
+ mBuffers[0].type = CAMERA_BUFFER_ANW;
+ mFramesWithCameraAdapterMap.add(handle, 0);
+
+ err = extendedOps()->get_buffer_dimension(mBufferSource, &mBuffers[0].width, &mBuffers[0].height);
+ err = extendedOps()->get_buffer_format(mBufferSource, &formatSource);
+
+ int t, l, r, b, w, h;
+ err = extendedOps()->get_crop(mBufferSource, &l, &t, &r, &b);
+ err = extendedOps()->get_current_size(mBufferSource, &w, &h);
+
+ // lock buffer
+ {
+ void *y_uv[2];
+ android::Rect bounds(mBuffers[0].width, mBuffers[0].height);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[0].mapped = y_uv[0];
+ }
+
+ mFrameWidth = mBuffers[0].width;
+ mFrameHeight = mBuffers[0].height;
+ mPixelFormat = getFormatFromANW(formatSource);
+
+ mBuffers[0].format = mPixelFormat;
+ mBuffers[0].actual_size = CameraHal::calculateBufferSize(mPixelFormat, w, h);
+ mBuffers[0].offset = t * w + l * CameraHal::getBPP(mPixelFormat);
+ mBufferSourceDirection = BUFFER_SOURCE_TAP_IN;
+
+ return mBuffers;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ freeBufferList(mBuffers);
+
+ if (NULL != mErrorNotifier.get()) {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+}
+
+uint32_t * BufferSourceAdapter::getOffsets()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+int BufferSourceAdapter::minUndequeueableBuffers(int& undequeueable) {
+ LOG_FUNCTION_NAME;
+ int ret = NO_ERROR;
+
+ if(!mBufferSource)
+ {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeueable);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret);
+ if ( ENODEV == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ return -ret;
+ }
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+int BufferSourceAdapter::maxQueueableBuffers(unsigned int& queueable)
+{
+ LOG_FUNCTION_NAME;
+ int ret = NO_ERROR;
+ int undequeued = 0;
+
+ if(mBufferCount == 0) {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = minUndequeueableBuffers(undequeued);
+ if (ret != NO_ERROR) {
+ goto end;
+ }
+
+ queueable = mBufferCount - undequeued;
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+int BufferSourceAdapter::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return -1;
+
+}
+
+status_t BufferSourceAdapter::returnBuffersToWindow()
+{
+ status_t ret = NO_ERROR;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ //Give the buffers back to display here - sort of free it
+ if (mBufferSource) {
+ for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(i);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[value].opaque;
+
+ // if buffer index is out of bounds skip
+ if ((value < 0) || (value >= mBufferCount)) {
+ CAMHAL_LOGEA("Potential out bounds access to handle...skipping");
+ continue;
+ }
+
+ // unlock buffer before giving it up
+ mapper.unlock(*handle);
+
+ ret = mBufferSource->cancel_buffer(mBufferSource, handle);
+ if ( ENODEV == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ return -ret;
+ } else if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("cancel_buffer() failed: %s (%d)",
+ strerror(-ret),
+ -ret);
+ return -ret;
+ }
+ }
+ } else {
+ CAMHAL_LOGE("mBufferSource is NULL");
+ }
+
+ ///Clear the frames with camera adapter map
+ mFramesWithCameraAdapterMap.clear();
+
+ return ret;
+
+}
+
+int BufferSourceAdapter::freeBufferList(CameraBuffer * buflist)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ if ( mBuffers != buflist ) {
+ return BAD_VALUE;
+ }
+
+ android::AutoMutex lock(mLock);
+
+ if (mBufferSourceDirection == BUFFER_SOURCE_TAP_OUT) returnBuffersToWindow();
+
+ if( mBuffers != NULL)
+ {
+ delete [] mBuffers;
+ mBuffers = NULL;
+ }
+
+ return NO_ERROR;
+}
+
+
+bool BufferSourceAdapter::supportsExternalBuffering()
+{
+ return false;
+}
+
+void BufferSourceAdapter::addFrame(CameraFrame* frame)
+{
+ if (mQueueFrame.get()) {
+ mQueueFrame->addFrame(frame);
+ }
+}
+
+void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
+{
+ status_t ret = NO_ERROR;
+ buffer_handle_t *handle = NULL;
+ int i;
+ uint32_t x, y;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ android::AutoMutex lock(mLock);
+
+ if (!mBuffers || !frame->mBuffer) {
+ CAMHAL_LOGEA("Adapter sent BufferSourceAdapter a NULL frame?");
+ return;
+ }
+
+ for ( i = 0; i < mBufferCount; i++ ) {
+ if (frame->mBuffer == &mBuffers[i]) {
+ break;
+ }
+ }
+
+ if (i >= mBufferCount) {
+ CAMHAL_LOGD("Can't find frame in buffer list");
+ if (frame->mFrameType != CameraFrame::REPROCESS_INPUT_FRAME) {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ static_cast<CameraFrame::FrameType>(frame->mFrameType));
+ }
+ return;
+ }
+
+ handle = (buffer_handle_t *) mBuffers[i].opaque;
+
+ // Handle input buffers
+ // TODO(XXX): Move handling of input buffers out of here if
+ // it becomes more complex
+ if (frame->mFrameType == CameraFrame::REPROCESS_INPUT_FRAME) {
+ CAMHAL_LOGD("Unlock %p (buffer #%d)", handle, i);
+ mapper.unlock(*handle);
+ return;
+ }
+
+ CameraHal::getXYFromOffset(&x, &y, frame->mOffset, frame->mAlignment, mPixelFormat);
+ CAMHAL_LOGVB("offset = %u left = %d top = %d right = %d bottom = %d",
+ frame->mOffset, x, y, x + frame->mWidth, y + frame->mHeight);
+ ret = mBufferSource->set_crop(mBufferSource, x, y, x + frame->mWidth, y + frame->mHeight);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGE("mBufferSource->set_crop returned error %d", ret);
+ goto fail;
+ }
+
+ if ( NULL != frame->mMetaData.get() ) {
+ camera_memory_t *extMeta = frame->mMetaData->getExtendedMetadata();
+ if ( NULL != extMeta ) {
+ camera_metadata_t *metaData = static_cast<camera_metadata_t *> (extMeta->data);
+ metaData->timestamp = frame->mTimestamp;
+ ret = extendedOps()->set_metadata(mBufferSource, extMeta);
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::set_metadata returned error %d", ret);
+ goto fail;
+ }
+ }
+ }
+
+ // unlock buffer before enqueueing
+ mapper.unlock(*handle);
+
+ ret = mBufferSource->enqueue_buffer(mBufferSource, handle);
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::queueBuffer returned error %d", ret);
+ goto fail;
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) frame->mBuffer->opaque);
+
+ return;
+
+fail:
+ mFramesWithCameraAdapterMap.clear();
+ mBufferSource = NULL;
+ mReturnFrame->requestExit();
+ mQueueFrame->requestExit();
+}
+
+
+bool BufferSourceAdapter::handleFrameReturn()
+{
+ status_t err;
+ buffer_handle_t *buf;
+ int i = 0;
+ int stride; // dummy variable to get stride
+ CameraFrame::FrameType type;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ void *y_uv[2];
+ android::Rect bounds(mFrameWidth, mFrameHeight);
+
+ android::AutoMutex lock(mLock);
+
+ if ( (NULL == mBufferSource) || (NULL == mBuffers) ) {
+ return false;
+ }
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &buf, &stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return false;
+ }
+
+ err = mBufferSource->lock_buffer(mBufferSource, buf);
+ if (err != 0) {
+ CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return false;
+ }
+
+ mapper.lock(*buf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+
+ for(i = 0; i < mBufferCount; i++) {
+ if (mBuffers[i].opaque == buf)
+ break;
+ }
+
+ if (i >= mBufferCount) {
+ CAMHAL_LOGEB("Failed to find handle %p", buf);
+ }
+
+ mFramesWithCameraAdapterMap.add((buffer_handle_t *) mBuffers[i].opaque, i);
+
+ CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount - 1);
+
+ mFrameProvider->returnFrame(&mBuffers[i], formatToOutputFrameType(mPixelFormat));
+ return true;
+}
+
+void BufferSourceAdapter::frameCallback(CameraFrame* caFrame)
+{
+ if ((NULL != caFrame) && (NULL != caFrame->mCookie)) {
+ BufferSourceAdapter *da = (BufferSourceAdapter*) caFrame->mCookie;
+ da->addFrame(caFrame);
+ } else {
+ CAMHAL_LOGEB("Invalid Cookie in Camera Frame = %p, Cookie = %p",
+ caFrame, caFrame ? caFrame->mCookie : NULL);
+ }
+}
+
+/*--------------------BufferSourceAdapter Class ENDS here-----------------------------*/
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp
index db73443..7754d51 100644
--- a/camera/CameraHal.cpp
+++ b/camera/CameraHal.cpp
@@ -21,10 +21,9 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "ANativeWindowDisplayAdapter.h"
+#include "BufferSourceAdapter.h"
#include "TICameraParameters.h"
#include "CameraProperties.h"
#include <cutils/properties.h>
@@ -32,9 +31,11 @@
#include <poll.h>
#include <math.h>
-namespace android {
+namespace Ti {
+namespace Camera {
-extern "C" CameraAdapter* CameraAdapter_Factory(size_t);
+extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t);
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t);
/*****************************************************************************/
@@ -43,13 +44,96 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t);
//// Currently, they are hard-coded
const int CameraHal::NO_BUFFERS_PREVIEW = MAX_CAMERA_BUFFERS;
-const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 2;
+const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 5;
+const int CameraHal::SW_SCALING_FPS_LIMIT = 15;
+
+const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 16;
-const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 0;
const uint32_t MessageNotifier::FRAME_BIT_FIELD_POSITION = 0;
+// TODO(XXX): Temporarily increase number of buffers we can allocate from ANW
+// until faux-NPA mode is implemented
+const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP = 15;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+// HACK: Default path to directory where RAW images coming from video port will be saved to.
+// If directory not exists the saving is skipped and video port frame is ignored.
+// The directory name is choosed in so weird way to enable RAW images saving only when
+// directory has been created explicitly by user.
+extern const char * const kRawImagesOutputDirPath = "/data/misc/camera/RaW_PiCtUrEs";
+extern const char * const kYuvImagesOutputDirPath = "/data/misc/camera/YuV_PiCtUrEs";
+#endif
+
/******************************************************************************/
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+static int dummy_update_and_get_buffer(preview_stream_ops_t*, buffer_handle_t**, int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_dimension(preview_stream_ops_t*, int*, int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_format(preview_stream_ops_t*, int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_set_metadata(preview_stream_ops_t*, const camera_memory_t*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_id(preview_stream_ops_t*, char *data, unsigned int dataSize) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_count(preview_stream_ops_t*, int *count) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_crop(preview_stream_ops_t*,
+ int *, int *, int *, int *) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_current_size(preview_stream_ops_t*,
+ int *, int *) {
+ return INVALID_OPERATION;
+}
+#endif
+
+#ifdef OMAP_ENHANCEMENT
+static preview_stream_extended_ops_t dummyPreviewStreamExtendedOps = {
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ dummy_update_and_get_buffer,
+ dummy_get_buffer_dimension,
+ dummy_get_buffer_format,
+ dummy_set_metadata,
+ dummy_get_id,
+ dummy_get_buffer_count,
+ dummy_get_crop,
+ dummy_get_current_size,
+#endif
+};
+#endif
+
+
+DisplayAdapter::DisplayAdapter()
+{
+#ifdef OMAP_ENHANCEMENT
+ mExtendedOps = &dummyPreviewStreamExtendedOps;
+#endif
+}
+
+#ifdef OMAP_ENHANCEMENT
+void DisplayAdapter::setExtendedOps(preview_stream_extended_ops_t * extendedOps) {
+ mExtendedOps = extendedOps ? extendedOps : &dummyPreviewStreamExtendedOps;
+}
+#endif
+
+
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
struct timeval CameraHal::mStartPreview;
@@ -67,6 +151,7 @@ static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) {
}
}
+
/*-------------Camera Hal Interface Method definitions STARTS here--------------------*/
/**
@@ -110,6 +195,10 @@ void CameraHal::setCallbacks(camera_notify_callback notify_cb,
user);
}
+ if ( NULL != mCameraAdapter ) {
+ mCameraAdapter->setSharedAllocator(get_memory);
+ }
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -131,10 +220,13 @@ void CameraHal::enableMsgType(int32_t msgType)
// ignoring enable focus message from camera service
// we will enable internally in autoFocus call
- msgType &= ~(CAMERA_MSG_FOCUS | CAMERA_MSG_FOCUS_MOVE);
+ msgType &= ~CAMERA_MSG_FOCUS;
+#ifdef ANDROID_API_JB_OR_LATER
+ msgType &= ~CAMERA_MSG_FOCUS_MOVE;
+#endif
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled |= msgType;
}
@@ -173,7 +265,7 @@ void CameraHal::disableMsgType(int32_t msgType)
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled &= ~msgType;
}
@@ -201,10 +293,18 @@ void CameraHal::disableMsgType(int32_t msgType)
*/
int CameraHal::msgTypeEnabled(int32_t msgType)
{
+ int32_t msgEnabled = 0;
+
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
+
+ msgEnabled = mMsgEnabled;
+ if (!previewEnabled() && !mPreviewInitializationDone) {
+ msgEnabled &= ~(CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_PREVIEW_METADATA);
+ }
+
LOG_FUNCTION_NAME_EXIT;
- return (mMsgEnabled & msgType);
+ return (msgEnabled & msgType);
}
/**
@@ -218,11 +318,11 @@ int CameraHal::msgTypeEnabled(int32_t msgType)
int CameraHal::setParameters(const char* parameters)
{
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME;
- CameraParameters params;
+ android::CameraParameters params;
- String8 str_params(parameters);
+ android::String8 str_params(parameters);
params.unflatten(str_params);
LOG_FUNCTION_NAME_EXIT;
@@ -238,204 +338,202 @@ int CameraHal::setParameters(const char* parameters)
@todo Define error codes
*/
-int CameraHal::setParameters(const CameraParameters& params)
+int CameraHal::setParameters(const android::CameraParameters& params)
{
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME;
int w, h;
- int w_orig, h_orig;
- int framerate,minframerate;
+ int framerate;
int maxFPS, minFPS;
const char *valstr = NULL;
int varint = 0;
status_t ret = NO_ERROR;
- CameraParameters oldParams = mParameters;
// Needed for KEY_RECORDING_HINT
bool restartPreviewRequired = false;
bool updateRequired = false;
- bool videoMode = false;
+ android::CameraParameters oldParams = mParameters;
+
+#ifdef V4L_CAMERA_ADAPTER
+ if (strcmp (V4L_CAMERA_NAME_USB, mCameraProperties->get(CameraProperties::CAMERA_NAME)) == 0 ) {
+ updateRequired = true;
+ }
+#endif
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
///Ensure that preview is not enabled when the below parameters are changed.
if(!previewEnabled())
{
-
- CAMHAL_LOGDB("PreviewFormat %s", params.getPreviewFormat());
-
if ((valstr = params.getPreviewFormat()) != NULL) {
if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS))) {
mParameters.setPreviewFormat(valstr);
+ CAMHAL_LOGDB("PreviewFormat set %s", valstr);
} else {
- CAMHAL_LOGEB("Invalid preview format.Supported: %s", mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ CAMHAL_LOGEB("Invalid preview format: %s. Supported: %s", valstr,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
return BAD_VALUE;
}
}
- varint = params.getInt(TICameraParameters::KEY_VNF);
- valstr = params.get(TICameraParameters::KEY_VNF);
- if ( valstr != NULL ) {
- if ( ( varint == 0 ) || ( varint == 1 ) ) {
- CAMHAL_LOGDB("VNF set %s", valstr);
- mParameters.set(TICameraParameters::KEY_VNF, varint);
- } else {
+ if ((valstr = params.get(TICameraParameters::KEY_VNF)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::VNF_SUPPORTED),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("VNF %s", valstr);
+ mParameters.set(TICameraParameters::KEY_VNF, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGEB("ERROR: Invalid VNF: %s", valstr);
return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_VNF,
+ android::CameraParameters::FALSE);
}
}
- if ((valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
// make sure we support vstab...if we don't and application is trying to set
// vstab then return an error
if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
- CameraParameters::TRUE) == 0) {
- CAMHAL_LOGDB("VSTAB %s",valstr);
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION, valstr);
- } else if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("VSTAB %s", valstr);
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGEB("ERROR: Invalid VSTAB: %s", valstr);
return BAD_VALUE;
} else {
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
- CameraParameters::FALSE);
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ android::CameraParameters::FALSE);
}
}
+ if( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL) {
+ if (strcmp(TICameraParameters::VIDEO_MODE, valstr)) {
+ mCapModeBackup = valstr;
+ }
- if( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL)
- {
- CAMHAL_LOGDB("Capture mode set %s", valstr);
- mParameters.set(TICameraParameters::KEY_CAP_MODE, valstr);
- }
+ CAMHAL_LOGDB("Capture mode set %s", valstr);
- if ((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL) {
- if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) {
- CAMHAL_LOGDB("IPP mode set %s", valstr);
- mParameters.set(TICameraParameters::KEY_IPP, valstr);
+ const char *currentMode = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ( NULL != currentMode ) {
+ if ( strcmp(currentMode, valstr) != 0 ) {
+ updateRequired = true;
+ }
+ } else {
+ updateRequired = true;
+ }
+
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, valstr);
+ } else if (!mCapModeBackup.isEmpty()) {
+ // Restore previous capture mode after stopPreview()
+ mParameters.set(TICameraParameters::KEY_CAP_MODE,
+ mCapModeBackup.string());
+ updateRequired = true;
+ }
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ if ((valstr = params.get(TICameraParameters::KEY_VTC_HINT)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VTC_HINT, valstr);
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ mVTCUseCase = true;
} else {
- CAMHAL_LOGEB("ERROR: Invalid IPP mode: %s", valstr);
- return BAD_VALUE;
+ mVTCUseCase = false;
}
+ CAMHAL_LOGDB("VTC Hint = %d", mVTCUseCase);
}
-#ifdef OMAP_ENHANCEMENT
-
- if((valstr = params.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL)
- {
- CAMHAL_LOGDB("Stereo 3D->2D Preview mode is %s", params.get(TICameraParameters::KEY_S3D2D_PREVIEW));
- mParameters.set(TICameraParameters::KEY_S3D2D_PREVIEW, valstr);
+ if (mVTCUseCase) {
+ if ((valstr = params.get(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE, valstr);
}
- if((valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
- {
- CAMHAL_LOGDB("AutoConvergence mode is %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE));
- mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ if ((valstr = params.get(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT, valstr);
}
-#endif
-
}
+#endif
+ }
- params.getPreviewSize(&w, &h);
- if (w == -1 && h == -1) {
- CAMHAL_LOGEA("Unable to get preview size");
- return BAD_VALUE;
- }
-
- int oldWidth, oldHeight;
- mParameters.getPreviewSize(&oldWidth, &oldHeight);
-
-#ifdef OMAP_ENHANCEMENT
-
- int orientation =0;
- if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
- {
- CAMHAL_LOGDB("Sensor Orientation is set to %s", params.get(TICameraParameters::KEY_SENSOR_ORIENTATION));
- mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr);
- orientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if ((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL) {
+ if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) {
+ if ((mParameters.get(TICameraParameters::KEY_IPP) == NULL) ||
+ (strcmp(valstr, mParameters.get(TICameraParameters::KEY_IPP)))) {
+ CAMHAL_LOGDB("IPP mode set %s", params.get(TICameraParameters::KEY_IPP));
+ mParameters.set(TICameraParameters::KEY_IPP, valstr);
+ restartPreviewRequired = true;
}
-
- if(orientation ==90 || orientation ==270)
- {
- if ( !isResolutionValid(h,w, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
- {
- CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
- return BAD_VALUE;
- }
- else
- {
- mParameters.setPreviewSize(w, h);
- mVideoWidth = w;
- mVideoHeight = h;
- }
- }
- else
- {
- if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
- {
- CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid IPP mode: %s", valstr);
return BAD_VALUE;
- }
- else
+ }
+ }
+
+ if ( (valstr = params.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)) != NULL )
+ {
+ if (strcmp(valstr, mParameters.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)))
{
- mParameters.setPreviewSize(w, h);
+ CAMHAL_LOGDB("Stereo 3D preview image layout is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT, valstr);
+ restartPreviewRequired = true;
}
- }
-
+ }
-#else
+#ifdef OMAP_ENHANCEMENT
+ int orientation =0;
+ if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
+ {
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(TICameraParameters::KEY_SENSOR_ORIENTATION),
+ updateRequired);
- if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES))) {
- CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
- return BAD_VALUE;
- } else {
- mParameters.setPreviewSize(w, h);
- }
+ orientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if ( orientation < 0 || orientation >= 360 || (orientation%90) != 0 ) {
+ CAMHAL_LOGE("Invalid sensor orientation: %s. Value must be one of: [0, 90, 180, 270]", valstr);
+ return BAD_VALUE;
+ }
+ CAMHAL_LOGD("Sensor Orientation is set to %d", orientation);
+ mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr);
+ }
#endif
- if ( ( oldWidth != w ) || ( oldHeight != h ) ) {
- restartPreviewRequired |= true;
+ params.getPreviewSize(&w, &h);
+ if (w == -1 && h == -1) {
+ CAMHAL_LOGEA("Unable to get preview size");
+ return BAD_VALUE;
}
- CAMHAL_LOGDB("PreviewResolution by App %d x %d", w, h);
+ mVideoWidth = w;
+ mVideoHeight = h;
// Handle RECORDING_HINT to Set/Reset Video Mode Parameters
- valstr = params.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = params.get(android::CameraParameters::KEY_RECORDING_HINT);
if(valstr != NULL)
{
- if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
+ if(strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
- CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
- videoMode = true;
- int w, h;
-
- params.getPreviewSize(&w, &h);
- CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h);
- //HACK FOR MMS
- mVideoWidth = w;
- mVideoHeight = h;
- CAMHAL_LOGVB("%s Video Width=%d Height=%d\n", __FUNCTION__, mVideoWidth, mVideoHeight);
-
- setPreferredPreviewRes(w, h);
- mParameters.getPreviewSize(&w, &h);
- CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h);
- //Avoid restarting preview for MMS HACK
- if ((w != mVideoWidth) && (h != mVideoHeight))
- {
- restartPreviewRequired = false;
- }
+ CAMHAL_LOGVB("Video Resolution: %d x %d", mVideoWidth, mVideoHeight);
+#ifdef OMAP_ENHANCEMENT_VTC
+ if (!mVTCUseCase)
+#endif
+ {
+ int maxFPS, minFPS;
+ params.getPreviewFpsRange(&minFPS, &maxFPS);
+ maxFPS /= CameraHal::VFR_SCALE;
+ if ( ( maxFPS <= SW_SCALING_FPS_LIMIT ) ) {
+ getPreferredPreviewRes(&w, &h);
+ }
+ }
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, valstr);
restartPreviewRequired |= setVideoModeParameters(params);
}
- else if(strcmp(valstr, CameraParameters::FALSE) == 0)
+ else if(strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
- CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, valstr);
restartPreviewRequired |= resetVideoModeParameters();
- params.getPreviewSize(&mVideoWidth, &mVideoHeight);
}
else
{
@@ -450,52 +548,95 @@ int CameraHal::setParameters(const CameraParameters& params)
// ImageCapture activity doesnot set KEY_RECORDING_HINT to FALSE (i.e. simply NULL),
// then Video Mode parameters may remain present in ImageCapture activity as well.
CAMHAL_LOGDA("Recording Hint is set to NULL");
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, "");
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, "");
restartPreviewRequired |= resetVideoModeParameters();
- params.getPreviewSize(&mVideoWidth, &mVideoHeight);
}
- if ((valstr = params.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) {
+ if ( (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES))) ) {
+ CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
+ return BAD_VALUE;
+ }
+
+ int oldWidth, oldHeight;
+ mParameters.getPreviewSize(&oldWidth, &oldHeight);
+ if ( ( oldWidth != w ) || ( oldHeight != h ) )
+ {
+ mParameters.setPreviewSize(w, h);
+ restartPreviewRequired = true;
+ }
+
+ CAMHAL_LOGDB("Preview Resolution: %d x %d", w, h);
+
+ if ((valstr = params.get(android::CameraParameters::KEY_FOCUS_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES))) {
CAMHAL_LOGDB("Focus mode set %s", valstr);
// we need to take a decision on the capture mode based on whether CAF picture or
// video is chosen so the behavior of each is consistent to the application
- if(strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
+ if(strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
restartPreviewRequired |= resetVideoModeParameters();
- } else if (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
+ } else if (strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
restartPreviewRequired |= setVideoModeParameters(params);
}
- mParameters.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid FOCUS mode = %s", valstr);
return BAD_VALUE;
}
}
- ///Below parameters can be changed when the preview is running
- if ( (valstr = params.getPictureFormat()) != NULL ) {
- if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) {
- mParameters.setPictureFormat(valstr);
- } else {
- CAMHAL_LOGEB("ERROR: Invalid picture format: %s",valstr);
- return BAD_VALUE;
- }
+ mRawCapture = false;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ valstr = params.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (!valstr || strcmp(valstr, TICameraParameters::HIGH_QUALITY_MODE) == 0) &&
+ access(kRawImagesOutputDirPath, F_OK) != -1 ) {
+ mRawCapture = true;
}
+#endif
+
+ if ( (valstr = params.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT)) != NULL )
+ {
+ CAMHAL_LOGDB("Stereo 3D capture image layout is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT, valstr);
+ }
params.getPictureSize(&w, &h);
- if ( isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES))) {
+ if ( (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES))) ) {
mParameters.setPictureSize(w, h);
} else {
- CAMHAL_LOGEB("ERROR: Invalid picture resolution %dx%d", w, h);
+ CAMHAL_LOGEB("ERROR: Invalid picture resolution %d x %d", w, h);
return BAD_VALUE;
}
CAMHAL_LOGDB("Picture Size by App %d x %d", w, h);
-#ifdef OMAP_ENHANCEMENT
+ if ( (valstr = params.getPictureFormat()) != NULL ) {
+ if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) {
+ if ((strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) &&
+ mCameraProperties->get(CameraProperties::MAX_PICTURE_WIDTH) &&
+ mCameraProperties->get(CameraProperties::MAX_PICTURE_HEIGHT)) {
+ unsigned int width = 0, height = 0;
+ // Set picture size to full frame for raw bayer capture
+ width = atoi(mCameraProperties->get(CameraProperties::MAX_PICTURE_WIDTH));
+ height = atoi(mCameraProperties->get(CameraProperties::MAX_PICTURE_HEIGHT));
+ mParameters.setPictureSize(width,height);
+ }
+ mParameters.setPictureFormat(valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid picture format: %s",valstr);
+ ret = BAD_VALUE;
+ }
+ }
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
if ((valstr = params.get(TICameraParameters::KEY_BURST)) != NULL) {
if (params.getInt(TICameraParameters::KEY_BURST) >=0) {
CAMHAL_LOGDB("Burst set %s", valstr);
@@ -505,145 +646,181 @@ int CameraHal::setParameters(const CameraParameters& params)
return BAD_VALUE;
}
}
-
#endif
- framerate = params.getPreviewFrameRate();
- valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
- CAMHAL_LOGDB("FRAMERATE %d", framerate);
-
- CAMHAL_LOGVB("Passed FRR: %s, Supported FRR %s", valstr
- , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
- CAMHAL_LOGVB("Passed FR: %d, Supported FR %s", framerate
- , mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
-
-
- //Perform parameter validation
- if(!isParameterValid(valstr
- , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))
- || !isParameterValid(framerate,
- mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES)))
- {
- CAMHAL_LOGEA("Invalid frame rate range or frame rate");
- return BAD_VALUE;
- }
-
// Variable framerate ranges have higher priority over
// deprecated constant FPS. "KEY_PREVIEW_FPS_RANGE" should
// be cleared by the client in order for constant FPS to get
// applied.
- if ( strcmp(valstr, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE)) != 0)
- {
+ // If Port FPS needs to be used for configuring, then FPS RANGE should not be set by the APP.
+ valstr = params.get(android::CameraParameters::KEY_PREVIEW_FPS_RANGE);
+ if (valstr != NULL && strlen(valstr)) {
+ int curMaxFPS = 0;
+ int curMinFPS = 0;
+
// APP wants to set FPS range
- //Set framerate = MAXFPS
+ // Set framerate = MAXFPS
CAMHAL_LOGDA("APP IS CHANGING FRAME RATE RANGE");
- params.getPreviewFpsRange(&minFPS, &maxFPS);
- if ( ( 0 > minFPS ) || ( 0 > maxFPS ) )
- {
- CAMHAL_LOGEA("ERROR: FPS Range is negative!");
- return BAD_VALUE;
- }
+ mParameters.getPreviewFpsRange(&curMinFPS, &curMaxFPS);
+ CAMHAL_LOGDB("## current minFPS = %d; maxFPS=%d",curMinFPS, curMaxFPS);
- framerate = maxFPS /CameraHal::VFR_SCALE;
-
- }
- else
- {
- if ( framerate != atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)) )
- {
-
- selectFPSRange(framerate, &minFPS, &maxFPS);
- CAMHAL_LOGDB("Select FPS Range %d %d", minFPS, maxFPS);
- }
- else
- {
- if (videoMode) {
- valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_VIDEO);
- CameraParameters temp;
- temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
- temp.getPreviewFpsRange(&minFPS, &maxFPS);
- }
- else {
- valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_IMAGE);
- CameraParameters temp;
- temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
- temp.getPreviewFpsRange(&minFPS, &maxFPS);
- }
-
- framerate = maxFPS / CameraHal::VFR_SCALE;
+ params.getPreviewFpsRange(&minFPS, &maxFPS);
+ CAMHAL_LOGDB("## requested minFPS = %d; maxFPS=%d",minFPS, maxFPS);
+ // Validate VFR
+ if (!isFpsRangeValid(minFPS, maxFPS, params.get(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE)) &&
+ !isFpsRangeValid(minFPS, maxFPS, params.get(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED))) {
+ CAMHAL_LOGEA("Invalid FPS Range");
+ return BAD_VALUE;
+ } else {
+ framerate = maxFPS / CameraHal::VFR_SCALE;
+ mParameters.setPreviewFrameRate(framerate);
+ CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
+ mParameters.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ CAMHAL_LOGDB("FPS Range = %s", valstr);
+ if ( curMaxFPS == (FRAME_RATE_HIGH_HD * CameraHal::VFR_SCALE) &&
+ maxFPS < (FRAME_RATE_HIGH_HD * CameraHal::VFR_SCALE) ) {
+ restartPreviewRequired = true;
}
+ }
+ } else {
+ framerate = params.getPreviewFrameRate();
+ if (!isParameterValid(framerate, params.get(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)) &&
+ !isParameterValid(framerate, params.get(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED))) {
+ CAMHAL_LOGEA("Invalid frame rate");
+ return BAD_VALUE;
+ }
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
- }
-
- CAMHAL_LOGDB("FPS Range = %s", valstr);
- CAMHAL_LOGDB("DEFAULT FPS Range = %s", mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
-
- minFPS /= CameraHal::VFR_SCALE;
- maxFPS /= CameraHal::VFR_SCALE;
-
- if ( ( 0 == minFPS ) || ( 0 == maxFPS ) )
- {
- CAMHAL_LOGEA("ERROR: FPS Range is invalid!");
- return BAD_VALUE;
- }
-
- if ( maxFPS < minFPS )
- {
- CAMHAL_LOGEA("ERROR: Max FPS is smaller than Min FPS!");
- return BAD_VALUE;
- }
- CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
- mParameters.setPreviewFrameRate(framerate);
- mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE));
-
- CAMHAL_LOGDB("FPS Range [%d, %d]", minFPS, maxFPS);
- mParameters.set(TICameraParameters::KEY_MINFRAMERATE, minFPS);
- mParameters.set(TICameraParameters::KEY_MAXFRAMERATE, maxFPS);
+ sprintf(tmpBuffer, "%d,%d", framerate * CameraHal::VFR_SCALE, framerate * CameraHal::VFR_SCALE);
+ mParameters.setPreviewFrameRate(framerate);
+ CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
+ mParameters.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, tmpBuffer);
+ CAMHAL_LOGDB("FPS Range = %s", tmpBuffer);
+ }
- if( ( valstr = params.get(TICameraParameters::KEY_GBCE) ) != NULL )
- {
- CAMHAL_LOGDB("GBCE Value = %s", valstr);
- mParameters.set(TICameraParameters::KEY_GBCE, valstr);
+ if ((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::SUPPORTED_GBCE),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("GBCE %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GBCE, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid GBCE: %s", valstr);
+ return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_GBCE, android::CameraParameters::FALSE);
}
+ } else {
+ mParameters.set(TICameraParameters::KEY_GBCE, android::CameraParameters::FALSE);
+ }
- if( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL )
- {
- CAMHAL_LOGDB("GLBCE Value = %s", valstr);
- mParameters.set(TICameraParameters::KEY_GLBCE, valstr);
+ if ((valstr = params.get(TICameraParameters::KEY_GLBCE)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::SUPPORTED_GLBCE),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("GLBCE %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GLBCE, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid GLBCE: %s", valstr);
+ return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_GLBCE, android::CameraParameters::FALSE);
}
+ } else {
+ mParameters.set(TICameraParameters::KEY_GLBCE, android::CameraParameters::FALSE);
+ }
-#ifdef OMAP_ENHANCEMENT
-
+#ifdef OMAP_ENHANCEMENT_S3D
///Update the current parameter set
- if( (valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
- {
- CAMHAL_LOGDB("AutoConvergence Mode is set = %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE));
- mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ if ( (valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE_MODE)) != NULL ) {
+ CAMHAL_LOGDB("AutoConvergence mode set = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, valstr);
+ }
+
+ if ( (valstr = params.get(TICameraParameters::KEY_MANUAL_CONVERGENCE)) != NULL ) {
+ int manualConvergence = (int)strtol(valstr, 0, 0);
+
+ if ( ( manualConvergence < strtol(mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN), 0, 0) ) ||
+ ( manualConvergence > strtol(mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX), 0, 0) ) ) {
+ CAMHAL_LOGEB("ERROR: Invalid Manual Convergence = %d", manualConvergence);
+ return BAD_VALUE;
+ } else {
+ CAMHAL_LOGDB("ManualConvergence Value = %d", manualConvergence);
+ mParameters.set(TICameraParameters::KEY_MANUAL_CONVERGENCE, valstr);
}
+ }
- if( (valstr = params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES)) !=NULL )
- {
- CAMHAL_LOGDB("ManualConvergence Value = %s", params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES));
- mParameters.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, valstr);
+ if((valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION)) != NULL) {
+ if ( strcmp(mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED),
+ android::CameraParameters::TRUE) == 0 ) {
+ CAMHAL_LOGDB("Mechanical Mialignment Correction is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION, valstr);
+ } else {
+ mParameters.remove(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION);
}
+ }
if ((valstr = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES))) {
- CAMHAL_LOGDB("Exposure set = %s", valstr);
+ CAMHAL_LOGDB("Exposure mode set = %s", valstr);
mParameters.set(TICameraParameters::KEY_EXPOSURE_MODE, valstr);
+ if (!strcmp(valstr, TICameraParameters::EXPOSURE_MODE_MANUAL)) {
+ int manualVal;
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_EXPOSURE)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Exposure = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN);
+ }
+ CAMHAL_LOGDB("Manual Exposure = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_EXPOSURE, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Exposure right = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN);
+ }
+ CAMHAL_LOGDB("Manual Exposure right = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_GAIN_ISO)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Gain = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN);
+ }
+ CAMHAL_LOGDB("Manual Gain = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_GAIN_ISO, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Gain right = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN);
+ }
+ CAMHAL_LOGDB("Manual Gain right = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT, valstr);
+ }
+ }
} else {
- CAMHAL_LOGEB("ERROR: Invalid Exposure = %s", valstr);
+ CAMHAL_LOGEB("ERROR: Invalid Exposure mode = %s", valstr);
return BAD_VALUE;
}
}
-
#endif
- if ((valstr = params.get(CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE))) {
CAMHAL_LOGDB("White balance set %s", valstr);
- mParameters.set(CameraParameters::KEY_WHITE_BALANCE, valstr);
+ mParameters.set(android::CameraParameters::KEY_WHITE_BALANCE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid white balance = %s", valstr);
return BAD_VALUE;
@@ -651,7 +828,6 @@ int CameraHal::setParameters(const CameraParameters& params)
}
#ifdef OMAP_ENHANCEMENT
-
if ((valstr = params.get(TICameraParameters::KEY_CONTRAST)) != NULL) {
if (params.getInt(TICameraParameters::KEY_CONTRAST) >= 0 ) {
CAMHAL_LOGDB("Contrast set %s", valstr);
@@ -691,13 +867,12 @@ int CameraHal::setParameters(const CameraParameters& params)
return BAD_VALUE;
}
}
-
#endif
- if ((valstr = params.get(CameraParameters::KEY_ANTIBANDING)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_ANTIBANDING)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING))) {
CAMHAL_LOGDB("Antibanding set %s", valstr);
- mParameters.set(CameraParameters::KEY_ANTIBANDING, valstr);
+ mParameters.set(android::CameraParameters::KEY_ANTIBANDING, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Antibanding = %s", valstr);
return BAD_VALUE;
@@ -705,7 +880,6 @@ int CameraHal::setParameters(const CameraParameters& params)
}
#ifdef OMAP_ENHANCEMENT
-
if ((valstr = params.get(TICameraParameters::KEY_ISO)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES))) {
CAMHAL_LOGDB("ISO set %s", valstr);
@@ -715,27 +889,25 @@ int CameraHal::setParameters(const CameraParameters& params)
return BAD_VALUE;
}
}
-
#endif
- if( (valstr = params.get(CameraParameters::KEY_FOCUS_AREAS)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_FOCUS_AREAS)) != NULL )
{
- CAMHAL_LOGDB("Focus areas position set %s",valstr);
- mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ CAMHAL_LOGDB("Focus areas position set %s", params.get(android::CameraParameters::KEY_FOCUS_AREAS));
+ mParameters.set(android::CameraParameters::KEY_FOCUS_AREAS, valstr);
}
#ifdef OMAP_ENHANCEMENT
-
if( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
{
- CAMHAL_LOGDB("Measurements set to %s", params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE));
+ CAMHAL_LOGDB("Measurements set to %s", valstr);
mParameters.set(TICameraParameters::KEY_MEASUREMENT_ENABLE, valstr);
- if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0)
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
mMeasurementEnabled = true;
}
- else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0)
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
mMeasurementEnabled = false;
}
@@ -745,113 +917,107 @@ int CameraHal::setParameters(const CameraParameters& params)
}
}
-
#endif
- if( (valstr = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
+ if( (valstr = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
{
- CAMHAL_LOGDB("Exposure compensation set %s", valstr);
- mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
+ CAMHAL_LOGDB("Exposure compensation set %s", params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION));
+ mParameters.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
}
- if ((valstr = params.get(CameraParameters::KEY_SCENE_MODE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_SCENE_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES))) {
CAMHAL_LOGDB("Scene mode set %s", valstr);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_SCENE_MODE),
+ mParameters.get(android::CameraParameters::KEY_SCENE_MODE),
updateRequired);
- mParameters.set(CameraParameters::KEY_SCENE_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_SCENE_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Scene mode = %s", valstr);
return BAD_VALUE;
}
}
- if ((valstr = params.get(CameraParameters::KEY_FLASH_MODE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_FLASH_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES))) {
CAMHAL_LOGDB("Flash mode set %s", valstr);
- mParameters.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Flash mode = %s", valstr);
return BAD_VALUE;
}
}
- if ((valstr = params.get(CameraParameters::KEY_EFFECT)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_EFFECT)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS))) {
CAMHAL_LOGDB("Effect set %s", valstr);
- mParameters.set(CameraParameters::KEY_EFFECT, valstr);
+ mParameters.set(android::CameraParameters::KEY_EFFECT, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Effect = %s", valstr);
return BAD_VALUE;
}
}
- varint = params.getInt(CameraParameters::KEY_ROTATION);
- if( varint >=0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_ROTATION);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Rotation set %d", varint);
- mParameters.set(CameraParameters::KEY_ROTATION, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_ROTATION, varint);
+ }
- varint = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
- if( varint >= 0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Jpeg quality set %d", varint);
- mParameters.set(CameraParameters::KEY_JPEG_QUALITY, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_JPEG_QUALITY, varint);
+ }
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- if( varint >=0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Thumbnail width set %d", varint);
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, varint);
+ }
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
- if( varint >=0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Thumbnail width set %d", varint);
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, varint);
+ }
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
- if( varint >=0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Thumbnail quality set %d", varint);
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, varint);
+ }
- if( (valstr = params.get(CameraParameters::KEY_GPS_LATITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS latitude set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_LATITUDE, valstr);
+ CAMHAL_LOGDB("GPS latitude set %s", params.get(android::CameraParameters::KEY_GPS_LATITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_LATITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_LATITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_LATITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS longitude set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, valstr);
+ CAMHAL_LOGDB("GPS longitude set %s", params.get(android::CameraParameters::KEY_GPS_LONGITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_LONGITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_LONGITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS altitude set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, valstr);
+ CAMHAL_LOGDB("GPS altitude set %s", params.get(android::CameraParameters::KEY_GPS_ALTITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_ALTITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_ALTITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
{
- CAMHAL_LOGDB("GPS timestamp set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, valstr);
+ CAMHAL_LOGDB("GPS timestamp set %s", params.get(android::CameraParameters::KEY_GPS_TIMESTAMP));
+ mParameters.set(android::CameraParameters::KEY_GPS_TIMESTAMP, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP);
+ mParameters.remove(android::CameraParameters::KEY_GPS_TIMESTAMP);
}
if( (valstr = params.get(TICameraParameters::KEY_GPS_DATESTAMP)) != NULL )
@@ -862,12 +1028,12 @@ int CameraHal::setParameters(const CameraParameters& params)
mParameters.remove(TICameraParameters::KEY_GPS_DATESTAMP);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
{
- CAMHAL_LOGDB("GPS processing method set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
+ CAMHAL_LOGDB("GPS processing method set %s", params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD));
+ mParameters.set(android::CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ mParameters.remove(android::CameraParameters::KEY_GPS_PROCESSING_METHOD);
}
if( (valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM )) != NULL )
@@ -899,67 +1065,122 @@ int CameraHal::setParameters(const CameraParameters& params)
}
#ifdef OMAP_ENHANCEMENT
-
if( (valstr = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL )
{
CAMHAL_LOGDB("Exposure Bracketing set %s", params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE));
mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valstr);
+ mParameters.remove(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE);
}
- else
+ else if ((valstr = params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE)) != NULL) {
+ CAMHAL_LOGDB("ABS Exposure+Gain Bracketing set %s", params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE));
+ mParameters.set(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE, valstr);
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+ } else
{
mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
}
+ if( (valstr = params.get(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE)) != NULL ) {
+ CAMHAL_LOGDB("Zoom Bracketing range %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE, valstr);
+ } else {
+ mParameters.remove(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE);
+ }
#endif
- valstr = params.get(CameraParameters::KEY_ZOOM);
- varint = params.getInt(CameraParameters::KEY_ZOOM);
- if ( valstr != NULL ) {
- if ( ( varint >= 0 ) && ( varint <= mMaxZoomSupported ) ) {
- CAMHAL_LOGDB("Zoom set %s", valstr);
+ if ((valstr = params.get(android::CameraParameters::KEY_ZOOM)) != NULL ) {
+ varint = atoi(valstr);
+ if ( varint >= 0 && varint <= mMaxZoomSupported ) {
+ CAMHAL_LOGDB("Zoom set %d", varint);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_ZOOM),
+ mParameters.get(android::CameraParameters::KEY_ZOOM),
updateRequired);
- mParameters.set(CameraParameters::KEY_ZOOM, valstr);
+ mParameters.set(android::CameraParameters::KEY_ZOOM, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Zoom: %s", valstr);
return BAD_VALUE;
}
}
- if( (valstr = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
{
CAMHAL_LOGDB("Auto Exposure Lock set %s", valstr);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
+ mParameters.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
updateRequired);
- mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
+ mParameters.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
}
- if( (valstr = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
{
CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", valstr);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
+ mParameters.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
updateRequired);
- mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
+ mParameters.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
}
- if( (valstr = params.get(CameraParameters::KEY_METERING_AREAS)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_METERING_AREAS)) != NULL )
{
- CAMHAL_LOGDB("Metering areas position set %s", valstr);
- mParameters.set(CameraParameters::KEY_METERING_AREAS, valstr);
+ CAMHAL_LOGDB("Metering areas position set %s", params.get(android::CameraParameters::KEY_METERING_AREAS));
+ mParameters.set(android::CameraParameters::KEY_METERING_AREAS, valstr);
}
- // Only send parameters to adapter if preview is already
- // enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter,
- // will be called in startPreview()
- // TODO(XXX): Need to identify other parameters that need update from camera adapter
- if ( (NULL != mCameraAdapter) && (mPreviewEnabled || updateRequired) ) {
- ret |= mCameraAdapter->setParameters(mParameters);
+ if( (valstr = params.get(TICameraParameters::RAW_WIDTH)) != NULL ) {
+ CAMHAL_LOGDB("Raw image width set %s", params.get(TICameraParameters::RAW_WIDTH));
+ mParameters.set(TICameraParameters::RAW_WIDTH, valstr);
}
-#ifdef OMAP_ENHANCEMENT
+ if( (valstr = params.get(TICameraParameters::RAW_HEIGHT)) != NULL ) {
+ CAMHAL_LOGDB("Raw image height set %s", params.get(TICameraParameters::RAW_HEIGHT));
+ mParameters.set(TICameraParameters::RAW_HEIGHT, valstr);
+ }
+
+ //TI extensions for enable/disable algos
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA)) != NULL )
+ {
+ CAMHAL_LOGDB("External Gamma set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_NSF1)) != NULL )
+ {
+ CAMHAL_LOGDB("NSF1 set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_NSF1, valstr);
+ }
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_NSF2)) != NULL )
+ {
+ CAMHAL_LOGDB("NSF2 set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_NSF2, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_SHARPENING)) != NULL )
+ {
+ CAMHAL_LOGDB("Sharpening set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_SHARPENING, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_THREELINCOLORMAP)) != NULL )
+ {
+ CAMHAL_LOGDB("Color Conversion set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_THREELINCOLORMAP, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_GIC)) != NULL )
+ {
+ CAMHAL_LOGDB("Green Inballance Correction set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_GIC, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GAMMA_TABLE)) != NULL )
+ {
+ CAMHAL_LOGDB("Manual gamma table set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GAMMA_TABLE, valstr);
+ }
+
+ android::CameraParameters adapterParams = mParameters;
+
+#ifdef OMAP_ENHANCEMENT
if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS) )
{
int posBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS);
@@ -982,44 +1203,60 @@ int CameraHal::setParameters(const CameraParameters& params)
CAMHAL_LOGDB("Negative bracketing range %d", mBracketRangeNegative);
if( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL) &&
- ( strcmp(valstr, TICameraParameters::BRACKET_ENABLE) == 0 ))
- {
- if ( !mBracketingEnabled )
- {
+ ( strcmp(valstr, android::CameraParameters::TRUE) == 0 )) {
+ if ( !mBracketingEnabled ) {
CAMHAL_LOGDA("Enabling bracketing");
mBracketingEnabled = true;
-
- //Wait for AF events to enable bracketing
- if ( NULL != mCameraAdapter )
- {
- setEventProvider( CameraHalEvent::ALL_EVENTS, mCameraAdapter );
- }
- }
- else
- {
+ } else {
CAMHAL_LOGDA("Bracketing already enabled");
- }
}
- else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
- ( strcmp(valstr, TICameraParameters::BRACKET_DISABLE) == 0 ))
- {
+ adapterParams.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ mParameters.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ } else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
+ ( strcmp(valstr, android::CameraParameters::FALSE) == 0 )) {
CAMHAL_LOGDA("Disabling bracketing");
+ adapterParams.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ mParameters.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
mBracketingEnabled = false;
- stopImageBracketing();
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ }
- //Remove AF events subscription
- if ( NULL != mEventProvider )
- {
- mEventProvider->disableEventNotification( CameraHalEvent::ALL_EVENTS );
- delete mEventProvider;
- mEventProvider = NULL;
- }
+ } else {
+ adapterParams.remove(TICameraParameters::KEY_TEMP_BRACKETING);
+ mParameters.remove(TICameraParameters::KEY_TEMP_BRACKETING);
+ }
+#endif
- }
+#ifdef OMAP_ENHANCEMENT_VTC
+ if (mVTCUseCase && !mTunnelSetup && (mCameraAdapter != NULL) &&
+ ((mParameters.get(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE)) != NULL )&&
+ ((mParameters.get(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT)) != NULL )) {
+
+ uint32_t sliceHeight = mParameters.getInt(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT);
+ uint32_t encoderHandle = mParameters.getInt(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE);
+ int w, h;
+ mParameters.getPreviewSize(&w, &h);
+ status_t done = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SETUP_TUNNEL, sliceHeight, encoderHandle, w, h);
+ if (done == NO_ERROR) mTunnelSetup = true;
+ ret |= done;
+ }
+#endif
+ // Only send parameters to adapter if preview is already
+ // enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter,
+ // will be called in startPreview()
+ // TODO(XXX): Need to identify other parameters that need update from camera adapter
+ if ( (NULL != mCameraAdapter) &&
+ (mPreviewEnabled || updateRequired) &&
+ (!(mPreviewEnabled && restartPreviewRequired)) ) {
+ ret |= mCameraAdapter->setParameters(adapterParams);
+ }
+
+#ifdef OMAP_ENHANCEMENT
if( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
- ( strcmp(valstr, TICameraParameters::SHUTTER_ENABLE) == 0 ))
+ ( strcmp(valstr, android::CameraParameters::TRUE) == 0 ))
{
CAMHAL_LOGDA("Enabling shutter sound");
@@ -1028,7 +1265,7 @@ int CameraHal::setParameters(const CameraParameters& params)
mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
}
else if ( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
- ( strcmp(valstr, TICameraParameters::SHUTTER_DISABLE) == 0 ))
+ ( strcmp(valstr, android::CameraParameters::FALSE) == 0 ))
{
CAMHAL_LOGDA("Disabling shutter sound");
@@ -1036,9 +1273,7 @@ int CameraHal::setParameters(const CameraParameters& params)
mMsgEnabled &= ~CAMERA_MSG_SHUTTER;
mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
}
-
#endif
-
}
//On fail restore old parameters
@@ -1053,8 +1288,21 @@ int CameraHal::setParameters(const CameraParameters& params)
ret = restartPreview();
} else if (restartPreviewRequired && !previewEnabled() &&
mDisplayPaused && !mRecordingEnabled) {
- CAMHAL_LOGDA("Stopping Preview");
- forceStopPreview();
+ CAMHAL_LOGDA("Restarting preview in paused mode");
+ ret = restartPreview();
+
+ // TODO(XXX): If there is some delay between the restartPreview call and the code
+ // below, then the user could see some preview frames and callbacks. Let's find
+ // a better place to put this later...
+ if (ret == NO_ERROR) {
+ mDisplayPaused = true;
+ mPreviewEnabled = false;
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+ }
+ }
+
+ if ( !mBracketingRunning && mBracketingEnabled ) {
+ startImageBracketing();
}
if (ret != NO_ERROR)
@@ -1082,45 +1330,35 @@ status_t CameraHal::allocPreviewBufs(int width, int height, const char* previewF
return NO_MEMORY;
}
- if(!mPreviewBufs)
+ if(!mPreviewBuffers)
{
- ///@todo Pluralise the name of this method to allocateBuffers
mPreviewLength = 0;
- mPreviewBufs = (int32_t *) mDisplayAdapter->allocateBuffer(width, height,
+ mPreviewBuffers = mDisplayAdapter->allocateBufferList(width, height,
previewFormat,
mPreviewLength,
buffercount);
-
- if (NULL == mPreviewBufs ) {
+ if (NULL == mPreviewBuffers ) {
CAMHAL_LOGEA("Couldn't allocate preview buffers");
return NO_MEMORY;
- }
+ }
mPreviewOffsets = (uint32_t *) mDisplayAdapter->getOffsets();
if ( NULL == mPreviewOffsets ) {
CAMHAL_LOGEA("Buffer mapping failed");
return BAD_VALUE;
- }
-
- mPreviewFd = mDisplayAdapter->getFd();
- if ( -1 == mPreviewFd ) {
- CAMHAL_LOGEA("Invalid handle");
- return BAD_VALUE;
- }
+ }
mBufProvider = (BufferProvider*) mDisplayAdapter.get();
ret = mDisplayAdapter->maxQueueableBuffers(max_queueable);
if (ret != NO_ERROR) {
return ret;
- }
-
+ }
}
LOG_FUNCTION_NAME_EXIT;
return ret;
-
}
status_t CameraHal::freePreviewBufs()
@@ -1128,12 +1366,11 @@ status_t CameraHal::freePreviewBufs()
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- CAMHAL_LOGDB("mPreviewBufs = 0x%x", (unsigned int)mPreviewBufs);
- if(mPreviewBufs)
+ CAMHAL_LOGDB("mPreviewBuffers = %p", mPreviewBuffers);
+ if(mPreviewBuffers)
{
- ///@todo Pluralise the name of this method to freeBuffers
- ret = mBufProvider->freeBuffer(mPreviewBufs);
- mPreviewBufs = NULL;
+ ret = mBufProvider->freeBufferList(mPreviewBuffers);
+ mPreviewBuffers = NULL;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1153,7 +1390,7 @@ status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
if ( NO_ERROR == ret )
{
- if( NULL != mPreviewDataBufs )
+ if( NULL != mPreviewDataBuffers )
{
ret = freePreviewDataBufs();
}
@@ -1162,10 +1399,10 @@ status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
if ( NO_ERROR == ret )
{
bytes = ((bytes+4095)/4096)*4096;
- mPreviewDataBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, NULL, bytes, bufferCount);
+ mPreviewDataBuffers = mMemoryManager->allocateBufferList(0, 0, NULL, bytes, bufferCount);
CAMHAL_LOGDB("Size of Preview data buffer = %d", bytes);
- if( NULL == mPreviewDataBufs )
+ if( NULL == mPreviewDataBuffers )
{
CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
ret = -NO_MEMORY;
@@ -1189,7 +1426,7 @@ status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
mPreviewDataOffsets = NULL;
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1203,12 +1440,11 @@ status_t CameraHal::freePreviewDataBufs()
if ( NO_ERROR == ret )
{
- if( NULL != mPreviewDataBufs )
+ if( NULL != mPreviewDataBuffers )
{
- ///@todo Pluralise the name of this method to freeBuffers
- ret = mMemoryManager->freeBuffer(mPreviewDataBufs);
- mPreviewDataBufs = NULL;
+ ret = mMemoryManager->freeBufferList(mPreviewDataBuffers);
+ mPreviewDataBuffers = NULL;
}
}
@@ -1218,51 +1454,44 @@ status_t CameraHal::freePreviewDataBufs()
return ret;
}
-status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size, const char* previewFormat, unsigned int bufferCount)
+status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size,
+ const char* previewFormat, unsigned int bufferCount)
{
status_t ret = NO_ERROR;
- int bytes;
+ int bytes = size;
LOG_FUNCTION_NAME;
- bytes = size;
-
// allocate image buffers only if not already allocated
- if(NULL != mImageBufs) {
+ if(NULL != mImageBuffers) {
return NO_ERROR;
}
- if ( NO_ERROR == ret )
- {
+ if ( NO_ERROR == ret ) {
bytes = ((bytes+4095)/4096)*4096;
- mImageBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, previewFormat, bytes, bufferCount);
-
+ mImageBuffers = mMemoryManager->allocateBufferList(0, 0, previewFormat, bytes, bufferCount);
CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
- if( NULL == mImageBufs )
- {
+ if( NULL == mImageBuffers ) {
CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
ret = -NO_MEMORY;
- }
- else
- {
+ } else {
bytes = size;
- }
}
+ }
- if ( NO_ERROR == ret )
- {
+ if ( NO_ERROR == ret ) {
mImageFd = mMemoryManager->getFd();
mImageLength = bytes;
mImageOffsets = mMemoryManager->getOffsets();
- }
- else
- {
+ mImageCount = bufferCount;
+ } else {
mImageFd = -1;
mImageLength = 0;
mImageOffsets = NULL;
- }
+ mImageCount = 0;
+ }
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1272,36 +1501,38 @@ status_t CameraHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t buf
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- if( NULL != mVideoBufs ){
- ret = freeVideoBufs(mVideoBufs);
- mVideoBufs = NULL;
+ if( NULL != mVideoBuffers ){
+ ret = freeVideoBufs(mVideoBuffers);
+ mVideoBuffers = NULL;
}
if ( NO_ERROR == ret ){
int32_t stride;
- buffer_handle_t *bufsArr = new buffer_handle_t [bufferCount];
+ CameraBuffer *buffers = new CameraBuffer [bufferCount];
+
+ memset (buffers, 0, sizeof(CameraBuffer) * bufferCount);
- if (bufsArr != NULL){
- for (int i = 0; i< bufferCount; i++){
- GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
- buffer_handle_t buf;
- ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &buf, &stride);
+ if (buffers != NULL){
+ for (unsigned int i = 0; i< bufferCount; i++){
+ android::GraphicBufferAllocator &GrallocAlloc = android::GraphicBufferAllocator::get();
+ buffer_handle_t handle;
+ ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &handle, &stride);
if (ret != NO_ERROR){
CAMHAL_LOGEA("Couldn't allocate video buffers using Gralloc");
ret = -NO_MEMORY;
- for (int j=0; j< i; j++){
- buf = (buffer_handle_t)bufsArr[j];
- CAMHAL_LOGEB("Freeing Gralloc Buffer 0x%x", buf);
- GrallocAlloc.free(buf);
+ for (unsigned int j=0; j< i; j++){
+ CAMHAL_LOGEB("Freeing Gralloc Buffer %p", buffers[i].opaque);
+ GrallocAlloc.free((buffer_handle_t)buffers[i].opaque);
}
- delete [] bufsArr;
+ delete [] buffers;
goto exit;
}
- bufsArr[i] = buf;
- CAMHAL_LOGVB("*** Gralloc Handle =0x%x ***", buf);
+ buffers[i].type = CAMERA_BUFFER_GRALLOC;
+ buffers[i].opaque = (void *)handle;
+ CAMHAL_LOGVB("*** Gralloc Handle =0x%x ***", handle);
}
- mVideoBufs = (int32_t *)bufsArr;
+ mVideoBuffers = buffers;
}
else{
CAMHAL_LOGEA("Couldn't allocate video buffers ");
@@ -1310,11 +1541,53 @@ status_t CameraHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t buf
}
exit:
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
+status_t CameraHal::allocRawBufs(int width, int height, const char* previewFormat, int bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME
+
+
+ ///@todo Enhance this method allocImageBufs() to take in a flag for burst capture
+ ///Always allocate the buffers for image capture using MemoryManager
+ if (NO_ERROR == ret) {
+ if(( NULL != mVideoBuffers )) {
+ // Re-use the buffer for raw capture.
+ return ret;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ mVideoLength = 0;
+ mVideoLength = (((width * height * 2) + 4095)/4096)*4096;
+ mVideoBuffers = mMemoryManager->allocateBufferList(width, height, previewFormat,
+ mVideoLength, bufferCount);
+
+ CAMHAL_LOGDB("Size of Video cap buffer (used for RAW capture) %d", mVideoLength);
+ if( NULL == mVideoBuffers ) {
+ CAMHAL_LOGEA("Couldn't allocate Video buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ mVideoFd = mMemoryManager->getFd();
+ mVideoOffsets = mMemoryManager->getOffsets();
+ } else {
+ mVideoFd = -1;
+ mVideoOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
void endImageCapture( void *userData)
{
LOG_FUNCTION_NAME;
@@ -1344,11 +1617,18 @@ status_t CameraHal::signalEndImageCapture()
{
status_t ret = NO_ERROR;
int w,h;
- CameraParameters adapterParams = mParameters;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
+ if (mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out->disableDisplay();
+ }
+
+ if (mBufferSourceAdapter_In.get()) {
+ mBufferSourceAdapter_In->disableDisplay();
+ }
+
if ( mBracketingRunning ) {
stopImageBracketing();
} else {
@@ -1366,50 +1646,42 @@ status_t CameraHal::freeImageBufs()
LOG_FUNCTION_NAME;
- if ( NO_ERROR == ret )
- {
-
- if( NULL != mImageBufs )
- {
-
- ///@todo Pluralise the name of this method to freeBuffers
- ret = mMemoryManager->freeBuffer(mImageBufs);
- mImageBufs = NULL;
+ if (NULL == mImageBuffers) {
+ return -EINVAL;
+ }
- }
- else
- {
- ret = -EINVAL;
- }
+ if (mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out = 0;
+ } else {
+ ret = mMemoryManager->freeBufferList(mImageBuffers);
+ }
- }
+ mImageBuffers = NULL;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t CameraHal::freeVideoBufs(void *bufs)
+status_t CameraHal::freeVideoBufs(CameraBuffer *bufs)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- buffer_handle_t *pBuf = (buffer_handle_t*)bufs;
int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
- if(pBuf == NULL)
+ if(bufs == NULL)
{
CAMHAL_LOGEA("NULL pointer passed to freeVideoBuffer");
LOG_FUNCTION_NAME_EXIT;
return BAD_VALUE;
}
- GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
+ android::GraphicBufferAllocator &GrallocAlloc = android::GraphicBufferAllocator::get();
for(int i = 0; i < count; i++){
- buffer_handle_t ptr = *pBuf++;
- CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", ptr);
- GrallocAlloc.free(ptr);
+ CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", bufs[i].opaque);
+ GrallocAlloc.free((buffer_handle_t)bufs[i].opaque);
}
LOG_FUNCTION_NAME_EXIT;
@@ -1417,6 +1689,27 @@ status_t CameraHal::freeVideoBufs(void *bufs)
return ret;
}
+status_t CameraHal::freeRawBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME
+
+ if ( NO_ERROR == ret ) {
+ if( NULL != mVideoBuffers ) {
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBufferList(mVideoBuffers);
+ mVideoBuffers = NULL;
+ } else {
+ ret = -EINVAL;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
/**
@brief Start preview mode.
@@ -1425,13 +1718,117 @@ status_t CameraHal::freeVideoBufs(void *bufs)
@todo Update function header with the different errors that are possible
*/
-status_t CameraHal::startPreview()
+status_t CameraHal::startPreview() {
+ LOG_FUNCTION_NAME;
+
+ // When tunneling is enabled during VTC, startPreview happens in 2 steps:
+ // When the application sends the command CAMERA_CMD_PREVIEW_INITIALIZATION,
+ // cameraPreviewInitialization() is called, which in turn causes the CameraAdapter
+ // to move from loaded to idle state. And when the application calls startPreview,
+ // the CameraAdapter moves from idle to executing state.
+ //
+ // If the application calls startPreview() without sending the command
+ // CAMERA_CMD_PREVIEW_INITIALIZATION, then the function cameraPreviewInitialization()
+ // AND startPreview() are executed. In other words, if the application calls
+ // startPreview() without sending the command CAMERA_CMD_PREVIEW_INITIALIZATION,
+ // then the CameraAdapter moves from loaded to idle to executing state in one shot.
+ status_t ret = cameraPreviewInitialization();
+
+ // The flag mPreviewInitializationDone is set to true at the end of the function
+ // cameraPreviewInitialization(). Therefore, if everything goes alright, then the
+ // flag will be set. Sometimes, the function cameraPreviewInitialization() may
+ // return prematurely if all the resources are not available for starting preview.
+ // For example, if the preview window is not set, then it would return NO_ERROR.
+ // Under such circumstances, one should return from startPreview as well and should
+ // not continue execution. That is why, we check the flag and not the return value.
+ if (!mPreviewInitializationDone) return ret;
+
+ // Once startPreview is called, there is no need to continue to remember whether
+ // the function cameraPreviewInitialization() was called earlier or not. And so
+ // the flag mPreviewInitializationDone is reset here. Plus, this preserves the
+ // current behavior of startPreview under the circumstances where the application
+ // calls startPreview twice or more.
+ mPreviewInitializationDone = false;
+
+ ///Enable the display adapter if present, actual overlay enable happens when we post the buffer
+ if(mDisplayAdapter.get() != NULL) {
+ CAMHAL_LOGDA("Enabling display");
+ int width, height;
+ mParameters.getPreviewSize(&width, &height);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview);
+#else
+ ret = mDisplayAdapter->enableDisplay(width, height, NULL);
+#endif
+
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEA("Couldn't enable display");
+
+ // FIXME: At this stage mStateSwitchLock is locked and unlock is supposed to be called
+ // only from mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW)
+ // below. But this will never happen because of goto error. Thus at next
+ // startPreview() call CameraHAL will be deadlocked.
+ // Need to revisit mStateSwitch lock, for now just abort the process.
+ CAMHAL_ASSERT_X(false,
+ "At this stage mCameraAdapter->mStateSwitchLock is still locked, "
+ "deadlock is guaranteed");
+
+ goto error;
+ }
+
+ }
+
+ ///Send START_PREVIEW command to adapter
+ CAMHAL_LOGDA("Starting CameraAdapter preview mode");
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW);
+
+ if(ret!=NO_ERROR) {
+ CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter");
+ goto error;
+ }
+ CAMHAL_LOGDA("Started preview");
+
+ mPreviewEnabled = true;
+ mPreviewStartInProgress = false;
+ return ret;
+
+ error:
+
+ CAMHAL_LOGEA("Performing cleanup after error");
+
+ //Do all the cleanup
+ freePreviewBufs();
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);
+ if(mDisplayAdapter.get() != NULL) {
+ mDisplayAdapter->disableDisplay(false);
+ }
+ mAppCallbackNotifier->stop();
+ mPreviewStartInProgress = false;
+ mPreviewEnabled = false;
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+////////////
+/**
+ @brief Set preview mode related initialization
+ -> Camera Adapter set params
+ -> Allocate buffers
+ -> Set use buffers for preview
+ @param none
+ @return NO_ERROR
+ @todo Update function header with the different errors that are possible
+
+ */
+status_t CameraHal::cameraPreviewInitialization()
{
status_t ret = NO_ERROR;
CameraAdapter::BuffersDescriptor desc;
CameraFrame frame;
- const char *valstr = NULL;
unsigned int required_buffer_count;
unsigned int max_queueble_buffers;
@@ -1441,6 +1838,10 @@ status_t CameraHal::startPreview()
LOG_FUNCTION_NAME;
+ if (mPreviewInitializationDone) {
+ return NO_ERROR;
+ }
+
if ( mPreviewEnabled ){
CAMHAL_LOGDA("Preview already running");
LOG_FUNCTION_NAME_EXIT;
@@ -1461,15 +1862,11 @@ status_t CameraHal::startPreview()
///Update the current preview width and height
mPreviewWidth = frame.mWidth;
mPreviewHeight = frame.mHeight;
- //Update the padded width and height - required for VNF and VSTAB
- mParameters.set(TICameraParameters::KEY_PADDED_WIDTH, mPreviewWidth);
- mParameters.set(TICameraParameters::KEY_PADDED_HEIGHT, mPreviewHeight);
-
}
///If we don't have the preview callback enabled and display adapter,
if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL)){
- CAMHAL_LOGDA("Preview not started. Preview in progress flag set");
+ CAMHAL_LOGD("Preview not started. Preview in progress flag set");
mPreviewStartInProgress = true;
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SWITCH_TO_EXECUTING);
if ( NO_ERROR != ret ){
@@ -1499,10 +1896,11 @@ status_t CameraHal::startPreview()
{
mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME);
}
+
+ signalEndImageCapture();
return ret;
}
-
required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
///Allocate the preview buffers
@@ -1534,7 +1932,7 @@ status_t CameraHal::startPreview()
if ( NO_ERROR == ret )
{
- desc.mBuffers = mPreviewDataBufs;
+ desc.mBuffers = mPreviewDataBuffers;
desc.mOffsets = mPreviewDataOffsets;
desc.mFd = mPreviewDataFd;
desc.mLength = mPreviewDataLength;
@@ -1548,7 +1946,7 @@ status_t CameraHal::startPreview()
}
///Pass the buffers to Camera Adapter
- desc.mBuffers = mPreviewBufs;
+ desc.mBuffers = mPreviewBuffers;
desc.mOffsets = mPreviewOffsets;
desc.mFd = mPreviewFd;
desc.mLength = mPreviewLength;
@@ -1565,8 +1963,6 @@ status_t CameraHal::startPreview()
return ret;
}
- mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count);
-
///Start the callback notifier
ret = mAppCallbackNotifier->start();
@@ -1586,72 +1982,10 @@ status_t CameraHal::startPreview()
goto error;
}
- ///Enable the display adapter if present, actual overlay enable happens when we post the buffer
- if(mDisplayAdapter.get() != NULL)
- {
- CAMHAL_LOGDA("Enabling display");
- bool isS3d = false;
- DisplayAdapter::S3DParameters s3dParams;
- int width, height;
- mParameters.getPreviewSize(&width, &height);
-#if 0 //TODO: s3d is not part of bringup...will reenable
- if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_SUPPORTED)) != NULL) {
- isS3d = (strcmp(valstr, "true") == 0);
- }
- if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL) {
- if (strcmp(valstr, "off") == 0)
- {
- CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS OFF");
- //TODO: obtain the frame packing configuration from camera or user settings
- //once side by side configuration is supported
- s3dParams.mode = OVERLAY_S3D_MODE_ON;
- s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
- s3dParams.order = OVERLAY_S3D_ORDER_LF;
- s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
- }
- else
- {
- CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS ON");
- s3dParams.mode = OVERLAY_S3D_MODE_OFF;
- s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
- s3dParams.order = OVERLAY_S3D_ORDER_LF;
- s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
- }
- }
-#endif //if 0
-
-#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
-
- ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview, isS3d ? &s3dParams : NULL);
-
-#else
-
- ret = mDisplayAdapter->enableDisplay(width, height, NULL, isS3d ? &s3dParams : NULL);
-
-#endif
+ if (ret == NO_ERROR) mPreviewInitializationDone = true;
- if ( ret != NO_ERROR )
- {
- CAMHAL_LOGEA("Couldn't enable display");
- goto error;
- }
+ mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count);
- }
-
- ///Send START_PREVIEW command to adapter
- CAMHAL_LOGDA("Starting CameraAdapter preview mode");
-
- ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW);
-
- if(ret!=NO_ERROR)
- {
- CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter");
- goto error;
- }
- CAMHAL_LOGDA("Started preview");
-
- mPreviewEnabled = true;
- mPreviewStartInProgress = false;
return ret;
error:
@@ -1698,19 +2032,22 @@ status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
if(mDisplayAdapter.get() != NULL)
{
///NULL window passed, destroy the display adapter if present
- CAMHAL_LOGDA("NULL window passed, destroying display adapter");
+ CAMHAL_LOGD("NULL window passed, destroying display adapter");
mDisplayAdapter.clear();
///@remarks If there was a window previously existing, we usually expect another valid window to be passed by the client
///@remarks so, we will wait until it passes a valid window to begin the preview again
mSetPreviewWindowCalled = false;
}
- CAMHAL_LOGDA("NULL ANativeWindow passed to setPreviewWindow");
+ CAMHAL_LOGD("NULL ANativeWindow passed to setPreviewWindow");
return NO_ERROR;
}else if(mDisplayAdapter.get() == NULL)
{
// Need to create the display adapter since it has not been created
// Create display adapter
mDisplayAdapter = new ANativeWindowDisplayAdapter();
+#ifdef OMAP_ENHANCEMENT
+ mDisplayAdapter->setExtendedOps(mExtendedPreviewStreamOps);
+#endif
ret = NO_ERROR;
if(!mDisplayAdapter.get() || ((ret=mDisplayAdapter->initialize())!=NO_ERROR))
{
@@ -1768,6 +2105,387 @@ status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
}
+#ifdef OMAP_ENHANCEMENT_CPCAM
+void CameraHal::setExtendedPreviewStreamOps(preview_stream_extended_ops_t *ops)
+{
+ mExtendedPreviewStreamOps = ops;
+}
+
+/**
+ @brief Sets Tapout Surfaces.
+
+ Buffers provided to CameraHal via this object for tap-out
+ functionality.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setTapoutLocked(struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapout) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // Set tapout point
+ // 1. Check name of tap-out
+ // 2. If not already set, then create a new one
+ // 3. Allocate buffers. If user is re-setting the surface, free buffers first and re-allocate
+ // in case dimensions have changed
+
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ android::sp<DisplayAdapter> out;
+ out = mOutAdapters.itemAt(i);
+ ret = out->setPreviewWindow(tapout);
+ if (ret == ALREADY_EXISTS) {
+ CAMHAL_LOGD("Tap Out already set at index = %d", i);
+ index = i;
+ ret = NO_ERROR;
+ }
+ }
+
+ if (index < 0) {
+ android::sp<DisplayAdapter> out = new BufferSourceAdapter();
+
+ ret = out->initialize();
+ if (ret != NO_ERROR) {
+ out.clear();
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ goto exit;
+ }
+
+ // BufferSourceAdapter will be handler of the extended OPS
+ out->setExtendedOps(mExtendedPreviewStreamOps);
+
+ // CameraAdapter will be the frame provider for BufferSourceAdapter
+ out->setFrameProvider(mCameraAdapter);
+
+ // BufferSourceAdapter will use ErrorHandler to send errors back to
+ // the application
+ out->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = out->setPreviewWindow(tapout);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ goto exit;
+ }
+
+ if (NULL != mCameraAdapter) {
+ unsigned int bufferCount, max_queueable;
+ CameraFrame frame;
+
+ bufferCount = out->getBufferCount();
+ if (bufferCount < 1) bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ bufferCount);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ if (NO_ERROR == ret) {
+ CameraBuffer *bufs = NULL;
+ unsigned int stride;
+ unsigned int height = frame.mHeight;
+ int size = frame.mLength;
+
+ stride = frame.mAlignment / getBPP(mParameters.getPictureFormat());
+ bufs = out->allocateBufferList(stride,
+ height,
+ mParameters.getPictureFormat(),
+ size,
+ bufferCount);
+ if (bufs == NULL){
+ CAMHAL_LOGEB("error allocating buffer list");
+ goto exit;
+ }
+ }
+ }
+ mOutAdapters.add(out);
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Releases Tapout Surfaces.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseTapoutLocked(struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ char id[OP_STR_SIZE];
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapout) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // Get the name of tapout
+ ret = mExtendedPreviewStreamOps->get_id(tapout, id, sizeof(id));
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("get_id OPS returned error %d", ret);
+ return ret;
+ }
+
+ // 1. Check name of tap-out
+ // 2. If exist, then free buffers and then remove it
+ if (mBufferSourceAdapter_Out.get() && mBufferSourceAdapter_Out->match(id)) {
+ CAMHAL_LOGD("REMOVE tap out %p previously set as current", tapout);
+ mBufferSourceAdapter_Out.clear();
+ }
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ android::sp<DisplayAdapter> out;
+ out = mOutAdapters.itemAt(i);
+ if (out->match(id)) {
+ CAMHAL_LOGD("REMOVE tap out %p \"%s\" at position %d", tapout, id, i);
+ mOutAdapters.removeAt(i);
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Sets Tapin Surfaces.
+
+ Buffers provided to CameraHal via this object for tap-in
+ functionality.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setTapinLocked(struct preview_stream_ops *tapin)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapin) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // 1. Set tapin point
+ // 1. Check name of tap-in
+ // 2. If not already set, then create a new one
+ // 3. Allocate buffers. If user is re-setting the surface, free buffers first and re-allocate
+ // in case dimensions have changed
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ android::sp<DisplayAdapter> in;
+ in = mInAdapters.itemAt(i);
+ ret = in->setPreviewWindow(tapin);
+ if (ret == ALREADY_EXISTS) {
+ CAMHAL_LOGD("Tap In already set at index = %d", i);
+ index = i;
+ ret = NO_ERROR;
+ }
+ }
+
+ if (index < 0) {
+ android::sp<DisplayAdapter> in = new BufferSourceAdapter();
+
+ ret = in->initialize();
+ if (ret != NO_ERROR) {
+ in.clear();
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ goto exit;
+ }
+
+ // BufferSourceAdapter will be handler of the extended OPS
+ in->setExtendedOps(mExtendedPreviewStreamOps);
+
+ // CameraAdapter will be the frame provider for BufferSourceAdapter
+ in->setFrameProvider(mCameraAdapter);
+
+ // BufferSourceAdapter will use ErrorHandler to send errors back to
+ // the application
+ in->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = in->setPreviewWindow(tapin);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ goto exit;
+ }
+
+ mInAdapters.add(in);
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Releases Tapin Surfaces.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseTapinLocked(struct preview_stream_ops *tapin)
+{
+ status_t ret = NO_ERROR;
+ char id[OP_STR_SIZE];
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapin) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // Get the name of tapin
+ ret = mExtendedPreviewStreamOps->get_id(tapin, id, sizeof(id));
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("get_id OPS returned error %d", ret);
+ return ret;
+ }
+
+ // 1. Check name of tap-in
+ // 2. If exist, then free buffers and then remove it
+ if (mBufferSourceAdapter_In.get() && mBufferSourceAdapter_In->match(id)) {
+ CAMHAL_LOGD("REMOVE tap in %p previously set as current", tapin);
+ mBufferSourceAdapter_In.clear();
+ }
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ android::sp<DisplayAdapter> in;
+ in = mInAdapters.itemAt(i);
+ if (in->match(id)) {
+ CAMHAL_LOGD("REMOVE tap in %p \"%s\" at position %d", tapin, id, i);
+ mInAdapters.removeAt(i);
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Sets ANativeWindow object.
+
+ Buffers provided to CameraHal via this object for tap-in/tap-out
+ functionality.
+
+ TODO(XXX): this is just going to use preview_stream_ops for now, but we
+ most likely need to extend it when we want more functionality
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ CAMHAL_LOGD ("setBufferSource(%p, %p)", tapin, tapout);
+
+ ret = setTapoutLocked(tapout);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setTapoutLocked returned error 0x%x", ret);
+ goto exit;
+ }
+
+ ret = setTapinLocked(tapin);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setTapinLocked returned error 0x%x", ret);
+ goto exit;
+ }
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Releases ANativeWindow object.
+
+ Release Buffers previously released with setBufferSource()
+
+ TODO(XXX): this is just going to use preview_stream_ops for now, but we
+ most likely need to extend it when we want more functionality
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ CAMHAL_LOGD ("releaseBufferSource(%p, %p)", tapin, tapout);
+ if (tapout) {
+ ret |= releaseTapoutLocked(tapout);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error %d to release tap out", ret);
+ }
+ }
+
+ if (tapin) {
+ ret |= releaseTapinLocked(tapin);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error %d to release tap in", ret);
+ }
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+#endif
+
+
/**
@brief Stop a previously started preview.
@@ -1785,7 +2503,7 @@ void CameraHal::stopPreview()
return;
}
- bool imageCaptureRunning = (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE) &&
+ bool imageCaptureRunning = (mCameraAdapter->getState() & CameraAdapter::CAPTURE_STATE) &&
(mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE);
if(mDisplayPaused && !imageCaptureRunning)
{
@@ -1857,23 +2575,27 @@ status_t CameraHal::startRecording( )
// set internal recording hint in case camera adapter needs to make some
// decisions....(will only be sent to camera adapter if camera restart is required)
- mParameters.set(TICameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+ mParameters.set(TICameraParameters::KEY_RECORDING_HINT, android::CameraParameters::TRUE);
// if application starts recording in continuous focus picture mode...
// then we need to force default capture mode (as opposed to video mode)
- if ( ((valstr = mParameters.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
- (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
+ if ( ((valstr = mParameters.get(android::CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
+ (strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
restartPreviewRequired = resetVideoModeParameters();
}
// only need to check recording hint if preview restart is not already needed
- valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = mParameters.get(android::CameraParameters::KEY_RECORDING_HINT);
if ( !restartPreviewRequired &&
- (!valstr || (valstr && (strcmp(valstr, CameraParameters::TRUE) != 0))) ) {
+ (!valstr || (valstr && (strcmp(valstr, android::CameraParameters::TRUE) != 0))) ) {
restartPreviewRequired = setVideoModeParameters(mParameters);
}
if (restartPreviewRequired) {
+ {
+ android::AutoMutex lock(mLock);
+ mCapModeBackup = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ }
ret = restartPreview();
}
@@ -1895,13 +2617,13 @@ status_t CameraHal::startRecording( )
mAppCallbackNotifier->useVideoBuffers(true);
mAppCallbackNotifier->setVideoRes(mVideoWidth, mVideoHeight);
- ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, mVideoBufs);
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, count, mVideoBuffers);
}
else
{
mAppCallbackNotifier->useVideoBuffers(false);
mAppCallbackNotifier->setVideoRes(mPreviewWidth, mPreviewHeight);
- ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, NULL);
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, count, NULL);
}
}
@@ -1938,72 +2660,74 @@ status_t CameraHal::startRecording( )
@todo Modify the policies for enabling VSTAB & VNF usecase based later.
*/
-bool CameraHal::setVideoModeParameters(const CameraParameters& params)
+bool CameraHal::setVideoModeParameters(const android::CameraParameters& params)
{
const char *valstr = NULL;
const char *valstrRemote = NULL;
bool restartPreviewRequired = false;
- status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
// Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
if ( (valstr == NULL) ||
- ( (valstr != NULL) && (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) != 0) ) )
- {
+ ( (valstr != NULL) && ( (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) != 0) &&
+ (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ ) != 0) ) ) ) {
CAMHAL_LOGDA("Set CAPTURE_MODE to VIDEO_MODE");
mParameters.set(TICameraParameters::KEY_CAP_MODE, (const char *) TICameraParameters::VIDEO_MODE);
restartPreviewRequired = true;
- }
+ }
- // Check if CAPTURE_MODE is VIDEO_MODE, since VSTAB & VNF work only in VIDEO_MODE.
- valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
- if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) {
- valstrRemote = params.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- // set VSTAB. restart is required if vstab value has changed
- if ( valstrRemote != NULL) {
- // make sure we support vstab
- if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
- CameraParameters::TRUE) == 0) {
- valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- // vstab value has changed
- if ((valstr != NULL) &&
- strcmp(valstr, valstrRemote) != 0) {
- restartPreviewRequired = true;
- }
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION, valstrRemote);
+ // set VSTAB. restart is required if vstab value has changed
+ if ( (valstrRemote = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL ) {
+ // make sure we support vstab
+ if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
+ android::CameraParameters::TRUE) == 0) {
+ valstr = mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ // vstab value has changed
+ if ((valstr != NULL) &&
+ strcmp(valstr, valstrRemote) != 0) {
+ restartPreviewRequired = true;
}
- } else if (mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION)) {
- // vstab was configured but now unset
- restartPreviewRequired = true;
- mParameters.remove(CameraParameters::KEY_VIDEO_STABILIZATION);
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ valstrRemote);
}
+ } else if (mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) {
+ // vstab was configured but now unset
+ restartPreviewRequired = true;
+ mParameters.remove(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ }
- // Set VNF
- valstrRemote = params.get(TICameraParameters::KEY_VNF);
- if ( valstrRemote == NULL) {
- CAMHAL_LOGDA("Enable VNF");
- mParameters.set(TICameraParameters::KEY_VNF, "1");
+ // Set VNF
+ if ((valstrRemote = params.get(TICameraParameters::KEY_VNF)) == NULL) {
+ CAMHAL_LOGDA("Enable VNF");
+ mParameters.set(TICameraParameters::KEY_VNF, android::CameraParameters::TRUE);
+ restartPreviewRequired = true;
+ } else {
+ valstr = mParameters.get(TICameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, valstrRemote) != 0) {
restartPreviewRequired = true;
- } else {
- valstr = mParameters.get(TICameraParameters::KEY_VNF);
- if (valstr && strcmp(valstr, valstrRemote) != 0) {
- restartPreviewRequired = true;
- }
- mParameters.set(TICameraParameters::KEY_VNF, valstrRemote);
}
+ mParameters.set(TICameraParameters::KEY_VNF, valstrRemote);
+ }
+#if !defined(OMAP_ENHANCEMENT) && !defined(ENHANCED_DOMX)
// For VSTAB alone for 1080p resolution, padded width goes > 2048, which cannot be rendered by GPU.
// In such case, there is support in Ducati for combination of VSTAB & VNF requiring padded width < 2048.
// So we are forcefully enabling VNF, if VSTAB is enabled for 1080p resolution.
- valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- if (valstr && (strcmp(valstr, CameraParameters::TRUE) == 0) && (mPreviewWidth == 1920)) {
+ int w, h;
+ params.getPreviewSize(&w, &h);
+ valstr = mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && (strcmp(valstr, android::CameraParameters::TRUE) == 0) && (w == 1920)) {
CAMHAL_LOGDA("Force Enable VNF for 1080p");
- mParameters.set(TICameraParameters::KEY_VNF, "1");
- restartPreviewRequired = true;
+ const char *valKeyVnf = mParameters.get(TICameraParameters::KEY_VNF);
+ if(!valKeyVnf || (strcmp(valKeyVnf, android::CameraParameters::TRUE) != 0)) {
+ mParameters.set(TICameraParameters::KEY_VNF, android::CameraParameters::TRUE);
+ restartPreviewRequired = true;
+ }
}
- }
+#endif
+
LOG_FUNCTION_NAME_EXIT;
return restartPreviewRequired;
@@ -2055,31 +2779,22 @@ bool CameraHal::resetVideoModeParameters()
*/
status_t CameraHal::restartPreview()
{
- const char *valstr = NULL;
- char tmpvalstr[30];
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
// Retain CAPTURE_MODE before calling stopPreview(), since it is reset in stopPreview().
- tmpvalstr[0] = 0;
- valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
- if(valstr != NULL)
- {
- if(sizeof(tmpvalstr) < (strlen(valstr)+1))
- {
- return -EINVAL;
- }
-
- strncpy(tmpvalstr, valstr, sizeof(tmpvalstr));
- tmpvalstr[sizeof(tmpvalstr)-1] = 0;
- }
forceStopPreview();
{
- Mutex::Autolock lock(mLock);
- mParameters.set(TICameraParameters::KEY_CAP_MODE, tmpvalstr);
+ android::AutoMutex lock(mLock);
+ if (!mCapModeBackup.isEmpty()) {
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, mCapModeBackup.string());
+ mCapModeBackup = "";
+ } else {
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
+ }
mCameraAdapter->setParameters(mParameters);
}
@@ -2103,7 +2818,7 @@ void CameraHal::stopRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if (!mRecordingEnabled )
{
@@ -2122,12 +2837,12 @@ void CameraHal::stopRecording()
mRecordingEnabled = false;
if ( mAppCallbackNotifier->getUesVideoBuffers() ){
- freeVideoBufs(mVideoBufs);
- if (mVideoBufs){
- CAMHAL_LOGVB(" FREEING mVideoBufs 0x%x", mVideoBufs);
- delete [] mVideoBufs;
+ freeVideoBufs(mVideoBuffers);
+ if (mVideoBuffers){
+ CAMHAL_LOGVB(" FREEING mVideoBuffers %p", mVideoBuffers);
+ delete [] mVideoBuffers;
}
- mVideoBufs = NULL;
+ mVideoBuffers = NULL;
}
// reset internal recording hint in case camera adapter needs to make some
@@ -2203,7 +2918,7 @@ status_t CameraHal::autoFocus()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled |= CAMERA_MSG_FOCUS;
@@ -2260,13 +2975,13 @@ status_t CameraHal::cancelAutoFocus()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
- CameraParameters adapterParams = mParameters;
+ android::AutoMutex lock(mLock);
+ android::CameraParameters adapterParams = mParameters;
mMsgEnabled &= ~CAMERA_MSG_FOCUS;
if( NULL != mCameraAdapter )
{
- adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, CameraParameters::FALSE);
+ adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, android::CameraParameters::FALSE);
mCameraAdapter->setParameters(adapterParams);
mCameraAdapter->sendCommand(CameraAdapter::CAMERA_CANCEL_AUTOFOCUS);
mAppCallbackNotifier->flushEventQueue();
@@ -2315,34 +3030,17 @@ void CameraHal::eventCallback(CameraHalEvent* event)
{
LOG_FUNCTION_NAME;
- if ( NULL != event )
- {
- switch( event->mEventType )
- {
- case CameraHalEvent::EVENT_FOCUS_LOCKED:
- case CameraHalEvent::EVENT_FOCUS_ERROR:
- {
- if ( mBracketingEnabled )
- {
- startImageBracketing();
- }
- break;
- }
- default:
- {
- break;
- }
- };
- }
-
LOG_FUNCTION_NAME_EXIT;
}
status_t CameraHal::startImageBracketing()
{
- status_t ret = NO_ERROR;
- CameraFrame frame;
- CameraAdapter::BuffersDescriptor desc;
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+ unsigned int max_queueable = 0;
+
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
@@ -2407,7 +3105,7 @@ status_t CameraHal::startImageBracketing()
if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
{
- desc.mBuffers = mImageBufs;
+ desc.mBuffers = mImageBuffers;
desc.mOffsets = mImageOffsets;
desc.mFd = mImageFd;
desc.mLength = mImageLength;
@@ -2465,20 +3163,46 @@ status_t CameraHal::stopImageBracketing()
@todo Define error codes if unable to switch to image capture
*/
-status_t CameraHal::takePicture( )
+status_t CameraHal::takePicture(const char *params)
{
+ android::AutoMutex lock(mLock);
+ return __takePicture(params);
+}
+
+/**
+ @brief Internal function for getting a captured image.
+ shared by takePicture and reprocess.
+ @param none
+ @return NO_ERROR If able to switch to image capture
+ @todo Define error codes if unable to switch to image capture
+
+ */
+status_t CameraHal::__takePicture(const char *params, struct timeval *captureStart)
+{
+ // cancel AF state if needed (before any operation and mutex lock)
+ if (mCameraAdapter->getState() == CameraAdapter::AF_STATE) {
+ cancelAutoFocus();
+ }
+
status_t ret = NO_ERROR;
CameraFrame frame;
CameraAdapter::BuffersDescriptor desc;
- int burst;
+ int burst = -1;
const char *valstr = NULL;
unsigned int bufferCount = 1;
-
- Mutex::Autolock lock(mLock);
+ unsigned int max_queueable = 0;
+ unsigned int rawBufferCount = 1;
+ bool isCPCamMode = false;
+ android::sp<DisplayAdapter> outAdapter = 0;
+ bool reuseTapout = false;
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- gettimeofday(&mStartCapture, NULL);
+ if ( NULL == captureStart ) {
+ gettimeofday(&mStartCapture, NULL);
+ } else {
+ memcpy(&mStartCapture, captureStart, sizeof(struct timeval));
+ }
#endif
@@ -2491,53 +3215,166 @@ status_t CameraHal::takePicture( )
return NO_INIT;
}
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+
+ isCPCamMode = valstr && !strcmp(valstr, TICameraParameters::CP_CAM_MODE);
+
// return error if we are already capturing
- if ( (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
+ // however, we can queue a capture when in cpcam mode
+ if ( ((mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) ||
(mCameraAdapter->getState() == CameraAdapter::VIDEO_CAPTURE_STATE &&
- mCameraAdapter->getNextState() != CameraAdapter::VIDEO_STATE) ) {
+ mCameraAdapter->getNextState() != CameraAdapter::VIDEO_STATE)) &&
+ !isCPCamMode) {
CAMHAL_LOGEA("Already capturing an image...");
return NO_INIT;
}
// we only support video snapshot if we are in video mode (recording hint is set)
- valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
if ( (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE) &&
- (valstr && strcmp(valstr, TICameraParameters::VIDEO_MODE)) ) {
+ (valstr && ( strcmp(valstr, TICameraParameters::VIDEO_MODE) &&
+ strcmp(valstr, TICameraParameters::VIDEO_MODE_HQ ) ) ) ) {
CAMHAL_LOGEA("Trying to capture while recording without recording hint set...");
return INVALID_OPERATION;
}
- if ( !mBracketingRunning )
- {
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ // check if camera application is using shots parameters
+ // api. parameters set here override anything set using setParameters
+ // TODO(XXX): Just going to use legacy TI parameters for now. Need
+ // add new APIs in CameraHal to utilize android::ShotParameters later, so
+ // we don't have to parse through the whole set of parameters
+ // in camera adapter
+ if (strlen(params) > 0) {
+ android::ShotParameters shotParams;
+ const char *valStr;
+ const char *valExpComp, *valExpGain;
+ int valNum;
+
+ android::String8 shotParams8(params);
+
+ shotParams.unflatten(shotParams8);
+ mParameters.remove(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE);
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+
+ valExpGain = shotParams.get(android::ShotParameters::KEY_EXP_GAIN_PAIRS);
+ valExpComp = shotParams.get(android::ShotParameters::KEY_EXP_COMPENSATION);
+ if (NULL != valExpComp) {
+ mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valExpComp);
+ } else if (NULL != valExpGain) {
+ mParameters.set(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE, valExpGain);
+ }
- if ( NO_ERROR == ret )
- {
- burst = mParameters.getInt(TICameraParameters::KEY_BURST);
+ valNum = shotParams.getInt(android::ShotParameters::KEY_BURST);
+ if (valNum >= 0) {
+ mParameters.set(TICameraParameters::KEY_BURST, valNum);
+ burst = valNum;
+ }
+
+ valStr = shotParams.get(android::ShotParameters::KEY_FLUSH_CONFIG);
+ if (valStr!= NULL) {
+ if ( 0 == strcmp(valStr, android::ShotParameters::TRUE) ) {
+ mParameters.set(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE,
+ android::CameraParameters::TRUE);
+ } else if ( 0 == strcmp(valStr, android::ShotParameters::FALSE) ) {
+ mParameters.set(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE,
+ android::CameraParameters::FALSE);
+ }
+ }
+
+ valStr = shotParams.get(android::ShotParameters::KEY_CURRENT_TAP_OUT);
+ if (valStr != NULL) {
+ int index = -1;
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ if(mOutAdapters.itemAt(i)->match(valStr)) {
+ index = i;
+ break;
+ }
+ }
+ if (index < 0) {
+ CAMHAL_LOGE("Invalid tap out surface passed to camerahal");
+ return BAD_VALUE;
}
+ CAMHAL_LOGD("Found matching out adapter at %d", index);
+ outAdapter = mOutAdapters.itemAt(index);
+ if ( outAdapter == mBufferSourceAdapter_Out ) {
+ reuseTapout = true;
+ }
+ }
+
+ mCameraAdapter->setParameters(mParameters);
+ } else
+#endif
+ {
+ // TODO(XXX): Should probably reset burst and bracketing params
+ // when we remove legacy TI parameters implementation
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture parameters set: ", &mStartCapture);
+
+#endif
+
+ // if we are already in the middle of a capture and using the same
+ // tapout ST...then we just need setParameters and start image
+ // capture to queue more shots
+ if (((mCameraAdapter->getState() & CameraAdapter::CAPTURE_STATE) ==
+ CameraAdapter::CAPTURE_STATE) &&
+ (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) &&
+ (reuseTapout) ) {
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE,
+ (int) &mStartCapture);
+#else
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
+#endif
+ return ret;
+ }
+
+ if ( !mBracketingRunning )
+ {
+ // if application didn't set burst through android::ShotParameters
+ // then query from TICameraParameters
+ if ((burst == -1) && (NO_ERROR == ret)) {
+ burst = mParameters.getInt(TICameraParameters::KEY_BURST);
+ }
//Allocate all buffers only in burst capture case
- if ( burst > 1 )
- {
- bufferCount = CameraHal::NO_BUFFERS_IMAGE_CAPTURE;
- if ( NULL != mAppCallbackNotifier.get() )
- {
+ if ( burst > 0 ) {
+ // For CPCam mode...allocate for worst case burst
+ bufferCount = isCPCamMode || (burst > CameraHal::NO_BUFFERS_IMAGE_CAPTURE) ?
+ CameraHal::NO_BUFFERS_IMAGE_CAPTURE : burst;
+
+ if (outAdapter.get()) {
+ if ( reuseTapout ) {
+ bufferCount = mImageCount;
+ } else {
+ bufferCount = outAdapter->getBufferCount();
+ if (bufferCount < 1) {
+ bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+ }
+ }
+ }
+
+ if ( NULL != mAppCallbackNotifier.get() ) {
mAppCallbackNotifier->setBurst(true);
- }
}
- else
- {
- if ( NULL != mAppCallbackNotifier.get() )
- {
+ } else if ( mBracketingEnabled ) {
+ bufferCount = mBracketRangeNegative + 1;
+ if ( NULL != mAppCallbackNotifier.get() ) {
+ mAppCallbackNotifier->setBurst(false);
+ }
+ } else {
+ if ( NULL != mAppCallbackNotifier.get() ) {
mAppCallbackNotifier->setBurst(false);
- }
}
+ }
// pause preview during normal image capture
// do not pause preview if recording (video state)
- if (NO_ERROR == ret &&
- NULL != mDisplayAdapter.get() &&
- burst < 1) {
+ if ( (NO_ERROR == ret) && (NULL != mDisplayAdapter.get()) ) {
if (mCameraAdapter->getState() != CameraAdapter::VIDEO_STATE) {
mDisplayPaused = true;
mPreviewEnabled = false;
@@ -2575,51 +3412,117 @@ status_t CameraHal::takePicture( )
}
}
- if ( NO_ERROR == ret )
- {
- mParameters.getPictureSize(( int * ) &frame.mWidth,
- ( int * ) &frame.mHeight);
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- ret = allocImageBufs(frame.mWidth,
- frame.mHeight,
- frame.mLength,
- mParameters.getPictureFormat(),
- bufferCount);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ CameraHal::PPM("Takepicture buffer size queried: ", &mStartCapture);
+
+#endif
+
+ if (outAdapter.get()) {
+ // Avoid locking the tapout again when reusing it
+ if (!reuseTapout) {
+ // Need to reset buffers if we are switching adapters since we don't know
+ // the state of the new buffer list
+ ret = outAdapter->maxQueueableBuffers(max_queueable);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGE("Couldn't get max queuable");
+ return ret;
+ }
+ mImageBuffers = outAdapter->getBuffers(true);
+ mImageOffsets = outAdapter->getOffsets();
+ mImageFd = outAdapter->getFd();
+ mImageLength = outAdapter->getSize();
+ mImageCount = bufferCount;
+ mBufferSourceAdapter_Out = outAdapter;
+ }
+ } else {
+ mBufferSourceAdapter_Out.clear();
+ // allocImageBufs will only allocate new buffers if mImageBuffers is NULL
+ if ( NO_ERROR == ret ) {
+ max_queueable = bufferCount;
+ ret = allocImageBufs(frame.mAlignment / getBPP(mParameters.getPictureFormat()),
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ bufferCount);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
}
}
+ }
- if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture buffers allocated: ", &mStartCapture);
+ memcpy(&mImageBuffers->ppmStamp, &mStartCapture, sizeof(struct timeval));
+
+#endif
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
{
- desc.mBuffers = mImageBufs;
+ desc.mBuffers = mImageBuffers;
desc.mOffsets = mImageOffsets;
desc.mFd = mImageFd;
desc.mLength = mImageLength;
desc.mCount = ( size_t ) bufferCount;
- desc.mMaxQueueable = ( size_t ) bufferCount;
+ desc.mMaxQueueable = ( size_t ) max_queueable;
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
( int ) &desc);
}
+ if (mRawCapture) {
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDB("Raw capture buffers setup - %s", mParameters.getPictureFormat());
+ ret = allocRawBufs(mParameters.getInt(TICameraParameters::RAW_WIDTH),
+ mParameters.getInt(TICameraParameters::RAW_HEIGHT),
+ android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
+ rawBufferCount);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("allocRawBufs (for RAW capture) returned error 0x%x", ret);
+ }
+ }
+
+ if ((NO_ERROR == ret) && ( NULL != mCameraAdapter )) {
+ desc.mBuffers = mVideoBuffers;
+ desc.mOffsets = mVideoOffsets;
+ desc.mFd = mVideoFd;
+ desc.mLength = mVideoLength;
+ desc.mCount = ( size_t ) rawBufferCount;
+ desc.mMaxQueueable = ( size_t ) rawBufferCount;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE,
+ ( int ) &desc);
+ }
}
+ }
- if ( ( NO_ERROR == ret ) && ( NULL != mCameraAdapter ) )
- {
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture buffers registered: ", &mStartCapture);
+
+#endif
+
+ if ((ret == NO_ERROR) && mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out->enableDisplay(0, 0, NULL);
+ }
+
+ if ((NO_ERROR == ret) && (NULL != mCameraAdapter)) {
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
//pass capture timestamp along with the camera adapter command
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE, (int) &mStartCapture);
+ CameraHal::PPM("Takepicture capture started: ", &mStartCapture);
+
#else
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
#endif
- }
+ }
return ret;
}
@@ -2637,11 +3540,9 @@ status_t CameraHal::takePicture( )
status_t CameraHal::cancelPicture( )
{
LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
- Mutex::Autolock lock(mLock);
-
- mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
-
+ ret = signalEndImageCapture();
return NO_ERROR;
}
@@ -2654,7 +3555,7 @@ status_t CameraHal::cancelPicture( )
*/
char* CameraHal::getParameters()
{
- String8 params_str8;
+ android::String8 params_str8;
char* params_string;
const char * valstr = NULL;
@@ -2665,16 +3566,38 @@ char* CameraHal::getParameters()
mCameraAdapter->getParameters(mParameters);
}
- CameraParameters mParams = mParameters;
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT)) != NULL ) {
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, valstr))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, valstr))) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ }
+ }
+
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)) != NULL ) {
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, valstr))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, valstr))) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ }
+ }
+
+ android::CameraParameters mParams = mParameters;
// Handle RECORDING_HINT to Set/Reset Video Mode Parameters
- valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = mParameters.get(android::CameraParameters::KEY_RECORDING_HINT);
if(valstr != NULL)
{
- if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ if(strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
//HACK FOR MMS MODE
- resetPreviewRes(&mParams, mVideoWidth, mVideoHeight);
+ resetPreviewRes(&mParams);
}
}
@@ -2695,6 +3618,134 @@ char* CameraHal::getParameters()
return params_string;
}
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+/**
+ @brief Starts reprocessing operation.
+ */
+status_t CameraHal::reprocess(const char *params)
+{
+ status_t ret = NO_ERROR;
+ int bufferCount = 0;
+ CameraAdapter::BuffersDescriptor desc;
+ CameraBuffer *reprocBuffers = NULL;
+ android::ShotParameters shotParams;
+ const char *valStr = NULL;
+ struct timeval startReprocess;
+
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&startReprocess, NULL);
+
+#endif
+
+ // 0. Get tap in surface
+ if (strlen(params) > 0) {
+ android::String8 shotParams8(params);
+ shotParams.unflatten(shotParams8);
+ }
+
+ valStr = shotParams.get(android::ShotParameters::KEY_CURRENT_TAP_IN);
+ if (valStr != NULL) {
+ int index = -1;
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ if(mInAdapters.itemAt(i)->match(valStr)) {
+ index = i;
+ break;
+ }
+ }
+ if (index < 0) {
+ CAMHAL_LOGE("Invalid tap in surface passed to camerahal");
+ return BAD_VALUE;
+ }
+ CAMHAL_LOGD("Found matching in adapter at %d", index);
+ mBufferSourceAdapter_In = mInAdapters.itemAt(index);
+ } else {
+ CAMHAL_LOGE("No tap in surface sent with shot config!");
+ return BAD_VALUE;
+ }
+
+ // 1. Get buffers
+ if (mBufferSourceAdapter_In.get()) {
+ reprocBuffers = mBufferSourceAdapter_In->getBufferList(&bufferCount);
+ }
+
+ if (!reprocBuffers) {
+ CAMHAL_LOGE("Error: couldn't get input buffers for reprocess()");
+ goto exit;
+ }
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Got reprocess buffers: ", &startReprocess);
+
+#endif
+
+ // 2. Get buffer information and parse parameters
+ {
+ shotParams.setBurst(bufferCount);
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ memcpy(&reprocBuffers->ppmStamp, &startReprocess, sizeof(struct timeval));
+
+#endif
+
+ // 3. Give buffer to camera adapter
+ desc.mBuffers = reprocBuffers;
+ desc.mOffsets = 0;
+ desc.mFd = 0;
+ desc.mLength = 0;
+ desc.mCount = (size_t) bufferCount;
+ desc.mMaxQueueable = (size_t) bufferCount;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS, (int) &desc);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error calling camera use buffers");
+ goto exit;
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess buffers registered: ", &startReprocess);
+
+#endif
+
+ // 4. Start reprocessing
+ ret = mBufferSourceAdapter_In->enableDisplay(0, 0, NULL);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error enabling tap in point");
+ goto exit;
+ }
+
+ // 5. Start capturing
+ ret = __takePicture(shotParams.flatten().string(), &startReprocess);
+
+exit:
+ return ret;
+}
+
+/**
+ @brief Cancels current reprocessing operation
+
+ */
+status_t CameraHal::cancel_reprocess( )
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ret = signalEndImageCapture();
+ return NO_ERROR;
+}
+#endif
+
+
void CameraHal::putParameters(char *parms)
{
free(parms);
@@ -2714,7 +3765,6 @@ status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
LOG_FUNCTION_NAME;
-
if ( ( NO_ERROR == ret ) && ( NULL == mCameraAdapter ) )
{
CAMHAL_LOGEA("No CameraAdapter instance");
@@ -2724,24 +3774,31 @@ status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
///////////////////////////////////////////////////////
// Following commands do NOT need preview to be started
///////////////////////////////////////////////////////
- switch(cmd) {
- case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
- bool enable = static_cast<bool>(arg1);
- Mutex::Autolock lock(mLock);
- if (enable) {
- mMsgEnabled |= CAMERA_MSG_FOCUS_MOVE;
- } else {
- mMsgEnabled &= ~CAMERA_MSG_FOCUS_MOVE;
- }
- return NO_ERROR;
- break;
+
+ switch ( cmd ) {
+#ifdef ANDROID_API_JB_OR_LATER
+ case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
+ {
+ const bool enable = static_cast<bool>(arg1);
+ android::AutoMutex lock(mLock);
+ if ( enable ) {
+ mMsgEnabled |= CAMERA_MSG_FOCUS_MOVE;
+ } else {
+ mMsgEnabled &= ~CAMERA_MSG_FOCUS_MOVE;
+ }
+ }
+ return OK;
+#endif
}
- if ( ( NO_ERROR == ret ) && ( !previewEnabled() ))
- {
+ if ( ret == OK && !previewEnabled()
+#ifdef OMAP_ENHANCEMENT_VTC
+ && (cmd != CAMERA_CMD_PREVIEW_INITIALIZATION)
+#endif
+ ) {
CAMHAL_LOGEA("Preview is not running");
ret = -EINVAL;
- }
+ }
///////////////////////////////////////////////////////
// Following commands NEED preview to be started
@@ -2759,6 +3816,7 @@ status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
case CAMERA_CMD_STOP_SMOOTH_ZOOM:
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM);
+ break;
case CAMERA_CMD_START_FACE_DETECTION:
@@ -2772,6 +3830,31 @@ status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
break;
+#ifdef OMAP_ENHANCEMENT_VTC
+ case CAMERA_CMD_PREVIEW_DEINITIALIZATION:
+ if(mDisplayAdapter.get() != NULL) {
+ ///Stop the buffer display first
+ mDisplayAdapter->disableDisplay();
+ }
+
+ if(mAppCallbackNotifier.get() != NULL) {
+ //Stop the callback sending
+ mAppCallbackNotifier->stop();
+ mAppCallbackNotifier->flushAndReturnFrames();
+ mAppCallbackNotifier->stopPreviewCallbacks();
+ }
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_DESTROY_TUNNEL);
+ mTunnelSetup = false;
+
+ break;
+
+ case CAMERA_CMD_PREVIEW_INITIALIZATION:
+ ret = cameraPreviewInitialization();
+
+ break;
+#endif
+
default:
break;
};
@@ -2837,11 +3920,11 @@ CameraHal::CameraHal(int cameraId)
///Initialize all the member variables to their defaults
mPreviewEnabled = false;
- mPreviewBufs = NULL;
- mImageBufs = NULL;
+ mPreviewBuffers = NULL;
+ mImageBuffers = NULL;
mBufProvider = NULL;
mPreviewStartInProgress = false;
- mVideoBufs = NULL;
+ mVideoBuffers = NULL;
mVideoBufProvider = NULL;
mRecordingEnabled = false;
mDisplayPaused = false;
@@ -2858,13 +3941,14 @@ CameraHal::CameraHal(int cameraId)
mMaxZoomSupported = 0;
mShutterEnabled = true;
mMeasurementEnabled = false;
- mPreviewDataBufs = NULL;
+ mPreviewDataBuffers = NULL;
mCameraProperties = NULL;
mCurrentTime = 0;
mFalsePreview = 0;
mImageOffsets = NULL;
mImageLength = 0;
mImageFd = 0;
+ mImageCount = 0;
mVideoOffsets = NULL;
mVideoFd = 0;
mVideoLength = 0;
@@ -2883,6 +3967,19 @@ CameraHal::CameraHal(int cameraId)
mSensorListener = NULL;
mVideoWidth = 0;
mVideoHeight = 0;
+#ifdef OMAP_ENHANCEMENT_VTC
+ mVTCUseCase = false;
+ mTunnelSetup = false;
+#endif
+ mPreviewInitializationDone = false;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mExtendedPreviewStreamOps = 0;
+#endif
+
+ //These values depends on the sensor characteristics
+
+ mRawCapture = false;
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
@@ -2932,6 +4029,7 @@ CameraHal::~CameraHal()
}
freeImageBufs();
+ freeRawBufs();
/// Free the memory manager
mMemoryManager.clear();
@@ -2956,6 +4054,7 @@ status_t CameraHal::initialize(CameraProperties::Properties* properties)
LOG_FUNCTION_NAME;
int sensor_index = 0;
+ const char* sensor_name = NULL;
///Initialize the event mask used for registering an event provider for AppCallbackNotifier
///Currently, registering all events as to be coming from CameraAdapter
@@ -2978,9 +4077,22 @@ status_t CameraHal::initialize(CameraProperties::Properties* properties)
sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX));
}
- CAMHAL_LOGDB("Sensor index %d", sensor_index);
+ if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_NAME)) != 0 ) {
+ sensor_name = mCameraProperties->get(CameraProperties::CAMERA_NAME);
+ }
+ CAMHAL_LOGDB("Sensor index= %d; Sensor name= %s", sensor_index, sensor_name);
+
+ if (strcmp(sensor_name, V4L_CAMERA_NAME_USB) == 0) {
+#ifdef V4L_CAMERA_ADAPTER
+ mCameraAdapter = V4LCameraAdapter_Factory(sensor_index);
+#endif
+ }
+ else {
+#ifdef OMX_CAMERA_ADAPTER
+ mCameraAdapter = OMXCameraAdapter_Factory(sensor_index);
+#endif
+ }
- mCameraAdapter = CameraAdapter_Factory(sensor_index);
if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties)!=NO_ERROR))
{
CAMHAL_LOGEA("Unable to create or initialize CameraAdapter");
@@ -3078,39 +4190,64 @@ status_t CameraHal::initialize(CameraProperties::Properties* properties)
bool CameraHal::isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions)
{
- bool ret = true;
+ bool ret = false;
status_t status = NO_ERROR;
- char tmpBuffer[PARAM_BUFFER + 1];
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
char *pos = NULL;
LOG_FUNCTION_NAME;
- if ( NULL == supportedResolutions )
- {
+ if (NULL == supportedResolutions) {
CAMHAL_LOGEA("Invalid supported resolutions string");
- ret = false;
goto exit;
- }
+ }
- status = snprintf(tmpBuffer, PARAM_BUFFER, "%dx%d", width, height);
- if ( 0 > status )
- {
+ status = snprintf(tmpBuffer, MAX_PROP_VALUE_LENGTH - 1, "%dx%d", width, height);
+ if (0 > status) {
CAMHAL_LOGEA("Error encountered while generating validation string");
- ret = false;
goto exit;
- }
+ }
- pos = strstr(supportedResolutions, tmpBuffer);
- if ( NULL == pos )
- {
- ret = false;
- }
- else
- {
- ret = true;
- }
+ ret = isParameterValid(tmpBuffer, supportedResolutions);
exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isFpsRangeValid(int fpsMin, int fpsMax, const char *supportedFpsRanges)
+{
+ bool ret = false;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char *pos;
+ int suppFpsRangeArray[2];
+ int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedFpsRanges ) {
+ CAMHAL_LOGEA("Invalid supported FPS ranges string");
+ return false;
+ }
+
+ if (fpsMin <= 0 || fpsMax <= 0 || fpsMin > fpsMax) {
+ return false;
+ }
+
+ strncpy(supported, supportedFpsRanges, MAX_PROP_VALUE_LENGTH);
+ pos = strtok(supported, " (,)");
+ while (pos != NULL) {
+ suppFpsRangeArray[i] = atoi(pos);
+ if (i++) {
+ if (fpsMin >= suppFpsRangeArray[0] && fpsMax <= suppFpsRangeArray[1]) {
+ ret = true;
+ break;
+ }
+ i = 0;
+ }
+ pos = strtok(NULL, " (,)");
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -3119,37 +4256,34 @@ exit:
bool CameraHal::isParameterValid(const char *param, const char *supportedParams)
{
- bool ret = true;
- char *pos = NULL;
+ bool ret = false;
+ char *pos;
+ char supported[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
- if ( NULL == supportedParams )
- {
+ if (NULL == supportedParams) {
CAMHAL_LOGEA("Invalid supported parameters string");
- ret = false;
goto exit;
- }
+ }
- if ( NULL == param )
- {
+ if (NULL == param) {
CAMHAL_LOGEA("Invalid parameter string");
- ret = false;
goto exit;
- }
+ }
- pos = strstr(supportedParams, param);
- if ( NULL == pos )
- {
- ret = false;
- }
- else
- {
- ret = true;
+ strncpy(supported, supportedParams, MAX_PROP_VALUE_LENGTH - 1);
+
+ pos = strtok(supported, ",");
+ while (pos != NULL) {
+ if (!strcmp(pos, param)) {
+ ret = true;
+ break;
}
+ pos = strtok(NULL, ",");
+ }
exit:
-
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -3157,40 +4291,26 @@ exit:
bool CameraHal::isParameterValid(int param, const char *supportedParams)
{
- bool ret = true;
- char *pos = NULL;
+ bool ret = false;
status_t status;
- char tmpBuffer[PARAM_BUFFER + 1];
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
- if ( NULL == supportedParams )
- {
+ if (NULL == supportedParams) {
CAMHAL_LOGEA("Invalid supported parameters string");
- ret = false;
goto exit;
- }
+ }
- status = snprintf(tmpBuffer, PARAM_BUFFER, "%d", param);
- if ( 0 > status )
- {
+ status = snprintf(tmpBuffer, MAX_PROP_VALUE_LENGTH - 1, "%d", param);
+ if (0 > status) {
CAMHAL_LOGEA("Error encountered while generating validation string");
- ret = false;
goto exit;
- }
+ }
- pos = strstr(supportedParams, tmpBuffer);
- if ( NULL == pos )
- {
- ret = false;
- }
- else
- {
- ret = true;
- }
+ ret = isParameterValid(tmpBuffer, supportedParams);
exit:
-
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -3214,7 +4334,6 @@ status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
status_t ret = NO_ERROR;
char *ctx, *pWidth, *pHeight;
const char *sep = "x";
- char *tmp = NULL;
LOG_FUNCTION_NAME;
@@ -3225,11 +4344,10 @@ status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
//This fixes "Invalid input resolution"
char *resStr_copy = (char *)malloc(strlen(resStr) + 1);
- if ( NULL!=resStr_copy ) {
- if ( NO_ERROR == ret )
+ if ( NULL != resStr_copy )
{
strcpy(resStr_copy, resStr);
- pWidth = strtok_r( (char *) resStr_copy, sep, &ctx);
+ pWidth = strtok_r(resStr_copy, sep, &ctx);
if ( NULL != pWidth )
{
@@ -3257,9 +4375,9 @@ status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
}
}
- free(resStr_copy);
- resStr_copy = NULL;
- }
+ free(resStr_copy);
+ resStr_copy = NULL;
+
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -3267,51 +4385,65 @@ status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
void CameraHal::insertSupportedParams()
{
- char tmpBuffer[PARAM_BUFFER + 1];
-
LOG_FUNCTION_NAME;
- CameraParameters &p = mParameters;
+ android::CameraParameters &p = mParameters;
///Set the name of the camera
p.set(TICameraParameters::KEY_CAMERA_NAME, mCameraProperties->get(CameraProperties::CAMERA_NAME));
mMaxZoomSupported = atoi(mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
- p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
- p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
- p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
- p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
- p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
- p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
- p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
- p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+ p.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ p.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED));
+ p.set(android::CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
+ p.set(android::CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
+ p.set(android::CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
+ p.set(android::CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
+ p.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
p.set(TICameraParameters::KEY_SUPPORTED_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP));
p.set(TICameraParameters::KEY_SUPPORTED_ISO_VALUES, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES));
- p.set(CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
- p.set(CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
- p.set(CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
- p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
+ p.set(android::CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
+ p.set(android::CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+ p.set(android::CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
+ p.set(android::CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
p.set(TICameraParameters::KEY_SUPPORTED_IPP, mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
- p.set(TICameraParameters::KEY_S3D_SUPPORTED,mCameraProperties->get(CameraProperties::S3D_SUPPORTED));
- p.set(TICameraParameters::KEY_S3D2D_PREVIEW_MODE,mCameraProperties->get(CameraProperties::S3D2D_PREVIEW_MODES));
- p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
- p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
- p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
- p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
- p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
- p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
- p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
- p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
+ p.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT_VALUES, mCameraProperties->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES));
+ p.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT_VALUES, mCameraProperties->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE_VALUES, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE_VALUES));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP));
+ p.set(android::CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
+ p.set(TICameraParameters::KEY_VNF_SUPPORTED, mCameraProperties->get(CameraProperties::VNF_SUPPORTED));
+ p.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
+ p.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
+ p.set(android::CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
+ p.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED, mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED));
+ p.set(TICameraParameters::KEY_CAP_MODE_VALUES, mCameraProperties->get(CameraProperties::CAP_MODE_VALUES));
LOG_FUNCTION_NAME_EXIT;
@@ -3322,13 +4454,16 @@ void CameraHal::initDefaultParameters()
//Purpose of this function is to initialize the default current and supported parameters for the currently
//selected camera.
- CameraParameters &p = mParameters;
+ android::CameraParameters &p = mParameters;
int currentRevision, adapterRevision;
status_t ret = NO_ERROR;
int width, height;
+ const char *valstr;
LOG_FUNCTION_NAME;
+ insertSupportedParams();
+
ret = parseResolution(mCameraProperties->get(CameraProperties::PREVIEW_SIZE), width, height);
if ( NO_ERROR == ret )
@@ -3355,62 +4490,78 @@ void CameraHal::initDefaultParameters()
if ( NO_ERROR == ret )
{
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
}
else
{
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
}
- insertSupportedParams();
-
//Insert default values
p.setPreviewFrameRate(atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)));
+ p.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
p.setPreviewFormat(mCameraProperties->get(CameraProperties::PREVIEW_FORMAT));
p.setPictureFormat(mCameraProperties->get(CameraProperties::PICTURE_FORMAT));
- p.set(CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
- p.set(CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
- p.set(CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
- p.set(CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
- p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
- p.set(CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
- p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
- p.set(CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
- p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
- p.set(CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
+ p.set(android::CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
+ p.set(android::CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
+ p.set(android::CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
+ p.set(android::CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
+ p.set(android::CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
+ p.set(android::CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
+ p.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
+ p.set(android::CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
+ p.set(android::CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
p.set(TICameraParameters::KEY_CONTRAST, mCameraProperties->get(CameraProperties::CONTRAST));
p.set(TICameraParameters::KEY_SATURATION, mCameraProperties->get(CameraProperties::SATURATION));
p.set(TICameraParameters::KEY_BRIGHTNESS, mCameraProperties->get(CameraProperties::BRIGHTNESS));
p.set(TICameraParameters::KEY_SHARPNESS, mCameraProperties->get(CameraProperties::SHARPNESS));
p.set(TICameraParameters::KEY_EXPOSURE_MODE, mCameraProperties->get(CameraProperties::EXPOSURE_MODE));
+ p.set(TICameraParameters::KEY_MANUAL_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_GAIN_ISO, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
p.set(TICameraParameters::KEY_ISO, mCameraProperties->get(CameraProperties::ISO_MODE));
p.set(TICameraParameters::KEY_IPP, mCameraProperties->get(CameraProperties::IPP));
p.set(TICameraParameters::KEY_GBCE, mCameraProperties->get(CameraProperties::GBCE));
- p.set(TICameraParameters::KEY_S3D2D_PREVIEW, mCameraProperties->get(CameraProperties::S3D2D_PREVIEW));
- p.set(TICameraParameters::KEY_AUTOCONVERGENCE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE));
- p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
- p.set(CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
- p.set(CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
- p.set(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
- p.set(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
- p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+ p.set(TICameraParameters::KEY_GBCE_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_GBCE));
+ p.set(TICameraParameters::KEY_GLBCE, mCameraProperties->get(CameraProperties::GLBCE));
+ p.set(TICameraParameters::KEY_GLBCE_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_GLBCE));
+ p.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT, mCameraProperties->get(CameraProperties::S3D_PRV_FRAME_LAYOUT));
+ p.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT, mCameraProperties->get(CameraProperties::S3D_CAP_FRAME_LAYOUT));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
+ p.set(TICameraParameters::KEY_MANUAL_CONVERGENCE, mCameraProperties->get(CameraProperties::MANUAL_CONVERGENCE));
+ p.set(android::CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
+ p.set(TICameraParameters::KEY_VNF, mCameraProperties->get(CameraProperties::VNF));
+ p.set(android::CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
+ p.set(android::CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
+ p.set(android::CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
- p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
p.set(TICameraParameters::KEY_EXIF_MAKE, mCameraProperties->get(CameraProperties::EXIF_MAKE));
p.set(TICameraParameters::KEY_EXIF_MODEL, mCameraProperties->get(CameraProperties::EXIF_MODEL));
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
- p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
- p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
- p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
-
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
+ p.set(android::CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
+ p.set(android::CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
+ p.set(android::CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
+ p.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION, mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION));
// Only one area a.k.a Touch AF for now.
// TODO: Add support for multiple focus areas.
- p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
- p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
- p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
- p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+ p.set(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
+ p.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
+ p.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
+ p.set(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+ p.set(TICameraParameters::RAW_WIDTH, mCameraProperties->get(CameraProperties::RAW_WIDTH));
+ p.set(TICameraParameters::RAW_HEIGHT,mCameraProperties->get(CameraProperties::RAW_HEIGHT));
+
+ // TI extensions for enable/disable algos
+ // Hadcoded for now
+ p.set(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA, android::CameraParameters::FALSE);
+ p.set(TICameraParameters::KEY_ALGO_NSF1, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_NSF2, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_SHARPENING, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_THREELINCOLORMAP, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_GIC, android::CameraParameters::TRUE);
LOG_FUNCTION_NAME_EXIT;
}
@@ -3426,7 +4577,9 @@ void CameraHal::forceStopPreview()
LOG_FUNCTION_NAME;
// stop bracketing if it is running
- stopImageBracketing();
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ }
if(mDisplayAdapter.get() != NULL) {
///Stop the buffer display first
@@ -3490,6 +4643,11 @@ void CameraHal::deinitialize()
mSensorListener = NULL;
}
+ mBufferSourceAdapter_Out.clear();
+ mBufferSourceAdapter_In.clear();
+ mOutAdapters.clear();
+ mInAdapters.clear();
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -3503,66 +4661,51 @@ status_t CameraHal::storeMetaDataInBuffers(bool enable)
LOG_FUNCTION_NAME_EXIT;
}
-void CameraHal::selectFPSRange(int framerate, int *min_fps, int *max_fps)
+void CameraHal::getPreferredPreviewRes(int *width, int *height)
{
- char * ptr;
- char supported[MAX_PROP_VALUE_LENGTH];
- int fpsrangeArray[2];
- int i = 0;
-
- LOG_FUNCTION_NAME;
- size_t size = strlen(mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))+1;
- strncpy(supported, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED), size);
-
- ptr = strtok (supported," (,)");
+ LOG_FUNCTION_NAME;
- while (ptr != NULL)
- {
- fpsrangeArray[i]= atoi(ptr)/CameraHal::VFR_SCALE;
- if (i == 1)
- {
- if (framerate == fpsrangeArray[i])
- {
- CAMHAL_LOGDB("SETTING FPS RANGE min = %d max = %d \n", fpsrangeArray[0], fpsrangeArray[1]);
- *min_fps = fpsrangeArray[0]*CameraHal::VFR_SCALE;
- *max_fps = fpsrangeArray[1]*CameraHal::VFR_SCALE;
- break;
- }
- }
- ptr = strtok (NULL, " (,)");
- i++;
- i%=2;
+ // We request Ducati for a higher resolution so preview looks better and then downscale the frame before the callback.
+ // TODO: This should be moved to configuration constants and boolean flag whether to provide such optimization
+ // Also consider providing this configurability of the desired display resolution from the application
+ if ( ( *width == 320 ) && ( *height == 240 ) ) {
+ *width = 640;
+ *height = 480;
+ } else if ( ( *width == 176 ) && ( *height == 144 ) ) {
+ *width = 704;
+ *height = 576;
}
- LOG_FUNCTION_NAME_EXIT;
-
+ LOG_FUNCTION_NAME_EXIT;
}
-void CameraHal::setPreferredPreviewRes(int width, int height)
+void CameraHal::resetPreviewRes(android::CameraParameters *params)
{
LOG_FUNCTION_NAME;
- if ( (width == 320) && (height == 240)){
- mParameters.setPreviewSize(640,480);
- }
- if ( (width == 176) && (height == 144)){
- mParameters.setPreviewSize(704,576);
+ if ( (mVideoWidth <= 320) && (mVideoHeight <= 240)){
+ params->setPreviewSize(mVideoWidth, mVideoHeight);
}
LOG_FUNCTION_NAME_EXIT;
}
-void CameraHal::resetPreviewRes(CameraParameters *mParams, int width, int height)
+void *
+camera_buffer_get_omx_ptr (CameraBuffer *buffer)
{
- LOG_FUNCTION_NAME;
-
- if ( (width <= 320) && (height <= 240)){
- mParams->setPreviewSize(mVideoWidth, mVideoHeight);
- }
-
- LOG_FUNCTION_NAME_EXIT;
+ CAMHAL_LOGV("buffer_type %d opaque %p", buffer->type, buffer->opaque);
+
+ if (buffer->type == CAMERA_BUFFER_ANW) {
+ buffer_handle_t *handle = (buffer_handle_t *)buffer->opaque;
+ CAMHAL_LOGV("anw %08x", *handle);
+ return (void *)*handle;
+ } else if (buffer->type == CAMERA_BUFFER_ION) {
+ return (void *)buffer->fd;
+ } else {
+ CAMHAL_LOGV("other %08x", buffer->opaque);
+ return (void *)buffer->opaque;
+ }
}
-};
-
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHalCommon.cpp b/camera/CameraHalCommon.cpp
index 7e81a09..ff460f9 100644
--- a/camera/CameraHalCommon.cpp
+++ b/camera/CameraHalCommon.cpp
@@ -16,7 +16,8 @@
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const char CameraHal::PARAMS_DELIMITER []= ",";
@@ -46,7 +47,7 @@ void CameraHal::PPM(const char* str){
ppm.tv_sec = ppm.tv_sec * 1000000;
ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_start.tv_usec;
- ALOGD("PPM: %s :%ld.%ld ms", str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ));
+ CAMHAL_LOGI("PPM: %s :%ld.%ld ms", str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ));
}
#elif PPM_INSTRUMENTATION_ABS
@@ -76,7 +77,7 @@ void CameraHal::PPM(const char* str){
absolute *= 1000;
absolute += ppm.tv_usec /1000;
- ALOGD("PPM: %s :%llu.%llu ms : %llu ms", str, ( elapsed /1000 ), ( elapsed % 1000 ), absolute);
+ CAMHAL_LOGI("PPM: %s :%llu.%llu ms : %llu ms", str, ( elapsed /1000 ), ( elapsed % 1000 ), absolute);
}
#endif
@@ -109,13 +110,104 @@ void CameraHal::PPM(const char* str, struct timeval* ppm_first, ...){
ppm.tv_sec = ppm.tv_sec * 1000000;
ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_first->tv_usec;
- ALOGD("PPM: %s :%ld.%ld ms : %llu ms", temp_str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ), absolute);
+ CAMHAL_LOGI("PPM: %s :%ld.%ld ms : %llu ms", temp_str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ), absolute);
va_end(args);
}
#endif
-};
+/** Common utility function definitions used all over the HAL */
+unsigned int CameraHal::getBPP(const char* format) {
+ unsigned int bytesPerPixel;
+
+ // Calculate bytes per pixel based on the pixel format
+ if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0 ||
+ strcmp(format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ bytesPerPixel = 1;
+ } else {
+ bytesPerPixel = 1;
+ }
+
+ return bytesPerPixel;
+}
+
+void CameraHal::getXYFromOffset(unsigned int *x, unsigned int *y,
+ unsigned int offset, unsigned int stride,
+ const char* format)
+{
+ CAMHAL_ASSERT( x && y && format && (0U < stride) );
+
+ *x = (offset % stride) / getBPP(format);
+ *y = (offset / stride);
+}
+
+const char* CameraHal::getPixelFormatConstant(const char* parametersFormat)
+{
+ const char *pixelFormat = NULL;
+
+ if ( NULL != parametersFormat ) {
+ if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I) ) {
+ CAMHAL_LOGVA("CbYCrY format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if ( (0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
+ (0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420P)) ) {
+ // TODO(XXX): We are treating YV12 the same as YUV420SP
+ CAMHAL_LOGVA("YUV420SP format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565) ) {
+ CAMHAL_LOGVA("RGB565 format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ) {
+ CAMHAL_LOGVA("BAYER format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ } else if ( 0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_JPEG) ) {
+ CAMHAL_LOGVA("JPEG format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_JPEG;
+ } else {
+ CAMHAL_LOGEA("Invalid format, NV12 format selected as default");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+
+ return pixelFormat;
+}
+
+size_t CameraHal::calculateBufferSize(const char* parametersFormat, int width, int height)
+{
+ int bufferSize = -1;
+
+ if ( NULL != parametersFormat ) {
+ if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I) ) {
+ bufferSize = width * height * 2;
+ } else if ( (0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
+ (0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P)) ) {
+ bufferSize = width * height * 3 / 2;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565) ) {
+ bufferSize = width * height * 2;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ) {
+ bufferSize = width * height * 2;
+ } else {
+ CAMHAL_LOGEA("Invalid format");
+ bufferSize = 0;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL");
+ bufferSize = 0;
+ }
+
+ return bufferSize;
+}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHalUtilClasses.cpp b/camera/CameraHalUtilClasses.cpp
index 073c2b8..53c9a55 100644
--- a/camera/CameraHalUtilClasses.cpp
+++ b/camera/CameraHalUtilClasses.cpp
@@ -21,12 +21,10 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
-
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
/*--------------------FrameProvider Class STARTS here-----------------------------*/
@@ -59,7 +57,7 @@ int FrameProvider::disableFrameNotification(int32_t frameTypes)
return ret;
}
-int FrameProvider::returnFrame(void *frameBuf, CameraFrame::FrameType frameType)
+int FrameProvider::returnFrame(CameraBuffer *frameBuf, CameraFrame::FrameType frameType)
{
status_t ret = NO_ERROR;
@@ -68,7 +66,7 @@ int FrameProvider::returnFrame(void *frameBuf, CameraFrame::FrameType frameType)
return ret;
}
-void FrameProvider::addFramePointers(void *frameBuf, void *buf)
+void FrameProvider::addFramePointers(CameraBuffer *frameBuf, void *buf)
{
mFrameNotifier->addFramePointers(frameBuf, buf);
return;
@@ -105,7 +103,7 @@ int EventProvider::disableEventNotification(int32_t frameTypes)
LOG_FUNCTION_NAME;
status_t ret = NO_ERROR;
- mEventNotifier->disableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ mEventNotifier->disableMsgType(frameTypes<<MessageNotifier::EVENT_BIT_FIELD_POSITION
, mCookie
);
@@ -199,7 +197,7 @@ status_t CameraArea::checkArea(ssize_t top,
status_t CameraArea::parseAreas(const char *area,
size_t areaLength,
- Vector< sp<CameraArea> > &areas)
+ android::Vector<android::sp<CameraArea> > &areas)
{
status_t ret = NO_ERROR;
char *ctx;
@@ -211,7 +209,7 @@ status_t CameraArea::parseAreas(const char *area,
const char sep = ',';
ssize_t top, left, bottom, right, weight;
char *tmpBuffer = NULL;
- sp<CameraArea> currentArea;
+ android::sp<CameraArea> currentArea;
LOG_FUNCTION_NAME
@@ -334,8 +332,8 @@ status_t CameraArea::parseAreas(const char *area,
return ret;
}
-bool CameraArea::areAreasDifferent(Vector< sp<CameraArea> > &area1,
- Vector< sp<CameraArea> > &area2) {
+bool CameraArea::areAreasDifferent(android::Vector< android::sp<CameraArea> > &area1,
+ android::Vector< android::sp<CameraArea> > &area2) {
if (area1.size() != area2.size()) {
return true;
}
@@ -350,7 +348,7 @@ bool CameraArea::areAreasDifferent(Vector< sp<CameraArea> > &area1,
return false;
}
-bool CameraArea::compare(const sp<CameraArea> &area) {
+bool CameraArea::compare(const android::sp<CameraArea> &area) {
return ((mTop == area->mTop) && (mLeft == area->mLeft) &&
(mBottom == area->mBottom) && (mRight == area->mRight) &&
(mWeight == area->mWeight));
@@ -359,4 +357,5 @@ bool CameraArea::compare(const sp<CameraArea> &area) {
/*--------------------CameraArea Class ENDS here-----------------------------*/
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHal_Module.cpp b/camera/CameraHal_Module.cpp
index cb4e804..a9277a2 100644
--- a/camera/CameraHal_Module.cpp
+++ b/camera/CameraHal_Module.cpp
@@ -21,8 +21,6 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
#include <utils/threads.h>
#include "CameraHal.h"
@@ -30,8 +28,18 @@
#include "TICameraParameters.h"
-static android::CameraProperties gCameraProperties;
-static android::CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
+#ifdef CAMERAHAL_DEBUG_VERBOSE
+# define CAMHAL_LOG_MODULE_FUNCTION_NAME LOG_FUNCTION_NAME
+#else
+# define CAMHAL_LOG_MODULE_FUNCTION_NAME
+#endif
+
+
+namespace Ti {
+namespace Camera {
+
+static CameraProperties gCameraProperties;
+static CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
static unsigned int gCamerasOpen = 0;
static android::Mutex gCameraHalDeviceLock;
@@ -45,6 +53,10 @@ static struct hw_module_methods_t camera_module_methods = {
open: camera_device_open
};
+} // namespace Camera
+} // namespace Ti
+
+
camera_module_t HAL_MODULE_INFO_SYM = {
common: {
tag: HARDWARE_MODULE_TAG,
@@ -53,14 +65,18 @@ camera_module_t HAL_MODULE_INFO_SYM = {
id: CAMERA_HARDWARE_MODULE_ID,
name: "TI OMAP CameraHal Module",
author: "TI",
- methods: &camera_module_methods,
+ methods: &Ti::Camera::camera_module_methods,
dso: NULL, /* remove compilation warnings */
reserved: {0}, /* remove compilation warnings */
},
- get_number_of_cameras: camera_get_number_of_cameras,
- get_camera_info: camera_get_camera_info,
+ get_number_of_cameras: Ti::Camera::camera_get_number_of_cameras,
+ get_camera_info: Ti::Camera::camera_get_camera_info,
};
+
+namespace Ti {
+namespace Camera {
+
typedef struct ti_camera_device {
camera_device_t base;
/* TI specific "private" data can go here (base.priv) */
@@ -75,11 +91,11 @@ typedef struct ti_camera_device {
int camera_set_preview_window(struct camera_device * device,
struct preview_stream_ops *window)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -90,6 +106,61 @@ int camera_set_preview_window(struct camera_device * device,
return rv;
}
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_set_extended_preview_ops(struct camera_device * device,
+ preview_stream_extended_ops_t * extendedOps)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ if (!device) {
+ return BAD_VALUE;
+ }
+
+ ti_camera_device_t * const tiDevice = reinterpret_cast<ti_camera_device_t*>(device);
+ gCameraHals[tiDevice->cameraid]->setExtendedPreviewStreamOps(extendedOps);
+
+ return OK;
+}
+
+int camera_set_buffer_source(struct camera_device * device,
+ struct preview_stream_ops *tapin,
+ struct preview_stream_ops *tapout)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->setBufferSource(tapin, tapout);
+
+ return rv;
+}
+
+int camera_release_buffer_source(struct camera_device * device,
+ struct preview_stream_ops *tapin,
+ struct preview_stream_ops *tapout)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->releaseBufferSource(tapin, tapout);
+
+ return rv;
+}
+#endif
+
void camera_set_callbacks(struct camera_device * device,
camera_notify_callback notify_cb,
camera_data_callback data_cb,
@@ -97,9 +168,9 @@ void camera_set_callbacks(struct camera_device * device,
camera_request_memory get_memory,
void *user)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -111,9 +182,9 @@ void camera_set_callbacks(struct camera_device * device,
void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -125,9 +196,9 @@ void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -139,9 +210,9 @@ void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return 0;
@@ -153,11 +224,11 @@ int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type)
int camera_start_preview(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -170,9 +241,9 @@ int camera_start_preview(struct camera_device * device)
void camera_stop_preview(struct camera_device * device)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -184,11 +255,11 @@ void camera_stop_preview(struct camera_device * device)
int camera_preview_enabled(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -200,11 +271,11 @@ int camera_preview_enabled(struct camera_device * device)
int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -218,11 +289,11 @@ int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
int camera_start_recording(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -234,9 +305,9 @@ int camera_start_recording(struct camera_device * device)
void camera_stop_recording(struct camera_device * device)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -248,11 +319,11 @@ void camera_stop_recording(struct camera_device * device)
int camera_recording_enabled(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -265,9 +336,9 @@ int camera_recording_enabled(struct camera_device * device)
void camera_release_recording_frame(struct camera_device * device,
const void *opaque)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -279,11 +350,11 @@ void camera_release_recording_frame(struct camera_device * device,
int camera_auto_focus(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -295,11 +366,11 @@ int camera_auto_focus(struct camera_device * device)
int camera_cancel_auto_focus(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -311,27 +382,45 @@ int camera_cancel_auto_focus(struct camera_device * device)
int camera_take_picture(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->takePicture(0);
+ return rv;
+}
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_take_picture_with_parameters(struct camera_device * device, const char *params)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return rv;
ti_dev = (ti_camera_device_t*) device;
- rv = gCameraHals[ti_dev->cameraid]->takePicture();
+ rv = gCameraHals[ti_dev->cameraid]->takePicture(params);
return rv;
}
+#endif
int camera_cancel_picture(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -341,12 +430,46 @@ int camera_cancel_picture(struct camera_device * device)
return rv;
}
-int camera_set_parameters(struct camera_device * device, const char *params)
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_reprocess(struct camera_device * device, const char *params)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->reprocess(params);
+ return rv;
+}
+
+int camera_cancel_reprocess(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->cancel_reprocess();
+ return rv;
+}
+#endif
+
+int camera_set_parameters(struct camera_device * device, const char *params)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return rv;
@@ -359,11 +482,11 @@ int camera_set_parameters(struct camera_device * device, const char *params)
char* camera_get_parameters(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
char* param = NULL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return NULL;
@@ -376,9 +499,9 @@ char* camera_get_parameters(struct camera_device * device)
static void camera_put_parameters(struct camera_device *device, char *parms)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -391,25 +514,43 @@ static void camera_put_parameters(struct camera_device *device, char *parms)
int camera_send_command(struct camera_device * device,
int32_t cmd, int32_t arg1, int32_t arg2)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
ti_dev = (ti_camera_device_t*) device;
+#ifdef OMAP_ENHANCEMENT
+ if ( cmd == CAMERA_CMD_SETUP_EXTENDED_OPERATIONS ) {
+ camera_device_extended_ops_t * const ops = static_cast<camera_device_extended_ops_t*>(
+ camera_cmd_send_command_args_to_pointer(arg1, arg2));
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ ops->set_extended_preview_ops = camera_set_extended_preview_ops;
+ ops->set_buffer_source = camera_set_buffer_source;
+ ops->release_buffer_source = camera_release_buffer_source;
+ ops->take_picture_with_parameters = camera_take_picture_with_parameters;
+ ops->reprocess = camera_reprocess;
+ ops->cancel_reprocess = camera_cancel_reprocess;
+#endif
+
+ return OK;
+ }
+#endif
+
rv = gCameraHals[ti_dev->cameraid]->sendCommand(cmd, arg1, arg2);
return rv;
}
void camera_release(struct camera_device * device)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -421,6 +562,8 @@ void camera_release(struct camera_device * device)
int camera_dump(struct camera_device * device, int fd)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
@@ -437,12 +580,12 @@ extern "C" void heaptracker_free_leaked_memory(void);
int camera_device_close(hw_device_t* device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int ret = 0;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
- android::Mutex::Autolock lock(gCameraHalDeviceLock);
+ android::AutoMutex lock(gCameraHalDeviceLock);
if (!device) {
ret = -EINVAL;
@@ -488,10 +631,10 @@ int camera_device_open(const hw_module_t* module, const char* name,
int cameraid;
ti_camera_device_t* camera_device = NULL;
camera_device_ops_t* camera_ops = NULL;
- android::CameraHal* camera = NULL;
- android::CameraProperties::Properties* properties = NULL;
+ CameraHal* camera = NULL;
+ CameraProperties::Properties* properties = NULL;
- android::Mutex::Autolock lock(gCameraHalDeviceLock);
+ android::AutoMutex lock(gCameraHalDeviceLock);
CAMHAL_LOGI("camera_device open");
@@ -501,7 +644,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
if(cameraid > num_cameras)
{
- ALOGE("camera service provided cameraid out of bounds, "
+ CAMHAL_LOGE("camera service provided cameraid out of bounds, "
"cameraid = %d, num supported = %d",
cameraid, num_cameras);
rv = -EINVAL;
@@ -510,7 +653,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
if(gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED)
{
- ALOGE("maximum number of cameras already open");
+ CAMHAL_LOGE("maximum number of cameras already open");
rv = -ENOMEM;
goto fail;
}
@@ -518,7 +661,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
camera_device = (ti_camera_device_t*)malloc(sizeof(*camera_device));
if(!camera_device)
{
- ALOGE("camera_device allocation fail");
+ CAMHAL_LOGE("camera_device allocation fail");
rv = -ENOMEM;
goto fail;
}
@@ -526,7 +669,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
if(!camera_ops)
{
- ALOGE("camera_ops allocation fail");
+ CAMHAL_LOGE("camera_ops allocation fail");
rv = -ENOMEM;
goto fail;
}
@@ -572,23 +715,23 @@ int camera_device_open(const hw_module_t* module, const char* name,
if(gCameraProperties.getProperties(cameraid, &properties) < 0)
{
- ALOGE("Couldn't get camera properties");
+ CAMHAL_LOGE("Couldn't get camera properties");
rv = -ENOMEM;
goto fail;
}
- camera = new android::CameraHal(cameraid);
+ camera = new CameraHal(cameraid);
if(!camera)
{
- ALOGE("Couldn't create instance of CameraHal class");
+ CAMHAL_LOGE("Couldn't create instance of CameraHal class");
rv = -ENOMEM;
goto fail;
}
- if(properties && (camera->initialize(properties) != android::NO_ERROR))
+ if(properties && (camera->initialize(properties) != NO_ERROR))
{
- ALOGE("Couldn't initialize camera instance");
+ CAMHAL_LOGE("Couldn't initialize camera instance");
rv = -ENODEV;
goto fail;
}
@@ -620,19 +763,15 @@ int camera_get_number_of_cameras(void)
{
int num_cameras = MAX_CAMERAS_SUPPORTED;
- // TODO(XXX): Ducati is not loaded yet when camera service gets here
- // Lets revisit this later to see if we can somehow get this working
-#if 0
// this going to be the first call from camera service
// initialize camera properties here...
- if(gCameraProperties.initialize() != android::NO_ERROR)
+ if(gCameraProperties.initialize() != NO_ERROR)
{
CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
return NULL;
}
num_cameras = gCameraProperties.camerasSupported();
-#endif
return num_cameras;
}
@@ -643,40 +782,41 @@ int camera_get_camera_info(int camera_id, struct camera_info *info)
int face_value = CAMERA_FACING_BACK;
int orientation = 0;
const char *valstr = NULL;
- android::CameraProperties::Properties* properties = NULL;
+ CameraProperties::Properties* properties = NULL;
// this going to be the first call from camera service
// initialize camera properties here...
- if(gCameraProperties.initialize() != android::NO_ERROR)
+ if(gCameraProperties.initialize() != NO_ERROR)
{
CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
- return NULL;
+ rv = -EINVAL;
+ goto end;
}
//Get camera properties for camera index
if(gCameraProperties.getProperties(camera_id, &properties) < 0)
{
- ALOGE("Couldn't get camera properties");
+ CAMHAL_LOGE("Couldn't get camera properties");
rv = -EINVAL;
goto end;
}
if(properties)
{
- valstr = properties->get(android::CameraProperties::FACING_INDEX);
+ valstr = properties->get(CameraProperties::FACING_INDEX);
if(valstr != NULL)
{
- if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_FRONT) == 0)
+ if (strcmp(valstr, TICameraParameters::FACING_FRONT) == 0)
{
face_value = CAMERA_FACING_FRONT;
}
- else if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_BACK) == 0)
+ else if (strcmp(valstr, TICameraParameters::FACING_BACK) == 0)
{
face_value = CAMERA_FACING_BACK;
}
}
- valstr = properties->get(android::CameraProperties::ORIENTATION_INDEX);
+ valstr = properties->get(CameraProperties::ORIENTATION_INDEX);
if(valstr != NULL)
{
orientation = atoi(valstr);
@@ -695,6 +835,5 @@ end:
}
-
-
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 527b7c2..82b1da4 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -21,27 +21,40 @@
*
*/
-#include "CameraHal.h"
#include "CameraProperties.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const char CameraProperties::INVALID[]="prop-invalid-key";
const char CameraProperties::CAMERA_NAME[]="prop-camera-name";
const char CameraProperties::CAMERA_SENSOR_INDEX[]="prop-sensor-index";
+const char CameraProperties::CAMERA_SENSOR_ID[] = "prop-sensor-id";
const char CameraProperties::ORIENTATION_INDEX[]="prop-orientation";
const char CameraProperties::FACING_INDEX[]="prop-facing";
-const char CameraProperties::S3D_SUPPORTED[]="prop-s3d-supported";
const char CameraProperties::SUPPORTED_PREVIEW_SIZES[] = "prop-preview-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[] = "prop-preview-subsampled-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[] = "prop-preview-topbottom-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[] = "prop-preview-sidebyside-size-values";
const char CameraProperties::SUPPORTED_PREVIEW_FORMATS[] = "prop-preview-format-values";
const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES[] = "prop-preview-frame-rate-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT[] = "prop-preview-frame-rate-ext-values";
const char CameraProperties::SUPPORTED_PICTURE_SIZES[] = "prop-picture-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES[] = "prop-picture-subsampled-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES[] = "prop-picture-topbottom-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[] = "prop-picture-sidebyside-size-values";
const char CameraProperties::SUPPORTED_PICTURE_FORMATS[] = "prop-picture-format-values";
const char CameraProperties::SUPPORTED_THUMBNAIL_SIZES[] = "prop-jpeg-thumbnail-size-values";
const char CameraProperties::SUPPORTED_WHITE_BALANCE[] = "prop-whitebalance-values";
const char CameraProperties::SUPPORTED_EFFECTS[] = "prop-effect-values";
const char CameraProperties::SUPPORTED_ANTIBANDING[] = "prop-antibanding-values";
const char CameraProperties::SUPPORTED_EXPOSURE_MODES[] = "prop-exposure-mode-values";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN[] = "prop-manual-exposure-min";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX[] = "prop-manual-exposure-max";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP[] = "prop-manual-exposure-step";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN[] = "prop-manual-gain-iso-min";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX[] = "prop-manual-gain-iso-max";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP[] = "prop-manual-gain-iso-step";
const char CameraProperties::SUPPORTED_EV_MAX[] = "prop-ev-compensation-max";
const char CameraProperties::SUPPORTED_EV_MIN[] = "prop-ev-compensation-min";
const char CameraProperties::SUPPORTED_EV_STEP[] = "prop-ev-compensation-step";
@@ -79,21 +92,30 @@ const char CameraProperties::SATURATION[] = "prop-saturation-default";
const char CameraProperties::SHARPNESS[] = "prop-sharpness-default";
const char CameraProperties::IPP[] = "prop-ipp-default";
const char CameraProperties::GBCE[] = "prop-gbce-default";
-const char CameraProperties::S3D2D_PREVIEW[] = "prop-s3d2d-preview";
-const char CameraProperties::S3D2D_PREVIEW_MODES[] = "prop-s3d2d-preview-values";
-const char CameraProperties::AUTOCONVERGENCE[] = "prop-auto-convergence";
+const char CameraProperties::SUPPORTED_GBCE[] = "prop-gbce-supported";
+const char CameraProperties::GLBCE[] = "prop-glbce-default";
+const char CameraProperties::SUPPORTED_GLBCE[] = "prop-glbce-supported";
+const char CameraProperties::S3D_PRV_FRAME_LAYOUT[] = "prop-s3d-prv-frame-layout";
+const char CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES[] = "prop-s3d-prv-frame-layout-values";
+const char CameraProperties::S3D_CAP_FRAME_LAYOUT[] = "prop-s3d-cap-frame-layout";
+const char CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES[] = "prop-s3d-cap-frame-layout-values";
const char CameraProperties::AUTOCONVERGENCE_MODE[] = "prop-auto-convergence-mode";
-const char CameraProperties::MANUALCONVERGENCE_VALUES[] = "prop-manual-convergence-values";
+const char CameraProperties::AUTOCONVERGENCE_MODE_VALUES[] = "prop-auto-convergence-mode-values";
+const char CameraProperties::MANUAL_CONVERGENCE[] = "prop-manual-convergence";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN[] = "prop-supported-manual-convergence-min";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX[] = "prop-supported-manual-convergence-max";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP[] = "prop-supported-manual-convergence-step";
const char CameraProperties::VSTAB[] = "prop-vstab-default";
const char CameraProperties::VSTAB_SUPPORTED[] = "prop-vstab-supported";
+const char CameraProperties::VNF[] = "prop-vnf-default";
+const char CameraProperties::VNF_SUPPORTED[] = "prop-vnf-supported";
const char CameraProperties::REVISION[] = "prop-revision";
const char CameraProperties::FOCAL_LENGTH[] = "prop-focal-length";
const char CameraProperties::HOR_ANGLE[] = "prop-horizontal-angle";
const char CameraProperties::VER_ANGLE[] = "prop-vertical-angle";
const char CameraProperties::FRAMERATE_RANGE[] = "prop-framerate-range-default";
-const char CameraProperties::FRAMERATE_RANGE_IMAGE[] = "prop-framerate-range-image-default";
-const char CameraProperties::FRAMERATE_RANGE_VIDEO[]="prop-framerate-range-video-default";
const char CameraProperties::FRAMERATE_RANGE_SUPPORTED[]="prop-framerate-range-values";
+const char CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED[]="prop-framerate-range-ext-values";
const char CameraProperties::SENSOR_ORIENTATION[]= "sensor-orientation";
const char CameraProperties::SENSOR_ORIENTATION_VALUES[]= "sensor-orientation-values";
const char CameraProperties::EXIF_MAKE[] = "prop-exif-make";
@@ -111,8 +133,13 @@ const char CameraProperties::METERING_AREAS[] = "prop-metering-areas";
const char CameraProperties::VIDEO_SNAPSHOT_SUPPORTED[] = "prop-video-snapshot-supported";
const char CameraProperties::VIDEO_SIZE[] = "video-size";
const char CameraProperties::SUPPORTED_VIDEO_SIZES[] = "video-size-values";
-const char CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video";
-
+const char CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[] = "prop-mechanical-misalignment-correction-supported";
+const char CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION[] = "prop-mechanical-misalignment-correction";
+const char CameraProperties::CAP_MODE_VALUES[] = "prop-mode-values";
+const char CameraProperties::RAW_WIDTH[] = "prop-raw-width-values";
+const char CameraProperties::RAW_HEIGHT[] = "prop-raw-height-values";
+const char CameraProperties::MAX_PICTURE_WIDTH[] = "prop-max-picture-width";
+const char CameraProperties::MAX_PICTURE_HEIGHT[] = "prop-max-picture-height";
const char CameraProperties::DEFAULT_VALUE[] = "";
@@ -124,7 +151,7 @@ int CameraProperties::getProperties(int cameraIndex, CameraProperties::Propertie
{
LOG_FUNCTION_NAME;
- if((unsigned int)cameraIndex >= mCamerasSupported)
+ if(cameraIndex >= mCamerasSupported)
{
LOG_FUNCTION_NAME_EXIT;
return -EINVAL;
@@ -136,57 +163,78 @@ int CameraProperties::getProperties(int cameraIndex, CameraProperties::Propertie
return 0;
}
-ssize_t CameraProperties::Properties::set(const char *prop, const char *value)
-{
- if(!prop)
- return -EINVAL;
- if(!value)
- value = DEFAULT_VALUE;
+void CameraProperties::Properties::set(const char * const prop, const char * const value) {
+ CAMHAL_ASSERT(prop);
- return mProperties->replaceValueFor(String8(prop), String8(value));
+ if ( !value ) {
+ mProperties[mCurrentMode].removeItem(android::String8(prop));
+ } else {
+ mProperties[mCurrentMode].replaceValueFor(android::String8(prop), android::String8(value));
+ }
}
-ssize_t CameraProperties::Properties::set(const char *prop, int value)
-{
+void CameraProperties::Properties::set(const char * const prop, const int value) {
char s_val[30];
-
sprintf(s_val, "%d", value);
+ set(prop, s_val);
+}
- return set(prop, s_val);
+const char* CameraProperties::Properties::get(const char * prop) const {
+ return mProperties[mCurrentMode].valueFor(android::String8(prop)).string();
}
-const char* CameraProperties::Properties::get(const char * prop)
-{
- String8 value = mProperties->valueFor(String8(prop));
- return value.string();
+int CameraProperties::Properties::getInt(const char * prop) const {
+ android::String8 value = mProperties[mCurrentMode].valueFor(android::String8(prop));
+ if (value.isEmpty()) {
+ return -1;
+ }
+ return strtol(value, 0, 0);
}
-void CameraProperties::Properties::dump()
-{
- for (size_t i = 0; i < mProperties->size(); i++)
- {
- CAMHAL_LOGDB("%s = %s\n",
- mProperties->keyAt(i).string(),
- mProperties->valueAt(i).string());
+void CameraProperties::Properties::setSensorIndex(int idx) {
+ OperatingMode originalMode = getMode();
+ for ( int i = 0 ; i < MODE_MAX ; i++ ) {
+ setMode(static_cast<OperatingMode>(i));
+ set(CAMERA_SENSOR_INDEX, idx);
}
+ setMode(originalMode);
}
-const char* CameraProperties::Properties::keyAt(unsigned int index)
-{
- if(index < mProperties->size())
- {
- return mProperties->keyAt(index).string();
+void CameraProperties::Properties::setMode(OperatingMode mode) {
+ CAMHAL_ASSERT(mode >= 0 && mode < MODE_MAX);
+ mCurrentMode = mode;
+}
+
+OperatingMode CameraProperties::Properties::getMode() const {
+ return mCurrentMode;
+}
+
+void CameraProperties::Properties::dump() {
+ CAMHAL_LOGD("================================");
+ CAMHAL_LOGD("Dumping properties for camera: %d", getInt("prop-sensor-index"));
+
+ for (size_t i = 0; i < mProperties[mCurrentMode].size(); i++) {
+ CAMHAL_LOGD("%s = %s",
+ mProperties[mCurrentMode].keyAt(i).string(),
+ mProperties[mCurrentMode].valueAt(i).string());
+ }
+
+ CAMHAL_LOGD("--------------------------------");
+}
+
+const char* CameraProperties::Properties::keyAt(const unsigned int index) const {
+ if (index < mProperties[mCurrentMode].size()) {
+ return mProperties[mCurrentMode].keyAt(index).string();
}
return NULL;
}
-const char* CameraProperties::Properties::valueAt(unsigned int index)
-{
- if(index < mProperties->size())
- {
- return mProperties->valueAt(index).string();
+const char* CameraProperties::Properties::valueAt(const unsigned int index) const {
+ if (index < mProperties[mCurrentMode].size()) {
+ return mProperties[mCurrentMode].valueAt(index).string();
}
return NULL;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraProperties.cpp b/camera/CameraProperties.cpp
index 5d3ff20..93bc953 100644
--- a/camera/CameraProperties.cpp
+++ b/camera/CameraProperties.cpp
@@ -21,16 +21,13 @@
*
*/
-//#include "CameraHal.h"
-#include <utils/threads.h>
-
-#include "DebugUtils.h"
#include "CameraProperties.h"
#define CAMERA_ROOT "CameraRoot"
#define CAMERA_INSTANCE "CameraInstance"
-namespace android {
+namespace Ti {
+namespace Camera {
// lower entries have higher priority
static const char* g_camera_adapters[] = {
@@ -71,23 +68,24 @@ status_t CameraProperties::initialize()
status_t ret;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(mInitialized)
return NO_ERROR;
ret = loadProperties();
- mInitialized = 1;
+ if (ret == NO_ERROR) {
+ mInitialized = 1;
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
- const unsigned int starting_camera,
- const unsigned int max_camera);
+extern "C" status_t CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
+ int starting_camera, int max_camera, int & supported_cameras);
///Loads all the Camera related properties
status_t CameraProperties::loadProperties()
@@ -96,25 +94,32 @@ status_t CameraProperties::loadProperties()
status_t ret = NO_ERROR;
+ //Must be re-initialized here, since loadProperties() could potentially be called more than once.
+ mCamerasSupported = 0;
+
// adapter updates capabilities and we update camera count
- mCamerasSupported = CameraAdapter_Capabilities(mCameraProps, mCamerasSupported, MAX_CAMERAS_SUPPORTED);
+ const status_t err = CameraAdapter_Capabilities(mCameraProps, mCamerasSupported,
+ MAX_CAMERAS_SUPPORTED, mCamerasSupported);
- if((int)mCamerasSupported < 0) {
- ALOGE("error while getting capabilities");
+ if(err != NO_ERROR) {
+ CAMHAL_LOGE("error while getting capabilities");
+ ret = UNKNOWN_ERROR;
+ } else if (mCamerasSupported == 0) {
+ CAMHAL_LOGE("camera busy. properties not loaded. num_cameras = %d", mCamerasSupported);
ret = UNKNOWN_ERROR;
} else if (mCamerasSupported > MAX_CAMERAS_SUPPORTED) {
- ALOGE("returned too many adapaters");
+ CAMHAL_LOGE("returned too many adapaters");
ret = UNKNOWN_ERROR;
} else {
- ALOGE("num_cameras = %d", mCamerasSupported);
+ CAMHAL_LOGI("num_cameras = %d", mCamerasSupported);
- for (unsigned int i = 0; i < mCamerasSupported; i++) {
- mCameraProps[i].set(CAMERA_SENSOR_INDEX, i);
+ for (int i = 0; i < mCamerasSupported; i++) {
+ mCameraProps[i].setSensorIndex(i);
mCameraProps[i].dump();
}
}
- ALOGV("mCamerasSupported = %d", mCamerasSupported);
+ CAMHAL_LOGV("mCamerasSupported = %d", mCamerasSupported);
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -126,4 +131,5 @@ int CameraProperties::camerasSupported()
return mCamerasSupported;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/Decoder_libjpeg.cpp b/camera/Decoder_libjpeg.cpp
new file mode 100755
index 0000000..e1f2c7c
--- /dev/null
+++ b/camera/Decoder_libjpeg.cpp
@@ -0,0 +1,281 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Decoder_libjpeg.h"
+
+extern "C" {
+ #include "jpeglib.h"
+ #include "jerror.h"
+}
+
+#define NUM_COMPONENTS_IN_YUV 3
+
+namespace Ti {
+namespace Camera {
+
+/* JPEG DHT Segment omitted from MJPEG data */
+static unsigned char jpeg_odml_dht[0x1a6] = {
+ 0xff, 0xd8, /* Start of Image */
+ 0xff, 0xc4, 0x01, 0xa2, /* Define Huffman Table */
+
+ 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+
+ 0x01, 0x00, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+
+ 0x10, 0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, 0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7d,
+ 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
+ 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08, 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
+ 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
+ 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
+ 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
+ 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
+ 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+ 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
+ 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
+ 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
+ 0xf9, 0xfa,
+
+ 0x11, 0x00, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05, 0x04, 0x04, 0x00, 0x01, 0x02, 0x77,
+ 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21, 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
+ 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91, 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
+ 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34, 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
+ 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
+ 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
+ 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
+ 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
+ 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
+ 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
+ 0xf9, 0xfa
+};
+
+struct libjpeg_source_mgr : jpeg_source_mgr {
+ libjpeg_source_mgr(unsigned char *buffer_ptr, int len);
+ ~libjpeg_source_mgr();
+
+ unsigned char *mBufferPtr;
+ int mFilledLen;
+};
+
+static void libjpeg_init_source(j_decompress_ptr cinfo) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr;
+ src->bytes_in_buffer = 0;
+ src->current_offset = 0;
+}
+
+static boolean libjpeg_seek_input_data(j_decompress_ptr cinfo, long byte_offset) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->current_offset = byte_offset;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr + byte_offset;
+ src->bytes_in_buffer = 0;
+ return TRUE;
+}
+
+static boolean libjpeg_fill_input_buffer(j_decompress_ptr cinfo) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->current_offset += src->mFilledLen;
+ src->next_input_byte = src->mBufferPtr;
+ src->bytes_in_buffer = src->mFilledLen;
+ return TRUE;
+}
+
+static void libjpeg_skip_input_data(j_decompress_ptr cinfo, long num_bytes) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+
+ if (num_bytes > (long)src->bytes_in_buffer) {
+ CAMHAL_LOGEA("\n\n\n libjpeg_skip_input_data - num_bytes > (long)src->bytes_in_buffer \n\n\n");
+ } else {
+ src->next_input_byte += num_bytes;
+ src->bytes_in_buffer -= num_bytes;
+ }
+}
+
+static boolean libjpeg_resync_to_restart(j_decompress_ptr cinfo, int desired) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr;
+ src->bytes_in_buffer = 0;
+ return TRUE;
+}
+
+static void libjpeg_term_source(j_decompress_ptr /*cinfo*/) {}
+
+libjpeg_source_mgr::libjpeg_source_mgr(unsigned char *buffer_ptr, int len) : mBufferPtr(buffer_ptr), mFilledLen(len) {
+ init_source = libjpeg_init_source;
+ fill_input_buffer = libjpeg_fill_input_buffer;
+ skip_input_data = libjpeg_skip_input_data;
+ resync_to_restart = libjpeg_resync_to_restart;
+ term_source = libjpeg_term_source;
+ seek_input_data = libjpeg_seek_input_data;
+}
+
+libjpeg_source_mgr::~libjpeg_source_mgr() {}
+
+Decoder_libjpeg::Decoder_libjpeg()
+{
+ mWidth = 0;
+ mHeight = 0;
+ Y_Plane = NULL;
+ U_Plane = NULL;
+ V_Plane = NULL;
+ UV_Plane = NULL;
+}
+
+Decoder_libjpeg::~Decoder_libjpeg()
+{
+ release();
+}
+
+void Decoder_libjpeg::release()
+{
+ if (Y_Plane) {
+ free(Y_Plane);
+ Y_Plane = NULL;
+ }
+ if (U_Plane) {
+ free(U_Plane);
+ U_Plane = NULL;
+ }
+ if (V_Plane) {
+ free(V_Plane);
+ V_Plane = NULL;
+ }
+ if (UV_Plane) {
+ free(UV_Plane);
+ UV_Plane = NULL;
+ }
+}
+
+int Decoder_libjpeg::readDHTSize()
+{
+ return sizeof(jpeg_odml_dht);
+}
+
+int Decoder_libjpeg::appendDHT(unsigned char *jpeg_src, int filled_len, unsigned char *jpeg_with_dht_buffer, int buff_size)
+{
+ /* Appending DHT to JPEG */
+
+ int len = filled_len + sizeof(jpeg_odml_dht) - 2; // final length of jpeg data
+ if (len > buff_size) {
+ CAMHAL_LOGEA("\n\n\n Buffer size too small. filled_len=%d, buff_size=%d, sizeof(jpeg_odml_dht)=%d\n\n\n", filled_len, buff_size, sizeof(jpeg_odml_dht));
+ return 0;
+ }
+
+ memcpy(jpeg_with_dht_buffer, jpeg_odml_dht, sizeof(jpeg_odml_dht));
+ memcpy((jpeg_with_dht_buffer + sizeof(jpeg_odml_dht)), jpeg_src + 2, (filled_len - 2));
+ return len;
+}
+
+
+bool Decoder_libjpeg::decode(unsigned char *jpeg_src, int filled_len, unsigned char *nv12_buffer, int stride)
+{
+ struct jpeg_decompress_struct cinfo;
+ struct jpeg_error_mgr jerr;
+ struct libjpeg_source_mgr s_mgr(jpeg_src, filled_len);
+
+ if (filled_len == 0)
+ return false;
+
+ cinfo.err = jpeg_std_error(&jerr);
+ jpeg_create_decompress(&cinfo);
+
+ cinfo.src = &s_mgr;
+ int status = jpeg_read_header(&cinfo, true);
+ if (status != JPEG_HEADER_OK) {
+ CAMHAL_LOGEA("jpeg header corrupted");
+ return false;
+ }
+
+ cinfo.out_color_space = JCS_YCbCr;
+ cinfo.raw_data_out = true;
+ status = jpeg_start_decompress(&cinfo);
+ if (!status){
+ CAMHAL_LOGEA("jpeg_start_decompress failed");
+ return false;
+ }
+
+ if (mWidth == 0){
+ mWidth = cinfo.output_width;
+ mHeight = cinfo.output_height;
+ CAMHAL_LOGEA("w x h = %d x %d. stride=%d", cinfo.output_width, cinfo.output_height, stride);
+ }
+ else if ((cinfo.output_width > mWidth) || (cinfo.output_height > mHeight)) {
+ CAMHAL_LOGEA(" Free the existing buffers so that they are reallocated for new w x h. Old WxH = %dx%d. New WxH = %dx%d",
+ mWidth, mHeight, cinfo.output_width, cinfo.output_height);
+ release();
+ }
+
+ unsigned int decoded_uv_buffer_size = cinfo.output_width * cinfo.output_height / 2;
+ if (Y_Plane == NULL)Y_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (U_Plane == NULL)U_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (V_Plane == NULL)V_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (UV_Plane == NULL) UV_Plane = (unsigned char *)malloc(decoded_uv_buffer_size);
+
+ unsigned char **YUV_Planes[NUM_COMPONENTS_IN_YUV];
+ YUV_Planes[0] = Y_Plane;
+ YUV_Planes[1] = U_Plane;
+ YUV_Planes[2] = V_Plane;
+
+ unsigned char *row = &nv12_buffer[0];
+
+ // Y Component
+ for (unsigned int j = 0; j < cinfo.output_height; j++, row += stride)
+ YUV_Planes[0][j] = row;
+
+ row = &UV_Plane[0];
+
+ // U Component
+ for (unsigned int j = 0; j < cinfo.output_height; j+=2, row += cinfo.output_width / 2){
+ YUV_Planes[1][j+0] = row;
+ YUV_Planes[1][j+1] = row;
+ }
+
+ // V Component
+ for (unsigned int j = 0; j < cinfo.output_height; j+=2, row += cinfo.output_width / 2){
+ YUV_Planes[2][j+0] = row;
+ YUV_Planes[2][j+1] = row;
+ }
+
+ // Interleaving U and V
+ for (unsigned int i = 0; i < cinfo.output_height; i += 8) {
+ jpeg_read_raw_data(&cinfo, YUV_Planes, 8);
+ YUV_Planes[0] += 8;
+ YUV_Planes[1] += 8;
+ YUV_Planes[2] += 8;
+ }
+
+ unsigned char *uv_ptr = nv12_buffer + (stride * cinfo.output_height);
+ unsigned char *u_ptr = UV_Plane;
+ unsigned char *v_ptr = UV_Plane + (decoded_uv_buffer_size / 2);
+ for(unsigned int i = 0; i < cinfo.output_height / 2; i++){
+ for(unsigned int j = 0; j < cinfo.output_width; j+=2){
+ *(uv_ptr + j) = *u_ptr; u_ptr++;
+ *(uv_ptr + j + 1) = *v_ptr; v_ptr++;
+ }
+ uv_ptr = uv_ptr + stride;
+ }
+
+ jpeg_finish_decompress(&cinfo);
+ jpeg_destroy_decompress(&cinfo);
+
+ return true;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/Encoder_libjpeg.cpp b/camera/Encoder_libjpeg.cpp
index c7da115..e11e3bf 100644
--- a/camera/Encoder_libjpeg.cpp
+++ b/camera/Encoder_libjpeg.cpp
@@ -23,11 +23,9 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
-#include "CameraHal.h"
#include "Encoder_libjpeg.h"
#include "NV12_resize.h"
+#include "TICameraParameters.h"
#include <stdlib.h>
#include <unistd.h>
@@ -46,7 +44,9 @@ extern "C" {
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
#define MIN(x,y) ((x < y) ? x : y)
-namespace android {
+namespace Ti {
+namespace Camera {
+
struct integer_string_pair {
unsigned int integer;
const char* string;
@@ -151,7 +151,7 @@ static void uyvy_to_yuv(uint8_t* dst, uint32_t* src, int width) {
" blt 5f \n\t"
"0: @ 16 pixel swap \n\t"
" vld2.8 {q0, q1} , [%[src]]! @ q0 = uv q1 = y \n\t"
- " vuzp.8 q0, q2 @ d1 = u d5 = v \n\t"
+ " vuzp.8 q0, q2 @ d0 = u d4 = v \n\t"
" vmov d1, d0 @ q0 = u0u1u2..u0u1u2... \n\t"
" vmov d5, d4 @ q2 = v0v1v2..v0v1v2... \n\t"
" vzip.8 d0, d1 @ q0 = u0u0u1u1u2u2... \n\t"
@@ -173,6 +173,61 @@ static void uyvy_to_yuv(uint8_t* dst, uint32_t* src, int width) {
}
}
+static void yuyv_to_yuv(uint8_t* dst, uint32_t* src, int width) {
+ if (!dst || !src) {
+ return;
+ }
+
+ if (width % 2) {
+ return; // not supporting odd widths
+ }
+
+ // currently, neon routine only supports multiple of 16 width
+ if (width % 16) {
+ while ((width-=2) >= 0) {
+ uint8_t y0 = (src[0] >> 0) & 0xFF;
+ uint8_t u0 = (src[0] >> 8) & 0xFF;
+ uint8_t y1 = (src[0] >> 16) & 0xFF;
+ uint8_t v0 = (src[0] >> 24) & 0xFF;
+ dst[0] = y0;
+ dst[1] = u0;
+ dst[2] = v0;
+ dst[3] = y1;
+ dst[4] = u0;
+ dst[5] = v0;
+ dst += 6;
+ src++;
+ }
+ } else {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel swap \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " vuzp.8 q1, q2 @ d2 = u d4 = v \n\t"
+ " vmov d3, d2 @ q1 = u0u1u2..u0u1u2... \n\t"
+ " vmov d5, d4 @ q2 = v0v1v2..v0v1v2... \n\t"
+ " vzip.8 d2, d3 @ q1 = u0u0u1u1u2u2... \n\t"
+ " vzip.8 d4, d5 @ q2 = v0v0v1v1v2v2... \n\t"
+ " @ now q0 = y q1 = u q2 = v \n\t"
+ " vst3.8 {d0,d2,d4},[%[dst]]! \n\t"
+ " vst3.8 {d1,d3,d5},[%[dst]]! \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (dst), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width)
+ : "cc", "memory", "q0", "q1", "q2"
+ );
+ }
+}
+
static void resize_nv12(Encoder_libjpeg::params* params, uint8_t* dst_buffer) {
structConvImage o_img_ptr, i_img_ptr;
@@ -187,6 +242,7 @@ static void resize_nv12(Encoder_libjpeg::params* params, uint8_t* dst_buffer) {
i_img_ptr.eFormat = IC_FORMAT_YCbCr420_lp;
i_img_ptr.imgPtr = (uint8_t*) params->src;
i_img_ptr.clrPtr = i_img_ptr.imgPtr + (i_img_ptr.uWidth * i_img_ptr.uHeight);
+ i_img_ptr.uOffset = 0;
//ouput
o_img_ptr.uWidth = params->out_width;
@@ -195,6 +251,7 @@ static void resize_nv12(Encoder_libjpeg::params* params, uint8_t* dst_buffer) {
o_img_ptr.eFormat = IC_FORMAT_YCbCr420_lp;
o_img_ptr.imgPtr = dst_buffer;
o_img_ptr.clrPtr = o_img_ptr.imgPtr + (o_img_ptr.uWidth * o_img_ptr.uHeight);
+ o_img_ptr.uOffset = 0;
VT_resizeFrame_Video_opt2_lp(&i_img_ptr, &o_img_ptr, NULL, 0);
}
@@ -267,7 +324,11 @@ void ExifElementsTable::insertExifToJpeg(unsigned char* jpeg, size_t jpeg_size)
ResetJpgfile();
if (ReadJpegSectionsFromBuffer(jpeg, jpeg_size, read_mode)) {
jpeg_opened = true;
+#ifdef ANDROID_API_JB_OR_LATER
create_EXIF(table, exif_tag_count, gps_tag_count, has_datetime_tag);
+#else
+ create_EXIF(table, exif_tag_count, gps_tag_count);
+#endif
}
}
@@ -275,7 +336,7 @@ status_t ExifElementsTable::insertExifThumbnailImage(const char* thumb, int len)
status_t ret = NO_ERROR;
if ((len > 0) && jpeg_opened) {
- ret = ReplaceThumbnailFromBuffer(thumb, len);
+ ret = ReplaceThumbnailFromBuffer(thumb, len) ? NO_ERROR : UNKNOWN_ERROR;
CAMHAL_LOGDB("insertExifThumbnailImage. ReplaceThumbnail(). ret=%d", ret);
}
@@ -334,7 +395,14 @@ status_t ExifElementsTable::insertElement(const char* tag, const char* value) {
exif_tag_count++;
if (strcmp(tag, TAG_DATETIME) == 0) {
+#ifdef ANDROID_API_JB_OR_LATER
has_datetime_tag = true;
+#else
+ // jhead isn't taking datetime tag...this is a WA
+ ImageInfo.numDateTimeTags = 1;
+ memcpy(ImageInfo.DateTime, value,
+ MIN(ARRAY_SIZE(ImageInfo.DateTime), value_length + 1));
+#endif
}
}
@@ -386,20 +454,21 @@ size_t Encoder_libjpeg::encode(params* input) {
goto exit;
}
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
bpp = 1;
if ((in_width != out_width) || (in_height != out_height)) {
resize_src = (uint8_t*) malloc(input->dst_size);
resize_nv12(input, resize_src);
if (resize_src) src = resize_src;
}
- } else if ((in_width != out_width) || (in_height != out_height)) {
- CAMHAL_LOGEB("Encoder: resizing is not supported for this format: %s", input->format);
- goto exit;
- } else if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV422I)) {
+ } else if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV422I) &&
+ strcmp(input->format, TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY)) {
// we currently only support yuv422i and yuv420sp
CAMHAL_LOGEB("Encoder: format not supported: %s", input->format);
goto exit;
+ } else if ((in_width != out_width) || (in_height != out_height)) {
+ CAMHAL_LOGEB("Encoder: resizing is not supported for this format: %s", input->format);
+ goto exit;
}
cinfo.err = jpeg_std_error(&jerr);
@@ -411,9 +480,10 @@ size_t Encoder_libjpeg::encode(params* input) {
"height:%d \n\t"
"dest %p \n\t"
"dest size:%d \n\t"
- "mSrc %p",
+ "mSrc %p \n\t"
+ "format: %s",
out_width, out_height, input->dst,
- input->dst_size, src);
+ input->dst_size, src, input->format);
cinfo.dest = &dest_mgr;
cinfo.image_width = out_width - right_crop;
@@ -428,7 +498,7 @@ size_t Encoder_libjpeg::encode(params* input) {
jpeg_start_compress(&cinfo, TRUE);
- row_tmp = (uint8_t*)malloc(out_width * 3);
+ row_tmp = (uint8_t*)malloc((out_width - right_crop) * 3);
row_src = src + start_offset;
row_uv = src + out_width * out_height * bpp;
@@ -436,10 +506,12 @@ size_t Encoder_libjpeg::encode(params* input) {
JSAMPROW row[1]; /* pointer to JSAMPLE row[s] */
// convert input yuv format to yuv444
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
nv21_to_yuv(row_tmp, row_src, row_uv, out_width - right_crop);
- } else {
+ } else if (strcmp(input->format, TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY) == 0) {
uyvy_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop);
+ } else if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ yuyv_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop);
}
row[0] = row_tmp;
@@ -447,7 +519,7 @@ size_t Encoder_libjpeg::encode(params* input) {
row_src = row_src + out_width*bpp;
// move uv row if input format needs it
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
if (!(cinfo.next_scanline % 2))
row_uv = row_uv + out_width * bpp;
}
@@ -467,4 +539,5 @@ size_t Encoder_libjpeg::encode(params* input) {
return dest_mgr.jpegsize;
}
-} // namespace android
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/MemoryManager.cpp b/camera/MemoryManager.cpp
index 8631bbd..8d40b76 100644
--- a/camera/MemoryManager.cpp
+++ b/camera/MemoryManager.cpp
@@ -14,25 +14,19 @@
* limitations under the License.
*/
-
-
-#define LOG_TAG "CameraHAL"
-
-
#include "CameraHal.h"
#include "TICameraParameters.h"
extern "C" {
-#include <ion.h>
-
//#include <timm_osal_interfaces.h>
//#include <timm_osal_trace.h>
};
-namespace android {
+namespace Ti {
+namespace Camera {
///@todo Move these constants to a common header file, preferably in tiler.h
#define STRIDE_8BIT (4 * 1024)
@@ -43,92 +37,113 @@ namespace android {
///Utility Macro Declarations
/*--------------------MemoryManager Class STARTS here-----------------------------*/
-void* MemoryManager::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
-{
- LOG_FUNCTION_NAME;
+MemoryManager::MemoryManager() {
+ mIonFd = -1;
+}
- if(mIonFd < 0)
- {
+MemoryManager::~MemoryManager() {
+ if ( mIonFd >= 0 ) {
+ ion_close(mIonFd);
+ mIonFd = -1;
+ }
+}
+
+status_t MemoryManager::initialize() {
+ if ( mIonFd == -1 ) {
mIonFd = ion_open();
- if(mIonFd < 0)
- {
- CAMHAL_LOGEA("ion_open failed!!!");
- return NULL;
- }
+ if ( mIonFd < 0 ) {
+ CAMHAL_LOGE("ion_open() failed, error: %d", mIonFd);
+ mIonFd = -1;
+ return NO_INIT;
}
+ }
+
+ return OK;
+}
+
+CameraBuffer* MemoryManager::allocateBufferList(int width, int height, const char* format, int &size, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_ASSERT(mIonFd != -1);
///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing
///the buffers
const uint numArrayEntriesC = (uint)(numBufs+1);
///Allocate a buffer array
- uint32_t *bufsArr = new uint32_t [numArrayEntriesC];
- if(!bufsArr)
- {
- CAMHAL_LOGEB("Allocation failed when creating buffers array of %d uint32_t elements", numArrayEntriesC);
+ CameraBuffer *buffers = new CameraBuffer [numArrayEntriesC];
+ if(!buffers) {
+ CAMHAL_LOGEB("Allocation failed when creating buffers array of %d CameraBuffer elements", numArrayEntriesC);
goto error;
- }
+ }
///Initialize the array with zeros - this will help us while freeing the array in case of error
///If a value of an array element is NULL, it means we didnt allocate it
- memset(bufsArr, 0, sizeof(*bufsArr) * numArrayEntriesC);
+ memset(buffers, 0, sizeof(CameraBuffer) * numArrayEntriesC);
//2D Allocations are not supported currently
- if(bytes != 0)
- {
+ if(size != 0) {
struct ion_handle *handle;
int mmap_fd;
+ size_t stride;
///1D buffers
- for (int i = 0; i < numBufs; i++)
- {
- int ret = ion_alloc(mIonFd, bytes, 0, 1 << ION_HEAP_TYPE_CARVEOUT, &handle);
- if(ret < 0)
- {
- CAMHAL_LOGEB("ion_alloc resulted in error %d", ret);
+ for (int i = 0; i < numBufs; i++) {
+ unsigned char *data;
+ int ret = ion_alloc(mIonFd, size, 0, 1 << ION_HEAP_TYPE_CARVEOUT,
+ &handle);
+ if((ret < 0) || ((int)handle == -ENOMEM)) {
+ ret = ion_alloc_tiler(mIonFd, (size_t)size, 1, TILER_PIXEL_FMT_PAGE,
+ OMAP_ION_HEAP_TILER_MASK, &handle, &stride);
+ }
+
+ if((ret < 0) || ((int)handle == -ENOMEM)) {
+ CAMHAL_LOGEB("FAILED to allocate ion buffer of size=%d. ret=%d(0x%x)", size, ret, ret);
goto error;
- }
+ }
- CAMHAL_LOGDB("Before mapping, handle = %x, nSize = %d", handle, bytes);
- if ((ret = ion_map(mIonFd, handle, bytes, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
- (unsigned char**)&bufsArr[i], &mmap_fd)) < 0)
- {
+ CAMHAL_LOGDB("Before mapping, handle = %p, nSize = %d", handle, size);
+ if ((ret = ion_map(mIonFd, handle, size, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
+ &data, &mmap_fd)) < 0) {
CAMHAL_LOGEB("Userspace mapping of ION buffers returned error %d", ret);
ion_free(mIonFd, handle);
goto error;
- }
-
- mIonHandleMap.add(bufsArr[i], (unsigned int)handle);
- mIonFdMap.add(bufsArr[i], (unsigned int) mmap_fd);
- mIonBufLength.add(bufsArr[i], (unsigned int) bytes);
}
+ buffers[i].type = CAMERA_BUFFER_ION;
+ buffers[i].opaque = data;
+ buffers[i].mapped = data;
+ buffers[i].ion_handle = handle;
+ buffers[i].ion_fd = mIonFd;
+ buffers[i].fd = mmap_fd;
+ buffers[i].size = size;
+ buffers[i].format = CameraHal::getPixelFormatConstant(format);
+
}
- else // If bytes is not zero, then it is a 2-D tiler buffer request
- {
- }
+ }
- LOG_FUNCTION_NAME_EXIT;
+ LOG_FUNCTION_NAME_EXIT;
- return (void*)bufsArr;
+ return buffers;
error:
- ALOGE("Freeing buffers already allocated after error occurred");
- if(bufsArr)
- freeBuffer(bufsArr);
+
+ CAMHAL_LOGE("Freeing buffers already allocated after error occurred");
+ if(buffers)
+ freeBufferList(buffers);
if ( NULL != mErrorNotifier.get() )
- {
mErrorNotifier->errorNotify(-ENOMEM);
- }
+ LOG_FUNCTION_NAME_EXIT;
- if (mIonFd >= 0)
- {
- ion_close(mIonFd);
- mIonFd = -1;
- }
+ return NULL;
+}
+
+CameraBuffer* MemoryManager::getBufferList(int *numBufs) {
+ LOG_FUNCTION_NAME;
+ if (numBufs) *numBufs = -1;
- LOG_FUNCTION_NAME_EXIT;
return NULL;
}
@@ -152,50 +167,38 @@ int MemoryManager::getFd()
return -1;
}
-int MemoryManager::freeBuffer(void* buf)
+int MemoryManager::freeBufferList(CameraBuffer *buffers)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- uint32_t *bufEntry = (uint32_t*)buf;
+ int i;
- if(!bufEntry)
+ if(!buffers)
{
CAMHAL_LOGEA("NULL pointer passed to freebuffer");
LOG_FUNCTION_NAME_EXIT;
return BAD_VALUE;
}
- while(*bufEntry)
+ i = 0;
+ while(buffers[i].type == CAMERA_BUFFER_ION)
{
- unsigned int ptr = (unsigned int) *bufEntry++;
- if(mIonBufLength.valueFor(ptr))
+ if(buffers[i].size)
{
- munmap((void *)ptr, mIonBufLength.valueFor(ptr));
- close(mIonFdMap.valueFor(ptr));
- ion_free(mIonFd, (ion_handle*)mIonHandleMap.valueFor(ptr));
- mIonHandleMap.removeItem(ptr);
- mIonBufLength.removeItem(ptr);
- mIonFdMap.removeItem(ptr);
+ munmap(buffers[i].opaque, buffers[i].size);
+ close(buffers[i].fd);
+ ion_free(mIonFd, buffers[i].ion_handle);
}
else
{
CAMHAL_LOGEA("Not a valid Memory Manager buffer");
}
+ i++;
}
- ///@todo Check if this way of deleting array is correct, else use malloc/free
- uint32_t * bufArr = (uint32_t*)buf;
- delete [] bufArr;
+ delete [] buffers;
- if(mIonBufLength.size() == 0)
- {
- if(mIonFd >= 0)
- {
- ion_close(mIonFd);
- mIonFd = -1;
- }
- }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -222,7 +225,8 @@ status_t MemoryManager::setErrorHandler(ErrorNotifier *errorNotifier)
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------MemoryManager Class ENDS here-----------------------------*/
diff --git a/camera/NV12_resize.c b/camera/NV12_resize.cpp
index 7f92fb2..971ee38 100644
--- a/camera/NV12_resize.c
+++ b/camera/NV12_resize.cpp
@@ -1,12 +1,27 @@
-#include "NV12_resize.h"
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
-//#define LOG_NDEBUG 0
-#define LOG_NIDEBUG 0
-#define LOG_NDDEBUG 0
+#include "NV12_resize.h"
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
#define LOG_TAG "NV12_resize"
+
#define STRIDE 4096
-#include <utils/Log.h>
/*==========================================================================
* Function Name : VT_resizeFrame_Video_opt2_lp
@@ -23,95 +38,82 @@
* faster version.
============================================================================*/
mmBool
-VT_resizeFrame_Video_opt2_lp
-(
- structConvImage* i_img_ptr, /* Points to the input image */
- structConvImage* o_img_ptr, /* Points to the output image */
- IC_rect_type* cropout, /* how much to resize to in final image */
- mmUint16 dummy /* Transparent pixel value */
- )
-{
- ALOGV("VT_resizeFrame_Video_opt2_lp+");
-
- mmUint16 row,col;
- mmUint32 resizeFactorX;
- mmUint32 resizeFactorY;
-
-
- mmUint16 x, y;
-
- mmUchar* ptr8;
- mmUchar *ptr8Cb, *ptr8Cr;
-
-
- mmUint16 xf, yf;
- mmUchar* inImgPtrY;
- mmUchar* inImgPtrU;
- mmUchar* inImgPtrV;
- mmUint32 cox, coy, codx, cody;
- mmUint16 idx,idy, idxC;
-
- if(i_img_ptr->uWidth == o_img_ptr->uWidth)
- {
- if(i_img_ptr->uHeight == o_img_ptr->uHeight)
- {
- ALOGV("************************f(i_img_ptr->uHeight == o_img_ptr->uHeight) are same *********************\n");
- ALOGV("************************(i_img_ptr->width == %d" , i_img_ptr->uWidth );
- ALOGV("************************(i_img_ptr->uHeight == %d" , i_img_ptr->uHeight );
- ALOGV("************************(o_img_ptr->width == %d" ,o_img_ptr->uWidth );
- ALOGV("************************(o_img_ptr->uHeight == %d" , o_img_ptr->uHeight );
- }
- }
-
- if (!i_img_ptr || !i_img_ptr->imgPtr ||
- !o_img_ptr || !o_img_ptr->imgPtr)
- {
- ALOGE("Image Point NULL");
- ALOGV("VT_resizeFrame_Video_opt2_lp-");
- return FALSE;
- }
-
- inImgPtrY = (mmUchar *) i_img_ptr->imgPtr + i_img_ptr->uOffset;
- inImgPtrU = (mmUchar *) i_img_ptr->clrPtr + i_img_ptr->uOffset/2;
- inImgPtrV = (mmUchar*)inImgPtrU + 1;
-
- if (cropout == NULL)
- {
- cox = 0;
- coy = 0;
- codx = o_img_ptr->uWidth;
- cody = o_img_ptr->uHeight;
- }
- else
- {
- cox = cropout->x;
- coy = cropout->y;
- codx = cropout->uWidth;
- cody = cropout->uHeight;
- }
- idx = i_img_ptr->uWidth;
- idy = i_img_ptr->uHeight;
-
- /* make sure valid input size */
- if (idx < 1 || idy < 1 || i_img_ptr->uStride < 1)
- {
- ALOGE("idx or idy less then 1 idx = %d idy = %d stride = %d", idx, idy, i_img_ptr->uStride);
- ALOGV("VT_resizeFrame_Video_opt2_lp-");
- return FALSE;
- }
-
- resizeFactorX = ((idx-1)<<9) / codx;
- resizeFactorY = ((idy-1)<<9) / cody;
-
- if(i_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp &&
- o_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp)
- {
- ptr8 = (mmUchar*)o_img_ptr->imgPtr + cox + coy*o_img_ptr->uWidth;
+VT_resizeFrame_Video_opt2_lp(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ ) {
+ LOG_FUNCTION_NAME;
+
+ mmUint16 row,col;
+ mmUint32 resizeFactorX;
+ mmUint32 resizeFactorY;
+
+ mmUint16 x, y;
+
+ mmUchar* ptr8;
+ mmUchar *ptr8Cb, *ptr8Cr;
+
+ mmUint16 xf, yf;
+ mmUchar* inImgPtrY;
+ mmUchar* inImgPtrU;
+ mmUchar* inImgPtrV;
+ mmUint32 cox, coy, codx, cody;
+ mmUint16 idx,idy, idxC;
+
+ if ( i_img_ptr->uWidth == o_img_ptr->uWidth ) {
+ if ( i_img_ptr->uHeight == o_img_ptr->uHeight ) {
+ CAMHAL_LOGV("************************f(i_img_ptr->uHeight == o_img_ptr->uHeight) are same *********************\n");
+ CAMHAL_LOGV("************************(i_img_ptr->width == %d" , i_img_ptr->uWidth );
+ CAMHAL_LOGV("************************(i_img_ptr->uHeight == %d" , i_img_ptr->uHeight );
+ CAMHAL_LOGV("************************(o_img_ptr->width == %d" ,o_img_ptr->uWidth );
+ CAMHAL_LOGV("************************(o_img_ptr->uHeight == %d" , o_img_ptr->uHeight );
+ }
+ }
+
+ if ( !i_img_ptr || !i_img_ptr->imgPtr || !o_img_ptr || !o_img_ptr->imgPtr ) {
+ CAMHAL_LOGE("Image Point NULL");
+ return false;
+ }
+
+ inImgPtrY = (mmUchar *) i_img_ptr->imgPtr + i_img_ptr->uOffset;
+ inImgPtrU = (mmUchar *) i_img_ptr->clrPtr + i_img_ptr->uOffset/2;
+ inImgPtrV = (mmUchar*)inImgPtrU + 1;
+
+ if ( !cropout ) {
+ cox = 0;
+ coy = 0;
+ codx = o_img_ptr->uWidth;
+ cody = o_img_ptr->uHeight;
+ } else {
+ cox = cropout->x;
+ coy = cropout->y;
+ codx = cropout->uWidth;
+ cody = cropout->uHeight;
+ }
+ idx = i_img_ptr->uWidth;
+ idy = i_img_ptr->uHeight;
+
+ /* make sure valid input size */
+ if ( idx < 1 || idy < 1 || i_img_ptr->uStride < 1 ) {
+ CAMHAL_LOGE("idx or idy less then 1 idx = %d idy = %d stride = %d", idx, idy, i_img_ptr->uStride);
+ return false;
+ }
+
+ resizeFactorX = ((idx-1)<<9) / codx;
+ resizeFactorY = ((idy-1)<<9) / cody;
+ if( i_img_ptr->eFormat != IC_FORMAT_YCbCr420_lp ||
+ o_img_ptr->eFormat != IC_FORMAT_YCbCr420_lp ) {
+ CAMHAL_LOGE("eFormat not supported");
+ return false;
+ }
+
+ ptr8 = (mmUchar*)o_img_ptr->imgPtr + cox + coy*o_img_ptr->uWidth;
////////////////////////////for Y//////////////////////////
- for (row=0; row < cody; row++)
- {
+ for ( row = 0; row < cody; row++ ) {
mmUchar *pu8Yrow1 = NULL;
mmUchar *pu8Yrow2 = NULL;
y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9);
@@ -119,8 +121,7 @@ VT_resizeFrame_Video_opt2_lp
pu8Yrow1 = inImgPtrY + (y) * i_img_ptr->uStride;
pu8Yrow2 = pu8Yrow1 + i_img_ptr->uStride;
- for (col=0; col < codx; col++)
- {
+ for ( col = 0; col < codx; col++ ) {
mmUchar in11, in12, in21, in22;
mmUchar *pu8ptr1 = NULL;
mmUchar *pu8ptr2 = NULL;
@@ -128,12 +129,9 @@ VT_resizeFrame_Video_opt2_lp
mmUint16 accum_1;
//mmUint32 accum_W;
-
-
x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
-
//accum_W = 0;
accum_1 = 0;
@@ -175,7 +173,6 @@ VT_resizeFrame_Video_opt2_lp
accum_1 = (accum_1>>6);
*ptr8 = (mmUchar)accum_1 ;
-
ptr8++;
}
ptr8 = ptr8 + (o_img_ptr->uStride - codx);
@@ -189,8 +186,7 @@ VT_resizeFrame_Video_opt2_lp
ptr8Cr = (mmUchar*)(ptr8Cb+1);
idxC = (idx>>1);
- for (row=0; row < (((cody)>>1)); row++)
- {
+ for ( row = 0; row < (((cody)>>1)); row++ ) {
mmUchar *pu8Cbr1 = NULL;
mmUchar *pu8Cbr2 = NULL;
mmUchar *pu8Crr1 = NULL;
@@ -204,8 +200,7 @@ VT_resizeFrame_Video_opt2_lp
pu8Crr1 = inImgPtrV + (y) * i_img_ptr->uStride;
pu8Crr2 = pu8Crr1 + i_img_ptr->uStride;
- for (col=0; col < (((codx)>>1)); col++)
- {
+ for ( col = 0; col < (((codx)>>1)); col++ ) {
mmUchar in11, in12, in21, in22;
mmUchar *pu8Cbc1 = NULL;
mmUchar *pu8Cbc2 = NULL;
@@ -216,21 +211,17 @@ VT_resizeFrame_Video_opt2_lp
mmUint16 accum_1Cb, accum_1Cr;
//mmUint32 accum_WCb, accum_WCr;
-
x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
-
//accum_WCb = accum_WCr = 0;
accum_1Cb = accum_1Cr = 0;
pu8Cbc1 = pu8Cbr1 + (x*2);
pu8Cbc2 = pu8Cbr2 + (x*2);
- pu8Crc1 = pu8Crr1 + (x*2);
+ pu8Crc1 = pu8Crr1 + (x*2);
pu8Crc2 = pu8Crr2 + (x*2);
-
-
/* A pixel */
w = bWeights[xf][yf][0];
@@ -238,7 +229,7 @@ VT_resizeFrame_Video_opt2_lp
accum_1Cb = (w * in11);
// accum_WCb += (w);
- in11 = *(pu8Crc1);
+ in11 = *(pu8Crc1);
accum_1Cr = (w * in11);
//accum_WCr += (w);
@@ -260,7 +251,7 @@ VT_resizeFrame_Video_opt2_lp
accum_1Cb += (w * in21);
//accum_WCb += (w);
- in21 = *(pu8Crc2);
+ in21 = *(pu8Crc2);
accum_1Cr += (w * in21);
//accum_WCr += (w);
@@ -280,7 +271,6 @@ VT_resizeFrame_Video_opt2_lp
accum_1Cb = (accum_1Cb>>6);
*ptr8Cb = (mmUchar)accum_1Cb ;
-
accum_1Cr = (accum_1Cr >> 6);
*ptr8Cr = (mmUchar)accum_1Cr ;
@@ -294,14 +284,7 @@ VT_resizeFrame_Video_opt2_lp
ptr8Cr = ptr8Cr + (o_img_ptr->uStride-codx);
}
///////////////////For Cb- Cr////////////////////////////////////////
- }
- else
- {
- ALOGE("eFormat not supported");
- ALOGV("VT_resizeFrame_Video_opt2_lp-");
- return FALSE;
- }
- ALOGV("success");
- ALOGV("VT_resizeFrame_Video_opt2_lp-");
- return TRUE;
+
+ CAMHAL_LOGV("success");
+ return true;
}
diff --git a/camera/OMXCameraAdapter/OMX3A.cpp b/camera/OMXCameraAdapter/OMX3A.cpp
index 4cb4607..9baad08 100644
--- a/camera/OMXCameraAdapter/OMX3A.cpp
+++ b/camera/OMXCameraAdapter/OMX3A.cpp
@@ -21,24 +21,19 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
#include <cutils/properties.h>
-#undef TRUE
-#undef FALSE
-#define TRUE "true"
-#define FALSE "false"
-
#define METERING_AREAS_RANGE 0xFF
-namespace android {
+static const char PARAM_SEP[] = ",";
+
+namespace Ti {
+namespace Camera {
+
const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
OMX_SCENEMODETYPE scene) {
const SceneModesEntry* cameraLUT = NULL;
@@ -69,7 +64,7 @@ const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
return entry;
}
-status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
+status_t OMXCameraAdapter::setParameters3A(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -81,9 +76,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(m3ASettingsUpdateLock);
+ android::AutoMutex lock(m3ASettingsUpdateLock);
- str = params.get(CameraParameters::KEY_SCENE_MODE);
+ str = params.get(android::CameraParameters::KEY_SCENE_MODE);
mode = getLUTvalue_HALtoOMX( str, SceneLUT);
if ( mFirstTimeInit || ((str != NULL) && ( mParameters3A.SceneMode != mode )) ) {
if ( 0 <= mode ) {
@@ -113,22 +108,52 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
- str = params.get(TICameraParameters::KEY_EXPOSURE_MODE);
- mode = getLUTvalue_HALtoOMX( str, ExpLUT);
- if ( ( str != NULL ) && ( mParameters3A.Exposure != mode ))
- {
- mParameters3A.Exposure = mode;
- CAMHAL_LOGDB("Exposure mode %d", mode);
- if ( 0 <= mParameters3A.Exposure )
- {
- mPending3Asettings |= SetExpMode;
+ if ( (str = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL ) {
+ mode = getLUTvalue_HALtoOMX(str, ExpLUT);
+ if ( mParameters3A.Exposure != mode ) {
+ // If either the new or the old exposure mode is manual set also
+ // the SetManualExposure flag to call setManualExposureVal where
+ // the auto gain and exposure flags are configured
+ if ( mParameters3A.Exposure == OMX_ExposureControlOff ||
+ mode == OMX_ExposureControlOff ) {
+ mPending3Asettings |= SetManualExposure;
+ }
+ mParameters3A.Exposure = mode;
+ CAMHAL_LOGDB("Exposure mode %d", mode);
+ if ( 0 <= mParameters3A.Exposure ) {
+ mPending3Asettings |= SetExpMode;
}
}
-
+ if ( mode == OMX_ExposureControlOff ) {
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE);
+ if ( mParameters3A.ManualExposure != mode ) {
+ mParameters3A.ManualExposure = mode;
+ CAMHAL_LOGDB("Manual Exposure = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT);
+ if ( mParameters3A.ManualExposureRight != mode ) {
+ mParameters3A.ManualExposureRight = mode;
+ CAMHAL_LOGDB("Manual Exposure right = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO);
+ if ( mParameters3A.ManualGain != mode ) {
+ mParameters3A.ManualGain = mode;
+ CAMHAL_LOGDB("Manual Gain = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT);
+ if ( mParameters3A.ManualGainRight != mode ) {
+ mParameters3A.ManualGainRight = mode;
+ CAMHAL_LOGDB("Manual Gain right = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ }
+ }
#endif
- str = params.get(CameraParameters::KEY_WHITE_BALANCE);
+ str = params.get(android::CameraParameters::KEY_WHITE_BALANCE);
mode = getLUTvalue_HALtoOMX( str, WBalLUT);
if (mFirstTimeInit || ((str != NULL) && (mode != mParameters3A.WhiteBallance)))
{
@@ -141,7 +166,6 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
varint = params.getInt(TICameraParameters::KEY_CONTRAST);
if ( 0 <= varint )
{
@@ -189,10 +213,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
mPending3Asettings |= SetBrightness;
}
}
-
#endif
- str = params.get(CameraParameters::KEY_ANTIBANDING);
+ str = params.get(android::CameraParameters::KEY_ANTIBANDING);
mode = getLUTvalue_HALtoOMX(str,FlickerLUT);
if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.Flicker != mode ) ))
{
@@ -205,7 +228,6 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
str = params.get(TICameraParameters::KEY_ISO);
mode = getLUTvalue_HALtoOMX(str, IsoLUT);
CAMHAL_LOGVB("ISO mode arrived in HAL : %s", str);
@@ -218,10 +240,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
mPending3Asettings |= SetISO;
}
}
-
#endif
- str = params.get(CameraParameters::KEY_FOCUS_MODE);
+ str = params.get(android::CameraParameters::KEY_FOCUS_MODE);
mode = getLUTvalue_HALtoOMX(str, FocusLUT);
if ( (mFirstTimeInit || ((str != NULL) && (mParameters3A.Focus != mode))))
{
@@ -237,19 +258,15 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
CAMHAL_LOGDB("Focus %x", mParameters3A.Focus);
}
- str = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION);
- varint = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
- if ( mFirstTimeInit ||
- (( str != NULL ) &&
- (mParameters3A.EVCompensation != varint )))
- {
+ str = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ varint = params.getInt(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if ( mFirstTimeInit || (str && (mParameters3A.EVCompensation != varint))) {
CAMHAL_LOGDB("Setting EV Compensation to %d", varint);
-
mParameters3A.EVCompensation = varint;
mPending3Asettings |= SetEVCompensation;
}
- str = params.get(CameraParameters::KEY_FLASH_MODE);
+ str = params.get(android::CameraParameters::KEY_FLASH_MODE);
mode = getLUTvalue_HALtoOMX( str, FlashLUT);
if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.FlashMode != mode )) )
{
@@ -260,14 +277,14 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
else
{
- mParameters3A.FlashMode = OMX_Manual;
+ mParameters3A.FlashMode = OMX_IMAGE_FlashControlAuto;
}
}
CAMHAL_LOGVB("Flash Setting %s", str);
CAMHAL_LOGVB("FlashMode %d", mParameters3A.FlashMode);
- str = params.get(CameraParameters::KEY_EFFECT);
+ str = params.get(android::CameraParameters::KEY_EFFECT);
mode = getLUTvalue_HALtoOMX( str, EffLUT);
if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.Effect != mode )) )
{
@@ -279,13 +296,13 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
- if ( (str != NULL) && (!strcmp(str, "true")) )
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
{
OMX_BOOL lock = OMX_FALSE;
mUserSetExpLock = OMX_FALSE;
- str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
- if (str && ((strcmp(str, "true")) == 0))
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
{
CAMHAL_LOGVA("Locking Exposure");
lock = OMX_TRUE;
@@ -304,13 +321,13 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
- if ( (str != NULL) && (!strcmp(str, "true")) )
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
{
OMX_BOOL lock = OMX_FALSE;
mUserSetWbLock = OMX_FALSE;
- str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
- if (str && ((strcmp(str, "true")) == 0))
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
{
CAMHAL_LOGVA("Locking WhiteBalance");
lock = OMX_TRUE;
@@ -329,24 +346,24 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
str = params.get(TICameraParameters::KEY_AUTO_FOCUS_LOCK);
- if (str && (strcmp(str, TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
+ if (str && (strcmp(str, android::CameraParameters::TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
CAMHAL_LOGVA("Locking Focus");
mParameters3A.FocusLock = OMX_TRUE;
setFocusLock(mParameters3A);
- } else if (str && (strcmp(str, FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
+ } else if (str && (strcmp(str, android::CameraParameters::FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
CAMHAL_LOGVA("UnLocking Focus");
mParameters3A.FocusLock = OMX_FALSE;
setFocusLock(mParameters3A);
}
- str = params.get(CameraParameters::KEY_METERING_AREAS);
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
if ( (str != NULL) ) {
size_t MAX_METERING_AREAS;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
- MAX_METERING_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_METERING_AREAS));
+ MAX_METERING_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS));
- Mutex::Autolock lock(mMeteringAreasLock);
+ android::AutoMutex lock(mMeteringAreasLock);
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
@@ -359,7 +376,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
if ( MAX_METERING_AREAS >= mMeteringAreas.size() ) {
CAMHAL_LOGDB("Setting Metering Areas %s",
- params.get(CameraParameters::KEY_METERING_AREAS));
+ params.get(android::CameraParameters::KEY_METERING_AREAS));
mPending3Asettings |= SetMeteringAreas;
} else {
@@ -370,11 +387,129 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
+// TI extensions for enable/disable algos
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA,
+ mParameters3A.AlgoExternalGamma, SetAlgoExternalGamma, "External Gamma");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF1,
+ mParameters3A.AlgoNSF1, SetAlgoNSF1, "NSF1");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF2,
+ mParameters3A.AlgoNSF2, SetAlgoNSF2, "NSF2");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_SHARPENING,
+ mParameters3A.AlgoSharpening, SetAlgoSharpening, "Sharpening");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_THREELINCOLORMAP,
+ mParameters3A.AlgoThreeLinColorMap, SetAlgoThreeLinColorMap, "ThreeLinColorMap");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_GIC, mParameters3A.AlgoGIC, SetAlgoGIC, "GIC");
+
+ // Gamma table
+ str = params.get(TICameraParameters::KEY_GAMMA_TABLE);
+ updateGammaTable(str);
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+void OMXCameraAdapter::updateGammaTable(const char* gamma)
+{
+ unsigned int plane = 0;
+ unsigned int i = 0;
+ bool gamma_changed = false;
+ const char *a = gamma;
+ OMX_TI_GAMMATABLE_ELEM_TYPE *elem[3] = { mParameters3A.mGammaTable.pR,
+ mParameters3A.mGammaTable.pG,
+ mParameters3A.mGammaTable.pB};
+
+ if (!gamma) return;
+
+ mPending3Asettings &= ~SetGammaTable;
+ memset(&mParameters3A.mGammaTable, 0, sizeof(mParameters3A.mGammaTable));
+ for (plane = 0; plane < 3; plane++) {
+ a = strchr(a, '(');
+ if (NULL != a) {
+ a++;
+ for (i = 0; i < OMX_TI_GAMMATABLE_SIZE; i++) {
+ char *b;
+ int newVal;
+ newVal = strtod(a, &b);
+ if (newVal != elem[plane][i].nOffset) {
+ elem[plane][i].nOffset = newVal;
+ gamma_changed = true;
+ }
+ a = strpbrk(b, ",:)");
+ if ((NULL != a) && (':' == *a)) {
+ a++;
+ } else if ((NULL != a) && (',' == *a)){
+ a++;
+ break;
+ } else if ((NULL != a) && (')' == *a)){
+ a++;
+ break;
+ } else {
+ CAMHAL_LOGE("Error while parsing values");
+ gamma_changed = false;
+ break;
+ }
+ newVal = strtod(a, &b);
+ if (newVal != elem[plane][i].nSlope) {
+ elem[plane][i].nSlope = newVal;
+ gamma_changed = true;
+ }
+ a = strpbrk(b, ",:)");
+ if ((NULL != a) && (',' == *a)) {
+ a++;
+ } else if ((NULL != a) && (':' == *a)){
+ a++;
+ break;
+ } else if ((NULL != a) && (')' == *a)){
+ a++;
+ break;
+ } else {
+ CAMHAL_LOGE("Error while parsing values");
+ gamma_changed = false;
+ break;
+ }
+ }
+ if ((OMX_TI_GAMMATABLE_SIZE - 1) != i) {
+ CAMHAL_LOGE("Error while parsing values (incorrect count %u)", i);
+ gamma_changed = false;
+ break;
+ }
+ } else {
+ CAMHAL_LOGE("Error while parsing planes (%u)", plane);
+ gamma_changed = false;
+ break;
+ }
+ }
+
+ if (gamma_changed) {
+ mPending3Asettings |= SetGammaTable;
+ }
+}
+
+void OMXCameraAdapter::declareParameter3ABool(const android::CameraParameters &params, const char *key,
+ OMX_BOOL &current_setting, E3ASettingsFlags pending,
+ const char *msg)
+{
+ OMX_BOOL val = OMX_TRUE;
+ const char *str = params.get(key);
+
+ if (str && ((strcmp(str, android::CameraParameters::FALSE)) == 0))
+ {
+ CAMHAL_LOGVB("Disabling %s", msg);
+ val = OMX_FALSE;
+ }
+ else
+ {
+ CAMHAL_LOGVB("Enabling %s", msg);
+ }
+ if (current_setting != val)
+ {
+ current_setting = val;
+ CAMHAL_LOGDB("%s %s", msg, current_setting ? "enabled" : "disabled");
+ mPending3Asettings |= pending;
+ }
+}
+
int OMXCameraAdapter::getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT)
{
int LUTsize = LUT.size;
@@ -396,30 +531,23 @@ const char* OMXCameraAdapter::getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT)
return NULL;
}
-status_t OMXCameraAdapter::init3AParams(Gen3A_settings &Gen3A)
+int OMXCameraAdapter::getMultipleLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT, char * supported)
{
- LOG_FUNCTION_NAME;
-
- Gen3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
- Gen3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
- Gen3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
- Gen3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
- Gen3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
- Gen3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
- Gen3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
- Gen3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
- Gen3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
- Gen3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
- Gen3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
- Gen3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
- Gen3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
- Gen3A.ExposureLock = OMX_FALSE;
- Gen3A.FocusLock = OMX_FALSE;
- Gen3A.WhiteBalanceLock = OMX_FALSE;
-
- LOG_FUNCTION_NAME_EXIT;
+ int num = 0;
+ int remaining_size;
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].omxDefinition == OMXValue )
+ {
+ num++;
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ remaining_size = ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(supported)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(supported)));
+ strncat(supported, LUT.Table[i].userDefinition, remaining_size);
+ }
- return NO_ERROR;
+ return num;
}
status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
@@ -453,7 +581,7 @@ status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
static bool isFlashDisabled() {
@@ -470,14 +598,80 @@ static bool isFlashDisabled() {
char value[PROPERTY_VALUE_MAX];
if (property_get("camera.flash_off", value, NULL) &&
- (!strcasecmp(value, "true") || !strcasecmp(value, "1"))) {
- ALOGW("flash is disabled for testing purpose");
+ (!strcasecmp(value, android::CameraParameters::TRUE) || !strcasecmp(value, "1"))) {
+ CAMHAL_LOGW("flash is disabled for testing purpose");
return true;
}
return false;
}
+status_t OMXCameraAdapter::setManualExposureVal(Gen3A_settings& Gen3A) {
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expVal;
+ OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE expValRight;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expVal, OMX_CONFIG_EXPOSUREVALUETYPE);
+ OMX_INIT_STRUCT_PTR (&expValRight, OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE);
+ expVal.nPortIndex = OMX_ALL;
+ expValRight.nPortIndex = OMX_ALL;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expVal);
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if ( Gen3A.Exposure != OMX_ExposureControlOff ) {
+ expVal.bAutoShutterSpeed = OMX_TRUE;
+ expVal.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expVal.bAutoShutterSpeed = OMX_FALSE;
+ expVal.nShutterSpeedMsec = Gen3A.ManualExposure;
+ expValRight.nShutterSpeedMsec = Gen3A.ManualExposureRight;
+ if ( Gen3A.ManualGain <= 0 || Gen3A.ManualGainRight <= 0 ) {
+ expVal.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expVal.bAutoSensitivity = OMX_FALSE;
+ expVal.nSensitivity = Gen3A.ManualGain;
+ expValRight.nSensitivity = Gen3A.ManualGainRight;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expVal);
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error 0x%x while configuring manual exposure values", eError);
+ } else {
+ CAMHAL_LOGDA("Camera manual exposure values configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
{
status_t ret = NO_ERROR;
@@ -544,7 +738,7 @@ status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
@@ -576,7 +770,7 @@ status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
@@ -685,7 +879,7 @@ status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
@@ -716,7 +910,7 @@ status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
@@ -760,7 +954,7 @@ status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
@@ -804,7 +998,7 @@ status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
@@ -835,7 +1029,7 @@ status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
@@ -855,21 +1049,9 @@ status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
wb.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
wb.eWhiteBalControl = ( OMX_WHITEBALCONTROLTYPE ) Gen3A.WhiteBallance;
- if ( WB_FACE_PRIORITY == Gen3A.WhiteBallance )
- {
- //Disable Region priority and enable Face priority
- setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, true);
-
- //Then set the mode to auto
- wb.eWhiteBalControl = OMX_WhiteBalControlAuto;
- }
- else
- {
- //Disable Face and Region priority
- setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false);
- setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
- }
+ // disable face and region priorities
+ setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false);
+ setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonWhiteBalance,
@@ -956,7 +1138,7 @@ status_t OMXCameraAdapter::setFlicker(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
@@ -993,7 +1175,7 @@ status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setContrast(Gen3A_settings& Gen3A)
@@ -1076,7 +1258,7 @@ status_t OMXCameraAdapter::setSharpness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
@@ -1107,7 +1289,7 @@ status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
@@ -1144,7 +1326,7 @@ status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
@@ -1175,13 +1357,14 @@ status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+ OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE expValRight;
LOG_FUNCTION_NAME;
@@ -1191,41 +1374,60 @@ status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
return NO_INIT;
}
+ // In case of manual exposure Gain is applied from setManualExposureVal
+ if ( Gen3A.Exposure == OMX_ExposureControlOff ) {
+ return NO_ERROR;
+ }
+
OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ OMX_INIT_STRUCT_PTR (&expValRight, OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE);
expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ expValRight.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
- OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonExposureValue,
- &expValues);
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
- if( 0 == Gen3A.ISO )
- {
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if( 0 == Gen3A.ISO ) {
expValues.bAutoSensitivity = OMX_TRUE;
- }
- else
- {
+ } else {
expValues.bAutoSensitivity = OMX_FALSE;
expValues.nSensitivity = Gen3A.ISO;
- }
+ expValRight.nSensitivity = expValues.nSensitivity;
+ }
eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonExposureValue,
- &expValues);
- if ( OMX_ErrorNone != eError )
- {
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while configuring ISO 0x%x error = 0x%x",
( unsigned int ) expValues.nSensitivity,
eError);
- }
- else
- {
+ } else {
CAMHAL_LOGDB("ISO 0x%x configured successfully",
( unsigned int ) expValues.nSensitivity);
- }
+ }
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
@@ -1256,7 +1458,7 @@ status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
@@ -1293,7 +1495,7 @@ status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
@@ -1325,7 +1527,7 @@ status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
}
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
@@ -1357,7 +1559,7 @@ status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
}
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
@@ -1388,7 +1590,7 @@ status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus)
@@ -1422,7 +1624,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
else
{
- const char *lock_state_exp = toggleExp ? TRUE : FALSE;
CAMHAL_LOGDA("Exposure Lock GetConfig successfull");
/* Apply locks only when not applied already */
@@ -1431,7 +1632,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
setExposureLock(mParameters3A);
}
- mParams.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, lock_state_exp);
}
OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
@@ -1469,7 +1669,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
else
{
- const char *lock_state_wb = toggleWb ? TRUE : FALSE;
CAMHAL_LOGDA("WhiteBalance Lock GetConfig successfull");
/* Apply locks only when not applied already */
@@ -1478,10 +1677,9 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
setWhiteBalanceLock(mParameters3A);
}
- mParams.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, lock_state_wb);
}
EXIT:
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
@@ -1489,14 +1687,14 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_ALGOAREASTYPE **meteringAreas;
+ CameraBuffer *bufferlist;
+ OMX_ALGOAREASTYPE *meteringAreas;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
- MemoryManager memMgr;
int areasSize = 0;
LOG_FUNCTION_NAME
- Mutex::Autolock lock(mMeteringAreasLock);
+ android::AutoMutex lock(mMeteringAreasLock);
if ( OMX_StateInvalid == mComponentState )
{
@@ -1505,7 +1703,8 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
}
areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
- meteringAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1);
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ meteringAreas = (OMX_ALGOAREASTYPE *)bufferlist[0].opaque;
OMXCameraPortParameters * mPreviewData = NULL;
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
@@ -1516,37 +1715,47 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
return -ENOMEM;
}
- OMX_INIT_STRUCT_PTR (meteringAreas[0], OMX_ALGOAREASTYPE);
+ OMX_INIT_STRUCT_PTR (meteringAreas, OMX_ALGOAREASTYPE);
- meteringAreas[0]->nPortIndex = OMX_ALL;
- meteringAreas[0]->nNumAreas = mMeteringAreas.size();
- meteringAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaExposure;
+ meteringAreas->nPortIndex = OMX_ALL;
+ meteringAreas->nNumAreas = mMeteringAreas.size();
+ meteringAreas->nAlgoAreaPurpose = OMX_AlgoAreaExposure;
for ( unsigned int n = 0; n < mMeteringAreas.size(); n++)
{
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
// transform the coordinates to 3A-type coordinates
- mMeteringAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth,
- (size_t)mPreviewData->mHeight,
- (size_t&)meteringAreas[0]->tAlgoAreas[n].nTop,
- (size_t&)meteringAreas[0]->tAlgoAreas[n].nLeft,
- (size_t&)meteringAreas[0]->tAlgoAreas[n].nWidth,
- (size_t&)meteringAreas[0]->tAlgoAreas[n].nHeight);
-
- meteringAreas[0]->tAlgoAreas[n].nLeft =
- ( meteringAreas[0]->tAlgoAreas[n].nLeft * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
- meteringAreas[0]->tAlgoAreas[n].nTop =
- ( meteringAreas[0]->tAlgoAreas[n].nTop* METERING_AREAS_RANGE ) / mPreviewData->mHeight;
- meteringAreas[0]->tAlgoAreas[n].nWidth =
- ( meteringAreas[0]->tAlgoAreas[n].nWidth * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
- meteringAreas[0]->tAlgoAreas[n].nHeight =
- ( meteringAreas[0]->tAlgoAreas[n].nHeight * METERING_AREAS_RANGE ) / mPreviewData->mHeight;
-
- meteringAreas[0]->tAlgoAreas[n].nPriority = mMeteringAreas.itemAt(n)->getWeight();
+ mMeteringAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&)meteringAreas->tAlgoAreas[n].nTop,
+ (size_t&)meteringAreas->tAlgoAreas[n].nLeft,
+ (size_t&)meteringAreas->tAlgoAreas[n].nWidth,
+ (size_t&)meteringAreas->tAlgoAreas[n].nHeight);
+
+ meteringAreas->tAlgoAreas[n].nLeft =
+ ( meteringAreas->tAlgoAreas[n].nLeft * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
+ meteringAreas->tAlgoAreas[n].nTop =
+ ( meteringAreas->tAlgoAreas[n].nTop* METERING_AREAS_RANGE ) / mPreviewData->mHeight;
+ meteringAreas->tAlgoAreas[n].nWidth =
+ ( meteringAreas->tAlgoAreas[n].nWidth * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
+ meteringAreas->tAlgoAreas[n].nHeight =
+ ( meteringAreas->tAlgoAreas[n].nHeight * METERING_AREAS_RANGE ) / mPreviewData->mHeight;
+
+ meteringAreas->tAlgoAreas[n].nPriority = mMeteringAreas.itemAt(n)->getWeight();
CAMHAL_LOGDB("Metering area %d : top = %d left = %d width = %d height = %d prio = %d",
- n, (int)meteringAreas[0]->tAlgoAreas[n].nTop, (int)meteringAreas[0]->tAlgoAreas[n].nLeft,
- (int)meteringAreas[0]->tAlgoAreas[n].nWidth, (int)meteringAreas[0]->tAlgoAreas[n].nHeight,
- (int)meteringAreas[0]->tAlgoAreas[n].nPriority);
+ n, (int)meteringAreas->tAlgoAreas[n].nTop, (int)meteringAreas->tAlgoAreas[n].nLeft,
+ (int)meteringAreas->tAlgoAreas[n].nWidth, (int)meteringAreas->tAlgoAreas[n].nHeight,
+ (int)meteringAreas->tAlgoAreas[n].nPriority);
}
@@ -1554,7 +1763,7 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = areasSize;
- sharedBuffer.pSharedBuff = (OMX_U8 *) meteringAreas[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufferlist[0]);
if ( NULL == sharedBuffer.pSharedBuff )
{
@@ -1577,15 +1786,178 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
}
EXIT:
- if (NULL != meteringAreas)
+ if (NULL != bufferlist)
{
- memMgr.freeBuffer((void*) meteringAreas);
- meteringAreas = NULL;
+ mMemMgr.freeBufferList(bufferlist);
}
return ret;
}
+//TI extensions for enable/disable algos
+status_t OMXCameraAdapter::setParameter3ABoolInvert(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg)
+{
+ OMX_BOOL inv_data;
+
+ if (OMX_TRUE == data)
+ {
+ inv_data = OMX_FALSE;
+ }
+ else if (OMX_FALSE == data)
+ {
+ inv_data = OMX_TRUE;
+ }
+ else
+ {
+ return BAD_VALUE;
+ }
+ return setParameter3ABool(omx_idx, inv_data, msg);
+}
+
+status_t OMXCameraAdapter::setParameter3ABool(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE cfgdata;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&cfgdata, OMX_CONFIG_BOOLEANTYPE);
+ cfgdata.bEnabled = data;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ omx_idx,
+ &cfgdata);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring %s error = 0x%x", msg, eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("%s configured successfully %d ", msg, cfgdata.bEnabled);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setAlgoExternalGamma(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABool((OMX_INDEXTYPE) OMX_TI_IndexConfigExternalGamma, Gen3A.AlgoExternalGamma, "External Gamma");
+}
+
+status_t OMXCameraAdapter::setAlgoNSF1(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableNSF1, Gen3A.AlgoNSF1, "NSF1");
+}
+
+status_t OMXCameraAdapter::setAlgoNSF2(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableNSF2, Gen3A.AlgoNSF2, "NSF2");
+}
+
+status_t OMXCameraAdapter::setAlgoSharpening(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableSharpening, Gen3A.AlgoSharpening, "Sharpening");
+}
+
+status_t OMXCameraAdapter::setAlgoThreeLinColorMap(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableThreeLinColorMap, Gen3A.AlgoThreeLinColorMap, "Color Conversion");
+}
+
+status_t OMXCameraAdapter::setAlgoGIC(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableGIC, Gen3A.AlgoGIC, "Green Inballance Correction");
+}
+
+status_t OMXCameraAdapter::setGammaTable(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ CameraBuffer *bufferlist = NULL;
+ OMX_TI_CONFIG_GAMMATABLE_TYPE *gammaTable = NULL;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ int tblSize = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = NO_INIT;
+ goto EXIT;
+ }
+
+ tblSize = ((sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE)+4095)/4096)*4096;
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, tblSize, 1);
+ if (NULL == bufferlist) {
+ CAMHAL_LOGEB("Error allocating buffer for gamma table");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+ gammaTable = (OMX_TI_CONFIG_GAMMATABLE_TYPE *)bufferlist[0].mapped;
+ if (NULL == gammaTable) {
+ CAMHAL_LOGEB("Error allocating buffer for gamma table (wrong data pointer)");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+
+ memcpy(gammaTable, &mParameters3A.mGammaTable, sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE));
+
+#ifdef CAMERAHAL_DEBUG
+ {
+ android::String8 DmpR;
+ android::String8 DmpG;
+ android::String8 DmpB;
+ for (unsigned int i=0; i<OMX_TI_GAMMATABLE_SIZE;i++) {
+ DmpR.appendFormat(" %d:%d;", (int)gammaTable->pR[i].nOffset, (int)(int)gammaTable->pR[i].nSlope);
+ DmpG.appendFormat(" %d:%d;", (int)gammaTable->pG[i].nOffset, (int)(int)gammaTable->pG[i].nSlope);
+ DmpB.appendFormat(" %d:%d;", (int)gammaTable->pB[i].nOffset, (int)(int)gammaTable->pB[i].nSlope);
+ }
+ CAMHAL_LOGE("Gamma table R:%s", DmpR.string());
+ CAMHAL_LOGE("Gamma table G:%s", DmpG.string());
+ CAMHAL_LOGE("Gamma table B:%s", DmpB.string());
+ }
+#endif
+
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+ sharedBuffer.nPortIndex = OMX_ALL;
+ sharedBuffer.nSharedBuffSize = sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE);
+ sharedBuffer.pSharedBuff = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufferlist[0]);
+ if ( NULL == sharedBuffer.pSharedBuff ) {
+ CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigGammaTable, &sharedBuffer);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while setting Gamma Table configuration 0x%x", eError);
+ ret = BAD_VALUE;
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("Gamma Table SetConfig successfull.");
+ }
+
+EXIT:
+
+ if (NULL != bufferlist) {
+ mMemMgr.freeBufferList(bufferlist);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
{
status_t ret = NO_ERROR;
@@ -1594,7 +1966,7 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(m3ASettingsUpdateLock);
+ android::AutoMutex lock(m3ASettingsUpdateLock);
/*
* Scenes have a priority during the process
@@ -1692,6 +2064,11 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
break;
}
+ case SetManualExposure: {
+ ret |= setManualExposureVal(Gen3A);
+ break;
+ }
+
case SetFlash:
{
ret |= setFlashMode(Gen3A);
@@ -1714,6 +2091,50 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
ret |= setMeteringAreas(Gen3A);
}
break;
+
+ //TI extensions for enable/disable algos
+ case SetAlgoExternalGamma:
+ {
+ ret |= setAlgoExternalGamma(Gen3A);
+ }
+ break;
+
+ case SetAlgoNSF1:
+ {
+ ret |= setAlgoNSF1(Gen3A);
+ }
+ break;
+
+ case SetAlgoNSF2:
+ {
+ ret |= setAlgoNSF2(Gen3A);
+ }
+ break;
+
+ case SetAlgoSharpening:
+ {
+ ret |= setAlgoSharpening(Gen3A);
+ }
+ break;
+
+ case SetAlgoThreeLinColorMap:
+ {
+ ret |= setAlgoThreeLinColorMap(Gen3A);
+ }
+ break;
+
+ case SetAlgoGIC:
+ {
+ ret |= setAlgoGIC(Gen3A);
+ }
+ break;
+
+ case SetGammaTable:
+ {
+ ret |= setGammaTable(Gen3A);
+ }
+ break;
+
default:
CAMHAL_LOGEB("this setting (0x%x) is still not supported in CameraAdapter ",
currSett);
@@ -1728,4 +2149,5 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXAlgo.cpp b/camera/OMXCameraAdapter/OMXAlgo.cpp
index 12b9058..6855d56 100644
--- a/camera/OMXCameraAdapter/OMXAlgo.cpp
+++ b/camera/OMXCameraAdapter/OMXAlgo.cpp
@@ -21,70 +21,85 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
#undef TRUE
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersAlgo(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *valstr = NULL;
+ const char *valManualStr = NULL;
const char *oldstr = NULL;
+ OMXCameraPortParameters *cap;
+ BrightnessMode gbce = BRIGHTNESS_OFF;
+ BrightnessMode glbce = BRIGHTNESS_OFF;
LOG_FUNCTION_NAME;
CaptureMode capMode;
CAMHAL_LOGDB("Capture mode %s", params.get(TICameraParameters::KEY_CAP_MODE));
- if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL )
- {
- if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0)
- {
+ if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL ) {
+ if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_SPEED;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::EXPOSURE_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::ZOOM_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_QUALITY;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_ZEROSHUTTERLAG;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) {
capMode = OMXCameraAdapter::VIDEO_MODE;
- }
- else
- {
+ mCapabilitiesOpMode = MODE_VIDEO;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ) == 0) {
+ capMode = OMXCameraAdapter::VIDEO_MODE_HQ;
+ mCapabilitiesOpMode = MODE_VIDEO_HIGH_QUALITY;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::CP_CAM_MODE) == 0) {
+ capMode = OMXCameraAdapter::CP_CAM;
+ mCapabilitiesOpMode = MODE_CPCAM;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::TEMP_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else {
capMode = OMXCameraAdapter::HIGH_QUALITY;
- }
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
}
- else
- {
- capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
- }
+ } else {
+ capMode = OMXCameraAdapter::HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ }
- if ( mCapMode != capMode )
- {
+ if ( mSensorIndex == 2 ) {
+ mCapabilitiesOpMode = MODE_STEREO;
+ }
+
+ if ( mCapMode != capMode ) {
mCapMode = capMode;
mOMXStateSwitch = true;
- }
+ mPendingPreviewSettings |= SetCapMode;
+ }
CAMHAL_LOGDB("Capture Mode set %d", mCapMode);
/// Configure IPP, LDCNSF, GBCE and GLBCE only in HQ mode
IPPMode ipp;
if((mCapMode == OMXCameraAdapter::HIGH_QUALITY) || (mCapMode == OMXCameraAdapter::HIGH_QUALITY_ZSL)
- || (mCapMode == OMXCameraAdapter::VIDEO_MODE) )
+ || (mCapMode == OMXCameraAdapter::VIDEO_MODE)
+ || (mCapMode == OMXCameraAdapter::CP_CAM))
{
if ( (valstr = params.get(TICameraParameters::KEY_IPP)) != NULL )
{
@@ -116,95 +131,57 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
CAMHAL_LOGVB("IPP Mode set %d", ipp);
- if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) )
- {
- // Configure GBCE only if the setting has changed since last time
- oldstr = mParams.get(TICameraParameters::KEY_GBCE);
- bool cmpRes = true;
- if ( NULL != oldstr )
- {
- cmpRes = strcmp(valstr, oldstr) != 0;
- }
- else
- {
- cmpRes = true;
- }
-
+ if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) ) {
+ if (strcmp(valstr, android::CameraParameters::TRUE ) == 0) {
+ gbce = BRIGHTNESS_ON;
+ } else {
+ gbce = BRIGHTNESS_OFF;
+ }
- if( cmpRes )
- {
- if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_ENABLE ) == 0)
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_ON);
- }
- else if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_DISABLE ) == 0)
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- else
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- }
+ if ( gbce != mGBCE ) {
+ mGBCE = gbce;
+ setGBCE(mGBCE);
}
- else if(mParams.get(TICameraParameters::KEY_GBCE) || mFirstTimeInit)
- {
+
+ } else if(mFirstTimeInit) {
//Disable GBCE by default
setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
-
- if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL )
- {
- // Configure GLBCE only if the setting has changed since last time
+ }
- oldstr = mParams.get(TICameraParameters::KEY_GLBCE);
- bool cmpRes = true;
- if ( NULL != oldstr )
- {
- cmpRes = strcmp(valstr, oldstr) != 0;
- }
- else
- {
- cmpRes = true;
- }
+ if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL ) {
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ glbce = BRIGHTNESS_ON;
+ } else {
+ glbce = BRIGHTNESS_OFF;
+ }
- if( cmpRes )
- {
- if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_ENABLE ) == 0)
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_ON);
- }
- else if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_DISABLE ) == 0)
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- else
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- }
+ if ( glbce != mGLBCE ) {
+ mGLBCE = glbce;
+ setGLBCE(mGLBCE);
}
- else if(mParams.get(TICameraParameters::KEY_GLBCE) || mFirstTimeInit)
- {
+
+ } else if(mFirstTimeInit) {
//Disable GLBCE by default
setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
}
- else
- {
+
+ } else {
ipp = OMXCameraAdapter::IPP_NONE;
- }
+ }
if ( mIPP != ipp )
{
mIPP = ipp;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetLDC;
+ mPendingPreviewSettings |= SetNSF;
}
///Set VNF Configuration
bool vnfEnabled = false;
- if ( params.getInt(TICameraParameters::KEY_VNF) > 0 )
+ valstr = params.get(TICameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
CAMHAL_LOGDA("VNF Enabled");
vnfEnabled = true;
@@ -219,12 +196,13 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
{
mVnfEnabled = vnfEnabled;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetVNF;
}
///Set VSTAB Configuration
bool vstabEnabled = false;
- valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- if (valstr && strcmp(valstr, CameraParameters::TRUE) == 0) {
+ valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGDA("VSTAB Enabled");
vstabEnabled = true;
}
@@ -238,6 +216,7 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
{
mVstabEnabled = vstabEnabled;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetVSTAB;
}
//A work-around for a failing call to OMX flush buffers
@@ -250,35 +229,30 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
#ifdef OMAP_ENHANCEMENT
//Set Auto Convergence Mode
- valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE);
- if ( valstr != NULL )
- {
- // Set ManualConvergence default value
- OMX_S32 manualconvergence = -30;
- if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeDisable, manualconvergence);
+ valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE_MODE);
+ valManualStr = params.get(TICameraParameters::KEY_MANUAL_CONVERGENCE);
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ if (cap->mFrameLayoutType != OMX_TI_StereoFrameLayout2D) {
+ if ((valstr != NULL) || (valManualStr != NULL)) {
+ setAutoConvergence(valstr, valManualStr, params);
+ if (valstr != NULL) {
+ CAMHAL_LOGDB("AutoConvergenceMode %s", valstr);
+ }
+ if (valManualStr != NULL) {
+ CAMHAL_LOGDB("Manual Convergence %s", valManualStr);
}
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FRAME) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeFrame, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_CENTER) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeCenter, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FFT) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeFocusFaceTouch, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL) == 0 )
- {
- manualconvergence = (OMX_S32)params.getInt(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES);
- setAutoConvergence(OMX_TI_AutoConvergenceModeManual, manualconvergence);
- }
- CAMHAL_LOGVB("AutoConvergenceMode %s, value = %d", valstr, (int) manualconvergence);
}
+ //Set Mechanical Misalignment Correction
+ valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION);
+ if ( valstr != NULL ) {
+ setMechanicalMisalignmentCorrection(strcmp(valstr, android::CameraParameters::TRUE) == 0);
+ CAMHAL_LOGDB("Mechanical Misalignment Correction %s", valstr);
+ }
+ }
+
#endif
LOG_FUNCTION_NAME_EXIT;
@@ -286,67 +260,123 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
return ret;
}
-// Get AutoConvergence
-status_t OMXCameraAdapter::getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode,
- OMX_S32 *pManualConverence)
+// Set AutoConvergence
+status_t OMXCameraAdapter::setAutoConvergence(const char *pValstr, const char *pValManualstr, const android::CameraParameters &params)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
-
- ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
- ACParams.nVersion = mLocalVersionParam;
- ACParams.nPortIndex = OMX_ALL;
+ const char *str = NULL;
+ android::Vector<android::sp<CameraArea> > tempAreas;
+ int mode;
+ int changed = 0;
LOG_FUNCTION_NAME;
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
- &ACParams);
- if ( eError != OMX_ErrorNone )
- {
- CAMHAL_LOGEB("Error while getting AutoConvergence 0x%x", eError);
- ret = -EINVAL;
+ if ( pValManualstr != NULL ) {
+ OMX_S32 manualConvergence = (OMX_S32)strtol(pValManualstr ,0 ,0);
+
+ if (mManualConv != manualConvergence) {
+ mManualConv = manualConvergence;
+ changed = 1;
}
- else
- {
- *pManualConverence = ACParams.nManualConverence;
- *pACMode = ACParams.eACMode;
- CAMHAL_LOGDA("AutoConvergence got successfully");
+ }
+
+ if ( pValstr != NULL ) {
+ mode = getLUTvalue_HALtoOMX(pValstr, mAutoConvergenceLUT);
+
+ if ( NAME_NOT_FOUND == mode ) {
+ CAMHAL_LOGEB("Wrong convergence mode: %s", pValstr);
+ LOG_FUNCTION_NAME_EXIT;
+ return mode;
}
- LOG_FUNCTION_NAME_EXIT;
+ if ( mAutoConv != static_cast<OMX_TI_AUTOCONVERGENCEMODETYPE> (mode) ) {
+ mAutoConv = static_cast<OMX_TI_AUTOCONVERGENCEMODETYPE> (mode);
+ changed = 1;
+ }
+ }
- return ret;
-}
+ if ( OMX_TI_AutoConvergenceModeFocusFaceTouch == mAutoConv ) {
+ android::AutoMutex lock(mTouchAreasLock);
-// Set AutoConvergence
-status_t OMXCameraAdapter::setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode,
- OMX_S32 pManualConverence)
-{
- status_t ret = NO_ERROR;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
- LOG_FUNCTION_NAME;
+ if ( NULL != str ) {
+ ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
+ } else {
+ CAMHAL_LOGEB("Touch areas not received in %s",
+ android::CameraParameters::KEY_METERING_AREAS);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( CameraArea::areAreasDifferent(mTouchAreas, tempAreas) ) {
+ mTouchAreas.clear();
+ mTouchAreas = tempAreas;
+ changed = 1;
+ }
+ }
+
+ if (!changed) {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
- ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
+ OMXCameraPortParameters * mPreviewData;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ ACParams.nSize = (OMX_U32)sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
ACParams.nVersion = mLocalVersionParam;
- ACParams.nPortIndex = OMX_ALL;
- ACParams.nManualConverence = pManualConverence;
- ACParams.eACMode = pACMode;
+ ACParams.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
+ &ACParams);
+
+ ACParams.eACMode = mAutoConv;
+ ACParams.nManualConverence = mManualConv;
+
+ if (1 == mTouchAreas.size()) {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
+ // transform the coordinates to 3A-type coordinates
+ mTouchAreas.itemAt(0)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&) ACParams.nACProcWinStartY,
+ (size_t&) ACParams.nACProcWinStartX,
+ (size_t&) ACParams.nACProcWinWidth,
+ (size_t&) ACParams.nACProcWinHeight);
+ }
+
+ CAMHAL_LOGDB("nSize %d", (int)ACParams.nSize);
+ CAMHAL_LOGDB("nPortIndex %d", (int)ACParams.nPortIndex);
+ CAMHAL_LOGDB("nManualConverence %d", (int)ACParams.nManualConverence);
+ CAMHAL_LOGDB("eACMode %d", (int)ACParams.eACMode);
+ CAMHAL_LOGDB("nACProcWinStartX %d", (int)ACParams.nACProcWinStartX);
+ CAMHAL_LOGDB("nACProcWinStartY %d", (int)ACParams.nACProcWinStartY);
+ CAMHAL_LOGDB("nACProcWinWidth %d", (int)ACParams.nACProcWinWidth);
+ CAMHAL_LOGDB("nACProcWinHeight %d", (int)ACParams.nACProcWinHeight);
+ CAMHAL_LOGDB("bACStatus %d", (int)ACParams.bACStatus);
+
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
(OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
&ACParams);
- if ( eError != OMX_ErrorNone )
- {
+
+ if ( eError != OMX_ErrorNone ) {
CAMHAL_LOGEB("Error while setting AutoConvergence 0x%x", eError);
- ret = -EINVAL;
- }
- else
- {
+ ret = BAD_VALUE;
+ } else {
CAMHAL_LOGDA("AutoConvergence applied successfully");
- }
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -603,78 +633,60 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
- OMX_TI_PARAM_ZSLHISTORYLENTYPE zslHistoryLen;
OMX_CONFIG_BOOLEANTYPE bCAC;
+ OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
LOG_FUNCTION_NAME;
- //ZSL have 4 buffers history by default
- OMX_INIT_STRUCT_PTR (&zslHistoryLen, OMX_TI_PARAM_ZSLHISTORYLENTYPE);
- zslHistoryLen.nHistoryLen = 4;
-
//CAC is disabled by default
OMX_INIT_STRUCT_PTR (&bCAC, OMX_CONFIG_BOOLEANTYPE);
+ OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
bCAC.bEnabled = OMX_FALSE;
if ( NO_ERROR == ret )
{
OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
- if ( mSensorIndex == OMX_TI_StereoSensor )
- {
- CAMHAL_LOGDA("Camera mode: STEREO");
- camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
+ if ( mSensorIndex == OMX_TI_StereoSensor ) {
+ if ( OMXCameraAdapter::VIDEO_MODE == mode ) {
+ CAMHAL_LOGDA("Camera mode: STEREO VIDEO");
+ camMode.eCamOperatingMode = OMX_TI_StereoVideo;
+ } else {
+ CAMHAL_LOGDA("Camera mode: STEREO");
+ camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
}
- else if ( OMXCameraAdapter::HIGH_SPEED == mode )
- {
+ } else if ( OMXCameraAdapter::HIGH_SPEED == mode ) {
CAMHAL_LOGDA("Camera mode: HIGH SPEED");
camMode.eCamOperatingMode = OMX_CaptureImageHighSpeedTemporalBracketing;
- }
- else if( OMXCameraAdapter::HIGH_QUALITY == mode )
- {
+ } else if ( OMXCameraAdapter::CP_CAM == mode ) {
+ CAMHAL_LOGDA("Camera mode: CP CAM");
+ camMode.eCamOperatingMode = OMX_TI_CPCam;
+ // TODO(XXX): Hardcode for now until we implement re-proc pipe
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ } else if( OMXCameraAdapter::HIGH_QUALITY == mode ) {
CAMHAL_LOGDA("Camera mode: HIGH QUALITY");
camMode.eCamOperatingMode = OMX_CaptureImageProfileBase;
- }
- else if( OMXCameraAdapter::HIGH_QUALITY_ZSL== mode )
- {
+ } else if( OMXCameraAdapter::HIGH_QUALITY_ZSL== mode ) {
const char* valstr = NULL;
CAMHAL_LOGDA("Camera mode: HIGH QUALITY_ZSL");
camMode.eCamOperatingMode = OMX_TI_CaptureImageProfileZeroShutterLag;
+#ifdef CAMERAHAL_TUNA
if ( !mIternalRecordingHint ) {
zslHistoryLen.nHistoryLen = 5;
}
+#endif
- }
- else if( OMXCameraAdapter::VIDEO_MODE == mode )
- {
+ } else if( OMXCameraAdapter::VIDEO_MODE == mode ) {
CAMHAL_LOGDA("Camera mode: VIDEO MODE");
camMode.eCamOperatingMode = OMX_CaptureVideo;
- }
- else
- {
+ } else if( OMXCameraAdapter::VIDEO_MODE_HQ == mode ) {
+ CAMHAL_LOGDA("Camera mode: VIDEO MODE HQ");
+ camMode.eCamOperatingMode = OMX_CaptureHighQualityVideo;
+ } else {
CAMHAL_LOGEA("Camera mode: INVALID mode passed!");
return BAD_VALUE;
- }
-
- if( NO_ERROR == ret )
- {
- eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
- ( OMX_INDEXTYPE ) OMX_TI_IndexParamZslHistoryLen,
- &zslHistoryLen);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while configuring ZSL History len 0x%x", eError);
- // Don't return status for now
- // as high history values might lead
- // to errors on some platforms.
- // ret = ErrorUtils::omxToAndroidError(eError);
- }
- else
- {
- CAMHAL_LOGDA("ZSL History len configured successfully");
- }
- }
+ }
if( NO_ERROR == ret )
{
@@ -684,7 +696,7 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while configuring camera mode 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
}
else
{
@@ -692,6 +704,20 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
}
}
+ if((NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mode)) {
+ //Configure Single Preview Mode
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSinglePreviewMode,
+ &singlePrevMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ } else {
+ CAMHAL_LOGDA("single preview mode configured successfully");
+ }
+ }
+
+
if( NO_ERROR == ret )
{
//Configure CAC
@@ -701,7 +727,7 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while configuring CAC 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
}
else
{
@@ -1009,7 +1035,7 @@ status_t OMXCameraAdapter::setAlgoPriority(AlgoPriority priority,
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setPictureRotation(unsigned int degree)
@@ -1055,76 +1081,83 @@ status_t OMXCameraAdapter::setSensorOrientation(unsigned int degree)
OMXCameraPortParameters *mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
+ if ( OMX_StateInvalid == mComponentState ) {
CAMHAL_LOGEA("OMX component is in invalid state");
ret = -1;
- }
+ }
/* Set Temproary Port resolution.
- * For resolution with height > 1008,resolution cannot be set without configuring orientation.
+ * For resolution with height >= 720,
+ * resolution cannot be set without configuring orientation.
* So we first set a temp resolution. We have used VGA
*/
- tmpHeight = mPreviewData->mHeight;
- tmpWidth = mPreviewData->mWidth;
- mPreviewData->mWidth = 640;
- mPreviewData->mHeight = 480;
- ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
- if ( ret != NO_ERROR )
- {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
+ if ( mPreviewData->mHeight >= 720 ) {
+ tmpHeight = mPreviewData->mHeight;
+ tmpWidth = mPreviewData->mWidth;
+ mPreviewData->mWidth = 640;
+ mPreviewData->mHeight = 480;
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
+ }
+
+ mPreviewData->mWidth = tmpWidth;
+ mPreviewData->mHeight = tmpHeight;
+ mPreviewPortInitialized = true;
+ }
+ else if (!mPreviewPortInitialized) {
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
}
+ mPreviewPortInitialized = true;
+ }
/* Now set Required Orientation*/
- if ( NO_ERROR == ret )
- {
+ if ( NO_ERROR == ret ) {
OMX_INIT_STRUCT(sensorOrientation, OMX_CONFIG_ROTATIONTYPE);
sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonRotate,
- &sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError);
- }
- CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
- ( unsigned int ) sensorOrientation.nRotation);
- sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
sensorOrientation.nRotation = degree;
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonRotate,
&sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError);
- }
- CAMHAL_LOGVA(" Read the Parameters that are set");
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonRotate,
- &sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError);
- }
+ }
CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
( unsigned int ) sensorOrientation.nRotation);
CAMHAL_LOGVB(" Sensor Configured for Port : %d",
( unsigned int ) sensorOrientation.nPortIndex);
- }
+ }
/* Now set the required resolution as requested */
+ if ( NO_ERROR == ret ) {
+ bool portConfigured = false;
+ ret = setSensorQuirks(degree,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex],
+ portConfigured);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring setSensorQuirks 0x%x", ret);
+ return ret;
+ }
- mPreviewData->mWidth = tmpWidth;
- mPreviewData->mHeight = tmpHeight;
- if ( NO_ERROR == ret )
- {
- ret = setFormat (mCameraAdapterParameters.mPrevPortIndex,
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
+ if ( !portConfigured ) {
+ ret = setFormat (mCameraAdapterParameters.mPrevPortIndex,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
}
+
+ // Another WA: Setting the port definition will reset the VFR
+ // configuration.
+ setVFramerate(mPreviewData->mMinFrameRate,
+ mPreviewData->mMaxFrameRate);
}
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -1146,9 +1179,7 @@ status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameR
ret = -EINVAL;
}
- // The port framerate should never be smaller
- // than max framerate.
- if ( mPreviewData->mFrameRate < maxFrameRate ) {
+ if ( !mSetFormatDone ) {
return NO_INIT;
}
@@ -1177,4 +1208,32 @@ status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameR
return ret;
}
-};
+status_t OMXCameraAdapter::setMechanicalMisalignmentCorrection(const bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_MM mm;
+
+ LOG_FUNCTION_NAME;
+
+ mm.nVersion = mLocalVersionParam;
+ mm.nSize = sizeof(OMX_TI_CONFIG_MM);
+ mm.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ mm.bMM = enable ? OMX_TRUE : OMX_FALSE;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigMechanicalMisalignment,
+ &mm);
+
+ if(OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while enabling mechanical misalignment correction. error = 0x%x", eError);
+ ret = -1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
index 392de93..9f6dd37 100755..100644
--- a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
+++ b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
@@ -23,6 +23,7 @@
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
+#include "OMXDCC.h"
#include "ErrorUtils.h"
#include "TICameraParameters.h"
#include <signal.h>
@@ -33,21 +34,21 @@
static int mDebugFps = 0;
static int mDebugFcs = 0;
-#undef TRUE
-#undef FALSE
-
#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
-namespace android {
+namespace Ti {
+namespace Camera {
+
+#ifdef CAMERAHAL_OMX_PROFILING
-#undef LOG_TAG
-///Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+const char OMXCameraAdapter::DEFAULT_PROFILE_PATH[] = "/data/dbg/profile_data.bin";
+
+#endif
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-Mutex gAdapterLock;
+android::Mutex gAdapterLock;
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
@@ -55,16 +56,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
LOG_FUNCTION_NAME;
char value[PROPERTY_VALUE_MAX];
+ const char *mountOrientationString = NULL;
+
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
property_get("debug.camera.framecounts", value, "0");
mDebugFcs = atoi(value);
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ property_get("debug.camera.profile", value, "0");
+ mDebugProfile = atoi(value);
+
+#endif
+
TIMM_OSAL_ERRORTYPE osalError = OMX_ErrorNone;
OMX_ERRORTYPE eError = OMX_ErrorNone;
status_t ret = NO_ERROR;
-
mLocalVersionParam.s.nVersionMajor = 0x1;
mLocalVersionParam.s.nVersionMinor = 0x1;
mLocalVersionParam.s.nRevision = 0x0 ;
@@ -72,6 +81,14 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mPending3Asettings = 0;//E3AsettingsAll;
mPendingCaptureSettings = 0;
+ mPendingPreviewSettings = 0;
+ mPendingReprocessSettings = 0;
+
+ ret = mMemMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
if ( 0 != mInitSem.Count() )
{
@@ -86,17 +103,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mCameraAdapterParameters.mImagePortIndex = OMX_CAMERA_PORT_IMAGE_OUT_IMAGE;
mCameraAdapterParameters.mMeasurementPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT;
//currently not supported use preview port instead
- mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW;
+ mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
+ mCameraAdapterParameters.mVideoInPortIndex = OMX_CAMERA_PORT_VIDEO_IN_VIDEO;
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
mOmxInitialized = true;
+ // Initialize the callback handles
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = Camera::OMXCameraAdapterEventHandler;
+ callbacks.EmptyBufferDone = Camera::OMXCameraAdapterEmptyBufferDone;
+ callbacks.FillBufferDone = Camera::OMXCameraAdapterFillBufferDone;
+
///Get the handle to the OMX Component
- eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, (OMX_PTR)this);
+ eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, this, callbacks);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
}
@@ -105,6 +129,8 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mComponentState = OMX_StateLoaded;
CAMHAL_LOGVB("OMX_GetHandle -0x%x sensor_index = %lu", eError, mSensorIndex);
+ initDccFileDataSave(&mCameraAdapterParameters.mHandleComp, mCameraAdapterParameters.mPrevPortIndex);
+
eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
OMX_CommandPortDisable,
OMX_ALL,
@@ -162,26 +188,39 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
CAMHAL_LOGDB("Sensor %d selected successfully", mSensorIndex);
}
+#ifdef CAMERAHAL_DEBUG
+
printComponentVersion(mCameraAdapterParameters.mHandleComp);
+#endif
+
mBracketingEnabled = false;
+ mZoomBracketingEnabled = false;
mBracketingBuffersQueuedCount = 0;
mBracketingRange = 1;
mLastBracetingBufferIdx = 0;
+ mBracketingBuffersQueued = NULL;
mOMXStateSwitch = false;
+ mBracketingSet = false;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ mRawCapture = false;
+ mYuvCapture = false;
+#endif
mCaptureSignalled = false;
mCaptureConfigured = false;
+ mReprocConfigured = false;
mRecording = false;
mWaitingForSnapshot = false;
- mSnapshotCount = 0;
+ mPictureFormatFromClient = NULL;
- mCapMode = HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_MAX;
+ mCapMode = INITIAL_MODE;
mIPP = IPP_NULL;
mVstabEnabled = false;
mVnfEnabled = false;
mBurstFrames = 1;
- mCapturedFrames = 0;
+ mFlushShotConfigQueue = false;
mPictureQuality = 100;
mCurrentZoomIdx = 0;
mTargetZoomIdx = 0;
@@ -190,13 +229,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mZoomInc = 1;
mZoomParameterIdx = 0;
mExposureBracketingValidEntries = 0;
+ mZoomBracketingValidEntries = 0;
mSensorOverclock = false;
+ mAutoConv = OMX_TI_AutoConvergenceModeMax;
+ mManualConv = 0;
+
+#ifdef CAMERAHAL_TUNA
mIternalRecordingHint = false;
+#endif
mDeviceOrientation = 0;
+ mFaceOrientation = 0;
mCapabilities = caps;
mZoomUpdating = false;
mZoomUpdate = false;
+ mGBCE = BRIGHTNESS_OFF;
+ mGLBCE = BRIGHTNESS_OFF;
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
mEXIFData.mGPSData.mAltitudeValid = false;
mEXIFData.mGPSData.mDatestampValid = false;
@@ -209,6 +259,29 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mEXIFData.mModelValid = false;
mEXIFData.mMakeValid = false;
+ mCapturedFrames = 0;
+ mBurstFramesAccum = 0;
+ mBurstFramesQueued = 0;
+
+ //update the mDeviceOrientation with the sensor mount orientation.
+ //So that the face detect will work before onOrientationEvent()
+ //get triggered.
+ CAMHAL_ASSERT(mCapabilities);
+ mountOrientationString = mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
+ CAMHAL_ASSERT(mountOrientationString);
+ mDeviceOrientation = atoi(mountOrientationString);
+ mFaceOrientation = atoi(mountOrientationString);
+
+ if (mSensorIndex != 2) {
+ mCapabilities->setMode(MODE_HIGH_SPEED);
+ }
+
+ if (mCapabilities->get(CameraProperties::SUPPORTED_ZOOM_STAGES) != NULL) {
+ mMaxZoomSupported = mCapabilities->getInt(CameraProperties::SUPPORTED_ZOOM_STAGES) + 1;
+ } else {
+ mMaxZoomSupported = 1;
+ }
+
// initialize command handling thread
if(mCommandHandler.get() == NULL)
mCommandHandler = new CommandHandler(this);
@@ -219,14 +292,13 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
- ret = mCommandHandler->run("CallbackThread", PRIORITY_URGENT_DISPLAY);
+ ret = mCommandHandler->run("CallbackThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("command handler thread already runnning!!");
ret = NO_ERROR;
- } else
- {
+ } else {
CAMHAL_LOGEA("Couldn't run command handlerthread");
return ret;
}
@@ -242,31 +314,18 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
- ret = mOMXCallbackHandler->run("OMXCallbackThread", PRIORITY_URGENT_DISPLAY);
+ ret = mOMXCallbackHandler->run("OMXCallbackThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("omx callback handler thread already runnning!!");
ret = NO_ERROR;
- }else
- {
+ } else {
CAMHAL_LOGEA("Couldn't run omx callback handler thread");
return ret;
}
}
- //Remove any unhandled events
- if (!mEventSignalQ.isEmpty()) {
- for (unsigned int i = 0 ;i < mEventSignalQ.size(); i++ ) {
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
- //remove from queue and free msg
- if ( NULL != msg ) {
- free(msg);
- }
- }
- mEventSignalQ.clear();
- }
-
OMX_INIT_STRUCT_PTR (&mRegionPriority, OMX_TI_CONFIG_3A_REGION_PRIORITY);
OMX_INIT_STRUCT_PTR (&mFacePriority, OMX_TI_CONFIG_3A_FACE_PRIORITY);
mRegionPriority.nPortIndex = OMX_ALL;
@@ -276,31 +335,65 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
//and will not conditionally apply based on current values.
mFirstTimeInit = true;
+ //Flag to avoid calling setVFramerate() before OMX_SetParameter(OMX_IndexParamPortDefinition)
+ //Ducati will return an error otherwise.
+ mSetFormatDone = false;
+
memset(mExposureBracketingValues, 0, EXP_BRACKET_RANGE*sizeof(int));
+ memset(mZoomBracketingValues, 0, ZOOM_BRACKET_RANGE*sizeof(int));
mMeasurementEnabled = false;
mFaceDetectionRunning = false;
mFaceDetectionPaused = false;
mFDSwitchAlgoPriority = false;
+ metadataLastAnalogGain = -1;
+ metadataLastExposureTime = -1;
+
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex], 0, sizeof(OMXCameraPortParameters));
-
- //Initialize 3A defaults
- ret = init3AParams(mParameters3A);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEA("Couldn't init 3A params!");
- goto EXIT;
- }
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex], 0, sizeof(OMXCameraPortParameters));
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex], 0, sizeof(OMXCameraPortParameters));
+
+ // initialize 3A defaults
+ mParameters3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
+ mParameters3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
+ mParameters3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
+ mParameters3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
+ mParameters3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
+ mParameters3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
+ mParameters3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
+ mParameters3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
+ mParameters3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
+ mParameters3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
+ mParameters3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
+ mParameters3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
+ mParameters3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.FocusLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
+
+ mParameters3A.ManualExposure = 0;
+ mParameters3A.ManualExposureRight = 0;
+ mParameters3A.ManualGain = 0;
+ mParameters3A.ManualGainRight = 0;
+
+ mParameters3A.AlgoExternalGamma = OMX_FALSE;
+ mParameters3A.AlgoNSF1 = OMX_TRUE;
+ mParameters3A.AlgoNSF2 = OMX_TRUE;
+ mParameters3A.AlgoSharpening = OMX_TRUE;
+ mParameters3A.AlgoThreeLinColorMap = OMX_TRUE;
+ mParameters3A.AlgoGIC = OMX_TRUE;
+ memset(&mParameters3A.mGammaTable, 0, sizeof(mParameters3A.mGammaTable));
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
EXIT:
CAMHAL_LOGDB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
void OMXCameraAdapter::performCleanupAfterError()
@@ -324,9 +417,15 @@ OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(Camer
switch ( frameType )
{
case CameraFrame::IMAGE_FRAME:
- case CameraFrame::RAW_FRAME:
ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
break;
+ case CameraFrame::RAW_FRAME:
+ if (mRawCapture) {
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+ } else {
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ }
+ break;
case CameraFrame::PREVIEW_FRAME_SYNC:
case CameraFrame::SNAPSHOT_FRAME:
case CameraFrame::VIDEO_FRAME_SYNC:
@@ -342,13 +441,16 @@ OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(Camer
return ret;
}
-status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+status_t OMXCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
+ LOG_FUNCTION_NAME;
+
status_t ret = NO_ERROR;
OMXCameraPortParameters *port = NULL;
OMX_ERRORTYPE eError = OMX_ErrorNone;
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
+ bool isCaptureFrame = false;
if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE )
{
@@ -360,16 +462,8 @@ status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType
return -EINVAL;
}
- if ( (NO_ERROR == ret) &&
- ((CameraFrame::IMAGE_FRAME == frameType) || (CameraFrame::RAW_FRAME == frameType)) &&
- (1 > mCapturedFrames) &&
- (!mBracketingEnabled)) {
- // Signal end of image capture
- if ( NULL != mEndImageCaptureCallback) {
- mEndImageCaptureCallback(mEndCaptureData);
- }
- return NO_ERROR;
- }
+ isCaptureFrame = (CameraFrame::IMAGE_FRAME == frameType) ||
+ (CameraFrame::RAW_FRAME == frameType);
if ( NO_ERROR == ret )
{
@@ -381,25 +475,36 @@ status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType
}
}
- if ( NO_ERROR == ret )
- {
-
- for ( int i = 0 ; i < port->mNumBufs ; i++)
- {
- if ( port->mBufferHeader[i]->pBuffer == frameBuf )
- {
+ if ( NO_ERROR == ret ) {
+ for ( int i = 0 ; i < port->mNumBufs ; i++) {
+ if ((CameraBuffer *) port->mBufferHeader[i]->pAppPrivate == frameBuf) {
+ if ( isCaptureFrame && !mBracketingEnabled ) {
+ android::AutoMutex lock(mBurstLock);
+ if ((1 > mCapturedFrames) && !mBracketingEnabled && (mCapMode != CP_CAM)) {
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ mEndImageCaptureCallback(mEndCaptureData);
+ }
+ port->mStatus[i] = OMXCameraPortParameters::IDLE;
+ return NO_ERROR;
+ } else if (mBurstFramesQueued >= mBurstFramesAccum) {
+ port->mStatus[i] = OMXCameraPortParameters::IDLE;
+ return NO_ERROR;
+ }
+ mBurstFramesQueued++;
+ }
+ port->mStatus[i] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, port->mBufferHeader[i]);
if ( eError != OMX_ErrorNone )
- {
+ {
CAMHAL_LOGEB("OMX_FillThisBuffer 0x%x", eError);
goto EXIT;
- }
+ }
mFramesWithDucati++;
break;
- }
- }
-
- }
+ }
+ }
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -410,86 +515,104 @@ EXIT:
//Since fillthisbuffer is called asynchronously, make sure to signal error to the app
mErrorNotifier->errorNotify(CAMERA_ERROR_HARD);
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+void OMXCameraAdapter::setParamS3D(OMX_U32 port, const char *valstr)
+{
+ OMXCameraPortParameters *cap;
+
+ LOG_FUNCTION_NAME;
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[port];
+ if (valstr != NULL)
+ {
+ if (strcmp(valstr, TICameraParameters::S3D_TB_FULL) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutTopBottom;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_SS_FULL) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutLeftRight;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_TB_SUBSAMPLED) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutTopBottomSubsample;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_SS_SUBSAMPLED) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutLeftRightSubsample;
+ }
+ else
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayout2D;
+ }
+ }
+ else
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayout2D;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
}
-status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
+status_t OMXCameraAdapter::setParameters(const android::CameraParameters &params)
{
LOG_FUNCTION_NAME;
- const char * str = NULL;
int mode = 0;
status_t ret = NO_ERROR;
bool updateImagePortParams = false;
int minFramerate, maxFramerate, frameRate;
const char *valstr = NULL;
- const char *oldstr = NULL;
int w, h;
OMX_COLOR_FORMATTYPE pixFormat;
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
///@todo Include more camera parameters
- if ( (valstr = params.getPreviewFormat()) != NULL )
- {
- if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
- strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
+ if ( (valstr = params.getPreviewFormat()) != NULL ) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
- pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- }
- else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
+ pixFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
- }
- else
- {
+ } else {
CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default");
pixFormat = OMX_COLOR_FormatCbYCrY;
- }
}
- else
- {
+ } else {
CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
pixFormat = OMX_COLOR_FormatCbYCrY;
- }
+ }
OMXCameraPortParameters *cap;
cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
params.getPreviewSize(&w, &h);
frameRate = params.getPreviewFrameRate();
- minFramerate = params.getInt(TICameraParameters::KEY_MINFRAMERATE);
- maxFramerate = params.getInt(TICameraParameters::KEY_MAXFRAMERATE);
- if ( ( 0 < minFramerate ) &&
- ( 0 < maxFramerate ) )
- {
- if ( minFramerate > maxFramerate )
- {
- CAMHAL_LOGEA(" Min FPS set higher than MAX. So setting MIN and MAX to the higher value");
- maxFramerate = minFramerate;
- }
+ params.getPreviewFpsRange(&minFramerate, &maxFramerate);
+ minFramerate /= CameraHal::VFR_SCALE;
+ maxFramerate /= CameraHal::VFR_SCALE;
+ if ( ( 0 < minFramerate ) && ( 0 < maxFramerate ) ) {
+ if ( minFramerate > maxFramerate ) {
+ CAMHAL_LOGEA(" Min FPS set higher than MAX. So setting MIN and MAX to the higher value");
+ maxFramerate = minFramerate;
+ }
- if ( 0 >= frameRate )
- {
+ if ( 0 >= frameRate ) {
frameRate = maxFramerate;
- }
+ }
- if( ( cap->mMinFrameRate != minFramerate ) ||
- ( cap->mMaxFrameRate != maxFramerate ) )
- {
+ if ( ( cap->mMinFrameRate != (OMX_U32) minFramerate ) ||
+ ( cap->mMaxFrameRate != (OMX_U32) maxFramerate ) ) {
cap->mMinFrameRate = minFramerate;
cap->mMaxFrameRate = maxFramerate;
setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate);
- }
}
-
- // TODO(XXX): Limiting 1080p to (24,24) or (15,15) for now. Need to remove later.
- if ((w >= 1920) && (h >= 1080)) {
- cap->mMaxFrameRate = cap->mMinFrameRate;
- setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate);
}
if ( 0 < frameRate )
@@ -537,22 +660,23 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
mOMXStateSwitch = true;
}
+#ifdef CAMERAHAL_TUNA
valstr = params.get(TICameraParameters::KEY_RECORDING_HINT);
- if (!valstr || (valstr && (strcmp(valstr, CameraParameters::FALSE)))) {
+ if (!valstr || (valstr && (strcmp(valstr, android::CameraParameters::FALSE)))) {
mIternalRecordingHint = false;
} else {
mIternalRecordingHint = true;
}
+#endif
#ifdef OMAP_ENHANCEMENT
-
if ( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
{
- if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0)
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
mMeasurementEnabled = true;
}
- else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0)
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
mMeasurementEnabled = false;
}
@@ -566,7 +690,11 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
//Disable measurement data by default
mMeasurementEnabled = false;
}
+#endif
+#ifdef OMAP_ENHANCEMENT_S3D
+ setParamS3D(mCameraAdapterParameters.mPrevPortIndex,
+ params.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT));
#endif
ret |= setParametersCapture(params, state);
@@ -586,6 +714,10 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
mParams = params;
mFirstTimeInit = false;
+ if ( MODE_MAX != mCapabilitiesOpMode ) {
+ mCapabilities->setMode(mCapabilitiesOpMode);
+ }
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -601,7 +733,7 @@ void saveFile(unsigned char *buff, int width, int height, int format) {
sprintf(fn, "/preview%03d.yuv", counter);
fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
if(fd < 0) {
- ALOGE("Unable to open file %s: %s", fn, strerror(fd));
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
return;
}
@@ -630,7 +762,36 @@ void saveFile(unsigned char *buff, int width, int height, int format) {
LOG_FUNCTION_NAME_EXIT;
}
-void OMXCameraAdapter::getParameters(CameraParameters& params)
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+static status_t saveBufferToFile(const void *buf, int size, const char *filename)
+{
+ if (size < 0) {
+ CAMHAL_LOGE("Wrong buffer size: %d", size);
+ return BAD_VALUE;
+ }
+
+ const int fd = open(filename, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0644);
+ if (fd < 0) {
+ CAMHAL_LOGE("ERROR: %s, Unable to save raw file", strerror(fd));
+ return BAD_VALUE;
+ }
+
+ if (write(fd, buf, size) != (signed)size) {
+ CAMHAL_LOGE("ERROR: Unable to write to raw file: %s ", strerror(errno));
+ close(fd);
+ return NO_MEMORY;
+ }
+
+ CAMHAL_LOGD("buffer=%p, size=%d stored at %s", buf, size, filename);
+
+ close(fd);
+ return OK;
+}
+#endif
+
+
+void OMXCameraAdapter::getParameters(android::CameraParameters& params)
{
status_t ret = NO_ERROR;
OMX_CONFIG_EXPOSUREVALUETYPE exp;
@@ -643,9 +804,7 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
if( mParameters3A.SceneMode != OMX_Manual ) {
const char *valstr_supported = NULL;
- // if preview is not started...we still need to feedback the proper params
- // look up the settings in the LUT
- if (((state & PREVIEW_ACTIVE) == 0) && mCapabilities) {
+ if (mCapabilities) {
const SceneModesEntry* entry = NULL;
entry = getSceneModeEntry(mCapabilities->get(CameraProperties::CAMERA_NAME),
(OMX_SCENEMODETYPE) mParameters3A.SceneMode);
@@ -657,40 +816,40 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
valstr = getLUTvalue_OMXtoHAL(mParameters3A.WhiteBallance, WBalLUT);
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_WHITE_BALANCE , valstr);
+ params.set(android::CameraParameters::KEY_WHITE_BALANCE , valstr);
valstr = getLUTvalue_OMXtoHAL(mParameters3A.FlashMode, FlashLUT);
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ params.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
if ((mParameters3A.Focus == OMX_IMAGE_FocusControlAuto) &&
- (mCapMode != OMXCameraAdapter::VIDEO_MODE)) {
- valstr = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+ ( (mCapMode != OMXCameraAdapter::VIDEO_MODE) &&
+ (mCapMode != OMXCameraAdapter::VIDEO_MODE_HQ) ) ) {
+ valstr = android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
} else {
valstr = getLUTvalue_OMXtoHAL(mParameters3A.Focus, FocusLUT);
}
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ params.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
}
//Query focus distances only when focus is running
if ( ( AF_ACTIVE & state ) ||
- ( NULL == mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES) ) )
+ ( NULL == mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES) ) )
{
updateFocusDistances(params);
}
else
{
- params.set(CameraParameters::KEY_FOCUS_DISTANCES,
- mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES));
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES,
+ mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES));
}
#ifdef OMAP_ENHANCEMENT
-
OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSUREVALUETYPE);
exp.nPortIndex = OMX_ALL;
@@ -705,11 +864,10 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
{
CAMHAL_LOGEB("OMX error 0x%x, while retrieving current ISO value", eError);
}
-
#endif
{
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
//Immediate zoom should not be avaialable while smooth zoom is running
if ( ZOOM_ACTIVE & state )
{
@@ -717,7 +875,7 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
{
mZoomParameterIdx += mZoomInc;
}
- params.set( CameraParameters::KEY_ZOOM, mZoomParameterIdx);
+ params.set(android::CameraParameters::KEY_ZOOM, mZoomParameterIdx);
if ( ( mCurrentZoomIdx == mTargetZoomIdx ) &&
( mZoomParameterIdx == mCurrentZoomIdx ) )
{
@@ -744,36 +902,238 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
else
{
- params.set( CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
+ params.set(android::CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
}
}
//Populate current lock status
- if ( mParameters3A.ExposureLock ) {
- params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
- CameraParameters::TRUE);
+ if ( mUserSetExpLock || mParameters3A.ExposureLock ) {
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::TRUE);
} else {
- params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
- CameraParameters::FALSE);
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::FALSE);
}
- if ( mParameters3A.WhiteBalanceLock ) {
- params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
- CameraParameters::TRUE);
+ if ( mUserSetWbLock || mParameters3A.WhiteBalanceLock ) {
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::TRUE);
} else {
- params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
- CameraParameters::FALSE);
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::FALSE);
}
+ // Update Picture size capabilities dynamically
+ params.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ mCapabilities->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+
+ // Update framerate capabilities dynamically
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+
+ params.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED,
+ mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
+
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ mCapabilities->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+
+ params.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED,
+ mCapabilities->get(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED));
+
LOG_FUNCTION_NAME_EXIT;
}
-status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams)
+status_t OMXCameraAdapter::setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height) {
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_HANDLETYPE *encoderHandle = (OMX_HANDLETYPE *)EncoderHandle;
+
+ CAMHAL_LOGDB("\n %s: SliceHeight:%d, EncoderHandle:%d width:%d height:%d \n", __FUNCTION__, SliceHeight, EncoderHandle, width, height);
+
+ if (SliceHeight == 0){
+ CAMHAL_LOGEA("\n\n #### Encoder Slice Height Not received, Dont Setup Tunnel $$$$\n\n");
+ return BAD_VALUE;
+ }
+
+ if (encoderHandle == NULL) {
+ CAMHAL_LOGEA("Encoder Handle not set \n\n");
+ return BAD_VALUE;
+ }
+
+ if ( 0 != mInitSem.Count() ) {
+ CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ // Register for port enable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ mInitSem);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ // Enable VIDEO Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ // Wait for the port enable event to occur
+ ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("-Port enable event arrived");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout for enabling preview port expired!");
+ return UNKNOWN_ERROR;
+ }
+
+ //Set the Video Port Params
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+ portCheck.nPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter OMX_IndexParamPortDefinition Error - %x", eError);
+ }
+
+ portCheck.format.video.nFrameWidth = width;
+ portCheck.format.video.nFrameHeight = height;
+ portCheck.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter OMX_IndexParamPortDefinition Error- %x", eError);
+ }
+
+ //Slice Configuration
+ OMX_TI_PARAM_VTCSLICE VTCSlice;
+ OMX_INIT_STRUCT_PTR(&VTCSlice, OMX_TI_PARAM_VTCSLICE);
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexParamVtcSlice, &VTCSlice);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter OMX_TI_IndexParamVtcSlice Error - %x", eError);
+ }
+
+ VTCSlice.nSliceHeight = SliceHeight;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexParamVtcSlice, &VTCSlice);
+ if (OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_SetParameter on OMX_TI_IndexParamVtcSlice returned error: 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ eError = OMX_SetupTunnel(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoPortIndex, encoderHandle, 0);
+ if (OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_SetupTunnel returned error: 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::setSensorQuirks(int orientation,
+ OMXCameraPortParameters &portParams,
+ bool &portConfigured)
{
- size_t bufferCount;
+ status_t overclockStatus = NO_ERROR;
+ int sensorID = -1;
+ size_t overclockWidth;
+ size_t overclockHeight;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
LOG_FUNCTION_NAME;
+ portConfigured = false;
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+
+ portCheck.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition,
+ &portCheck);
+
+ if ( eError != OMX_ErrorNone ) {
+ CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if ( ( orientation == 90 ) || ( orientation == 270 ) ) {
+ overclockWidth = 1080;
+ overclockHeight = 1920;
+ } else {
+ overclockWidth = 1920;
+ overclockHeight = 1080;
+ }
+
+ sensorID = mCapabilities->getInt(CameraProperties::CAMERA_SENSOR_ID);
+ if( ( ( sensorID == SENSORID_IMX060 ) &&
+ ( portParams.mWidth >= overclockWidth ) &&
+ ( portParams.mHeight >= overclockHeight ) &&
+ ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) ) ||
+ (( sensorID == SENSORID_OV14825) &&
+ ( portParams.mFrameRate >= FRAME_RATE_HIGH_HD ))||
+ ( ( sensorID == SENSORID_OV5640 ) &&
+ ( portParams.mWidth >= overclockWidth ) &&
+ ( portParams.mHeight >= overclockHeight ) ) ) {
+ overclockStatus = setSensorOverclock(true);
+ } else {
+
+ //WA: If the next port resolution doesn't require
+ // sensor overclocking, but the previous resolution
+ // needed it, then we have to first set new port
+ // resolution and then disable sensor overclocking.
+ if( ( ( sensorID == SENSORID_IMX060 ) &&
+ ( portCheck.format.video.nFrameWidth >= overclockWidth ) &&
+ ( portCheck.format.video.nFrameHeight >= overclockHeight ) &&
+ ( ( portCheck.format.video.xFramerate >> 16 ) >= FRAME_RATE_FULL_HD ) ) ||
+ (( sensorID == SENSORID_OV14825) &&
+ (( portCheck.format.video.xFramerate >> 16) >= FRAME_RATE_HIGH_HD ))||
+ ( ( sensorID == SENSORID_OV5640 ) &&
+ ( portCheck.format.video.nFrameWidth >= overclockWidth ) &&
+ ( portCheck.format.video.nFrameHeight >= overclockHeight ) ) ) {
+ status_t ret = setFormat(mCameraAdapterParameters.mPrevPortIndex,
+ portParams);
+ if ( NO_ERROR != ret ) {
+ return ret;
+ }
+
+ // Another WA: Setting the port definition will reset the VFR
+ // configuration.
+ setVFramerate(portParams.mMinFrameRate, portParams.mMaxFrameRate);
+
+ portConfigured = true;
+ }
+
+ overclockStatus = setSensorOverclock(false);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return overclockStatus;
+}
+status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ size_t bufferCount;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_PARAM_PORTDEFINITIONTYPE portCheck;
@@ -783,152 +1143,153 @@ status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &port
eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- if ( OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port )
- {
+ if (OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port) {
portCheck.format.video.nFrameWidth = portParams.mWidth;
portCheck.format.video.nFrameHeight = portParams.mHeight;
portCheck.format.video.eColorFormat = portParams.mColorFormat;
portCheck.format.video.nStride = portParams.mStride;
- if( ( portCheck.format.video.nFrameWidth >= 1920 ) &&
- ( portCheck.format.video.nFrameHeight >= 1080 ) &&
- ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) )
- {
- setSensorOverclock(true);
- }
- else
- {
- setSensorOverclock(false);
- }
portCheck.format.video.xFramerate = portParams.mFrameRate<<16;
portCheck.nBufferSize = portParams.mStride * portParams.mHeight;
portCheck.nBufferCountActual = portParams.mNumBufs;
mFocusThreshold = FOCUS_THRESHOLD * portParams.mFrameRate;
- }
- else if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port )
- {
+ // Used for RAW capture
+ } else if (OMX_CAMERA_PORT_VIDEO_OUT_VIDEO == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = OMX_COLOR_FormatRawBayer10bit; // portParams.mColorFormat;
+ portCheck.nBufferCountActual = 1; // portParams.mNumBufs;
+ } else if (OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port) {
portCheck.format.image.nFrameWidth = portParams.mWidth;
portCheck.format.image.nFrameHeight = portParams.mHeight;
- if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingNone )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingJPS )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingMPO )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO;
+ if (OMX_COLOR_FormatUnused == portParams.mColorFormat) {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ if (mCodingMode == CodingJPEG) {
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ } else if (mCodingMode == CodingJPS) {
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS;
+ } else if (mCodingMode == CodingMPO) {
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO;
+ } else {
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
}
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWJPEG )
- {
- //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWJPEG when
- // RAW format is supported
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWMPO )
- {
- //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWMPO when
- // RAW format is supported
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
- }
- else
- {
- portCheck.format.image.eColorFormat = portParams.mColorFormat;
+ } else {
+ portCheck.format.image.eColorFormat = portParams.mColorFormat;
portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
- }
+ }
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ // RAW + YUV Capture
+ if (mYuvCapture) {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+#endif
//Stride for 1D tiler buffer is zero
portCheck.format.image.nStride = 0;
- portCheck.nBufferSize = portParams.mStride * portParams.mWidth * portParams.mHeight;
portCheck.nBufferCountActual = portParams.mNumBufs;
- }
- else
- {
- CAMHAL_LOGEB("Unsupported port index 0x%x", (unsigned int)port);
+ } else if (OMX_CAMERA_PORT_VIDEO_IN_VIDEO == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nStride = portParams.mStride;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = portParams.mColorFormat;
+ portCheck.format.video.xFramerate = 30 << 16;
+ portCheck.nBufferCountActual = portParams.mNumBufs;
+ } else {
+ CAMHAL_LOGEB("Unsupported port index (%lu)", port);
+ }
+
+ if (( mSensorIndex == OMX_TI_StereoSensor ) && (OMX_CAMERA_PORT_VIDEO_OUT_VIDEO != port)) {
+ ret = setS3DFrameLayout(port);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Error configuring stereo 3D frame layout");
+ return ret;
+ }
}
eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
- OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
/* check if parameters are set correctly by calling GetParameter() */
eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
- OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
portParams.mBufSize = portCheck.nBufferSize;
portParams.mStride = portCheck.format.image.nStride;
- if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port )
- {
+ if (OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port) {
CAMHAL_LOGDB("\n *** IMG Width = %ld", portCheck.format.image.nFrameWidth);
- CAMHAL_LOGDB("\n ***IMG Height = %ld", portCheck.format.image.nFrameHeight);
-
- CAMHAL_LOGDB("\n ***IMG IMG FMT = %x", portCheck.format.image.eColorFormat);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountMin = %ld\n",
- portCheck.nBufferCountMin);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountActual = %ld\n",
- portCheck.nBufferCountActual);
- CAMHAL_LOGDB("\n ***IMG portCheck.format.image.nStride = %ld\n",
- portCheck.format.image.nStride);
- }
- else
- {
+ CAMHAL_LOGDB("\n *** IMG Height = %ld", portCheck.format.image.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** IMG IMG FMT = %x", portCheck.format.image.eColorFormat);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** IMG portCheck.format.image.nStride = %ld\n",
+ portCheck.format.image.nStride);
+ } else if (OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port) {
CAMHAL_LOGDB("\n *** PRV Width = %ld", portCheck.format.video.nFrameWidth);
- CAMHAL_LOGDB("\n ***PRV Height = %ld", portCheck.format.video.nFrameHeight);
-
- CAMHAL_LOGDB("\n ***PRV IMG FMT = %x", portCheck.format.video.eColorFormat);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountMin = %ld\n",
- portCheck.nBufferCountMin);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountActual = %ld\n",
- portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** PRV Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** PRV IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
CAMHAL_LOGDB("\n ***PRV portCheck.format.video.nStride = %ld\n",
- portCheck.format.video.nStride);
- }
+ portCheck.format.video.nStride);
+ } else {
+ CAMHAL_LOGDB("\n *** VID Width = %ld", portCheck.format.video.nFrameWidth);
+ CAMHAL_LOGDB("\n *** VID Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** VID IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** VID portCheck.format.video.nStride = %ld\n",
+ portCheck.format.video.nStride);
+ }
+
+ mSetFormatDone = true;
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
EXIT:
- CAMHAL_LOGEB("Exiting function %s because of eError=%x", __FUNCTION__, eError);
+ CAMHAL_LOGEB("Exiting function %s because of eError = 0x%x", __FUNCTION__, eError);
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
-status_t OMXCameraAdapter::flushBuffers()
+status_t OMXCameraAdapter::flushBuffers(OMX_U32 nPort)
{
+ LOG_FUNCTION_NAME;
+
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- TIMM_OSAL_ERRORTYPE err;
- TIMM_OSAL_U32 uRequestedEvents = OMXCameraAdapter::CAMERA_PORT_FLUSH;
- TIMM_OSAL_U32 pRetrievedEvents;
if ( 0 != mFlushSem.Count() )
{
@@ -937,10 +1298,8 @@ status_t OMXCameraAdapter::flushBuffers()
return NO_INIT;
}
- LOG_FUNCTION_NAME;
-
OMXCameraPortParameters * mPreviewData = NULL;
- mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[nPort];
///Register for the FLUSH event
///This method just inserts a message in Event Q, which is checked in the callback
@@ -948,7 +1307,7 @@ status_t OMXCameraAdapter::flushBuffers()
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandFlush,
- OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW,
+ nPort,
mFlushSem);
if(ret!=NO_ERROR)
{
@@ -959,7 +1318,7 @@ status_t OMXCameraAdapter::flushBuffers()
///Send FLUSH command to preview port
eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
OMX_CommandFlush,
- mCameraAdapterParameters.mPrevPortIndex,
+ nPort,
NULL);
if(eError!=OMX_ErrorNone)
@@ -989,25 +1348,27 @@ status_t OMXCameraAdapter::flushBuffers()
ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandFlush,
- OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW,
+ nPort,
NULL);
CAMHAL_LOGDA("Flush event timeout expired");
goto EXIT;
}
+ mOMXCallbackHandler->flush();
+
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
///API to give the buffers to Adapter
-status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+status_t OMXCameraAdapter::useBuffers(CameraMode mode, CameraBuffer * bufArr, int num, size_t length, unsigned int queueable)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
status_t ret = NO_ERROR;
@@ -1023,15 +1384,15 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
break;
case CAMERA_IMAGE_CAPTURE:
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mNumBufs = num;
mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mMaxQueueable = queueable;
ret = UseBuffersCapture(bufArr, num);
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mNumBufs = num;
break;
case CAMERA_VIDEO:
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs = num;
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable;
- ret = UseBuffersPreview(bufArr, num);
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersRawCapture(bufArr, num);
break;
case CAMERA_MEASUREMENT:
@@ -1040,6 +1401,11 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
ret = UseBuffersPreviewData(bufArr, num);
break;
+ case CAMERA_REPROCESS:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersReprocess(bufArr, num);
+ break;
}
LOG_FUNCTION_NAME_EXIT;
@@ -1047,13 +1413,12 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
return ret;
}
-status_t OMXCameraAdapter::UseBuffersPreviewData(void* bufArr, int num)
+status_t OMXCameraAdapter::UseBuffersPreviewData(CameraBuffer * bufArr, int num)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * measurementData = NULL;
- uint32_t *buffers;
- Mutex::Autolock lock( mPreviewDataBufferLock);
+ android::AutoMutex lock(mPreviewDataBufferLock);
LOG_FUNCTION_NAME;
@@ -1080,7 +1445,6 @@ status_t OMXCameraAdapter::UseBuffersPreviewData(void* bufArr, int num)
{
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
measurementData->mNumBufs = num ;
- buffers= (uint32_t*) bufArr;
}
if ( NO_ERROR == ret )
@@ -1158,13 +1522,13 @@ EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::switchToExecuting()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -1173,7 +1537,7 @@ status_t OMXCameraAdapter::switchToExecuting()
msg.arg1 = mErrorNotifier;
ret = mCommandHandler->put(&msg);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1275,23 +1639,21 @@ status_t OMXCameraAdapter::doSwitchToExecuting()
performCleanupAfterError();
mStateSwitchLock.unlock();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::switchToLoaded()
-{
+status_t OMXCameraAdapter::switchToIdle() {
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mStateSwitchLock);
+ android::AutoMutex lock(mIdleStateSwitchLock);
- if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state");
+ if ( mComponentState == OMX_StateIdle || mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGDA("Already in OMX_StateIdle, OMX_Loaded state or OMX_StateInvalid state");
return NO_ERROR;
- }
+ }
if ( 0 != mSwitchToLoadedSem.Count() )
{
@@ -1353,6 +1715,107 @@ status_t OMXCameraAdapter::switchToLoaded()
goto EXIT;
}
+ mComponentState = OMX_StateIdle;
+
+ return NO_ERROR;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+
+
+status_t OMXCameraAdapter::prevPortEnable() {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ ///Register for Preview port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mSwitchToLoadedSem);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Enable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+
+
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ CAMHAL_LOGDA("Enabling Preview port");
+ ///Wait for state to switch to idle
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after Enabling Preview port Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Preview port enabled!");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Preview enable timedout");
+
+ goto EXIT;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::switchToLoaded(bool bPortEnableRequired) {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mStateSwitchLock);
+ if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state");
+ return NO_ERROR;
+ }
+
+ if ( mComponentState != OMX_StateIdle) {
+ ret = switchToIdle();
+ if (ret != NO_ERROR) return ret;
+ }
+
+ if ( 0 != mSwitchToLoadedSem.Count() ) {
+ CAMHAL_LOGEB("Error mSwitchToLoadedSem semaphore count %d", mSwitchToLoadedSem.Count());
+ goto EXIT;
+ }
+
///Register for LOADED state transition.
///This method just inserts a message in Event Q, which is checked in the callback
///The sempahore passed is signalled by the callback
@@ -1379,93 +1842,91 @@ status_t OMXCameraAdapter::switchToLoaded()
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- CAMHAL_LOGDA("Switching IDLE->LOADED state");
- ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
-
- //If somethiing bad happened while we wait
- if (mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGEA("Invalid State after IDLE->LOADED Exitting!!!");
- goto EXIT;
- }
+ if ( !bPortEnableRequired ) {
+ OMXCameraPortParameters *mCaptureData , *mPreviewData, *measurementData;
+ mCaptureData = mPreviewData = measurementData = NULL;
- if ( NO_ERROR == ret )
- {
- CAMHAL_LOGDA("IDLE->LOADED state changed");
- }
- else
- {
- ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandStateSet,
- OMX_StateLoaded,
- NULL);
- CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change");
- goto EXIT;
- }
-
- mComponentState = OMX_StateLoaded;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
- ///Register for Preview port ENABLE event
- ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
- mSwitchToLoadedSem);
+ ///Free the OMX Buffers
+ for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ ) {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mPreviewData->mBufferHeader[i]);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("Error in registering for event %d", ret);
- goto EXIT;
+ if(eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
- ///Enable Preview Port
- eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
+ if ( mMeasurementEnabled ) {
+ for ( int i = 0 ; i < measurementData->mNumBufs ; i++ ) {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ measurementData->mBufferHeader[i]);
+ if(eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
- CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ {
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.clear();
+ }
- CAMHAL_LOGDA("Enabling Preview port");
- ///Wait for state to switch to idle
+ }
+ }
+
+ CAMHAL_LOGDA("Switching IDLE->LOADED state");
ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
//If somethiing bad happened while we wait
if (mComponentState == OMX_StateInvalid)
{
- CAMHAL_LOGEA("Invalid State after Enabling Preview port Exitting!!!");
+ CAMHAL_LOGEA("Invalid State after IDLE->LOADED Exitting!!!");
goto EXIT;
}
if ( NO_ERROR == ret )
{
- CAMHAL_LOGDA("Preview port enabled!");
+ CAMHAL_LOGDA("IDLE->LOADED state changed");
}
else
{
ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
NULL);
- CAMHAL_LOGEA("Preview enable timedout");
-
+ CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change");
goto EXIT;
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ mComponentState = OMX_StateLoaded;
+ if (bPortEnableRequired == true) {
+ prevPortEnable();
+ }
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
+status_t OMXCameraAdapter::UseBuffersPreview(CameraBuffer * bufArr, int num)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -1485,7 +1946,6 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
mPreviewData->mNumBufs = num ;
- uint32_t *buffers = (uint32_t*)bufArr;
if ( 0 != mUsePreviewSem.Count() )
{
@@ -1503,70 +1963,53 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mStateSwitchLock.lock();
- if ( mComponentState == OMX_StateLoaded )
- {
+ if ( mComponentState == OMX_StateLoaded ) {
- ret = setLDC(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setLDC() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetLDC) {
+ mPendingPreviewSettings &= ~SetLDC;
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
}
+ }
- ret = setNSF(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setNSF() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetNSF) {
+ mPendingPreviewSettings &= ~SetNSF;
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
}
+ }
- ret = setCaptureMode(mCapMode);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetCapMode) {
+ mPendingPreviewSettings &= ~SetCapMode;
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
}
+ }
- CAMHAL_LOGDB("Camera Mode = %d", mCapMode);
-
- if( mCapMode == OMXCameraAdapter::VIDEO_MODE )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(mVnfEnabled);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- return ret;
- }
+ if( (mCapMode == OMXCameraAdapter::VIDEO_MODE) ||
+ (mCapMode == OMXCameraAdapter::VIDEO_MODE_HQ) ) {
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(mVstabEnabled);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- return ret;
+ if (mPendingPreviewSettings & SetVNF) {
+ mPendingPreviewSettings &= ~SetVNF;
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret){
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
}
}
- else
- {
- ret = enableVideoNoiseFilter(false);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- return ret;
- }
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(false);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- return ret;
+
+ if (mPendingPreviewSettings & SetVSTAB) {
+ mPendingPreviewSettings &= ~SetVSTAB;
+ ret = enableVideoStabilization(mVstabEnabled);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
}
}
+
}
+ }
ret = setSensorOrientation(mSensorOrientation);
if ( NO_ERROR != ret )
@@ -1575,14 +2018,6 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mSensorOrientation = 0;
}
- ret = setVFramerate(mPreviewData->mMinFrameRate, mPreviewData->mMaxFrameRate);
- if ( ret != NO_ERROR )
- {
- CAMHAL_LOGEB("VFR configuration failed 0x%x", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
- }
-
if ( mComponentState == OMX_StateLoaded )
{
///Register for IDLE state switch event
@@ -1650,21 +2085,22 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
OMX_BUFFERHEADERTYPE *pBufferHdr;
for(int index=0;index<num;index++) {
+ OMX_U8 *ptr;
- CAMHAL_LOGDB("OMX_UseBuffer(0x%x)", buffers[index]);
+ ptr = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufArr[index]);
eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
&pBufferHdr,
mCameraAdapterParameters.mPrevPortIndex,
0,
mPreviewData->mBufSize,
- (OMX_U8*)buffers[index]);
+ ptr);
if(eError!=OMX_ErrorNone)
{
CAMHAL_LOGEB("OMX_UseBuffer-0x%x", eError);
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- //pBufferHdr->pAppPrivate = (OMX_PTR)pBufferHdr;
+ pBufferHdr->pAppPrivate = (OMX_PTR)&bufArr[index];
pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
pBufferHdr->nVersion.s.nVersionMajor = 1 ;
pBufferHdr->nVersion.s.nVersionMinor = 1 ;
@@ -1679,15 +2115,19 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
for( int i = 0; i < num; i++ )
{
OMX_BUFFERHEADERTYPE *pBufHdr;
+ OMX_U8 *ptr;
+
+ ptr = (OMX_U8 *)camera_buffer_get_omx_ptr (&mPreviewDataBuffers[i]);
eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
&pBufHdr,
mCameraAdapterParameters.mMeasurementPortIndex,
0,
measurementData->mBufSize,
- (OMX_U8*)(mPreviewDataBuffers[i]));
+ ptr);
if ( eError == OMX_ErrorNone )
{
+ pBufHdr->pAppPrivate = (OMX_PTR *)&mPreviewDataBuffers[i];
pBufHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
pBufHdr->nVersion.s.nVersionMajor = 1 ;
pBufHdr->nVersion.s.nVersionMinor = 1 ;
@@ -1744,7 +2184,7 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
///If there is any failure, we reach here.
///Here, we do any resource freeing and convert from OMX error code to Camera Hal error code
@@ -1757,7 +2197,7 @@ EXIT:
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::startPreview()
@@ -1776,6 +2216,12 @@ status_t OMXCameraAdapter::startPreview()
goto EXIT;
}
+ // Enable all preview mode extra data.
+ if ( OMX_ErrorNone == eError) {
+ ret |= setExtraData(true, mCameraAdapterParameters.mPrevPortIndex, OMX_AncillaryData);
+ ret |= setExtraData(true, OMX_ALL, OMX_TI_VectShotInfo);
+ }
+
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
@@ -1838,11 +2284,11 @@ status_t OMXCameraAdapter::startPreview()
mStateSwitchLock.unlock();
- apply3Asettings(mParameters3A);
//Queue all the buffers on preview port
for(int index=0;index< mPreviewData->mMaxQueueable;index++)
{
CAMHAL_LOGDB("Queuing buffer on Preview port - 0x%x", (uint32_t)mPreviewData->mBufferHeader[index]->pBuffer);
+ mPreviewData->mStatus[index] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*)mPreviewData->mBufferHeader[index]);
if(eError!=OMX_ErrorNone)
@@ -1850,8 +2296,8 @@ status_t OMXCameraAdapter::startPreview()
CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError);
}
mFramesWithDucati++;
-#ifdef DEGUG_LOG
- mBuffersWithDucati.add((uint32_t)mPreviewData->mBufferHeader[index]->pBuffer,1);
+#ifdef CAMERAHAL_DEBUG
+ mBuffersWithDucati.add((int)mPreviewData->mBufferHeader[index]->pBuffer,1);
#endif
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
@@ -1862,6 +2308,7 @@ status_t OMXCameraAdapter::startPreview()
for(int index=0;index< mPreviewData->mNumBufs;index++)
{
CAMHAL_LOGDB("Queuing buffer on Measurement port - 0x%x", (uint32_t) measurementData->mBufferHeader[index]->pBuffer);
+ measurementData->mStatus[index] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*) measurementData->mBufferHeader[index]);
if(eError!=OMX_ErrorNone)
@@ -1873,22 +2320,7 @@ status_t OMXCameraAdapter::startPreview()
}
- // Enable Ancillary data. The nDCCStatus field is used to signify
- // whether the preview frame is a snapshot
- if ( OMX_ErrorNone == eError)
- {
- ret = setExtraData(true, OMX_ALL, OMX_AncillaryData);
- }
-
-
- if ( mPending3Asettings )
- apply3Asettings(mParameters3A);
-
- // enable focus callbacks just once here
- // fixes an issue with slow callback registration in Ducati
- if ( NO_ERROR == ret ) {
- ret = setFocusCallback(true);
- }
+ setFocusCallback(true);
//reset frame rate estimates
mFPS = 0.0f;
@@ -1904,10 +2336,11 @@ status_t OMXCameraAdapter::startPreview()
mLastFrameCount = 0;
mIter = 1;
mLastFPSTime = systemTime();
+ mTunnelDestroyed = false;
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
@@ -1916,11 +2349,11 @@ status_t OMXCameraAdapter::startPreview()
mStateSwitchLock.unlock();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::stopPreview()
+status_t OMXCameraAdapter::destroyTunnel()
{
LOG_FUNCTION_NAME;
@@ -1934,13 +2367,13 @@ status_t OMXCameraAdapter::stopPreview()
mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
- if (mAdapterState == LOADED_PREVIEW_STATE) {
- // Something happened in CameraHal between UseBuffers and startPreview
- // this means that state switch is still locked..so we need to unlock else
- // deadlock will occur on the next start preview
- mStateSwitchLock.unlock();
- return NO_ERROR;
- }
+ if (mAdapterState == LOADED_PREVIEW_STATE) {
+ // Something happened in CameraHal between UseBuffers and startPreview
+ // this means that state switch is still locked..so we need to unlock else
+ // deadlock will occur on the next start preview
+ mStateSwitchLock.unlock();
+ return ALREADY_EXISTS;
+ }
if ( mComponentState != OMX_StateExecuting )
{
@@ -1950,7 +2383,7 @@ status_t OMXCameraAdapter::stopPreview()
}
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
// we should wait for the first frame to come before trying to stopPreview...if not
// we might put OMXCamera in a bad state (IDLE->LOADED timeout). Seeing this a lot
// after a capture
@@ -1969,11 +2402,9 @@ status_t OMXCameraAdapter::stopPreview()
mFirstFrameCondition.broadcast();
}
- ret = cancelAutoFocus();
- if(ret!=NO_ERROR)
{
- CAMHAL_LOGEB("Error canceling autofocus %d", ret);
- // Error, but we probably still want to continue to stop preview
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
}
OMX_CONFIG_FOCUSASSISTTYPE focusAssist;
@@ -2016,110 +2447,63 @@ status_t OMXCameraAdapter::stopPreview()
goto EXIT;
}
- ///Register for Preview port Disable event
- ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- mStopPreviewSem);
-
- ///Disable Preview Port
- eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
-
- ///Free the OMX Buffers
- for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ )
- {
- eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
- mCameraAdapterParameters.mPrevPortIndex,
- mPreviewData->mBufferHeader[i]);
+ switchToIdle();
- if(eError!=OMX_ErrorNone)
- {
- CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
- }
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- }
-
- if ( mMeasurementEnabled )
- {
-
- for ( int i = 0 ; i < measurementData->mNumBufs ; i++ )
- {
- eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
- mCameraAdapterParameters.mMeasurementPortIndex,
- measurementData->mBufferHeader[i]);
- if(eError!=OMX_ErrorNone)
- {
- CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
- }
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- }
+ mTunnelDestroyed = true;
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
- {
- Mutex::Autolock lock(mPreviewDataBufferLock);
- mPreviewDataBuffersAvailable.clear();
- }
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
- }
+}
- CAMHAL_LOGDA("Disabling preview port");
- ret = mStopPreviewSem.WaitTimeout(OMX_CMD_TIMEOUT);
+status_t OMXCameraAdapter::stopPreview() {
+ LOG_FUNCTION_NAME;
- //If somethiing bad happened while we wait
- if (mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGEA("Invalid State after Disabling preview port Exitting!!!");
- goto EXIT;
- }
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
- if ( NO_ERROR == ret )
- {
- CAMHAL_LOGDA("Preview port disabled");
+ if (mTunnelDestroyed == false){
+ ret = destroyTunnel();
+ if (ret == ALREADY_EXISTS) {
+ // Special case to handle invalid stopping preview in LOADED_PREVIEW_STATE
+ return NO_ERROR;
}
- else
- {
- ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
- CAMHAL_LOGEA("Timeout expired on preview port disable");
- goto EXIT;
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGEB(" destroyTunnel returned error ");
+ return ret;
}
+ }
- {
- Mutex::Autolock lock(mPreviewBufferLock);
+ mTunnelDestroyed = false;
+
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
///Clear all the available preview buffers
mPreviewBuffersAvailable.clear();
- }
+ }
switchToLoaded();
-
mFirstTimeInit = true;
mPendingCaptureSettings = 0;
+ mPendingReprocessSettings = 0;
mFramesWithDucati = 0;
mFramesWithDisplay = 0;
mFramesWithEncoder = 0;
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
-
-EXIT:
- CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
- {
- Mutex::Autolock lock(mPreviewBufferLock);
- ///Clear all the available preview buffers
- mPreviewBuffersAvailable.clear();
- }
- performCleanupAfterError();
- LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
-
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::setSensorOverclock(bool enable)
@@ -2154,7 +2538,6 @@ status_t OMXCameraAdapter::setSensorOverclock(bool enable)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while setting Sensor overclock 0x%x", eError);
- ret = BAD_VALUE;
}
else
{
@@ -2164,7 +2547,7 @@ status_t OMXCameraAdapter::setSensorOverclock(bool enable)
LOG_FUNCTION_NAME_EXIT;
- return ret;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
@@ -2259,15 +2642,70 @@ status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
return ret;
}
+status_t OMXCameraAdapter::setS3DFrameLayout(OMX_U32 port) const
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_FRAMELAYOUTTYPE frameLayout;
+ const OMXCameraPortParameters *cap =
+ &mCameraAdapterParameters.mCameraPortParams[port];
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&frameLayout, OMX_TI_FRAMELAYOUTTYPE);
+ frameLayout.nPortIndex = port;
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamStereoFrmLayout, &frameLayout);
+ if (eError != OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("Error while getting S3D frame layout: 0x%x", eError);
+ return -EINVAL;
+ }
+
+ if (cap->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ {
+ frameLayout.eFrameLayout = OMX_TI_StereoFrameLayoutTopBottom;
+ frameLayout.nSubsampleRatio = 2;
+ }
+ else if (cap->mFrameLayoutType ==
+ OMX_TI_StereoFrameLayoutLeftRightSubsample)
+ {
+ frameLayout.eFrameLayout = OMX_TI_StereoFrameLayoutLeftRight;
+ frameLayout.nSubsampleRatio = 2;
+ }
+ else
+ {
+ frameLayout.eFrameLayout = cap->mFrameLayoutType;
+ frameLayout.nSubsampleRatio = 1;
+ }
+ frameLayout.nSubsampleRatio = frameLayout.nSubsampleRatio << 7;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamStereoFrmLayout, &frameLayout);
+ if (eError != OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("Error while setting S3D frame layout: 0x%x", eError);
+ return -EINVAL;
+ }
+ else
+ {
+ CAMHAL_LOGDB("S3D frame layout %d applied successfully on port %lu",
+ frameLayout.eFrameLayout, port);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
status_t OMXCameraAdapter::autoFocus()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
// which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
@@ -2285,7 +2723,7 @@ status_t OMXCameraAdapter::autoFocus()
EXIT:
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -2293,12 +2731,12 @@ status_t OMXCameraAdapter::autoFocus()
status_t OMXCameraAdapter::takePicture()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
- {
- Mutex::Autolock lock(mFrameCountMutex);
+ if (mNextState != REPROCESS_STATE) {
+ android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
// which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
@@ -2310,8 +2748,16 @@ status_t OMXCameraAdapter::takePicture()
}
}
- msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE;
+ // TODO(XXX): re-using take picture to kick off reprocessing pipe
+ // Need to rethink this approach during reimplementation
+ if (mNextState == REPROCESS_STATE) {
+ msg.command = CommandHandler::CAMERA_START_REPROCESS;
+ } else {
+ msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE;
+ }
+
msg.arg1 = mErrorNotifier;
+ msg.arg2 = cacheCaptureParameters();
ret = mCommandHandler->put(&msg);
EXIT:
@@ -2345,7 +2791,7 @@ status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
if ( mOMXStateSwitch )
{
- ret = switchToLoaded();
+ ret = switchToLoaded(true);
if ( NO_ERROR != ret )
{
CAMHAL_LOGEB("switchToLoaded() failed 0x%x", ret);
@@ -2358,78 +2804,55 @@ status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
if ( OMX_StateLoaded == mComponentState )
{
- ret = setLDC(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setLDC() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- goto exit;
+ if (mPendingPreviewSettings & SetLDC) {
+ mPendingPreviewSettings &= ~SetLDC;
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
}
+ }
- ret = setNSF(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setNSF() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- goto exit;
+ if (mPendingPreviewSettings & SetNSF) {
+ mPendingPreviewSettings &= ~SetNSF;
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
}
+ }
- ret = setCaptureMode(mCapMode);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
+ if (mPendingPreviewSettings & SetCapMode) {
+ mPendingPreviewSettings &= ~SetCapMode;
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
}
+ }
- if(mCapMode == OMXCameraAdapter::VIDEO_MODE)
- {
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(mVnfEnabled);
- }
+ if((mCapMode == OMXCameraAdapter::VIDEO_MODE) ||
+ (mCapMode == OMXCameraAdapter::VIDEO_MODE_HQ) ) {
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ if (mPendingPreviewSettings & SetVNF) {
+ mPendingPreviewSettings &= ~SetVNF;
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret){
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
}
+ }
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Stabilization
+ if (mPendingPreviewSettings & SetVSTAB) {
+ mPendingPreviewSettings &= ~SetVSTAB;
ret = enableVideoStabilization(mVstabEnabled);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- }
- }
- else
- {
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(false);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- }
-
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(false);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
}
}
}
+ }
ret = setSensorOrientation(mSensorOrientation);
if ( NO_ERROR != ret )
@@ -2522,9 +2945,6 @@ void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
LOG_FUNCTION_NAME;
static const unsigned int DEGREES_TILT_IGNORE = 45;
- int device_orientation = 0;
- int mount_orientation = 0;
- const char *facing_direction = NULL;
// if tilt angle is greater than DEGREES_TILT_IGNORE
// we are going to ignore the orientation returned from
@@ -2534,32 +2954,36 @@ void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
return;
}
+ int mountOrientation = 0;
+ bool isFront = false;
if (mCapabilities) {
- if (mCapabilities->get(CameraProperties::ORIENTATION_INDEX)) {
- mount_orientation = atoi(mCapabilities->get(CameraProperties::ORIENTATION_INDEX));
+ const char * const mountOrientationString =
+ mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
+ if (mountOrientationString) {
+ mountOrientation = atoi(mountOrientationString);
}
- facing_direction = mCapabilities->get(CameraProperties::FACING_INDEX);
- }
- // calculate device orientation relative to the sensor orientation
- // front camera display is mirrored...needs to be accounted for when orientation
- // is 90 or 270...since this will result in a flip on orientation otherwise
- if (facing_direction && !strcmp(facing_direction, TICameraParameters::FACING_FRONT) &&
- (orientation == 90 || orientation == 270)) {
- device_orientation = (orientation - mount_orientation + 360) % 360;
- } else { // back-facing camera
- device_orientation = (orientation + mount_orientation) % 360;
+ const char * const facingString = mCapabilities->get(CameraProperties::FACING_INDEX);
+ if (facingString) {
+ isFront = strcmp(facingString, TICameraParameters::FACING_FRONT) == 0;
+ }
}
- if (device_orientation != mDeviceOrientation) {
- mDeviceOrientation = device_orientation;
+ // direction is a constant sign for facing, meaning the rotation direction relative to device
+ // +1 (clockwise) for back sensor and -1 (counter-clockwise) for front sensor
+ const int direction = isFront ? -1 : 1;
- mFaceDetectionLock.lock();
- if (mFaceDetectionRunning) {
- // restart face detection with new rotation
- setFaceDetection(true, mDeviceOrientation);
- }
- mFaceDetectionLock.unlock();
+ int rotation = mountOrientation + direction*orientation;
+
+ // crop the calculated value to [0..360) range
+ while ( rotation < 0 ) rotation += 360;
+ rotation %= 360;
+
+ if (rotation != mDeviceOrientation) {
+ mDeviceOrientation = rotation;
+
+ // restart face detection with new rotation
+ setFaceDetectionOrientation(mDeviceOrientation);
}
CAMHAL_LOGVB("orientation = %d tilt = %d device_orientation = %d", orientation, tilt, mDeviceOrientation);
@@ -2650,10 +3074,10 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETY
{
CAMHAL_LOGEB("***Removing %d EVENTS***** \n", mEventSignalQ.size());
//remove from queue and free msg
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
if ( NULL != msg )
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
if ( sem )
{
sem->Signal();
@@ -2712,8 +3136,8 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData)
{
- Mutex::Autolock lock(mEventLock);
- TIUTILS::Message *msg;
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
bool eventSignalled = false;
LOG_FUNCTION_NAME;
@@ -2732,7 +3156,7 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
&& ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
&& msg->arg3)
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
CAMHAL_LOGDA("Event matched, signalling sem");
mEventSignalQ.removeAt(i);
//Signal the semaphore provided
@@ -2754,7 +3178,7 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
// Handling for focus callback
if ((nData2 == OMX_IndexConfigCommonFocusStatus) &&
(eEvent == (OMX_EVENTTYPE) OMX_EventIndexSettingChanged)) {
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = OMXCallbackHandler::CAMERA_FOCUS_STATUS;
msg.arg1 = NULL;
msg.arg2 = NULL;
@@ -2773,8 +3197,8 @@ OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData)
{
- Mutex::Autolock lock(mEventLock);
- TIUTILS::Message *msg;
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
LOG_FUNCTION_NAME;
if ( !mEventSignalQ.isEmpty() )
@@ -2791,7 +3215,7 @@ OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
&& ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
&& msg->arg3)
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
CAMHAL_LOGDA("Event matched, signalling sem");
mEventSignalQ.removeAt(i);
free(msg);
@@ -2814,14 +3238,14 @@ status_t OMXCameraAdapter::RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN Semaphore &semaphore)
+ OMX_IN Utils::Semaphore &semaphore)
{
status_t ret = NO_ERROR;
ssize_t res;
- Mutex::Autolock lock(mEventLock);
+ android::AutoMutex lock(mEventLock);
LOG_FUNCTION_NAME;
- TIUTILS::Message * msg = ( struct TIUTILS::Message * ) malloc(sizeof(struct TIUTILS::Message));
+ Utils::Message * msg = ( struct Utils::Message * ) malloc(sizeof(struct Utils::Message));
if ( NULL != msg )
{
msg->command = ( unsigned int ) eEvent;
@@ -2869,11 +3293,36 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDL
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
{
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME;
+ status_t stat = NO_ERROR;
+ status_t res1, res2;
+ OMXCameraPortParameters *pPortParam;
+ CameraFrame::FrameType typeOfFrame = CameraFrame::ALL_FRAMES;
+ unsigned int refCount = 0;
+ unsigned int mask = 0xFFFF;
+ CameraFrame cameraFrame;
+ OMX_TI_PLATFORMPRIVATE *platformPrivate;
- LOG_FUNCTION_NAME_EXIT;
+ res1 = res2 = NO_ERROR;
+
+ if (!pBuffHeader || !pBuffHeader->pBuffer) {
+ CAMHAL_LOGE("NULL Buffer from OMX");
+ return OMX_ErrorNone;
+ }
+
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nInputPortIndex]);
+ platformPrivate = (OMX_TI_PLATFORMPRIVATE*) pBuffHeader->pPlatformPrivate;
+
+ if (pBuffHeader->nInputPortIndex == OMX_CAMERA_PORT_VIDEO_IN_VIDEO) {
+ typeOfFrame = CameraFrame::REPROCESS_INPUT_FRAME;
+ mask = (unsigned int)CameraFrame::REPROCESS_INPUT_FRAME;
- return OMX_ErrorNone;
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return OMX_ErrorNone;
}
static void debugShowFPS()
@@ -2889,7 +3338,7 @@ static void debugShowFPS()
mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
mLastFpsTime = now;
mLastFrameCount = mFrameCount;
- ALOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ CAMHAL_LOGI("Camera %d Frames, %f FPS", mFrameCount, mFps);
}
// XXX: mFPS has the value we want
}
@@ -2901,7 +3350,7 @@ OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
{
- TIUTILS::Message msg;
+ Utils::Message msg;
OMX_ERRORTYPE eError = OMX_ErrorNone;
if (UNLIKELY(mDebugFps)) {
@@ -2920,6 +3369,47 @@ OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
return eError;
}
+#ifdef CAMERAHAL_OMX_PROFILING
+
+status_t OMXCameraAdapter::storeProfilingData(OMX_BUFFERHEADERTYPE* pBuffHeader) {
+ OMX_TI_PLATFORMPRIVATE *platformPrivate = NULL;
+ OMX_OTHER_EXTRADATATYPE *extraData = NULL;
+ FILE *fd = NULL;
+
+ LOG_FUNCTION_NAME
+
+ if ( UNLIKELY( mDebugProfile ) ) {
+
+ platformPrivate = static_cast<OMX_TI_PLATFORMPRIVATE *> (pBuffHeader->pPlatformPrivate);
+ extraData = getExtradata(platformPrivate->pMetaDataBuffer,
+ static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData));
+
+ if ( NULL != extraData ) {
+ if( extraData->eType == static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData) ) {
+
+ fd = fopen(DEFAULT_PROFILE_PATH, "ab");
+ if ( NULL != fd ) {
+ fwrite(extraData->data, 1, extraData->nDataSize, fd);
+ fclose(fd);
+ } else {
+ return -errno;
+ }
+
+ } else {
+ return NOT_ENOUGH_DATA;
+ }
+ } else {
+ return NOT_ENOUGH_DATA;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return NO_ERROR;
+}
+
+#endif
+
/*========================================================*/
/* @ fn SampleTest_FillBufferDone :: Application callback*/
/*========================================================*/
@@ -2936,22 +3426,39 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
BaseCameraAdapter::AdapterState state, nextState;
BaseCameraAdapter::getState(state);
BaseCameraAdapter::getNextState(nextState);
- sp<CameraFDResult> fdResult = NULL;
+ android::sp<CameraMetadataResult> metadataResult = NULL;
unsigned int mask = 0xFFFF;
CameraFrame cameraFrame;
- OMX_TI_PLATFORMPRIVATE *platformPrivate;
OMX_OTHER_EXTRADATATYPE *extraData;
OMX_TI_ANCILLARYDATATYPE *ancillaryData = NULL;
bool snapshotFrame = false;
+ if ( NULL == pBuffHeader ) {
+ return OMX_ErrorBadParameter;
+ }
+
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ storeProfilingData(pBuffHeader);
+
+#endif
+
res1 = res2 = NO_ERROR;
- pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]);
if ( !pBuffHeader || !pBuffHeader->pBuffer ) {
CAMHAL_LOGEA("NULL Buffer from OMX");
return OMX_ErrorNone;
}
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]);
+
+ // Find buffer and mark it as filled
+ for (int i = 0; i < pPortParam->mNumBufs; i++) {
+ if (pPortParam->mBufferHeader[i] == pBuffHeader) {
+ pPortParam->mStatus[i] = OMXCameraPortParameters::DONE;
+ }
+ }
+
if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW)
{
@@ -2960,49 +3467,29 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
return OMX_ErrorNone;
}
- if ( mWaitingForSnapshot )
- {
- platformPrivate = (OMX_TI_PLATFORMPRIVATE*) pBuffHeader->pPlatformPrivate;
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE*) platformPrivate->pMetaDataBuffer,
- (OMX_EXTRADATATYPE) OMX_AncillaryData);
+ if ( mWaitingForSnapshot ) {
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE) OMX_AncillaryData);
- if ( NULL != extraData )
- {
+ if ( NULL != extraData ) {
ancillaryData = (OMX_TI_ANCILLARYDATATYPE*) extraData->data;
- snapshotFrame = ancillaryData->nDCCStatus;
- mPending3Asettings |= SetFocus;
- }
- }
-
- recalculateFPS();
- {
- Mutex::Autolock lock(mFaceDetectionLock);
- if ( mFaceDetectionRunning && !mFaceDetectionPaused ) {
- detectFaces(pBuffHeader, fdResult, pPortParam->mWidth, pPortParam->mHeight);
- if ( NULL != fdResult.get() ) {
- notifyFaceSubscribers(fdResult);
- fdResult.clear();
- }
- if ( mFDSwitchAlgoPriority ) {
-
- //Disable region priority and enable face priority for AF
- setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
-
- //Disable Region priority and enable Face priority
- setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
- mFDSwitchAlgoPriority = false;
+ if ((OMX_2D_Snap == ancillaryData->eCameraView)
+ || (OMX_3D_Left_Snap == ancillaryData->eCameraView)
+ || (OMX_3D_Right_Snap == ancillaryData->eCameraView)) {
+ snapshotFrame = OMX_TRUE;
+ } else {
+ snapshotFrame = OMX_FALSE;
}
+ mPending3Asettings |= SetFocus;
}
- }
+ }
///Prepare the frames to be sent - initialize CameraFrame object and reference count
// TODO(XXX): ancillary data for snapshot frame is not being sent for video snapshot
// if we are waiting for a snapshot and in video mode...go ahead and send
// this frame as a snapshot
if( mWaitingForSnapshot && (mCapturedFrames > 0) &&
- (snapshotFrame || (mCapMode == VIDEO_MODE)))
+ (snapshotFrame || (mCapMode == VIDEO_MODE) || (mCapMode == VIDEO_MODE_HQ ) ))
{
typeOfFrame = CameraFrame::SNAPSHOT_FRAME;
mask = (unsigned int)CameraFrame::SNAPSHOT_FRAME;
@@ -3010,7 +3497,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
// video snapshot gets ancillary data and wb info from last snapshot frame
mCaptureAncillaryData = ancillaryData;
mWhiteBalanceData = NULL;
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE*) platformPrivate->pMetaDataBuffer,
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
(OMX_EXTRADATATYPE) OMX_WhiteBalance);
if ( NULL != extraData )
{
@@ -3029,29 +3516,29 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
mFramesWithEncoder++;
}
- //ALOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
+ //CAMHAL_LOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
if( mWaitingForSnapshot )
- {
- mSnapshotCount++;
-
- if ( (mSnapshotCount == 1) &&
- ((HIGH_SPEED == mCapMode) || (VIDEO_MODE == mCapMode)) )
- {
- notifyShutterSubscribers();
- }
- }
+ {
+ if ( !mBracketingEnabled &&
+ ((HIGH_SPEED == mCapMode) ||
+ (VIDEO_MODE == mCapMode) ||
+ (VIDEO_MODE_HQ == mCapMode)) )
+ {
+ notifyShutterSubscribers();
+ }
+ }
stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
mFramesWithDisplay++;
mFramesWithDucati--;
-#ifdef DEBUG_LOG
- if(mBuffersWithDucati.indexOfKey((int)pBuffHeader->pBuffer)<0)
+#ifdef CAMERAHAL_DEBUG
+ if(mBuffersWithDucati.indexOfKey((uint32_t)pBuffHeader->pBuffer)<0)
{
- ALOGE("Buffer was never with Ducati!! 0x%x", pBuffHeader->pBuffer);
- for(int i=0;i<mBuffersWithDucati.size();i++) ALOGE("0x%x", mBuffersWithDucati.keyAt(i));
+ CAMHAL_LOGE("Buffer was never with Ducati!! %p", pBuffHeader->pBuffer);
+ for(unsigned int i=0;i<mBuffersWithDucati.size();i++) CAMHAL_LOGE("0x%x", mBuffersWithDucati.keyAt(i));
}
mBuffersWithDucati.removeItem((int)pBuffHeader->pBuffer);
#endif
@@ -3059,6 +3546,31 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
if(mDebugFcs)
CAMHAL_LOGEB("C[%d] D[%d] E[%d]", mFramesWithDucati, mFramesWithDisplay, mFramesWithEncoder);
+ recalculateFPS();
+
+ createPreviewMetadata(pBuffHeader, metadataResult, pPortParam->mWidth, pPortParam->mHeight);
+ if ( NULL != metadataResult.get() ) {
+ notifyMetadataSubscribers(metadataResult);
+ metadataResult.clear();
+ }
+
+ {
+ android::AutoMutex lock(mFaceDetectionLock);
+ if ( mFDSwitchAlgoPriority ) {
+
+ //Disable region priority and enable face priority for AF
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
+
+ //Disable Region priority and enable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
+ mFDSwitchAlgoPriority = false;
+ }
+ }
+
+ sniffDccFileDataSave(pBuffHeader);
+
stat |= advanceZoom();
// On the fly update to 3A settings not working
@@ -3066,10 +3578,9 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
// or in the middle of transitioning to it
if( mPending3Asettings &&
( (nextState & CAPTURE_ACTIVE) == 0 ) &&
- ( (state & CAPTURE_ACTIVE) == 0 ) )
- {
+ ( (state & CAPTURE_ACTIVE) == 0 ) ) {
apply3Asettings(mParameters3A);
- }
+ }
}
else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT )
@@ -3080,11 +3591,11 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
}
else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_IMAGE_OUT_IMAGE )
- {
+ {
OMX_COLOR_FORMATTYPE pixFormat;
const char *valstr = NULL;
- pixFormat = mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mColorFormat;
+ pixFormat = pPortParam->mColorFormat;
if ( OMX_COLOR_FormatUnused == pixFormat )
{
@@ -3092,13 +3603,15 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
mask = (unsigned int) CameraFrame::IMAGE_FRAME;
} else if ( pixFormat == OMX_COLOR_FormatCbYCrY &&
((mPictureFormatFromClient &&
- !strcmp(mPictureFormatFromClient, CameraParameters::PIXEL_FORMAT_JPEG)) ||
- !mPictureFormatFromClient) ) {
+ !strcmp(mPictureFormatFromClient,
+ android::CameraParameters::PIXEL_FORMAT_JPEG)) ||
+ !mPictureFormatFromClient) ) {
// signals to callbacks that this needs to be coverted to jpeg
// before returning to framework
typeOfFrame = CameraFrame::IMAGE_FRAME;
mask = (unsigned int) CameraFrame::IMAGE_FRAME;
cameraFrame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
+ cameraFrame.mQuirks |= CameraFrame::FORMAT_YUV422I_UYVY;
// populate exif data and pass to subscribers via quirk
// subscriber is in charge of freeing exif data
@@ -3106,12 +3619,10 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
setupEXIF_libjpeg(exif, mCaptureAncillaryData, mWhiteBalanceData);
cameraFrame.mQuirks |= CameraFrame::HAS_EXIF_DATA;
cameraFrame.mCookie2 = (void*) exif;
- }
- else
- {
+ } else {
typeOfFrame = CameraFrame::RAW_FRAME;
mask = (unsigned int) CameraFrame::RAW_FRAME;
- }
+ }
pPortParam->mImageType = typeOfFrame;
@@ -3127,7 +3638,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
}
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
doBracketing(pBuffHeader, typeOfFrame);
@@ -3135,28 +3646,131 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
}
}
+ if (mZoomBracketingEnabled) {
+ doZoom(mZoomBracketingValues[mCurrentZoomBracketing]);
+ CAMHAL_LOGDB("Current Zoom Bracketing: %d", mZoomBracketingValues[mCurrentZoomBracketing]);
+ mCurrentZoomBracketing++;
+ if (mCurrentZoomBracketing == ARRAY_SIZE(mZoomBracketingValues)) {
+ mZoomBracketingEnabled = false;
+ }
+ }
+
if ( 1 > mCapturedFrames )
{
goto EXIT;
}
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != mSharedAllocator ) {
+ cameraFrame.mMetaData = new CameraMetadataResult(getMetaData(pBuffHeader->pPlatformPrivate, mSharedAllocator));
+ }
+#endif
+
CAMHAL_LOGDB("Captured Frames: %d", mCapturedFrames);
mCapturedFrames--;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mYuvCapture) {
+ struct timeval timeStampUsec;
+ gettimeofday(&timeStampUsec, NULL);
+
+ time_t saveTime;
+ time(&saveTime);
+ const struct tm * const timeStamp = gmtime(&saveTime);
+
+ char filename[256];
+ snprintf(filename,256, "%s/yuv_%d_%d_%d_%lu.yuv",
+ kYuvImagesOutputDirPath,
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec);
+
+ const status_t saveBufferStatus = saveBufferToFile(((CameraBuffer*)pBuffHeader->pAppPrivate)->mapped,
+ pBuffHeader->nFilledLen, filename);
+
+ if (saveBufferStatus != OK) {
+ CAMHAL_LOGE("ERROR: %d, while saving yuv!", saveBufferStatus);
+ } else {
+ CAMHAL_LOGD("yuv_%d_%d_%d_%lu.yuv successfully saved in %s",
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec,
+ kYuvImagesOutputDirPath);
+ }
+ }
+#endif
+
stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != cameraFrame.mMetaData.get() ) {
+ cameraFrame.mMetaData.clear();
+ }
+#endif
}
- else
- {
- CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported");
- goto EXIT;
+ else if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_VIDEO) {
+ typeOfFrame = CameraFrame::RAW_FRAME;
+ pPortParam->mImageType = typeOfFrame;
+ {
+ android::AutoMutex lock(mLock);
+ if( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE ) {
+ goto EXIT;
+ }
+ }
+
+ CAMHAL_LOGD("RAW buffer done on video port, length = %d", pBuffHeader->nFilledLen);
+
+ mask = (unsigned int) CameraFrame::RAW_FRAME;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if ( mRawCapture ) {
+ struct timeval timeStampUsec;
+ gettimeofday(&timeStampUsec, NULL);
+
+ time_t saveTime;
+ time(&saveTime);
+ const struct tm * const timeStamp = gmtime(&saveTime);
+
+ char filename[256];
+ snprintf(filename,256, "%s/raw_%d_%d_%d_%lu.raw",
+ kRawImagesOutputDirPath,
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec);
+
+ const status_t saveBufferStatus = saveBufferToFile( ((CameraBuffer*)pBuffHeader->pAppPrivate)->mapped,
+ pBuffHeader->nFilledLen, filename);
+
+ if (saveBufferStatus != OK) {
+ CAMHAL_LOGE("ERROR: %d , while saving raw!", saveBufferStatus);
+ } else {
+ CAMHAL_LOGD("raw_%d_%d_%d_%lu.raw successfully saved in %s",
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec,
+ kRawImagesOutputDirPath);
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+ }
+#endif
+ } else {
+ CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported");
+ goto EXIT;
}
if ( NO_ERROR != stat )
{
+ CameraBuffer *camera_buffer;
+
+ camera_buffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+
CAMHAL_LOGDB("sendFrameToSubscribers error: %d", stat);
- returnFrame(pBuffHeader->pBuffer, typeOfFrame);
+ returnFrame(camera_buffer, typeOfFrame);
}
return eError;
@@ -3181,7 +3795,7 @@ status_t OMXCameraAdapter::recalculateFPS()
float currentFPS;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount++;
if (mFrameCount == 1) {
mFirstFrameCondition.broadcast();
@@ -3213,23 +3827,6 @@ status_t OMXCameraAdapter::recalculateFPS()
return NO_ERROR;
}
-status_t OMXCameraAdapter::sendFrame(CameraFrame &frame)
-{
- status_t ret = NO_ERROR;
-
- LOG_FUNCTION_NAME;
-
-
- if ( NO_ERROR == ret )
- {
- ret = sendFrameToSubscribers(&frame);
- }
-
- LOG_FUNCTION_NAME_EXIT;
-
- return ret;
-}
-
status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, unsigned int mask, OMXCameraPortParameters *port)
{
status_t ret = NO_ERROR;
@@ -3248,11 +3845,11 @@ status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEA
return -EINVAL;
}
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
//frame.mFrameType = typeOfFrame;
frame.mFrameMask = mask;
- frame.mBuffer = pBuffHeader->pBuffer;
+ frame.mBuffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
frame.mLength = pBuffHeader->nFilledLen;
frame.mAlignment = port->mStride;
frame.mOffset = pBuffHeader->nOffset;
@@ -3284,60 +3881,9 @@ status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEA
return ret;
}
-status_t OMXCameraAdapter::initCameraFrame( CameraFrame &frame,
- OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader,
- int typeOfFrame,
- OMXCameraPortParameters *port)
-{
- status_t ret = NO_ERROR;
-
- LOG_FUNCTION_NAME;
-
- if ( NULL == port)
- {
- CAMHAL_LOGEA("Invalid portParam");
- return -EINVAL;
- }
-
- if ( NULL == pBuffHeader )
- {
- CAMHAL_LOGEA("Invalid Buffer header");
- return -EINVAL;
- }
-
- frame.mFrameType = typeOfFrame;
- frame.mBuffer = pBuffHeader->pBuffer;
- frame.mLength = pBuffHeader->nFilledLen;
- frame.mAlignment = port->mStride;
- frame.mOffset = pBuffHeader->nOffset;
- frame.mWidth = port->mWidth;
- frame.mHeight = port->mHeight;
-
- // Timestamp in pBuffHeader->nTimeStamp is derived on DUCATI side, which is
- // is not same time value as derived using systemTime. It would be ideal to use
- // exactly same time source across Android and Ducati, which is limited by
- // system now. So, workaround for now is to find the time offset between the two
- // time sources and compensate the difference, along with the latency involved
- // in camera buffer reaching CameraHal. Also, Do timeset offset calculation only
- // when recording is in progress, when nTimestamp will be populated by Camera
- if ( onlyOnce && mRecording )
- {
- mTimeSourceDelta = (pBuffHeader->nTimeStamp * 1000) - systemTime(SYSTEM_TIME_MONOTONIC);
- mTimeSourceDelta += kCameraBufferLatencyNs;
- onlyOnce = false;
- }
-
- // Calculating the new video timestamp based on offset from ducati source.
- frame.mTimestamp = (pBuffHeader->nTimeStamp * 1000) - mTimeSourceDelta;
-
- LOG_FUNCTION_NAME_EXIT;
-
- return ret;
-}
-
bool OMXCameraAdapter::CommandHandler::Handler()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
volatile int forever = 1;
status_t stat;
ErrorNotifier *errorNotify = NULL;
@@ -3348,16 +3894,19 @@ bool OMXCameraAdapter::CommandHandler::Handler()
{
stat = NO_ERROR;
CAMHAL_LOGDA("Handler: waiting for messsage...");
- TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.get(&msg);
}
CAMHAL_LOGDB("msg.command = %d", msg.command);
switch ( msg.command ) {
case CommandHandler::CAMERA_START_IMAGE_CAPTURE:
{
- stat = mCameraAdapter->startImageCapture();
+ OMXCameraAdapter::CachedCaptureParameters* cap_params =
+ static_cast<OMXCameraAdapter::CachedCaptureParameters*>(msg.arg2);
+ stat = mCameraAdapter->startImageCapture(false, cap_params);
+ delete cap_params;
break;
}
case CommandHandler::CAMERA_PERFORM_AUTOFOCUS:
@@ -3373,8 +3922,17 @@ bool OMXCameraAdapter::CommandHandler::Handler()
}
case CommandHandler::CAMERA_SWITCH_TO_EXECUTING:
{
- stat = mCameraAdapter->doSwitchToExecuting();
- break;
+ stat = mCameraAdapter->doSwitchToExecuting();
+ break;
+ }
+ case CommandHandler::CAMERA_START_REPROCESS:
+ {
+ OMXCameraAdapter::CachedCaptureParameters* cap_params =
+ static_cast<OMXCameraAdapter::CachedCaptureParameters*>(msg.arg2);
+ stat = mCameraAdapter->startReprocess();
+ stat = mCameraAdapter->startImageCapture(false, cap_params);
+ delete cap_params;
+ break;
}
}
@@ -3387,17 +3945,18 @@ bool OMXCameraAdapter::CommandHandler::Handler()
bool OMXCameraAdapter::OMXCallbackHandler::Handler()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
volatile int forever = 1;
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
while(forever){
- TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.get(&msg);
+ mIsProcessed = false;
}
switch ( msg.command ) {
@@ -3419,12 +3978,43 @@ bool OMXCameraAdapter::OMXCallbackHandler::Handler()
break;
}
}
+
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mIsProcessed = mCommandMsgQ.isEmpty();
+ if ( mIsProcessed )
+ mCondition.signal();
+ }
+ }
+
+ // force the condition to wake
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mIsProcessed = true;
+ mCondition.signal();
}
LOG_FUNCTION_NAME_EXIT;
return false;
}
+void OMXCameraAdapter::OMXCallbackHandler::flush()
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ if ( mIsProcessed )
+ return;
+
+ mCondition.wait(mLock);
+}
+
status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT_EXTRADATATYPE eType) {
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -3442,7 +4032,9 @@ status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT
extraDataControl.nPortIndex = nPortIndex;
extraDataControl.eExtraDataType = eType;
+#ifdef CAMERAHAL_TUNA
extraDataControl.eCameraView = OMX_2D;
+#endif
if (enable) {
extraDataControl.bEnable = OMX_TRUE;
@@ -3456,25 +4048,74 @@ status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-
-OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(OMX_OTHER_EXTRADATATYPE *extraData, OMX_EXTRADATATYPE type)
+OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(const OMX_PTR ptrPrivate, OMX_EXTRADATATYPE type) const
{
- if ( NULL != extraData )
- {
- while ( extraData->nDataSize != 0 )
- {
- if ( type == extraData->eType )
- {
- return extraData;
- }
- extraData = (OMX_OTHER_EXTRADATATYPE*) ((char*)extraData + extraData->nSize);
- }
- }
- // Required extradata type wasn't found
- return NULL;
+ if ( NULL != ptrPrivate ) {
+ const OMX_TI_PLATFORMPRIVATE *platformPrivate = (const OMX_TI_PLATFORMPRIVATE *) ptrPrivate;
+
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
+ platformPrivate->nSize,
+ sizeof(OMX_TI_PLATFORMPRIVATE),
+ platformPrivate->pAuxBuf1,
+ platformPrivate->pAuxBufSize1,
+ platformPrivate->pMetaDataBuffer,
+ platformPrivate->nMetaDataSize);
+ if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
+ if ( 0 < platformPrivate->nMetaDataSize ) {
+ OMX_U32 remainingSize = platformPrivate->nMetaDataSize;
+ OMX_OTHER_EXTRADATATYPE *extraData = (OMX_OTHER_EXTRADATATYPE *) platformPrivate->pMetaDataBuffer;
+ if ( NULL != extraData ) {
+ while ( extraData->eType && extraData->nDataSize && extraData->data &&
+ (remainingSize >= extraData->nSize)) {
+ if ( type == extraData->eType ) {
+ return extraData;
+ }
+ remainingSize -= extraData->nSize;
+ extraData = (OMX_OTHER_EXTRADATATYPE*) ((char*)extraData + extraData->nSize);
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE pMetaDataBuffer is NULL");
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
+ ( unsigned int ) platformPrivate->nMetaDataSize);
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
+ ( unsigned int ) platformPrivate->nSize);
+ }
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_TI_PLATFORMPRIVATE");
+ }
+
+ // Required extradata type wasn't found
+ return NULL;
+}
+
+OMXCameraAdapter::CachedCaptureParameters* OMXCameraAdapter::cacheCaptureParameters() {
+ CachedCaptureParameters* params = new CachedCaptureParameters();
+
+ params->mPendingCaptureSettings = mPendingCaptureSettings;
+ params->mPictureRotation = mPictureRotation;
+ memcpy(params->mExposureBracketingValues,
+ mExposureBracketingValues,
+ sizeof(mExposureBracketingValues));
+ memcpy(params->mExposureGainBracketingValues,
+ mExposureGainBracketingValues,
+ sizeof(mExposureGainBracketingValues));
+ memcpy(params->mExposureGainBracketingModes,
+ mExposureGainBracketingModes,
+ sizeof(mExposureGainBracketingModes));
+ params->mExposureBracketingValidEntries = mExposureBracketingValidEntries;
+ params->mExposureBracketMode = mExposureBracketMode;
+ params->mBurstFrames = mBurstFrames;
+ params->mFlushShotConfigQueue = mFlushShotConfigQueue;
+
+ return params;
}
OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
@@ -3488,16 +4129,19 @@ OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
// Initial values
mTimeSourceDelta = 0;
onlyOnce = true;
+ mDccData.pData = NULL;
mInitSem.Create(0);
mFlushSem.Create(0);
mUsePreviewDataSem.Create(0);
mUsePreviewSem.Create(0);
mUseCaptureSem.Create(0);
+ mUseReprocessSem.Create(0);
mStartPreviewSem.Create(0);
mStopPreviewSem.Create(0);
mStartCaptureSem.Create(0);
mStopCaptureSem.Create(0);
+ mStopReprocSem.Create(0);
mSwitchToLoadedSem.Create(0);
mCaptureSem.Create(0);
@@ -3512,6 +4156,14 @@ OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
mFramesWithDisplay = 0;
mFramesWithEncoder = 0;
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ mDebugProfile = 0;
+
+#endif
+
+ mPreviewPortInitialized = false;
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -3519,12 +4171,15 @@ OMXCameraAdapter::~OMXCameraAdapter()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
if ( mOmxInitialized ) {
// return to OMX Loaded state
switchToLoaded();
+ saveDccFileDataSave();
+
+ closeDccFileDataSave();
// deinit the OMX
if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid ) {
// free the handle for the Camera component
@@ -3543,11 +4198,11 @@ OMXCameraAdapter::~OMXCameraAdapter()
{
for (unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
{
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
//remove from queue and free msg
if ( NULL != msg )
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
sem->Signal();
free(msg);
@@ -3559,7 +4214,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
//Exit and free ref to command handling thread
if ( NULL != mCommandHandler.get() )
{
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = CommandHandler::COMMAND_EXIT;
msg.arg1 = mErrorNotifier;
mCommandHandler->clearCommandQ();
@@ -3571,7 +4226,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
//Exit and free ref to callback handling thread
if ( NULL != mOMXCallbackHandler.get() )
{
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = OMXCallbackHandler::COMMAND_EXIT;
//Clear all messages pending first
mOMXCallbackHandler->clearCommandQ();
@@ -3583,10 +4238,10 @@ OMXCameraAdapter::~OMXCameraAdapter()
LOG_FUNCTION_NAME_EXIT;
}
-extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
+extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t sensor_index)
{
CameraAdapter *adapter = NULL;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
LOG_FUNCTION_NAME;
@@ -3594,7 +4249,7 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
if ( adapter ) {
CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index);
} else {
- CAMHAL_LOGEA("Camera adapter create failed!");
+ CAMHAL_LOGEA("OMX Camera adapter create failed for sensor index = %d!",sensor_index);
}
LOG_FUNCTION_NAME_EXIT;
@@ -3602,7 +4257,8 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
return adapter;
}
-OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData )
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ const OMX_CALLBACKTYPE & callbacks)
{
OMX_ERRORTYPE eError = OMX_ErrorUndefined;
@@ -3613,12 +4269,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_P
}
// setup key parameters to send to Ducati during init
- OMX_CALLBACKTYPE oCallbacks;
-
- // initialize the callback handles
- oCallbacks.EventHandler = android::OMXCameraAdapterEventHandler;
- oCallbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone;
- oCallbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone;
+ OMX_CALLBACKTYPE oCallbacks = callbacks;
// get handle
eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.CAMERA", pAppData, &oCallbacks);
@@ -3633,80 +4284,231 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_P
return eError;
}
-extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
- const unsigned int starting_camera,
- const unsigned int max_camera) {
- int num_cameras_supported = 0;
- CameraProperties::Properties* properties = NULL;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_HANDLETYPE handle = NULL;
- OMX_TI_CAPTYPE caps;
+class CapabilitiesHandler
+{
+public:
+ CapabilitiesHandler()
+ {
+ mComponent = 0;
+ }
+
+ const OMX_HANDLETYPE & component() const
+ {
+ return mComponent;
+ }
+
+ OMX_HANDLETYPE & componentRef()
+ {
+ return mComponent;
+ }
+
+ status_t fetchCapabiltiesForMode(OMX_CAMOPERATINGMODETYPE mode,
+ int sensorId,
+ CameraProperties::Properties * properties)
+ {
+ OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
+
+ OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
+ camMode.eCamOperatingMode = mode;
+
+ OMX_ERRORTYPE eError = OMX_SetParameter(component(),
+ ( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode,
+ &camMode);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGE("Error while configuring camera mode in CameraAdapter_Capabilities 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ // get and fill capabilities
+ OMXCameraAdapter::getCaps(sensorId, properties, component());
+
+ return NO_ERROR;
+ }
+
+ status_t fetchCapabilitiesForSensor(int sensorId,
+ CameraProperties::Properties * properties)
+ {
+ // sensor select
+ OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
+ OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
+ sensorSelect.eSensor = (OMX_SENSORSELECT)sensorId;
+
+ CAMHAL_LOGD("Selecting sensor %d...", sensorId);
+ const OMX_ERRORTYPE sensorSelectError = OMX_SetConfig(component(),
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigSensorSelect, &sensorSelect);
+ CAMHAL_LOGD("Selecting sensor %d... DONE", sensorId);
+
+ if ( sensorSelectError != OMX_ErrorNone ) {
+ CAMHAL_LOGD("Max supported sensor number reached: %d", sensorId);
+ return BAD_VALUE;
+ }
+
+ status_t err = NO_ERROR;
+ if ( sensorId == 2 ) {
+ CAMHAL_LOGD("Camera mode: STEREO");
+ properties->setMode(MODE_STEREO);
+ err = fetchCapabiltiesForMode(OMX_CaptureStereoImageCapture,
+ sensorId,
+ properties);
+ } else {
+ CAMHAL_LOGD("Camera MONO");
+
+ CAMHAL_LOGD("Camera mode: HQ ");
+ properties->setMode(MODE_HIGH_QUALITY);
+ err = fetchCapabiltiesForMode(OMX_CaptureImageProfileBase,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: VIDEO ");
+ properties->setMode(MODE_VIDEO);
+ err = fetchCapabiltiesForMode(OMX_CaptureVideo,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: ZSL ");
+ properties->setMode(MODE_ZEROSHUTTERLAG);
+ err = fetchCapabiltiesForMode(OMX_TI_CaptureImageProfileZeroShutterLag,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: HS ");
+ properties->setMode(MODE_HIGH_SPEED);
+ err = fetchCapabiltiesForMode(OMX_CaptureImageHighSpeedTemporalBracketing,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: CPCAM ");
+ properties->setMode(MODE_CPCAM);
+ err = fetchCapabiltiesForMode(OMX_TI_CPCam,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+#ifdef CAMERAHAL_OMAP5_CAPTURE_MODES
+
+ CAMHAL_LOGD("Camera mode: VIDEO HQ ");
+ properties->setMode(MODE_VIDEO_HIGH_QUALITY);
+ err = fetchCapabiltiesForMode(OMX_CaptureHighQualityVideo,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+#endif
+
+ }
+
+ return err;
+ }
+
+private:
+ OMX_HANDLETYPE mComponent;
+ OMX_STATETYPE mState;
+};
+
+extern "C" status_t OMXCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(gAdapterLock);
+ supportedCameras = 0;
+
+ int num_cameras_supported = 0;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ android::AutoMutex lock(gAdapterLock);
if (!properties_array) {
CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
LOG_FUNCTION_NAME_EXIT;
- return -EINVAL;
+ return BAD_VALUE;
}
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("Error OMX_Init -0x%x", eError);
- return eError;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
- eError = OMXCameraAdapter::OMXCameraGetHandle(&handle);
+ CapabilitiesHandler handler;
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = 0;
+ callbacks.EmptyBufferDone = 0;
+ callbacks.FillBufferDone = 0;
+
+ eError = OMXCameraAdapter::OMXCameraGetHandle(&handler.componentRef(), &handler, callbacks);
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
goto EXIT;
}
+ DCCHandler dcc_handler;
+ dcc_handler.loadDCC(handler.componentRef());
+
// Continue selecting sensor and then querying OMX Camera for it's capabilities
// When sensor select returns an error, we know to break and stop
while (eError == OMX_ErrorNone &&
(starting_camera + num_cameras_supported) < max_camera) {
- // sensor select
- OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
- OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
- sensorSelect.eSensor = (OMX_SENSORSELECT) num_cameras_supported;
- eError = OMX_SetConfig(handle, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect);
-
- if ( OMX_ErrorNone != eError ) {
- break;
- }
- // get and fill capabilities
- properties = properties_array + starting_camera + num_cameras_supported;
- OMXCameraAdapter::getCaps(properties, handle);
+ const int sensorId = num_cameras_supported;
+ CameraProperties::Properties * properties = properties_array + starting_camera + sensorId;
+ const status_t err = handler.fetchCapabilitiesForSensor(sensorId, properties);
- // need to fill facing information
- // assume that only sensor 0 is back facing
- if (num_cameras_supported == 0) {
- properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_BACK);
- } else {
- properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT);
- }
+ if ( err != NO_ERROR )
+ break;
num_cameras_supported++;
+ CAMHAL_LOGEB("Number of OMX Cameras detected = %d \n",num_cameras_supported);
}
+ // clean up
+ if(handler.component()) {
+ CAMHAL_LOGD("Freeing the component...");
+ OMX_FreeHandle(handler.component());
+ CAMHAL_LOGD("Freeing the component... DONE");
+ handler.componentRef() = NULL;
+ }
+
EXIT:
- // clean up
- if(handle) {
- OMX_FreeHandle(handle);
- handle=NULL;
- }
+ CAMHAL_LOGD("Deinit...");
OMX_Deinit();
+ CAMHAL_LOGD("Deinit... DONE");
+
+ if ( eError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGE("Error: 0x%x", eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ supportedCameras = num_cameras_supported;
LOG_FUNCTION_NAME_EXIT;
- return num_cameras_supported;
+ return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp
index e1323ee..1807945 100644
--- a/camera/OMXCameraAdapter/OMXCapabilities.cpp
+++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp
@@ -26,46 +26,93 @@
#include "ErrorUtils.h"
#include "TICameraParameters.h"
-namespace android {
-
-#undef LOG_TAG
-
-// Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
/************************************
* global constants and variables
*************************************/
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
-#define FPS_MIN 5
-#define FPS_STEP 5
-#define FPS_RANGE_STEP 5
static const char PARAM_SEP[] = ",";
-static const int PARAM_SEP_CHAR = ',';
static const uint32_t VFR_OFFSET = 8;
-static const char VFR_BACKET_START[] = "(";
-static const char VFR_BRACKET_END[] = ")";
-static const char FRAMERATE_COUNT = 10;
+static const char FPS_STR_MAX_LEN = 10;
+
+static const unsigned int MANUAL_EXPOSURE_STEP = 1;
+static const unsigned int MANUAL_GAIN_ISO_MIN = 100;
+static const unsigned int MANUAL_GAIN_ISO_STEP = 100;
+
+const int OMXCameraAdapter::SENSORID_IMX060 = 300;
+const int OMXCameraAdapter::SENSORID_OV5650 = 301;
+const int OMXCameraAdapter::SENSORID_OV5640 = 302;
+const int OMXCameraAdapter::SENSORID_OV14825 = 304;
+const int OMXCameraAdapter::SENSORID_S5K4E1GA = 305;
+const int OMXCameraAdapter::SENSORID_S5K6A1GX03 = 306;
+const int OMXCameraAdapter::SENSORID_OV8830 = 310;
+const int OMXCameraAdapter::SENSORID_OV2722 = 311;
+
+
+const int OMXCameraAdapter::FPS_MIN = 5;
+const int OMXCameraAdapter::FPS_MAX = 30;
+const int OMXCameraAdapter::FPS_MAX_EXTENDED = 60;
+
+inline static int androidFromDucatiFrameRate(OMX_U32 frameRate) {
+ return (frameRate >> VFR_OFFSET) * CameraHal::VFR_SCALE;
+}
/**** look up tables to translate OMX Caps to Parameter ****/
const CapResolution OMXCameraAdapter::mImageCapRes [] = {
+ { 4416, 3312, "4416x3312" },
{ 4032, 3024, "4032x3024" },
{ 4000, 3000, "4000x3000" },
{ 3648, 2736, "3648x2736" },
{ 3264, 2448, "3264x2448" },
+ { 2608, 1960, "2608x1960" },
{ 2592, 1944, "2592x1944" },
{ 2592, 1728, "2592x1728" },
{ 2592, 1458, "2592x1458" },
+ { 2304, 1296, "2304x1296" },
+ { 2240, 1344, "2240x1344" },
+ { 2160, 1440, "2160x1440" },
+ { 2112, 1728, "2112x1728" },
{ 2048, 1536, "2048x1536" },
+ { 2016, 1512, "2016x1512" },
+ { 2000, 1600, "2000x1600" },
{ 1600, 1200, "1600x1200" },
{ 1280, 1024, "1280x1024" },
- { 1152, 864, "1152x864" },
- { 1280, 960, "1280x960" },
- { 640, 480, "640x480" },
- { 320, 240, "320x240" },
+ { 1152, 864, "1152x864" },
+ { 1280, 960, "1280x960" },
+ { 1024, 768, "1024x768" },
+ { 640, 480, "640x480" },
+ { 320, 240, "320x240" },
+};
+
+const CapResolution OMXCameraAdapter::mImageCapResSS [] = {
+ { 4032*2, 3024, "8064x3024" },
+ { 3648*2, 2736, "7296x2736" },
+ { 3264*2, 2448, "6528x2448" },
+ { 2592*2, 1944, "5184x1944" },
+ { 2048*2, 1536, "4096x1536" },
+ { 1600*2, 1200, "3200x1200" },
+ { 1280*2, 960, "2560x960" },
+ { 1024*2, 768, "2048x768" },
+ { 640*2, 480, "1280x480" },
+ { 320*2, 240, "640x240" },
+};
+
+const CapResolution OMXCameraAdapter::mImageCapResTB [] = {
+ { 4032, 3024*2, "4032x6048" },
+ { 3648, 2736*2, "3648x5472" },
+ { 3264, 2448*2, "3264x4896" },
+ { 2592, 1944*2, "2592x3888" },
+ { 2048, 1536*2, "2048x3072" },
+ { 1600, 1200*2, "1600x2400" },
+ { 1280, 960*2, "1280x1920" },
+ { 1024, 768*2, "1024x1536" },
+ { 640, 480*2, "640x960" },
+ { 320, 240*2, "320x480" },
};
const CapResolution OMXCameraAdapter::mPreviewRes [] = {
@@ -81,9 +128,57 @@ const CapResolution OMXCameraAdapter::mPreviewRes [] = {
{ 352, 288, "352x288" },
{ 240, 160, "240x160" },
{ 176, 144, "176x144" },
+ { 160, 120, "160x120" },
{ 128, 96, "128x96" },
};
+const CapResolution OMXCameraAdapter::mPreviewPortraitRes [] = {
+ //Portrait resolutions
+ { 1088, 1920, "1088x1920" },
+ { 720, 1280, "720x1280" },
+ { 480, 800, "480x800" },
+ { 576, 720, "576x720" },
+ { 576, 768, "576x768" },
+ { 480, 720, "480x720" },
+ { 480, 640, "480x640" },
+ { 288, 352, "288x352" },
+ { 240, 320, "240x320" },
+ { 160, 240, "160x240" },
+ { 144, 176, "144x176" },
+ { 120, 160, "120x160"},
+ { 96, 128, "96x128" }
+};
+
+const CapResolution OMXCameraAdapter::mPreviewResSS [] = {
+ { 1920*2, 1080, "3840x1080" },
+ { 1280*2, 720, "2560x720" },
+ { 800*2, 480, "1600x480" },
+ { 720*2, 576, "1440x576" },
+ { 720*2, 480, "1440x480" },
+ { 768*2, 576, "1536x576" },
+ { 640*2, 480, "1280x480" },
+ { 320*2, 240, "640x240" },
+ { 352*2, 288, "704x288" },
+ { 240*2, 160, "480x160" },
+ { 176*2, 144, "352x144" },
+ { 128*2, 96, "256x96" }
+};
+
+const CapResolution OMXCameraAdapter::mPreviewResTB [] = {
+ { 1920, 1080*2, "1920x2160" },
+ { 1280, 720*2, "1280x1440" },
+ { 800, 480*2, "800x960" },
+ { 720, 576*2, "720x1152" },
+ { 720, 480*2, "720x960" },
+ { 768, 576*2, "768x1152" },
+ { 640, 480*2, "640x960" },
+ { 320, 240*2, "320x480" },
+ { 352, 288*2, "352x576" },
+ { 240, 160*2, "240x320" },
+ { 176, 144*2, "176x288" },
+ { 128, 96*2, "128x192" },
+};
+
const CapResolution OMXCameraAdapter::mThumbRes [] = {
{ 640, 480, "640x480" },
{ 160, 120, "160x120" },
@@ -96,16 +191,40 @@ const CapResolution OMXCameraAdapter::mThumbRes [] = {
};
const CapPixelformat OMXCameraAdapter::mPixelformats [] = {
- { OMX_COLOR_FormatCbYCrY, CameraParameters::PIXEL_FORMAT_YUV422I },
- { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420SP },
- { OMX_COLOR_Format16bitRGB565, CameraParameters::PIXEL_FORMAT_RGB565 },
- { OMX_COLOR_FormatRawBayer10bit, TICameraParameters::PIXEL_FORMAT_RAW },
- { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420P },
+ { OMX_COLOR_FormatCbYCrY, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420SP },
+ { OMX_COLOR_Format16bitRGB565, android::CameraParameters::PIXEL_FORMAT_RGB565 },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420P },
+ { OMX_COLOR_FormatUnused, TICameraParameters::PIXEL_FORMAT_UNUSED },
+ { OMX_COLOR_FormatRawBayer10bit, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB },
+};
+
+const userToOMX_LUT OMXCameraAdapter::mFrameLayout [] = {
+ { TICameraParameters::S3D_NONE, OMX_TI_StereoFrameLayout2D },
+ { TICameraParameters::S3D_TB_FULL, OMX_TI_StereoFrameLayoutTopBottom },
+ { TICameraParameters::S3D_SS_FULL, OMX_TI_StereoFrameLayoutLeftRight },
+ { TICameraParameters::S3D_TB_SUBSAMPLED, OMX_TI_StereoFrameLayoutTopBottomSubsample },
+ { TICameraParameters::S3D_SS_SUBSAMPLED, OMX_TI_StereoFrameLayoutLeftRightSubsample },
+};
+
+const LUTtype OMXCameraAdapter::mLayoutLUT = {
+ ARRAY_SIZE(mFrameLayout),
+ mFrameLayout
+};
+
+const CapCodingFormat OMXCameraAdapter::mImageCodingFormat [] = {
+ { OMX_IMAGE_CodingJPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
+ { (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingJPS, TICameraParameters::PIXEL_FORMAT_JPS },
+ { (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingMPO, TICameraParameters::PIXEL_FORMAT_MPO },
};
const CapFramerate OMXCameraAdapter::mFramerates [] = {
+ { 60, "60" },
{ 30, "30" },
+ { 24, "24" },
+ { 20, "20" },
{ 15, "15" },
+ { 10, "10" },
};
const CapZoom OMXCameraAdapter::mZoomStages [] = {
@@ -185,37 +304,45 @@ const CapISO OMXCameraAdapter::mISOStages [] = {
// mapped values have to match with new_sensor_MSP.h
const CapU32 OMXCameraAdapter::mSensorNames [] = {
- { 300, "IMX060" },
- { 301, "OV5650" },
- { 305, "S5K4E1GA"},
- { 306, "S5K6A1GX03" }
+ { SENSORID_IMX060, "IMX060" },
+ { SENSORID_OV5650, "OV5650" },
+ { SENSORID_OV5640, "OV5640" },
+ { SENSORID_OV14825, "OV14825"},
+ { SENSORID_S5K4E1GA, "S5K4E1GA"},
+ { SENSORID_S5K6A1GX03, "S5K6A1GX03" },
+ { SENSORID_OV8830, "OV8830" },
+ { SENSORID_OV2722, "OV2722" }
// TODO(XXX): need to account for S3D camera later
};
-// values for supported variable framerates sorted in ascending order
-// CapU32Pair = (max fps, min fps, string representation)
-const CapU32Pair OMXCameraAdapter::mVarFramerates [] = {
- { 15, 15, "(15000,15000)"},
- { 30, 15, "(15000,30000)" },
- { 30, 24, "(24000,30000)" },
-// TODO(XXX): Removing 30,30 range to limit 1080p at 24fps. Will put back soon.
-#if 0
- { 30, 30, "(30000,30000)" },
-#endif
+const userToOMX_LUT OMXCameraAdapter::mAutoConvergence [] = {
+ { TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE, OMX_TI_AutoConvergenceModeDisable },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_FRAME, OMX_TI_AutoConvergenceModeFrame },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_CENTER, OMX_TI_AutoConvergenceModeCenter },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_TOUCH, OMX_TI_AutoConvergenceModeFocusFaceTouch },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL, OMX_TI_AutoConvergenceModeManual }
};
-/************************************
- * static helper functions
- *************************************/
-// utility function to remove last seperator
-void remove_last_sep(char* buffer) {
- char* last_sep = NULL;
- last_sep = strrchr(buffer, PARAM_SEP_CHAR);
- if (last_sep != NULL) {
- last_sep[0] = '\0';
- }
-}
+const LUTtype OMXCameraAdapter::mAutoConvergenceLUT = {
+ ARRAY_SIZE(mAutoConvergence),
+ mAutoConvergence
+};
+const userToOMX_LUT OMXCameraAdapter::mBracketingModes [] = {
+ { TICameraParameters::TEMP_BRACKETING , OMX_BracketTemporal },
+ { TICameraParameters::EXPOSURE_BRACKETING , OMX_BracketExposureRelativeInEV }
+};
+
+const LUTtype OMXCameraAdapter::mBracketingModesLUT = {
+ ARRAY_SIZE(mBracketingModes),
+ mBracketingModes
+};
+
+// values for supported camera facing direction
+const CapU32 OMXCameraAdapter::mFacing [] = {
+ { OMX_TI_SENFACING_BACK , TICameraParameters::FACING_BACK },
+ { OMX_TI_SENFACING_FRONT, TICameraParameters::FACING_FRONT},
+};
/*****************************************
* internal static function declarations
@@ -223,24 +350,28 @@ void remove_last_sep(char* buffer) {
/**** Utility functions to help translate OMX Caps to Parameter ****/
-status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
- const CapPixelformat *cap,
- size_t capCount,
- char * buffer,
- size_t bufferSize) {
+status_t OMXCameraAdapter::encodeImageCodingFormatCap(OMX_IMAGE_CODINGTYPE format,
+ const CapCodingFormat *cap,
+ size_t capCount,
+ char * buffer) {
+
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
if ( ( NULL == buffer ) || ( NULL == cap ) ) {
CAMHAL_LOGEA("Invalid input arguments");
- return -EINVAL;
+ ret = -EINVAL;
}
- for ( unsigned int i = 0; i < capCount; i++ ) {
- if ( format == cap[i].pixelformat ) {
- strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
+ if ( NO_ERROR == ret ) {
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( format == cap[i].imageCodingFormat ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(buffer)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(buffer))));
+ }
+ strncat(buffer, cap[i].param, ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(buffer)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(buffer))));
+ }
}
}
@@ -249,16 +380,13 @@ status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
return ret;
}
-status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax,
- OMX_U32 framerateMin,
- const CapFramerate *cap,
- size_t capCount,
- char * buffer,
- size_t bufferSize) {
+status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
+ const CapPixelformat *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
- bool minInserted = false;
- bool maxInserted = false;
- char tmpBuffer[FRAMERATE_COUNT];
LOG_FUNCTION_NAME;
@@ -267,113 +395,86 @@ status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax,
return -EINVAL;
}
- for ( unsigned int i = 0; i < capCount; i++ ) {
- if ( (framerateMax >= cap[i].num) && (framerateMin <= cap[i].num) ) {
- strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- if ( cap[i].num == framerateMin ) {
- minInserted = true;
+ for ( unsigned int i = 0 ; i < capCount ; i++ )
+ {
+ if ( format == cap[i].pixelformat )
+ {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize - 1);
}
}
- if ( cap[i].num == framerateMax ) {
- maxInserted = true;
- }
- }
-
- if ( !maxInserted ) {
- memset(tmpBuffer, 0, FRAMERATE_COUNT);
- snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMax);
- strncat(buffer, tmpBuffer, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
-
- if ( !minInserted ) {
- memset(tmpBuffer, 0, FRAMERATE_COUNT);
- snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMin);
- strncat(buffer, tmpBuffer, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
-
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::encodeVFramerateCap(OMX_TI_CAPTYPE &caps,
- const CapU32Pair *cap,
- size_t capCount,
- char *buffer,
- char *defaultRange,
- size_t bufferSize) {
- status_t ret = NO_ERROR;
- uint32_t minVFR, maxVFR;
- int default_index = -1;
-
+void OMXCameraAdapter::encodeFrameRates(const int minFrameRate, const int maxFrameRate,
+ const OMX_TI_CAPTYPE & caps, const CapFramerate * const fixedFrameRates,
+ const int frameRateCount, android::Vector<FpsRange> & fpsRanges) {
LOG_FUNCTION_NAME;
- if ( (NULL == buffer) || (NULL == cap) ) {
- CAMHAL_LOGEA("Invalid input arguments");
- return -EINVAL;
+ if ( minFrameRate == maxFrameRate ) {
+ // single fixed frame rate supported
+ fpsRanges.add(FpsRange(minFrameRate, maxFrameRate));
+ return;
}
- if(caps.ulPrvVarFPSModesCount < 1) {
- return NO_ERROR;
- }
+ // insert min and max frame rates
+ fpsRanges.add(FpsRange(minFrameRate, minFrameRate));
+ fpsRanges.add(FpsRange(maxFrameRate, maxFrameRate));
- // Assumption: last range in tPrvVarFPSModes will be for S30FPSHD mode
- minVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMin >> VFR_OFFSET;
- maxVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMax >> VFR_OFFSET;
+ // insert variable frame rates
+ for ( int i = 0; i < static_cast<int>(caps.ulPrvVarFPSModesCount); ++i ) {
+ const FpsRange fpsRange = FpsRange(
+ max(androidFromDucatiFrameRate(caps.tPrvVarFPSModes[i].nVarFPSMin), minFrameRate),
+ min(androidFromDucatiFrameRate(caps.tPrvVarFPSModes[i].nVarFPSMax), maxFrameRate));
- if (minVFR < FPS_MIN) {
- minVFR = FPS_MIN;
+ if ( fpsRange.isFixed() ) {
+ // this range is either min or max fixed frame rate, already added above
+ continue;
+ }
+
+ fpsRanges.add(fpsRange);
}
- for (unsigned int i = 0; i < capCount; i++) {
- // add cap[i] if it is in range and maxVFR != minVFR
- if ((maxVFR >= cap[i].num1) && (minVFR <= cap[i].num2)) {
- if (buffer[0] != '\0') {
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
- strncat(buffer, cap[i].param, bufferSize - 1);
+ // insert fixed frame rates
+ for ( int i = 0; i < frameRateCount; ++i ) {
+ const int fixedFrameRate = fixedFrameRates[i].num * CameraHal::VFR_SCALE;
- // choose the max variable framerate as default
- if (cap[i].num1 != cap[i].num2) {
- default_index = i;
- }
+ if ( fixedFrameRate < minFrameRate || fixedFrameRate > maxFrameRate ) {
+ // not supported by hardware
+ continue;
}
- }
- // if we haven't found any caps in the list to populate
- // just use the min and max
- if (buffer[0] == '\0') {
- snprintf(buffer, bufferSize - 1,
- "(%u,%u)",
- minVFR * CameraHal::VFR_SCALE,
- maxVFR * CameraHal::VFR_SCALE);
+ const FpsRange fpsRange = FpsRange(fixedFrameRate, fixedFrameRate);
+ fpsRanges.add(fpsRange);
}
- if (default_index != -1) {
- snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%lu,%lu",
- cap[default_index].num2 * CameraHal::VFR_SCALE,
- cap[default_index].num1 * CameraHal::VFR_SCALE);
- } else {
- snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%u,%u",
- minVFR * CameraHal::VFR_SCALE, maxVFR * CameraHal::VFR_SCALE);
- }
+ // sort first by max, then by min, according to Android API requirements
+ fpsRanges.sort(FpsRange::compare);
- LOG_FUNCTION_NAME_EXIT;
-
- return ret;
+ // remove duplicated frame rates
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()) - 1; ) {
+ const FpsRange & current = fpsRanges.itemAt(i);
+ const FpsRange & next = fpsRanges.itemAt(i + 1);
+ if ( current == next ) {
+ fpsRanges.removeAt(i + 1);
+ } else {
+ i++;
+ }
+ }
}
size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom,
const CapZoom *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t res = NO_ERROR;
size_t ret = 0;
@@ -387,12 +488,13 @@ size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom,
for ( unsigned int i = 0; i < capCount; i++ ) {
if ( cap[i].num <= maxZoom ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
ret++;
}
}
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
@@ -403,7 +505,8 @@ status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
const CapISO *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
@@ -415,11 +518,12 @@ status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
for ( unsigned int i = 0; i < capCount; i++ ) {
if ( cap[i].num <= maxISO) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
}
}
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
@@ -430,7 +534,8 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
const CapResolution *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
@@ -445,8 +550,10 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
(cap[i].height <= res.nHeightMax) &&
(cap[i].width >= res.nWidthMin) &&
(cap[i].height >= res.nHeightMin) ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize -1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
}
}
@@ -455,59 +562,278 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
return ret;
}
-status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::encodeSizeCap3D(OMX_TI_CAPRESTYPE &res,
+ const CapResolution *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( (cap[i].width <= res.nWidthMax) &&
+ (cap[i].height <= res.nHeightMax) &&
+ (cap[i].width >= res.nWidthMin) &&
+ (cap[i].height >= res.nHeightMin) &&
+ (cap[i].width * cap[i].height <= res.nMaxResInPixels)) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize -1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
+ int s3d_detected = 0;
+ int s3d_ss_detected = 0;
+ int s3d_tb_detected = 0;
LOG_FUNCTION_NAME;
+ for ( unsigned int i = 0 ; i < caps.ulCapFrameLayoutCount; i++ ) {
+ if (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottom)
+ {
+ s3d_tb_detected = 1;
+ }
+ else if (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRight)
+ {
+ s3d_ss_detected = 1;
+ }
+ else if ( (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ || (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRightSubsample) )
+ {
+ s3d_detected = 1;
+ }
+ }
+
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
- ret = encodeSizeCap(caps.tImageResRange,
+ // Check if we are in 2d mode
+ if (!s3d_ss_detected && !s3d_tb_detected && !s3d_detected)
+ {
+ ret = encodeSizeCap(caps.tImageResRange,
mImageCapRes,
ARRAY_SIZE(mImageCapRes),
supported,
MAX_PROP_VALUE_LENGTH);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
- } else {
- remove_last_sep(supported);
- params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ }
+ params->set(CameraProperties::MAX_PICTURE_WIDTH, caps.tImageResRange.nWidthMax);
+ params->set(CameraProperties::MAX_PICTURE_HEIGHT, caps.tImageResRange.nHeightMax);
}
+ else // 3d mode
+ {
+ if (s3d_tb_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapResTB,
+ ARRAY_SIZE(mImageCapResTB),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES, supported);
+ }
- LOG_FUNCTION_NAME;
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_ss_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapResSS,
+ ARRAY_SIZE(mImageCapResSS),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapRes,
+ ARRAY_SIZE(mImageCapRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES, supported);
+ }
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES, supported);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
+ int s3d_detected = 0;
+ int s3d_ss_detected = 0;
+ int s3d_tb_detected = 0;
LOG_FUNCTION_NAME;
+ for ( unsigned int i = 0 ; i < caps.ulPrvFrameLayoutCount; i++ ) {
+ if (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottom)
+ {
+ s3d_tb_detected = 1;
+ }
+ else if (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRight)
+ {
+ s3d_ss_detected = 1;
+ }
+ else if ( (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ || (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRightSubsample) )
+ {
+ s3d_detected = 1;
+ }
+ }
+
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
- ret = encodeSizeCap(caps.tPreviewResRange,
+ // Check if we are in 2d mode
+ if (!s3d_ss_detected && !s3d_tb_detected && !s3d_detected)
+ {
+ ret = encodeSizeCap(caps.tPreviewResRange,
mPreviewRes,
ARRAY_SIZE(mPreviewRes),
supported,
MAX_PROP_VALUE_LENGTH);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret);
- } else {
- remove_last_sep(supported);
- params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported Landscape preview sizes 0x%x", ret);
+ return ret;
+ }
+
+ /* Insert Portait Resolutions by verifying Potrait Capability Support */
+ ret = encodeSizeCap(caps.tRotatedPreviewResRange,
+ mPreviewPortraitRes,
+ ARRAY_SIZE(mPreviewPortraitRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported Potrait preview sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ }
}
+ else // 3d mode
+ {
+ if (s3d_tb_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewResTB,
+ ARRAY_SIZE(mPreviewResTB),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported 3D TB preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, supported);
+ }
- LOG_FUNCTION_NAME;
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_ss_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewResSS,
+ ARRAY_SIZE(mPreviewResSS),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported 3D SS preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewRes,
+ ARRAY_SIZE(mPreviewRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -524,16 +850,16 @@ status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error inserting supported video sizes 0x%x", ret);
} else {
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_VIDEO_SIZES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -551,11 +877,14 @@ status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params
CAMHAL_LOGEB("Error inserting supported thumbnail sizes 0x%x", ret);
} else {
//CTS Requirement: 0x0 should always be supported
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, "0x0", MAX_PROP_NAME_LENGTH);
params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -580,25 +909,26 @@ status_t OMXCameraAdapter::insertZoomStages(CameraProperties::Properties* params
params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, zoomStageCount - 1); //As per CTS requirement
if ( 0 == zoomStageCount ) {
- params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED);
- params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED);
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::FALSE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::FALSE);
} else {
- params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED);
- params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED);
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::TRUE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::TRUE);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ memset(supported, '\0', sizeof(supported));
for ( int i = 0 ; i < caps.ulImageFormatCount ; i++ ) {
ret = encodePixelformatCap(caps.eImageFormats[i],
@@ -606,6 +936,19 @@ status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* para
ARRAY_SIZE(mPixelformats),
supported,
MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
+ break;
+ }
+ }
+
+ for (int i = 0; i < caps.ulImageCodingFormatCount ; i++) {
+ ret = encodeImageCodingFormatCap(caps.eImageCodingFormat[i],
+ mImageCodingFormat,
+ ARRAY_SIZE(mImageCodingFormat),
+ supported);
+
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
break;
@@ -613,17 +956,16 @@ status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* para
}
if ( NO_ERROR == ret ) {
- //jpeg is not supported in OMX capabilies yet
- strncat(supported, CameraParameters::PIXEL_FORMAT_JPEG, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -646,74 +988,132 @@ status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* pa
if ( NO_ERROR == ret ) {
// need to advertise we support YV12 format
// We will program preview port with NV21 when we see application set YV12
- strncat(supported, CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
+status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ // collect supported normal frame rates
+ {
+ android::Vector<FpsRange> fpsRanges;
+
+ const int minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMin));
+ const int maxFrameRate = min<int>(FPS_MAX * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMax));
+
+ if ( minFrameRate > maxFrameRate ) {
+ CAMHAL_LOGE("Invalid frame rate range: [%d .. %d]", caps.xFramerateMin, caps.xFramerateMax);
+ return BAD_VALUE;
+ }
- LOG_FUNCTION_NAME;
+ encodeFrameRates(minFrameRate, maxFrameRate, caps, mFramerates, ARRAY_SIZE(mFramerates), fpsRanges);
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ // populate variable frame rates
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char defaultRange[MAX_PROP_VALUE_LENGTH];
- ret = encodeFramerateCap(caps.xFramerateMax >> VFR_OFFSET,
- caps.xFramerateMin >> VFR_OFFSET,
- mFramerates,
- ARRAY_SIZE(mFramerates),
- supported,
- MAX_PROP_VALUE_LENGTH);
+ memset(supported, 0, sizeof(supported));
+ memset(defaultRange, 0, sizeof(defaultRange));
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview framerates 0x%x", ret);
- } else {
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "(%d,%d)", fpsRange.min(), fpsRange.max());
+ strcat(supported, tmp);
+ }
+
+ const FpsRange & defaultFpsRange = fpsRanges.itemAt(fpsRanges.size() - 1);
+ snprintf(defaultRange, sizeof(defaultRange) - 1, "%d,%d", defaultFpsRange.min(), defaultFpsRange.max());
+
+ CAMHAL_LOGD("Supported framerate ranges: %s", supported);
+ CAMHAL_LOGD("Default framerate range: [%s]", defaultRange);
+
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
+ params->set(CameraProperties::FRAMERATE_RANGE, defaultRange);
+
+ // populate fixed frame rates
+ memset(supported, 0, sizeof(supported));
+ memset(defaultRange, 0, sizeof(defaultRange));
+
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( fpsRange.isFixed() && (fpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "%d", fpsRange.min()/CameraHal::VFR_SCALE);
+ strcat(supported, tmp);
+ }
+ }
+
+ CAMHAL_LOGD("Supported preview framerates: %s", supported);
params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+
+ // insert default frame rate only if it is fixed
+ if ( defaultFpsRange.isFixed() && (defaultFpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ snprintf(defaultRange, sizeof(defaultRange) - 1, "%d", defaultFpsRange.min()/CameraHal::VFR_SCALE);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, defaultRange);
+ }
}
- LOG_FUNCTION_NAME;
+ // collect supported extended frame rates
+ {
+ android::Vector<FpsRange> fpsRanges;
- return ret;
-}
+ const int minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMin));
+ const int maxFrameRate = min<int>(FPS_MAX_EXTENDED * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMax));
-status_t OMXCameraAdapter::insertVFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
- char defaultRange[MAX_PROP_VALUE_LENGTH];
+ encodeFrameRates(minFrameRate, maxFrameRate, caps, mFramerates, ARRAY_SIZE(mFramerates), fpsRanges);
- LOG_FUNCTION_NAME;
+ // populate variable frame rates
+ char supported[MAX_PROP_VALUE_LENGTH];
+ memset(supported, 0, sizeof(supported) - 1);
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "(%d,%d)", fpsRange.min(), fpsRange.max());
+ strcat(supported, tmp);
+ }
- ret = encodeVFramerateCap(caps,
- mVarFramerates,
- ARRAY_SIZE(mVarFramerates),
- supported,
- defaultRange,
- MAX_PROP_VALUE_LENGTH);
+ CAMHAL_LOGD("Supported framerate ranges extended: %s", supported);
+ params->set(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED, supported);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview framerate ranges 0x%x", ret);
- } else {
- params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
- CAMHAL_LOGDB("framerate ranges %s", supported);
- params->set(CameraProperties::FRAMERATE_RANGE, DEFAULT_FRAMERATE_RANGE_IMAGE);
- params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, DEFAULT_FRAMERATE_RANGE_VIDEO);
- params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, DEFAULT_FRAMERATE_RANGE_IMAGE);
- CAMHAL_LOGDB("Default framerate range: [%s]", DEFAULT_FRAMERATE_RANGE_IMAGE);
- }
+ // populate fixed frame rates
+ memset(supported, 0, sizeof(supported) - 1);
- LOG_FUNCTION_NAME;
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( fpsRange.isFixed() && (fpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "%d", fpsRange.min()/CameraHal::VFR_SCALE);
+ strcat(supported, tmp);
+ }
+ }
- return ret;
+ CAMHAL_LOGD("Supported extended preview framerates: %s", supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT, supported);
+ }
+
+ return OK;
}
-status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -727,12 +1127,13 @@ status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_T
snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMax * 10 ));
params->set(CameraProperties::SUPPORTED_EV_MAX, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -751,12 +1152,13 @@ status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params,
params->set(CameraProperties::SUPPORTED_ISO_VALUES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -766,32 +1168,31 @@ status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params,
//Off is always supported
strncat(supported, TICameraParameters::IPP_NONE, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
if ( caps.bLensDistortionCorrectionSupported ) {
- strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH);
}
if ( caps.bISONoiseFilterSupported ) {
- strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH);
}
if ( caps.bISONoiseFilterSupported && caps.bLensDistortionCorrectionSupported ) {
- strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH);
}
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_IPP_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -803,19 +1204,22 @@ status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, O
for ( unsigned int i = 0 ; i < caps.ulWhiteBalanceCount ; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eWhiteBalanceModes[i], WBalLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -827,19 +1231,22 @@ status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, O
for ( unsigned int i = 0 ; i < caps.ulColorEffectCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eColorEffects[i], EffLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+
params->set(CameraProperties::SUPPORTED_EFFECTS, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -851,19 +1258,73 @@ status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params,
for ( unsigned int i = 0 ; i < caps.ulExposureModeCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eExposureModes[i], ExpLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, supported);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertManualExpRanges(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
LOG_FUNCTION_NAME;
+ if (caps.nManualExpMin > caps.nManualExpMax) {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP, supported);
+ } else {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nManualExpMin);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nManualExpMax);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_EXPOSURE_STEP);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP, supported);
+ }
+
+ if (MANUAL_GAIN_ISO_MIN > caps.nSensitivityMax) {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP, supported); }
+ else {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_GAIN_ISO_MIN);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nSensitivityMax);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_GAIN_ISO_STEP);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
-status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -875,20 +1336,26 @@ status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params
for ( unsigned int i = 0 ; i < caps.ulFlashCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eFlashModes[i], FlashLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+ if ( strlen(supported) == 0 ) {
+ strncpy(supported, DEFAULT_FLASH_MODE, MAX_PROP_NAME_LENGTH);
+ }
+
params->set(CameraProperties::SUPPORTED_FLASH_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -900,34 +1367,31 @@ status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params
for ( unsigned int i = 0 ; i < caps.ulSceneCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eSceneModes[i], SceneLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_SCENE_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
- const char *p;
LOG_FUNCTION_NAME;
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
for ( unsigned int i = 0 ; i < caps.ulFocusModeCount; i++ ) {
- p = getLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT);
- if ( NULL != p ) {
- strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
- }
+ getMultipleLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT, supported);
}
// Check if focus is supported by camera
@@ -935,21 +1399,21 @@ status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params
caps.eFocusModes[0] == OMX_IMAGE_FocusControlOff) {
// Focus is not supported by camera
// Advertise this to app as infinitiy focus mode
- strncat(supported, CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
- } else {
- // Focus is supported but these modes are not supported by the
- // capability feature. Apply manually
- strncat(supported, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, MAX_PROP_NAME_LENGTH);
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, android::CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
}
params->set(CameraProperties::SUPPORTED_FOCUS_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -961,19 +1425,22 @@ status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* para
for ( unsigned int i = 0 ; i < caps.ulFlickerCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eFlicker[i], FlickerLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+
params->set(CameraProperties::SUPPORTED_ANTIBANDING, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -991,31 +1458,437 @@ status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX
params->set(CameraProperties::MAX_NUM_METERING_AREAS, supported);
CAMHAL_LOGDB("Maximum supported exposure areas %s", supported);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVNFSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+
LOG_FUNCTION_NAME;
+ if ( OMX_TRUE == caps.bVideoNoiseFilterSupported ) {
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
-status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertVSTABSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
status_t ret = NO_ERROR;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_TRUE == caps.bVideoStabilizationSupported ) {
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
- params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED);
- params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED);
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
LOG_FUNCTION_NAME
+ if ( caps.bAELockSupported ) {
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ if ( caps.bAWBLockSupported ) {
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ // 1) Look up and assign sensor name
+ for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) {
+ if(mSensorNames[i].num == caps.tSenMounting.nSenId) {
+ // sensor found
+ break;
+ }
+ }
+ if ( i == ARRAY_SIZE(mSensorNames) ) {
+ p = "UNKNOWN_SENSOR";
+ } else {
+ p = mSensorNames[i].param;
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ params->set(CameraProperties::CAMERA_NAME, supported);
+ params->set(CameraProperties::CAMERA_SENSOR_ID, caps.tSenMounting.nSenId);
+
+ // 2) Assign mounting rotation
+ params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertRaw(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+ sprintf(supported,"%d",int(caps.uSenNativeResWidth));
+ params->set(CameraProperties::RAW_WIDTH, supported);
+
+ memset(supported, '\0', sizeof(supported));
+ if (caps.bMechanicalMisalignmentSupported) {
+ sprintf(supported,"%d",int(caps.uSenNativeResHeight) * 2);
+ } else {
+ sprintf(supported,"%d",int(caps.uSenNativeResHeight));
+ }
+ params->set(CameraProperties::RAW_HEIGHT, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFacing(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ for (i = 0; i < ARRAY_SIZE(mFacing); i++) {
+ if((OMX_TI_SENFACING_TYPE)mFacing[i].num == caps.tSenMounting.eFacing) {
+ break;
+ }
+ }
+ if ( i == ARRAY_SIZE(mFacing) ) {
+ p = "UNKNOWN_FACING";
+ } else {
+ p = mFacing[i].param;
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ params->set(CameraProperties::FACING_INDEX, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFocalLength(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ sprintf(supported, "%d", caps.nFocalLength / 100);
+ strncat(supported, ".", REMAINING_BYTES(supported));
+ sprintf(supported+(strlen(supported)*sizeof(char)), "%d", caps.nFocalLength % 100);
+
+ params->set(CameraProperties::FOCAL_LENGTH, supported);
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertAutoConvergenceModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ for ( unsigned int i = 0 ; i < caps.ulAutoConvModesCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eAutoConvModes[i], mAutoConvergenceLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ }
+ }
+ params->set(CameraProperties::AUTOCONVERGENCE_MODE_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertManualConvergenceRange(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMin ));
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMax ));
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMax != caps.nManualConvMin ));
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertMechanicalMisalignmentCorrection(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED,
+ caps.bMechanicalMisalignmentSupported == OMX_TRUE ?
+ android::CameraParameters::TRUE : android::CameraParameters::FALSE);
+
+ return OK;
+}
+
+status_t OMXCameraAdapter::insertCaptureModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ // 3D mode detect: Misalignment is present only in 3d mode
+ if (caps.bMechanicalMisalignmentSupported)
+ {
+ strncat(supported, TICameraParameters::HIGH_QUALITY_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE, REMAINING_BYTES(supported));
+ }
+ else // 2D mode detect: Misalignment is present only in 3d mode
+ {
+ strncat(supported, TICameraParameters::HIGH_QUALITY_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::HIGH_PERFORMANCE_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::HIGH_QUALITY_ZSL_MODE, REMAINING_BYTES(supported));
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::CP_CAM_MODE, REMAINING_BYTES(supported));
+#endif
+#ifdef CAMERAHAL_OMAP5_CAPTURE_MODES
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE_HQ, REMAINING_BYTES(supported));
+#endif
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::ZOOM_BRACKETING, REMAINING_BYTES(supported));
+ }
+
+ for ( unsigned int i = 0 ; i < caps.ulBracketingModesCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eBracketingModes[i], mBracketingModesLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ }
+ }
+
+ params->set(CameraProperties::CAP_MODE_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
-status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertLayout(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
+ unsigned int i = 0;
LOG_FUNCTION_NAME;
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for ( unsigned int i = 0 ; i < caps.ulPrvFrameLayoutCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.ePrvFrameLayout[i], mLayoutLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES, supported);
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for ( unsigned int i = 0 ; i < caps.ulCapFrameLayoutCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eCapFrameLayout[i], mLayoutLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+ params->set(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVideoSnapshotSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if (caps.bStillCapDuringVideoSupported)
+ {
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::TRUE);
+ }
+ else
+ {
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertGBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if (caps.bGbceSupported) {
+ params->set(CameraProperties::SUPPORTED_GBCE,
+ android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::SUPPORTED_GBCE,
+ android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertGLBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if (caps.bGlbceSupported) {
+ params->set(CameraProperties::SUPPORTED_GLBCE,
+ android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::SUPPORTED_GLBCE,
+ android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char *pos, *str, *def;
+ char temp[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ /* If default is supported - set it, else - set first supported */
+ if (strstr(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES), DEFAULT_S3D_PREVIEW_LAYOUT)) {
+ strncpy(temp, DEFAULT_S3D_PREVIEW_LAYOUT, MAX_PROP_VALUE_LENGTH - 1);
+ } else {
+ strncpy(temp, params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ if ((pos = strstr(temp, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ }
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, temp);
+
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, temp))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, temp))) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ }
+
+ /* If default is supported - set it, else - set first supported */
+ if (strstr(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES), DEFAULT_S3D_PICTURE_LAYOUT)) {
+ strncpy(temp, DEFAULT_S3D_PICTURE_LAYOUT, MAX_PROP_VALUE_LENGTH - 1);
+ } else {
+ strncpy(temp, params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ if ((pos = strstr(temp, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ }
+ params->set(CameraProperties::S3D_CAP_FRAME_LAYOUT, temp);
+
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, temp))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, temp))) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ }
+
params->set(CameraProperties::ANTIBANDING, DEFAULT_ANTIBANDING);
params->set(CameraProperties::BRIGHTNESS, DEFAULT_BRIGHTNESS);
params->set(CameraProperties::CONTRAST, DEFAULT_CONTRAST);
@@ -1024,7 +1897,7 @@ status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::SUPPORTED_EV_STEP, DEFAULT_EV_STEP);
params->set(CameraProperties::EXPOSURE_MODE, DEFAULT_EXPOSURE_MODE);
params->set(CameraProperties::FLASH_MODE, DEFAULT_FLASH_MODE);
- char *pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED);
+ pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED);
if ( NULL != pos )
{
params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE_PREFERRED);
@@ -1034,81 +1907,99 @@ status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
}
params->set(CameraProperties::IPP, DEFAULT_IPP);
- params->set(CameraProperties::GBCE, DEFAULT_GBCE);
+ params->set(CameraProperties::GBCE, android::CameraParameters::FALSE);
+ params->set(CameraProperties::GLBCE, android::CameraParameters::FALSE);
params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE);
params->set(CameraProperties::JPEG_QUALITY, DEFAULT_JPEG_QUALITY);
params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, DEFAULT_THUMBNAIL_QUALITY);
params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, DEFAULT_THUMBNAIL_SIZE);
params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
- params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+
+ if (!strcmp(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT),
+ TICameraParameters::S3D_TB_FULL)) {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_TB_SIZE);
+ } else if (!strcmp(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT),
+ TICameraParameters::S3D_SS_FULL)) {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SS_SIZE);
+ } else {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ }
+
+ if (!strcmp(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT),
+ TICameraParameters::S3D_TB_FULL)) {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_TB_SIZE);
+ } else if (!strcmp(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT),
+ TICameraParameters::S3D_SS_FULL)) {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SS_SIZE);
+ } else {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ }
+
params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
- params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
- params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+
+ /* Set default value if supported, otherwise set max supported value */
+ strncpy(temp, params->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ def = str = temp;
+ while (1) {
+ if ((pos = strstr(str, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ if (!strcmp(str, DEFAULT_FRAMERATE)) {
+ def = str;
+ break;
+ }
+ if (atoi(str) > atoi(def)) {
+ def = str;
+ }
+ if (pos == NULL) {
+ break;
+ }
+ str = pos + strlen(PARAM_SEP);
+ }
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, def);
+
params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS);
params->set(CameraProperties::SATURATION, DEFAULT_SATURATION);
params->set(CameraProperties::SCENE_MODE, DEFAULT_SCENE_MODE);
params->set(CameraProperties::SHARPNESS, DEFAULT_SHARPNESS);
params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
- params->set(CameraProperties::VSTAB_SUPPORTED, DEFAULT_VSTAB_SUPPORTED);
+ params->set(CameraProperties::VNF, DEFAULT_VNF);
params->set(CameraProperties::WHITEBALANCE, DEFAULT_WB);
params->set(CameraProperties::ZOOM, DEFAULT_ZOOM);
params->set(CameraProperties::MAX_FD_HW_FACES, DEFAULT_MAX_FD_HW_FACES);
params->set(CameraProperties::MAX_FD_SW_FACES, DEFAULT_MAX_FD_SW_FACES);
params->set(CameraProperties::AUTO_EXPOSURE_LOCK, DEFAULT_AE_LOCK);
params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK, DEFAULT_AWB_LOCK);
- if(caps.tSenMounting.nSenId == 305) {
- params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_PRIMARY);
- } else {
- params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_SECONDARY);
- }
params->set(CameraProperties::HOR_ANGLE, DEFAULT_HOR_ANGLE);
params->set(CameraProperties::VER_ANGLE, DEFAULT_VER_ANGLE);
- params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, DEFAULT_VIDEO_SNAPSHOT_SUPPORTED);
params->set(CameraProperties::VIDEO_SIZE, DEFAULT_VIDEO_SIZE);
- params->set(CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO, DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO);
+ params->set(CameraProperties::SENSOR_ORIENTATION, DEFAULT_SENSOR_ORIENTATION);
+ params->set(CameraProperties::AUTOCONVERGENCE_MODE, DEFAULT_AUTOCONVERGENCE_MODE);
+ params->set(CameraProperties::MANUAL_CONVERGENCE, DEFAULT_MANUAL_CONVERGENCE);
+ params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION, DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE);
+
+ char property[PROPERTY_VALUE_MAX];
+ property_get("ro.product.manufacturer",
+ property,
+ DEFAULT_EXIF_MAKE);
+ property[0] = toupper(property[0]);
+ params->set(CameraProperties::EXIF_MAKE, property);
+ property_get("ro.product.model",
+ property,
+ DEFAULT_EXIF_MODEL);
+ property[0] = toupper(property[0]);
+ params->set(CameraProperties::EXIF_MODEL, property);
- LOG_FUNCTION_NAME;
-
- return ret;
-}
-
-status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
- const char *p;
- unsigned int i = 0;
-
- LOG_FUNCTION_NAME;
-
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
-
- // 1) Look up and assign sensor name
- for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) {
- if(mSensorNames[i].num == caps.tSenMounting.nSenId) {
- // sensor found
- break;
- }
- }
- if ( i == ARRAY_SIZE(mSensorNames) ) {
- p = "UNKNOWN_SENSOR";
- } else {
- p = mSensorNames[i].param;
- }
- strncat(supported, p, MAX_PROP_NAME_LENGTH);
- params->set(CameraProperties::CAMERA_NAME, supported);
-
- // 2) Assign mounting rotation
- params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation);
-
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
@@ -1141,10 +2032,6 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
}
if ( NO_ERROR == ret ) {
- ret = insertVFramerates(params, caps);
- }
-
- if ( NO_ERROR == ret ) {
ret = insertEVs(params, caps);
}
@@ -1169,6 +2056,10 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
}
if ( NO_ERROR == ret ) {
+ ret = insertManualExpRanges(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
ret = insertFlashModes(params, caps);
}
@@ -1191,46 +2082,340 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
if ( NO_ERROR == ret ) {
ret = insertLocks(params, caps);
}
+
if ( NO_ERROR == ret) {
ret = insertAreas(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertFacing(params, caps);
+ }
+ if ( NO_ERROR == ret) {
+ ret = insertFocalLength(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertAutoConvergenceModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertManualConvergenceRange(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertMechanicalMisalignmentCorrection(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertRaw(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertCaptureModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertLayout(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertVideoSnapshotSupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertVSTABSupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertVNFSupported(params, caps);
}
//NOTE: Ensure that we always call insertDefaults after inserting the supported capabilities
//as there are checks inside insertDefaults to make sure a certain default is supported
// or not
if ( NO_ERROR == ret ) {
- ret = insertVideoSizes(params, caps);
+ ret = insertVideoSizes(params, caps);
}
- if ( NO_ERROR == ret ) {
- ret = insertDefaults(params, caps);
+ if ( NO_ERROR == ret) {
+ ret = insertGBCESupported(params, caps);
}
+ if ( NO_ERROR == ret) {
+ ret = insertGLBCESupported(params, caps);
+ }
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+
+bool OMXCameraAdapter::_checkOmxTiCap(const OMX_TI_CAPTYPE & caps)
+{
+#define CAMHAL_CHECK_OMX_TI_CAP(countVar, arrayVar) \
+ do { \
+ const int count = static_cast<int>(caps.countVar); \
+ const int maxSize = CAMHAL_SIZE_OF_ARRAY(caps.arrayVar); \
+ if ( count < 0 || count > maxSize ) \
+ { \
+ CAMHAL_LOGE("OMX_TI_CAPTYPE verification failed"); \
+ CAMHAL_LOGE(" variable: OMX_TI_CAPTYPE::" #countVar \
+ ", value: %d, max allowed: %d", \
+ count, maxSize); \
+ return false; \
+ } \
+ } while (0)
+
+ CAMHAL_CHECK_OMX_TI_CAP(ulPreviewFormatCount, ePreviewFormats);
+ CAMHAL_CHECK_OMX_TI_CAP(ulImageFormatCount, eImageFormats);
+ CAMHAL_CHECK_OMX_TI_CAP(ulWhiteBalanceCount, eWhiteBalanceModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulColorEffectCount, eColorEffects);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFlickerCount, eFlicker);
+ CAMHAL_CHECK_OMX_TI_CAP(ulExposureModeCount, eExposureModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFocusModeCount, eFocusModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulSceneCount, eSceneModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFlashCount, eFlashModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulPrvVarFPSModesCount, tPrvVarFPSModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulCapVarFPSModesCount, tCapVarFPSModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulAutoConvModesCount, eAutoConvModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulBracketingModesCount, eBracketingModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulImageCodingFormatCount, eImageCodingFormat);
+ CAMHAL_CHECK_OMX_TI_CAP(ulPrvFrameLayoutCount, ePrvFrameLayout);
+ CAMHAL_CHECK_OMX_TI_CAP(ulCapFrameLayoutCount, eCapFrameLayout);
+
+#undef CAMHAL_CHECK_OMX_TI_CAP
+
+ return true;
+}
+
+
+bool OMXCameraAdapter::_dumpOmxTiCap(const int sensorId, const OMX_TI_CAPTYPE & caps)
+{
+ if ( !_checkOmxTiCap(caps) )
+ {
+ CAMHAL_LOGE("OMX_TI_CAPTYPE structure is invalid");
+ return false;
+ }
+
+ CAMHAL_LOGD("===================================================");
+ CAMHAL_LOGD("---- Dumping OMX capabilities for sensor id: %d ----", sensorId);
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPreviewFormatCount = %d", int(caps.ulPreviewFormatCount));
+ for ( int i = 0; i < int(caps.ulPreviewFormatCount); ++i )
+ CAMHAL_LOGD(" ePreviewFormats[%2d] = %d", i, int(caps.ePreviewFormats[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulImageFormatCount = %d", int(caps.ulImageFormatCount));
+ for ( int i = 0; i < int(caps.ulImageFormatCount); ++i )
+ CAMHAL_LOGD(" eImageFormats[%2d] = %d", i, int(caps.eImageFormats[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tPreviewResRange.nWidthMin = %d", int(caps.tPreviewResRange.nWidthMin));
+ CAMHAL_LOGD("tPreviewResRange.nHeightMin = %d", int(caps.tPreviewResRange.nHeightMin));
+ CAMHAL_LOGD("tPreviewResRange.nWidthMax = %d", int(caps.tPreviewResRange.nWidthMax));
+ CAMHAL_LOGD("tPreviewResRange.nHeightMax = %d", int(caps.tPreviewResRange.nHeightMax));
+ CAMHAL_LOGD("tPreviewResRange.nMaxResInPixels = %d", int(caps.tPreviewResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tRotatedPreviewResRange.nWidthMin = %d", int(caps.tRotatedPreviewResRange.nWidthMin));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nHeightMin = %d", int(caps.tRotatedPreviewResRange.nHeightMin));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nWidthMax = %d", int(caps.tRotatedPreviewResRange.nWidthMax));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nHeightMax = %d", int(caps.tRotatedPreviewResRange.nHeightMax));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nMaxResInPixels = %d", int(caps.tRotatedPreviewResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tImageResRange.nWidthMin = %d", int(caps.tImageResRange.nWidthMin));
+ CAMHAL_LOGD("tImageResRange.nHeightMin = %d", int(caps.tImageResRange.nHeightMin));
+ CAMHAL_LOGD("tImageResRange.nWidthMax = %d", int(caps.tImageResRange.nWidthMax));
+ CAMHAL_LOGD("tImageResRange.nHeightMax = %d", int(caps.tImageResRange.nHeightMax));
+ CAMHAL_LOGD("tImageResRange.nMaxResInPixels = %d", int(caps.tImageResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tThumbResRange.nWidthMin = %d", int(caps.tThumbResRange.nWidthMin));
+ CAMHAL_LOGD("tThumbResRange.nHeightMin = %d", int(caps.tThumbResRange.nHeightMin));
+ CAMHAL_LOGD("tThumbResRange.nWidthMax = %d", int(caps.tThumbResRange.nWidthMax));
+ CAMHAL_LOGD("tThumbResRange.nHeightMax = %d", int(caps.tThumbResRange.nHeightMax));
+ CAMHAL_LOGD("tThumbResRange.nMaxResInPixels = %d", int(caps.tThumbResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulWhiteBalanceCount = %d", int(caps.ulWhiteBalanceCount));
+ for ( int i = 0; i < int(caps.ulWhiteBalanceCount); ++i )
+ CAMHAL_LOGD(" eWhiteBalanceModes[%2d] = 0x%08x", i, int(caps.eWhiteBalanceModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulColorEffectCount = %d", int(caps.ulColorEffectCount));
+ for ( int i = 0; i < int(caps.ulColorEffectCount); ++i )
+ CAMHAL_LOGD(" eColorEffects[%2d] = 0x%08x", i, int(caps.eColorEffects[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("xMaxWidthZoom = %d", int(caps.xMaxWidthZoom));
+ CAMHAL_LOGD("xMaxHeightZoom = %d", int(caps.xMaxHeightZoom));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFlickerCount = %d", int(caps.ulFlickerCount));
+ for ( int i = 0; i < int(caps.ulFlickerCount); ++i )
+ CAMHAL_LOGD(" eFlicker[%2d] = %d", i, int(caps.eFlicker[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulExposureModeCount = %d", int(caps.ulExposureModeCount));
+ for ( int i = 0; i < int(caps.ulExposureModeCount); ++i )
+ CAMHAL_LOGD(" eExposureModes[%2d] = 0x%08x", i, int(caps.eExposureModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("bLensDistortionCorrectionSupported = %d", int(caps.bLensDistortionCorrectionSupported));
+ CAMHAL_LOGD("bISONoiseFilterSupported = %d", int(caps.bISONoiseFilterSupported));
+ CAMHAL_LOGD("xEVCompensationMin = %d", int(caps.xEVCompensationMin));
+ CAMHAL_LOGD("xEVCompensationMax = %d", int(caps.xEVCompensationMax));
+ CAMHAL_LOGD("nSensitivityMax = %d", int(caps.nSensitivityMax));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFocusModeCount = %d", int(caps.ulFocusModeCount));
+ for ( int i = 0; i < int(caps.ulFocusModeCount); ++i )
+ CAMHAL_LOGD(" eFocusModes[%2d] = 0x%08x", i, int(caps.eFocusModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulSceneCount = %d", int(caps.ulSceneCount));
+ for ( int i = 0; i < int(caps.ulSceneCount); ++i )
+ CAMHAL_LOGD(" eSceneModes[%2d] = %d", i, int(caps.eSceneModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFlashCount = %d", int(caps.ulFlashCount));
+ for ( int i = 0; i < int(caps.ulFlashCount); ++i )
+ CAMHAL_LOGD(" eFlashModes[%2d] = %d", i, int(caps.eFlashModes[i]));
+
+ CAMHAL_LOGD("xFramerateMin = %d", int(caps.xFramerateMin));
+ CAMHAL_LOGD("xFramerateMax = %d", int(caps.xFramerateMax));
+ CAMHAL_LOGD("bContrastSupported = %d", int(caps.bContrastSupported));
+ CAMHAL_LOGD("bSaturationSupported = %d", int(caps.bSaturationSupported));
+ CAMHAL_LOGD("bBrightnessSupported = %d", int(caps.bBrightnessSupported));
+ CAMHAL_LOGD("bProcessingLevelSupported = %d", int(caps.bProcessingLevelSupported));
+ CAMHAL_LOGD("bQFactorSupported = %d", int(caps.bQFactorSupported));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPrvVarFPSModesCount = %d", int(caps.ulPrvVarFPSModesCount));
+ for ( int i = 0; i < int(caps.ulPrvVarFPSModesCount); ++i )
+ {
+ CAMHAL_LOGD(" tPrvVarFPSModes[%d].nVarFPSMin = %d", i, int(caps.tPrvVarFPSModes[i].nVarFPSMin));
+ CAMHAL_LOGD(" tPrvVarFPSModes[%d].nVarFPSMax = %d", i, int(caps.tPrvVarFPSModes[i].nVarFPSMax));
+ }
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulCapVarFPSModesCount = %d", int(caps.ulCapVarFPSModesCount));
+ for ( int i = 0; i < int(caps.ulCapVarFPSModesCount); ++i )
+ {
+ CAMHAL_LOGD(" tCapVarFPSModes[%d].nVarFPSMin = %d", i, int(caps.tCapVarFPSModes[i].nVarFPSMin));
+ CAMHAL_LOGD(" tCapVarFPSModes[%d].nVarFPSMax = %d", i, int(caps.tCapVarFPSModes[i].nVarFPSMax));
+ }
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tSenMounting.nSenId = %d", int(caps.tSenMounting.nSenId));
+ CAMHAL_LOGD("tSenMounting.nRotation = %d", int(caps.tSenMounting.nRotation));
+ CAMHAL_LOGD("tSenMounting.bMirror = %d", int(caps.tSenMounting.bMirror));
+ CAMHAL_LOGD("tSenMounting.bFlip = %d", int(caps.tSenMounting.bFlip));
+ CAMHAL_LOGD("tSenMounting.eFacing = %d", int(caps.tSenMounting.eFacing));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulAutoConvModesCount = %d", int(caps.ulAutoConvModesCount));
+ for ( int i = 0; i < int(caps.ulAutoConvModesCount); ++i )
+ CAMHAL_LOGD(" eAutoConvModes[%2d] = %d", i, int(caps.eAutoConvModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulBracketingModesCount = %d", int(caps.ulBracketingModesCount));
+ for ( int i = 0; i < int(caps.ulBracketingModesCount); ++i )
+ CAMHAL_LOGD(" eBracketingModes[%2d] = %d", i, int(caps.eBracketingModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("bGbceSupported = %d", int(caps.bGbceSupported));
+ CAMHAL_LOGD("bRawJpegSupported = %d", int(caps.bRawJpegSupported));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulImageCodingFormatCount = %d", int(caps.ulImageCodingFormatCount));
+ for ( int i = 0; i < int(caps.ulImageCodingFormatCount); ++i )
+ CAMHAL_LOGD(" eImageCodingFormat[%2d] = %d", i, int(caps.eImageCodingFormat[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("uSenNativeResWidth = %d", int(caps.uSenNativeResWidth));
+ CAMHAL_LOGD("uSenNativeResHeight = %d", int(caps.uSenNativeResHeight));
+ CAMHAL_LOGD("ulAlgoAreasFocusCount = %d", int(caps.ulAlgoAreasFocusCount));
+ CAMHAL_LOGD("ulAlgoAreasExposureCount = %d", int(caps.ulAlgoAreasExposureCount));
+ CAMHAL_LOGD("bAELockSupported = %d", int(caps.bAELockSupported));
+ CAMHAL_LOGD("bAWBLockSupported = %d", int(caps.bAWBLockSupported));
+ CAMHAL_LOGD("bAFLockSupported = %d", int(caps.bAFLockSupported));
+ CAMHAL_LOGD("nFocalLength = %d", int(caps.nFocalLength));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPrvFrameLayoutCount = %d", int(caps.ulPrvFrameLayoutCount));
+ for ( int i = 0; i < int(caps.ulPrvFrameLayoutCount); ++i )
+ CAMHAL_LOGD(" ePrvFrameLayout[%2d] = %d", i, int(caps.ePrvFrameLayout[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulCapFrameLayoutCount = %d", int(caps.ulCapFrameLayoutCount));
+ for ( int i = 0; i < int(caps.ulCapFrameLayoutCount); ++i )
+ CAMHAL_LOGD(" eCapFrameLayout[%2d] = %d", i, int(caps.eCapFrameLayout[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("bVideoNoiseFilterSupported = %d", int(caps.bVideoNoiseFilterSupported ));
+ CAMHAL_LOGD("bVideoStabilizationSupported = %d", int(caps.bVideoStabilizationSupported ));
+ CAMHAL_LOGD("bStillCapDuringVideoSupported = %d", int(caps.bStillCapDuringVideoSupported ));
+ CAMHAL_LOGD("bMechanicalMisalignmentSupported = %d", int(caps.bMechanicalMisalignmentSupported));
+ CAMHAL_LOGD("bFacePrioritySupported = %d", int(caps.bFacePrioritySupported ));
+ CAMHAL_LOGD("bRegionPrioritySupported = %d", int(caps.bRegionPrioritySupported ));
+ CAMHAL_LOGD("bGlbceSupported = %d", int(caps.bGlbceSupported));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("nManualConvMin = %d", int(caps.nManualConvMin ));
+ CAMHAL_LOGD("nManualConvMax = %d", int(caps.nManualConvMax ));
+ CAMHAL_LOGD("nManualExpMin = %d", int(caps.nManualExpMin ));
+ CAMHAL_LOGD("nManualExpMax = %d", int(caps.nManualExpMax ));
+ CAMHAL_LOGD("nBrightnessMin = %d", int(caps.nBrightnessMin ));
+ CAMHAL_LOGD("nBrightnessMax = %d", int(caps.nBrightnessMax ));
+ CAMHAL_LOGD("nContrastMin = %d", int(caps.nContrastMin ));
+ CAMHAL_LOGD("nContrastMax = %d", int(caps.nContrastMax ));
+ CAMHAL_LOGD("nSharpnessMin = %d", int(caps.nSharpnessMin ));
+ CAMHAL_LOGD("nSharpnessMax = %d", int(caps.nSharpnessMax ));
+ CAMHAL_LOGD("nSaturationMin = %d", int(caps.nSaturationMin ));
+ CAMHAL_LOGD("nSaturationMax = %d", int(caps.nSaturationMax ));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("------------------- end of dump -------------------");
+ CAMHAL_LOGD("===================================================");
+
+ return true;
+}
+
/*****************************************
* public exposed function declarations
*****************************************/
-status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HANDLETYPE handle) {
+status_t OMXCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params, OMX_HANDLETYPE handle)
+{
status_t ret = NO_ERROR;
int caps_size = 0;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_CAPTYPE** caps = NULL;;
+ CameraBuffer *bufferlist;
+ OMX_TI_CAPTYPE* caps;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
MemoryManager memMgr;
LOG_FUNCTION_NAME;
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
// allocate tiler (or ion) buffer for caps (size is always a multiple of 4K)
caps_size = ((sizeof(OMX_TI_CAPTYPE)+4095)/4096)*4096;
- caps = (OMX_TI_CAPTYPE**) memMgr.allocateBuffer(0, 0, NULL, caps_size, 1);
+ bufferlist = memMgr.allocateBufferList(0, 0, NULL, caps_size, 1);
+ caps = (OMX_TI_CAPTYPE*) bufferlist[0].opaque;
if (!caps) {
CAMHAL_LOGEB("Error allocating buffer for caps %d", eError);
@@ -1239,13 +2424,13 @@ status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HAN
}
// initialize structures to be passed to OMX Camera
- OMX_INIT_STRUCT_PTR (caps[0], OMX_TI_CAPTYPE);
- caps[0]->nPortIndex = OMX_ALL;
+ OMX_INIT_STRUCT_PTR (caps, OMX_TI_CAPTYPE);
+ caps->nPortIndex = OMX_ALL;
OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = caps_size;
- sharedBuffer.pSharedBuff = (OMX_U8 *) caps[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr (&bufferlist[0]);
// Get capabilities from OMX Camera
eError = OMX_GetConfig(handle, (OMX_INDEXTYPE) OMX_TI_IndexConfigCamCapabilities, &sharedBuffer);
@@ -1257,23 +2442,26 @@ status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HAN
CAMHAL_LOGDA("OMX capability query success");
}
+#ifdef CAMERAHAL_DEBUG
+ _dumpOmxTiCap(sensorId, *caps);
+#endif
+
// Translate and insert Ducati capabilities to CameraProperties
if ( NO_ERROR == ret ) {
- ret = insertCapabilities(params, *caps[0]);
+ ret = insertCapabilities(params, *caps);
}
- CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps[0]->tSenMounting.nSenId);
-
+ CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps->tSenMounting.nSenId);
+ CAMHAL_LOGDB("facing id=%u", (unsigned int)caps->tSenMounting.eFacing);
EXIT:
- if (caps) {
- memMgr.freeBuffer((void*) caps);
- caps = NULL;
+ if (bufferlist) {
+ memMgr.freeBufferList(bufferlist);
}
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp
index 6ad5f88..ecc84f2 100644
--- a/camera/OMXCameraAdapter/OMXCapture.cpp
+++ b/camera/OMXCameraAdapter/OMXCapture.cpp
@@ -21,32 +21,39 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *str = NULL;
int w, h;
OMX_COLOR_FORMATTYPE pixFormat;
+ CodingMode codingMode = mCodingMode;
const char *valstr = NULL;
int varint = 0;
+ OMX_TI_STEREOFRAMELAYOUTTYPE capFrmLayout;
+ bool inCaptureState = false;
LOG_FUNCTION_NAME;
OMXCameraPortParameters *cap;
cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ capFrmLayout = cap->mFrameLayoutType;
+ setParamS3D(mCameraAdapterParameters.mImagePortIndex,
+ params.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT));
+ if (capFrmLayout != cap->mFrameLayoutType) {
+ mPendingCaptureSettings |= SetFormat;
+ }
+
params.getPictureSize(&w, &h);
if ( ( w != ( int ) cap->mWidth ) ||
@@ -64,84 +71,168 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Image: cap.mHeight = %d", (int)cap->mHeight);
if ((valstr = params.getPictureFormat()) != NULL) {
- if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("CbYCrY format selected");
pixFormat = OMX_COLOR_FormatCbYCrY;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV422I;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV420SP;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_RGB565;
- } else if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
CAMHAL_LOGDA("JPEG format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingNone;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_JPEG;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
+ codingMode = CodingJPEG;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_JPEG;
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
CAMHAL_LOGDA("JPS format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingJPS;
+ codingMode = CodingJPS;
mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_JPS;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
CAMHAL_LOGDA("MPO format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingMPO;
+ codingMode = CodingMPO;
mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_MPO;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_RAW) == 0) {
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
CAMHAL_LOGDA("RAW Picture format selected");
pixFormat = OMX_COLOR_FormatRawBayer10bit;
- mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_RAW;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
} else {
CAMHAL_LOGEA("Invalid format, JPEG format selected as default");
pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
mPictureFormatFromClient = NULL;
}
} else {
CAMHAL_LOGEA("Picture format is NULL, defaulting to JPEG");
pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
mPictureFormatFromClient = NULL;
}
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ mRawCapture = false;
+ mYuvCapture = false;
+
+ valstr = params.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (!valstr || strcmp(valstr, TICameraParameters::HIGH_QUALITY_MODE) == 0) &&
+ access(kRawImagesOutputDirPath, F_OK) != -1 ) {
+ mRawCapture = true;
+ }
+
+ if (mRawCapture && (access(kYuvImagesOutputDirPath, F_OK) != -1)) {
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ mYuvCapture = true;
+ }
+#endif
// JPEG capture is not supported in video mode by OMX Camera
// Set capture format to yuv422i...jpeg encode will
// be done on A9
valstr = params.get(TICameraParameters::KEY_CAP_MODE);
- if ( (valstr && !strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE)) &&
- (pixFormat == OMX_COLOR_FormatUnused) ) {
+ if ( (valstr && ( strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0 ||
+ strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ) == 0 ) ) &&
+ (pixFormat == OMX_COLOR_FormatUnused) ) {
CAMHAL_LOGDA("Capturing in video mode...selecting yuv422i");
pixFormat = OMX_COLOR_FormatCbYCrY;
}
- if ( pixFormat != cap->mColorFormat )
- {
+ if (pixFormat != cap->mColorFormat || codingMode != mCodingMode) {
mPendingCaptureSettings |= SetFormat;
cap->mColorFormat = pixFormat;
- }
+ mCodingMode = codingMode;
+ }
#ifdef OMAP_ENHANCEMENT
+ str = params.get(TICameraParameters::KEY_TEMP_BRACKETING);
+ if ( ( str != NULL ) &&
+ ( strcmp(str, android::CameraParameters::TRUE) == 0 ) ) {
- str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
- if ( NULL != str ) {
- parseExpRange(str, mExposureBracketingValues, EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+ if ( !mBracketingSet ) {
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ }
+
+ mBracketingSet = true;
} else {
+
+ if ( mBracketingSet ) {
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ }
+
+ mBracketingSet = false;
+ }
+
+ if ( (str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL ) {
+ parseExpRange(str, mExposureBracketingValues, NULL,
+ mExposureGainBracketingModes,
+ EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ } else {
+ mExposureBracketMode = OMX_BracketExposureRelativeInEV;
+ }
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ } else if ( (str = params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE)) != NULL) {
+ parseExpRange(str, mExposureBracketingValues, mExposureGainBracketingValues,
+ mExposureGainBracketingModes,
+ EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ } else {
+ mExposureBracketMode = OMX_BracketExposureGainAbsolute;
+ }
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ } else {
+ // always set queued shot config in CPCAM mode
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ }
// if bracketing was previously set...we set again before capturing to clear
- if (mExposureBracketingValidEntries) mPendingCaptureSettings |= SetExpBracket;
- mExposureBracketingValidEntries = 0;
+ if (mExposureBracketingValidEntries) {
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ mExposureBracketingValidEntries = 0;
+ }
}
+ str = params.get(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE);
+ if ( NULL != str ) {
+ parseExpRange(str, mZoomBracketingValues, NULL, NULL,
+ ZOOM_BRACKET_RANGE, mZoomBracketingValidEntries);
+ mCurrentZoomBracketing = 0;
+ mZoomBracketingEnabled = true;
+ } else {
+ if (mZoomBracketingValidEntries) {
+ mZoomBracketingValidEntries = 0;
+ }
+ mZoomBracketingEnabled = false;
+ }
#endif
- varint = params.getInt(CameraParameters::KEY_ROTATION);
- if ( varint != -1 )
+ // Flush config queue
+ // If TRUE: Flush queue and abort processing before enqueing
+ valstr = params.get(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE);
+ if ( NULL != valstr ) {
+ if ( 0 == strcmp(valstr, android::CameraParameters::TRUE) ) {
+ mFlushShotConfigQueue = true;
+ } else if ( 0 == strcmp(valstr, android::CameraParameters::FALSE) ) {
+ mFlushShotConfigQueue = false;
+ } else {
+ CAMHAL_LOGE("Missing flush shot config parameter. Will use current (%s)",
+ mFlushShotConfigQueue ? "true" : "false");
+ }
+ }
+
+ if ( params.getInt(android::CameraParameters::KEY_ROTATION) != -1 )
{
- if ( ( unsigned int ) varint != mPictureRotation) {
+ if (params.getInt(android::CameraParameters::KEY_ROTATION) != (int) mPictureRotation) {
mPendingCaptureSettings |= SetRotation;
}
- mPictureRotation = varint;
+ mPictureRotation = params.getInt(android::CameraParameters::KEY_ROTATION);
}
else
{
@@ -152,110 +243,101 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Picture Rotation set %d", mPictureRotation);
#ifdef OMAP_ENHANCEMENT
-
// Read Sensor Orientation and set it based on perating mode
-
- varint = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
- if (( varint != -1 ) && (mCapMode == OMXCameraAdapter::VIDEO_MODE))
+ varint = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if ( varint != -1 )
{
- mSensorOrientation = varint;
- if (mSensorOrientation == 270 ||mSensorOrientation==90)
- {
- CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati");
- mSensorOrientation +=180;
- mSensorOrientation%=360;
- }
- }
- else
+ mSensorOrientation = varint;
+ if (mSensorOrientation == 270 ||mSensorOrientation==90)
+ {
+ CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati");
+ mSensorOrientation +=180;
+ mSensorOrientation%=360;
+ }
+ }
+ else
{
- mSensorOrientation = 0;
+ mSensorOrientation = 0;
}
- CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation);
+ CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation);
+#endif
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
varint = params.getInt(TICameraParameters::KEY_BURST);
if ( varint >= 1 )
{
- if (varint != mBurstFrames) {
- mPendingCaptureSettings |= SetExpBracket;
+ if (varint != (int) mBurstFrames) {
+ mPendingCaptureSettings |= SetBurstExpBracket;
}
mBurstFrames = varint;
}
else
{
- if (mBurstFrames != 1) mPendingCaptureSettings |= SetExpBracket;
+ if (mBurstFrames != 1) mPendingCaptureSettings |= SetBurstExpBracket;
mBurstFrames = 1;
}
CAMHAL_LOGVB("Burst Frames set %d", mBurstFrames);
-
#endif
- varint = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
- if ( ( varint >= MIN_JPEG_QUALITY ) &&
- ( varint <= MAX_JPEG_QUALITY ) )
- {
- if ( ( unsigned int ) varint != mPictureQuality) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
+ if ( varint >= MIN_JPEG_QUALITY && varint <= MAX_JPEG_QUALITY ) {
+ if (varint != mPictureQuality) {
mPendingCaptureSettings |= SetQuality;
+ mPictureQuality = varint;
}
- mPictureQuality = varint;
- }
- else
- {
- if (mPictureQuality != MAX_JPEG_QUALITY) mPendingCaptureSettings |= SetQuality;
- mPictureQuality = MAX_JPEG_QUALITY;
+ } else {
+ if (mPictureQuality != MAX_JPEG_QUALITY) {
+ mPendingCaptureSettings |= SetQuality;
+ mPictureQuality = MAX_JPEG_QUALITY;
}
+ }
CAMHAL_LOGVB("Picture Quality set %d", mPictureQuality);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- if ( varint >= 0 )
- {
- if ( ( unsigned int ) varint != mThumbWidth) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ if ( varint >= 0 ) {
+ if (varint != mThumbWidth) {
mPendingCaptureSettings |= SetThumb;
+ mThumbWidth = varint;
}
- mThumbWidth = varint;
- }
- else
- {
- if (mThumbWidth != DEFAULT_THUMB_WIDTH) mPendingCaptureSettings |= SetThumb;
- mThumbWidth = DEFAULT_THUMB_WIDTH;
+ } else {
+ if (mThumbWidth != DEFAULT_THUMB_WIDTH) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbWidth = DEFAULT_THUMB_WIDTH;
}
-
+ }
CAMHAL_LOGVB("Picture Thumb width set %d", mThumbWidth);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
- if ( varint >= 0 )
- {
- if ( ( unsigned int ) varint != mThumbHeight) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ if ( varint >= 0 ) {
+ if (varint != mThumbHeight) {
mPendingCaptureSettings |= SetThumb;
+ mThumbHeight = varint;
}
- mThumbHeight = varint;
- }
- else
- {
- if (mThumbHeight != DEFAULT_THUMB_HEIGHT) mPendingCaptureSettings |= SetThumb;
- mThumbHeight = DEFAULT_THUMB_HEIGHT;
+ } else {
+ if (mThumbHeight != DEFAULT_THUMB_HEIGHT) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbHeight = DEFAULT_THUMB_HEIGHT;
}
-
+ }
CAMHAL_LOGVB("Picture Thumb height set %d", mThumbHeight);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
- if ( ( varint >= MIN_JPEG_QUALITY ) &&
- ( varint <= MAX_JPEG_QUALITY ) )
- {
- if ( ( unsigned int ) varint != mThumbQuality) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if ( varint >= MIN_JPEG_QUALITY && varint <= MAX_JPEG_QUALITY ) {
+ if (varint != mThumbQuality) {
mPendingCaptureSettings |= SetThumb;
+ mThumbQuality = varint;
}
- mThumbQuality = varint;
- }
- else
- {
- if (mThumbQuality != MAX_JPEG_QUALITY) mPendingCaptureSettings |= SetThumb;
- mThumbQuality = MAX_JPEG_QUALITY;
+ } else {
+ if (mThumbQuality != MAX_JPEG_QUALITY) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbQuality = MAX_JPEG_QUALITY;
}
+ }
CAMHAL_LOGDB("Thumbnail Quality set %d", mThumbQuality);
@@ -263,19 +345,16 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
mPendingCaptureSettings = ECapturesettingsAll;
}
- if (mPendingCaptureSettings) {
- disableImagePort();
- if ( NULL != mReleaseImageBuffersCallback ) {
- mReleaseImageBuffersCallback(mReleaseData);
- }
- }
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+ cap->mWidth = params.getInt(TICameraParameters::RAW_WIDTH);
+ cap->mHeight = params.getInt(TICameraParameters::RAW_HEIGHT);
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+status_t OMXCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
{
status_t ret = NO_ERROR;
OMXCameraPortParameters *imgCaptureData = NULL;
@@ -287,95 +366,207 @@ status_t OMXCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCou
{
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
- imgCaptureData->mNumBufs = bufferCount;
- // check if image port is already configured...
- // if it already configured then we don't have to query again
- if (!mCaptureConfigured) {
+ // If any settings have changed that need to be set with SetParam,
+ // we will need to disable the port to set them
+ if ((mPendingCaptureSettings & ECaptureParamSettings)) {
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
+
+ if (mPendingCaptureSettings & SetFormat) {
ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
}
if ( ret == NO_ERROR )
{
- length = imgCaptureData->mBufSize;
+ frame.mLength = imgCaptureData->mBufSize;
+ frame.mWidth = imgCaptureData->mWidth;
+ frame.mHeight = imgCaptureData->mHeight;
+ frame.mAlignment = imgCaptureData->mStride;
+ CAMHAL_LOGDB("getPictureBufferSize: width:%u height:%u alignment:%u length:%u",
+ frame.mWidth, frame.mHeight, frame.mAlignment, frame.mLength);
}
else
{
CAMHAL_LOGEB("setFormat() failed 0x%x", ret);
- length = 0;
}
}
- CAMHAL_LOGDB("getPictureBufferSize %d", length);
-
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+int OMXCameraAdapter::getBracketingValueMode(const char *a, const char *b) const
+{
+ BracketingValueMode bvm = BracketingValueAbsolute;
+
+ if ( (NULL != b) &&
+ (NULL != a) &&
+ (a < b) &&
+ ( (NULL != memchr(a, '+', b - a)) ||
+ (NULL != memchr(a, '-', b - a)) ) ) {
+ bvm = BracketingValueRelative;
+ }
+ return bvm;
+}
+
status_t OMXCameraAdapter::parseExpRange(const char *rangeStr,
- int * expRange,
+ int *expRange,
+ int *gainRange,
+ int *expGainModes,
size_t count,
size_t &validEntries)
{
status_t ret = NO_ERROR;
- char *ctx, *expVal;
- char *tmp = NULL;
+ char *end = NULL;
+ const char *startPtr = NULL;
size_t i = 0;
LOG_FUNCTION_NAME;
- if ( NULL == rangeStr )
- {
+ if ( NULL == rangeStr ){
return -EINVAL;
- }
+ }
- if ( NULL == expRange )
- {
+ if ( NULL == expRange ){
return -EINVAL;
- }
-
- if ( NO_ERROR == ret )
- {
- tmp = ( char * ) malloc( strlen(rangeStr) + 1 );
+ }
- if ( NULL == tmp )
- {
- CAMHAL_LOGEA("No resources for temporary buffer");
- return -1;
+ if ( NO_ERROR == ret ) {
+ startPtr = rangeStr;
+ do {
+ // Relative Exposure example: "-30,-10, 0, 10, 30"
+ // Absolute Gain ex. (exposure,gain) pairs: "(100,300),(200,300),(400,300),(800,300),(1600,300)"
+ // Relative Gain ex. (exposure,gain) pairs: "(-30,+0),(-10, +0),(+0,+0),(+10,+0),(+30,+0)"
+ // Forced relative Exposure example: "-30F,-10F, 0F, 10F, 30F"
+ // Forced absolute Gain ex. (exposure,gain) pairs: "(100,300)F,(200,300)F,(400,300)F,(800,300)F,(1600,300)F"
+ // Forced relative Gain ex. (exposure,gain) pairs: "(-30,+0)F,(-10, +0)F,(+0,+0)F,(+10,+0)F,(+30,+0)F"
+
+ // skip '(' and ','
+ while ((*startPtr == '(') || (*startPtr == ',')) startPtr++;
+
+ expRange[i] = (int)strtol(startPtr, &end, 10);
+
+ if (expGainModes) {
+ // if gainRange is given rangeStr should be (exposure, gain) pair
+ if (gainRange) {
+ int bvm_exp = getBracketingValueMode(startPtr, end);
+ startPtr = end + 1; // for the ','
+ gainRange[i] = (int)strtol(startPtr, &end, 10);
+
+ if (BracketingValueAbsolute == bvm_exp) {
+ expGainModes[i] = getBracketingValueMode(startPtr, end);
+ } else {
+ expGainModes[i] = bvm_exp;
+ }
+ } else {
+ expGainModes[i] = BracketingValueCompensation;
+ }
+ }
+ startPtr = end;
+
+ // skip ')'
+ while (*startPtr == ')') startPtr++;
+
+ // Check for "forced" key
+ if (expGainModes) {
+ while ((*startPtr == 'F') || (*startPtr == 'f')) {
+ if ( BracketingValueAbsolute == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueAbsoluteForced;
+ } else if ( BracketingValueRelative == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueRelativeForced;
+ } else if ( BracketingValueCompensation == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueCompensationForced;
+ } else {
+ CAMHAL_LOGE("Unexpected old mode 0x%x", expGainModes[i]);
+ }
+ startPtr++;
+ }
}
- memset(tmp, '\0', strlen(rangeStr) + 1);
-
- }
-
- if ( NO_ERROR == ret )
- {
- strncpy(tmp, rangeStr, strlen(rangeStr) );
- expVal = strtok_r( (char *) tmp, CameraHal::PARAMS_DELIMITER, &ctx);
- i = 0;
- while ( ( NULL != expVal ) && ( i < count ) )
- {
- expRange[i] = atoi(expVal);
- expVal = strtok_r(NULL, CameraHal::PARAMS_DELIMITER, &ctx);
i++;
- }
+
+ } while ((startPtr[0] != '\0') && (i < count));
validEntries = i;
- }
+ }
- if ( NULL != tmp )
- {
- free(tmp);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doExposureBracketing(int *evValues,
+ int *evValues2,
+ int *evModes2,
+ size_t evCount,
+ size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NULL == evValues ) {
+ CAMHAL_LOGEA("Exposure compensation values pointer is invalid");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret ) {
+ if (bracketMode == OMX_BracketVectorShot) {
+ ret = setVectorShot(evValues, evValues2, evModes2, evCount, frameCount, flush, bracketMode);
+ } else {
+ ret = setExposureBracketing(evValues, evValues2, evCount, frameCount, bracketMode);
}
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
- size_t evCount,
- size_t frameCount)
+status_t OMXCameraAdapter::setVectorStop(bool toPreview)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE vecShotStop;
+
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR(&vecShotStop, OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE);
+
+ vecShotStop.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (toPreview) {
+ vecShotStop.eStopMethod = OMX_TI_VECTSHOTSTOPMETHOD_GOTO_PREVIEW;
+ } else {
+ vecShotStop.eStopMethod = OMX_TI_VECTSHOTSTOPMETHOD_WAIT_IN_CAPTURE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigVectShotStopMethod,
+ &vecShotStop);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring bracket shot 0x%x", eError);
+ } else {
+ CAMHAL_LOGDA("Bracket shot configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::initVectorShot()
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -384,17 +575,193 @@ status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
- CAMHAL_LOGEA("OMX component is in invalid state");
- ret = -EINVAL;
+ if (NO_ERROR == ret) {
+ OMX_INIT_STRUCT_PTR (&expCapMode, OMX_CONFIG_CAPTUREMODETYPE);
+ expCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ expCapMode.bFrameLimited = OMX_FALSE;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCaptureMode,
+ &expCapMode);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring capture mode 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Camera capture mode configured successfully");
}
+ }
- if ( NULL == evValues )
- {
- CAMHAL_LOGEA("Exposure compensation values pointer is invalid");
- ret = -EINVAL;
+ if (NO_ERROR == ret) {
+ OMX_INIT_STRUCT_PTR (&extExpCapMode, OMX_CONFIG_EXTCAPTUREMODETYPE);
+ extExpCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ extExpCapMode.bEnableBracketing = OMX_TRUE;
+ extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketVectorShot;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigExtCaptureMode,
+ &extExpCapMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring extended capture mode 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Extended camera capture mode configured successfully");
+ }
+ }
+
+
+ if (NO_ERROR == ret) {
+ // set vector stop method to stop in capture
+ ret = setVectorStop(false);
+ }
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::setVectorShot(int *evValues,
+ int *evValues2,
+ int *evModes2,
+ size_t evCount,
+ size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_ENQUEUESHOTCONFIGS enqueueShotConfigs;
+ OMX_TI_CONFIG_QUERYAVAILABLESHOTS queryAvailableShots;
+ bool doFlush = flush;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR(&enqueueShotConfigs, OMX_TI_CONFIG_ENQUEUESHOTCONFIGS);
+ OMX_INIT_STRUCT_PTR(&queryAvailableShots, OMX_TI_CONFIG_QUERYAVAILABLESHOTS);
+
+ queryAvailableShots.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigQueryAvailableShots,
+ &queryAvailableShots);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGE("Error getting available shots 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGD("AVAILABLE SHOTS: %d", queryAvailableShots.nAvailableShots);
+ if (queryAvailableShots.nAvailableShots < evCount) {
+ // TODO(XXX): Need to implement some logic to handle this error
+ CAMHAL_LOGE("Not enough available shots to fulfill this queue request");
+ ret = -ENOSPC;
+ goto exit;
+ }
+ }
+
+ for ( unsigned int confID = 0; confID < evCount; ) {
+ unsigned int i;
+ for ( i = 0 ; (i < ARRAY_SIZE(enqueueShotConfigs.nShotConfig)) && (confID < evCount); i++, confID++ ) {
+ CAMHAL_LOGD("%2u: (%7d,%4d) mode: %d", confID, evValues[confID], evValues2[confID], evModes2[confID]);
+ enqueueShotConfigs.nShotConfig[i].nConfigId = confID;
+ enqueueShotConfigs.nShotConfig[i].nFrames = 1;
+ if ( (BracketingValueCompensation == evModes2[confID]) ||
+ (BracketingValueCompensationForced == evModes2[confID]) ) {
+ // EV compensation
+ enqueueShotConfigs.nShotConfig[i].nEC = evValues[confID];
+ enqueueShotConfigs.nShotConfig[i].nExp = 0;
+ enqueueShotConfigs.nShotConfig[i].nGain = 0;
+ } else {
+ // exposure,gain pair
+ enqueueShotConfigs.nShotConfig[i].nEC = 0;
+ enqueueShotConfigs.nShotConfig[i].nExp = evValues[confID];
+ enqueueShotConfigs.nShotConfig[i].nGain = evValues2[confID];
+ }
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE;
+ switch (evModes2[confID]) {
+ case BracketingValueAbsolute: // (exp,gain) pairs directly program sensor values
+ default :
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE;
+ break;
+ case BracketingValueRelative: // (exp,gain) pairs relative to AE settings and constraints
+ case BracketingValueCompensation: // EV compensation relative to AE settings and constraints
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_RELATIVE;
+ break;
+ case BracketingValueAbsoluteForced: // (exp,gain) pairs directly program sensor values
+ // are forced over constraints due to flicker, etc.
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_FORCE_ABSOLUTE;
+ break;
+ case BracketingValueRelativeForced: // (exp, gain) pairs relative to AE settings AND settings
+ case BracketingValueCompensationForced: // EV compensation relative to AE settings and constraints
+ // are forced over constraints due to flicker, etc.
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_FORCE_RELATIVE;
+ break;
+ }
+ enqueueShotConfigs.nShotConfig[i].bNoSnapshot = OMX_FALSE; // TODO: Make this configurable
+ }
+
+ // Repeat last exposure and again
+ if ((confID == evCount) && (evCount > 0) && (frameCount > evCount) && (0 != i)) {
+ enqueueShotConfigs.nShotConfig[i-1].nFrames = frameCount - evCount;
+ }
+
+ enqueueShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ enqueueShotConfigs.bFlushQueue = doFlush ? OMX_TRUE : OMX_FALSE;
+ enqueueShotConfigs.nNumConfigs = i;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &enqueueShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring enqueue shot 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Enqueue shot configured successfully");
+ }
+ // Flush only first time
+ doFlush = false;
+ }
+
+ // Handle burst capture (no any bracketing) case
+ if (0 == evCount) {
+ CAMHAL_LOGE("Handle burst capture (no any bracketing) case");
+ enqueueShotConfigs.nShotConfig[0].nConfigId = 0;
+ enqueueShotConfigs.nShotConfig[0].nFrames = frameCount;
+ enqueueShotConfigs.nShotConfig[0].nEC = 0;
+ enqueueShotConfigs.nShotConfig[0].nExp = 0;
+ enqueueShotConfigs.nShotConfig[0].nGain = 0;
+ enqueueShotConfigs.nShotConfig[0].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_RELATIVE;
+ enqueueShotConfigs.nShotConfig[0].bNoSnapshot = OMX_FALSE; // TODO: Make this configurable
+ enqueueShotConfigs.nNumConfigs = 1;
+ enqueueShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ enqueueShotConfigs.bFlushQueue = doFlush ? OMX_TRUE : OMX_FALSE;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &enqueueShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring enqueue shot 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Enqueue shot configured successfully");
}
+ }
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
+ int *evValues2,
+ size_t evCount,
+ size_t frameCount,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CAPTUREMODETYPE expCapMode;
+ OMX_CONFIG_EXTCAPTUREMODETYPE extExpCapMode;
+
+ LOG_FUNCTION_NAME;
if ( NO_ERROR == ret )
{
@@ -439,13 +806,19 @@ status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
else
{
extExpCapMode.bEnableBracketing = OMX_TRUE;
- extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketExposureRelativeInEV;
+ extExpCapMode.tBracketConfigType.eBracketMode = bracketMode;
extExpCapMode.tBracketConfigType.nNbrBracketingValues = evCount - 1;
}
for ( unsigned int i = 0 ; i < evCount ; i++ )
{
- extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10;
+ if (bracketMode == OMX_BracketExposureGainAbsolute) {
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = evValues[i];
+ extExpCapMode.tBracketConfigType.nBracketValues2[i] = evValues2[i];
+ } else {
+ // assuming OMX_BracketExposureRelativeInEV
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10;
+ }
}
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
@@ -538,7 +911,8 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
if ( NO_ERROR == ret )
{
- currentBufferIdx = ( unsigned int ) pBuffHeader->pAppPrivate;
+ CameraBuffer *buffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+ currentBufferIdx = buffer->index;
if ( currentBufferIdx >= imgCaptureData->mNumBufs)
{
@@ -558,8 +932,8 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
mBracketingBuffersQueued[nextBufferIdx] = true;
mBracketingBuffersQueuedCount++;
mLastBracetingBufferIdx = nextBufferIdx;
- setFrameRefCount(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame, 1);
- returnFrame(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame);
+ setFrameRefCount((CameraBuffer *)imgCaptureData->mBufferHeader[nextBufferIdx]->pAppPrivate, typeOfFrame, 1);
+ returnFrame((CameraBuffer *)imgCaptureData->mBufferHeader[nextBufferIdx]->pAppPrivate, typeOfFrame);
}
}
@@ -568,7 +942,7 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
return ret;
}
-status_t OMXCameraAdapter::sendBracketFrames()
+status_t OMXCameraAdapter::sendBracketFrames(size_t &framesSent)
{
status_t ret = NO_ERROR;
int currentBufferIdx;
@@ -577,6 +951,7 @@ status_t OMXCameraAdapter::sendBracketFrames()
LOG_FUNCTION_NAME;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ framesSent = 0;
if ( OMX_StateExecuting != mComponentState )
{
@@ -599,6 +974,7 @@ status_t OMXCameraAdapter::sendBracketFrames()
imgCaptureData->mBufferHeader[currentBufferIdx],
imgCaptureData->mImageType,
imgCaptureData);
+ framesSent++;
}
} while ( currentBufferIdx != mLastBracetingBufferIdx );
@@ -625,7 +1001,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
}
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
@@ -644,7 +1020,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
mBracketingRange = range;
mBracketingBuffersQueued = new bool[imgCaptureData->mNumBufs];
@@ -657,6 +1033,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
mBracketingBuffersQueuedCount = imgCaptureData->mNumBufs;
+ mBurstFramesAccum = imgCaptureData->mNumBufs;
mLastBracetingBufferIdx = mBracketingBuffersQueuedCount - 1;
for ( int i = 0 ; i < imgCaptureData->mNumBufs ; i++ )
@@ -669,10 +1046,11 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
-
- ret = startImageCapture();
+ CachedCaptureParameters* cap_params = cacheCaptureParameters();
+ ret = startImageCapture(true, cap_params);
+ delete cap_params;
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( NO_ERROR == ret )
{
@@ -696,15 +1074,15 @@ status_t OMXCameraAdapter::stopBracketing()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mBracketingLock);
+ ret = stopImageCapture();
+
+ android::AutoMutex lock(mBracketingLock);
if ( NULL != mBracketingBuffersQueued )
{
delete [] mBracketingBuffersQueued;
}
- ret = stopImageCapture();
-
mBracketingBuffersQueued = NULL;
mBracketingEnabled = false;
mBracketingBuffersQueuedCount = 0;
@@ -715,15 +1093,18 @@ status_t OMXCameraAdapter::stopBracketing()
return ret;
}
-status_t OMXCameraAdapter::startImageCapture()
+status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParameters* capParams)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * capData = NULL;
OMX_CONFIG_BOOLEANTYPE bOMX;
+ size_t bracketingSent = 0;
LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mImageCaptureLock);
+
if(!mCaptureConfigured)
{
///Image capture was cancelled before we could start
@@ -736,9 +1117,16 @@ status_t OMXCameraAdapter::startImageCapture()
return NO_INIT;
}
- if ((getNextState() & (CAPTURE_ACTIVE|BRACKETING_ACTIVE)) == 0) {
- CAMHAL_LOGDA("trying starting capture when already canceled");
- return NO_ERROR;
+ if ( !bracketing ) {
+ if ((getNextState() & (CAPTURE_ACTIVE|BRACKETING_ACTIVE)) == 0) {
+ CAMHAL_LOGDA("trying starting capture when already canceled");
+ return NO_ERROR;
+ }
+ }
+
+ if (!capParams) {
+ CAMHAL_LOGE("Invalid cached parameters sent!");
+ return BAD_VALUE;
}
// Camera framework doesn't expect face callbacks once capture is triggered
@@ -746,13 +1134,25 @@ status_t OMXCameraAdapter::startImageCapture()
//During bracketing image capture is already active
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
//Stop bracketing, activate normal burst for the remaining images
mBracketingEnabled = false;
- mCapturedFrames = mBracketingRange;
- ret = sendBracketFrames();
+ ret = sendBracketFrames(bracketingSent);
+
+ // Check if we accumulated enough buffers
+ if ( bracketingSent < ( mBracketingRange - 1 ) )
+ {
+ mCapturedFrames = mBracketingRange + ( ( mBracketingRange - 1 ) - bracketingSent );
+ }
+ else
+ {
+ mCapturedFrames = mBracketingRange;
+ }
+ mBurstFramesQueued = 0;
+ mBurstFramesAccum = mCapturedFrames;
+
if(ret != NO_ERROR)
goto EXIT;
else
@@ -761,25 +1161,49 @@ status_t OMXCameraAdapter::startImageCapture()
}
if ( NO_ERROR == ret ) {
- if (mPendingCaptureSettings & SetRotation) {
+ if (capParams->mPendingCaptureSettings & SetRotation) {
mPendingCaptureSettings &= ~SetRotation;
ret = setPictureRotation(mPictureRotation);
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error configuring image rotation %x", ret);
}
}
- }
- // need to enable wb data for video snapshot to fill in exif data
- if ((ret == NO_ERROR) && (mCapMode == VIDEO_MODE)) {
- // video snapshot uses wb data from snapshot frame
- ret = setExtraData(true, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
+ if (capParams->mPendingCaptureSettings & SetBurstExpBracket) {
+ mPendingCaptureSettings &= ~SetBurstExpBracket;
+ if ( mBracketingSet ) {
+ ret = doExposureBracketing(capParams->mExposureBracketingValues,
+ capParams->mExposureGainBracketingValues,
+ capParams->mExposureGainBracketingModes,
+ 0,
+ 0,
+ capParams->mFlushShotConfigQueue,
+ capParams->mExposureBracketMode);
+ } else {
+ ret = doExposureBracketing(capParams->mExposureBracketingValues,
+ capParams->mExposureGainBracketingValues,
+ capParams->mExposureGainBracketingModes,
+ capParams->mExposureBracketingValidEntries,
+ capParams->mBurstFrames,
+ capParams->mFlushShotConfigQueue,
+ capParams->mExposureBracketMode);
+ }
+
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setExposureBracketing() failed %d", ret);
+ goto EXIT;
+ }
+ }
}
- //OMX shutter callback events are only available in hq mode
- if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
- {
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture bracketing configs done: ", &mStartCapture);
+#endif
+
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ //OMX shutter callback events are only available in hq mode
+ if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) {
if ( NO_ERROR == ret )
{
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
@@ -794,27 +1218,79 @@ status_t OMXCameraAdapter::startImageCapture()
ret = setShutterCallback(true);
}
+ }
+
+ if (mPending3Asettings) {
+ apply3Asettings(mParameters3A);
+ }
+
+ if (ret == NO_ERROR) {
+ int index = 0;
+ int queued = 0;
+ android::AutoMutex lock(mBurstLock);
+
+ if (capParams->mFlushShotConfigQueue) {
+ // reset shot queue
+ mCapturedFrames = mBurstFrames;
+ mBurstFramesAccum = mBurstFrames;
+ mBurstFramesQueued = 0;
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
+ if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ mBurstFramesQueued++;
+ }
+ }
+ } else {
+ mCapturedFrames += mBurstFrames;
+ mBurstFramesAccum += mBurstFrames;
+ }
+ CAMHAL_LOGD("mBurstFramesQueued = %d mBurstFramesAccum = %d index = %d "
+ "capData->mNumBufs = %d queued = %d capData->mMaxQueueable = %d",
+ mBurstFramesQueued,mBurstFramesAccum,index,
+ capData->mNumBufs,queued,capData->mMaxQueueable);
+ CAMHAL_LOGD("%d", (mBurstFramesQueued < mBurstFramesAccum)
+ && (index < capData->mNumBufs)
+ && (queued < capData->mMaxQueueable));
+ while ((mBurstFramesQueued < mBurstFramesAccum) &&
+ (index < capData->mNumBufs) &&
+ (queued < capData->mMaxQueueable)) {
+ if (capData->mStatus[index] == OMXCameraPortParameters::IDLE) {
+ CAMHAL_LOGDB("Queuing buffer on Capture port - %p",
+ capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ mBurstFramesQueued++;
+ queued++;
+ } else if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ CAMHAL_LOGE("Not queueing index = %d", index);
+ queued++;
+ }
+ index++;
}
- if ( NO_ERROR == ret ) {
- capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
- ///Queue all the buffers on capture port
- for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
- CAMHAL_LOGDB("Queuing buffer on Capture port - 0x%x",
- ( unsigned int ) capData->mBufferHeader[index]->pBuffer);
- eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ ///Queue all the buffers on capture port
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
+ CAMHAL_LOGDB("Queuing buffer on Video port (for RAW capture) - 0x%x", ( unsigned int ) capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
}
+#endif
mWaitingForSnapshot = true;
mCaptureSignalled = false;
// Capturing command is not needed when capturing in video mode
// Only need to queue buffers on image ports
- if (mCapMode != VIDEO_MODE) {
+ if ( ( mCapMode != VIDEO_MODE ) && ( mCapMode != VIDEO_MODE_HQ ) ) {
OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
bOMX.bEnabled = OMX_TRUE;
@@ -829,10 +1305,14 @@ status_t OMXCameraAdapter::startImageCapture()
}
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture image buffers queued and capture enabled: ", &mStartCapture);
+#endif
+
//OMX shutter callback events are only available in hq mode
+
if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
{
-
if ( NO_ERROR == ret )
{
ret = mStartCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT);
@@ -863,16 +1343,19 @@ status_t OMXCameraAdapter::startImageCapture()
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture shutter event received: ", &mStartCapture);
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
- setExtraData(false, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
mWaitingForSnapshot = false;
mCaptureSignalled = false;
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::stopImageCapture()
@@ -884,6 +1367,8 @@ status_t OMXCameraAdapter::stopImageCapture()
LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mImageCaptureLock);
+
if (!mCaptureConfigured) {
//Capture is not ongoing, return from here
return NO_ERROR;
@@ -894,9 +1379,13 @@ status_t OMXCameraAdapter::stopImageCapture()
goto EXIT;
}
+ // TODO(XXX): Reprocessing is currently piggy-backing capture commands
+ if (mAdapterState == REPROCESS_STATE) {
+ ret = stopReprocess();
+ }
+
//Disable the callback first
mWaitingForSnapshot = false;
- mSnapshotCount = 0;
// OMX shutter callback events are only available in hq mode
if ((HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) {
@@ -916,11 +1405,24 @@ status_t OMXCameraAdapter::stopImageCapture()
}
mStartCaptureSem.Create(0);
}
- }
+ } else if (CP_CAM == mCapMode) {
+ // Reset shot config queue
+ OMX_TI_CONFIG_ENQUEUESHOTCONFIGS resetShotConfigs;
+ OMX_INIT_STRUCT_PTR(&resetShotConfigs, OMX_TI_CONFIG_ENQUEUESHOTCONFIGS);
- // After capture, face detection should be disabled
- // and application needs to restart face detection
- stopFaceDetection();
+ resetShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ resetShotConfigs.bFlushQueue = OMX_TRUE;
+ resetShotConfigs.nNumConfigs = 0;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &resetShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while reset shot config 0x%x", eError);
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("Shot config reset successfully");
+ }
+ }
//Wait here for the capture to be done, in worst case timeout and proceed with cleanup
mCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT);
@@ -934,7 +1436,7 @@ status_t OMXCameraAdapter::stopImageCapture()
// Disable image capture
// Capturing command is not needed when capturing in video mode
- if (mCapMode != VIDEO_MODE) {
+ if ( ( mCapMode != VIDEO_MODE ) && ( mCapMode != VIDEO_MODE_HQ ) ) {
OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
bOMX.bEnabled = OMX_FALSE;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
@@ -948,23 +1450,37 @@ status_t OMXCameraAdapter::stopImageCapture()
}
}
- // had to enable wb data for video snapshot to fill in exif data
- // now that we are done...disable
- if ((ret == NO_ERROR) && (mCapMode == VIDEO_MODE)) {
- ret = setExtraData(false, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
- }
-
CAMHAL_LOGDB("Capture set - 0x%x", eError);
mCaptureSignalled = true; //set this to true if we exited because of timeout
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount = 0;
mFirstFrameCondition.broadcast();
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ // Stop is always signalled externally in CPCAM mode
+ // We need to make sure we really stop
+ if ((mCapMode == CP_CAM)) {
+ disableReprocess();
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
+
+ // Moving code for below commit here as an optimization for continuous capture,
+ // so focus settings don't have to reapplied after each capture
+ // c78fa2a CameraHAL: Always reset focus mode after capture
+ // Workaround when doing many consecutive shots, CAF wasn't getting restarted.
+ mPending3Asettings |= SetFocus;
+
+ mCapturedFrames = 0;
+ mBurstFramesAccum = 0;
+ mBurstFramesQueued = 0;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
@@ -974,20 +1490,21 @@ EXIT:
}
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount = 0;
mFirstFrameCondition.broadcast();
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::disableImagePort(){
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters *imgCaptureData = NULL;
+ OMXCameraPortParameters *imgRawCaptureData = NULL;
if (!mCaptureConfigured) {
return NO_ERROR;
@@ -995,6 +1512,7 @@ status_t OMXCameraAdapter::disableImagePort(){
mCaptureConfigured = false;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex]; // for RAW capture
///Register for Image port Disable event
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
@@ -1044,19 +1562,164 @@ status_t OMXCameraAdapter::disableImagePort(){
goto EXIT;
}
- EXIT:
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ deinitInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+
+ // since port settings are not persistent after port is disabled...
+ mPendingCaptureSettings |= SetFormat;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+
+ if (mRawCapture) {
+ ///Register for Video port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ mStopCaptureSem);
+ ///Disable RawCapture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ ///Free all the buffers on RawCapture port
+ if (imgRawCaptureData) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - %d", imgRawCaptureData->mNumBufs);
+ for ( int index = 0 ; index < imgRawCaptureData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - 0x%x", ( unsigned int ) imgRawCaptureData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoPortIndex,
+ (OMX_BUFFERHEADERTYPE*)imgRawCaptureData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for Video port disable");
+ //Wait for the image port enable event
+ mStopCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ CAMHAL_LOGDA("Video Port disabled");
+ }
+#endif
+
+EXIT:
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::initInternalBuffers(OMX_U32 portIndex)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ int index = 0;
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR bufferdesc;
+
+ /* Indicate to Ducati that we're planning to use dynamically-mapped buffers */
+ OMX_INIT_STRUCT_PTR (&bufferdesc, OMX_TI_PARAM_USEBUFFERDESCRIPTOR);
+ bufferdesc.nPortIndex = portIndex;
+ bufferdesc.bEnabled = OMX_FALSE;
+ bufferdesc.eBufferType = OMX_TI_BufferTypePhysicalPageList;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexUseBufferDescriptor,
+ &bufferdesc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ CAMHAL_LOGDA("Initializing internal buffers");
+ do {
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferalloc;
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferallocset;
+ OMX_INIT_STRUCT_PTR (&bufferalloc, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferalloc.nPortIndex = portIndex;
+ bufferalloc.nIndex = index;
+
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferalloc);
+ if (eError == OMX_ErrorNoMore) {
+ return NO_ERROR;
+ }
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("GetParameter failed error = 0x%x", eError);
+ break;
+ }
+
+ CAMHAL_LOGDB("Requesting buftype %d of size %dx%d",
+ (int)bufferalloc.eBufType, (int)bufferalloc.nAllocWidth,
+ (int)bufferalloc.nAllocLines);
+
+ bufferalloc.eBufType = OMX_TI_BufferTypeHardwareReserved1D;
+
+ OMX_INIT_STRUCT_PTR (&bufferallocset, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferallocset.nPortIndex = portIndex;
+ bufferallocset.nIndex = index;
+ bufferallocset.eBufType = OMX_TI_BufferTypeHardwareReserved1D;
+ bufferallocset.nAllocWidth = bufferalloc.nAllocWidth;
+ bufferallocset.nAllocLines = bufferalloc.nAllocLines;
+
+ eError = OMX_SetParameter (mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferallocset);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("SetParameter failed, error=%08x", eError);
+ if (eError == OMX_ErrorNoMore) return NO_ERROR;
+ break;
+ }
+
+ index++;
+
+ /* 1 is an arbitrary limit */
+ } while (index < 1);
+
+ CAMHAL_LOGV("Ducati requested too many (>1) internal buffers");
+
+ return -EINVAL;
}
+status_t OMXCameraAdapter::deinitInternalBuffers(OMX_U32 portIndex)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR bufferdesc;
+
+ OMX_INIT_STRUCT_PTR (&bufferdesc, OMX_TI_PARAM_USEBUFFERDESCRIPTOR);
+ bufferdesc.nPortIndex = portIndex;
+ bufferdesc.bEnabled = OMX_FALSE;
+ bufferdesc.eBufferType = OMX_TI_BufferTypeDefault;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexUseBufferDescriptor,
+ &bufferdesc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferalloc;
+ OMX_INIT_STRUCT_PTR (&bufferalloc, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferalloc.nPortIndex = portIndex;
+ bufferalloc.eBufType = OMX_TI_BufferTypeDefault;
+ bufferalloc.nAllocWidth = 1;
+ bufferalloc.nAllocLines = 1;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferalloc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
-status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
+status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
{
LOG_FUNCTION_NAME;
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * imgCaptureData = NULL;
- uint32_t *buffers = (uint32_t*)bufArr;
OMXCameraPortParameters cap;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
@@ -1067,135 +1730,178 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
return BAD_VALUE;
}
- // capture is already configured...we can skip this step
- if (mCaptureConfigured) {
+ CAMHAL_ASSERT(num > 0);
- if ( NO_ERROR == ret )
- {
- ret = setupEXIF();
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret);
- }
+ // if some setting that requires a SetParameter (including
+ // changing buffer types) then we need to disable the port
+ // before being allowed to apply the settings
+ if ((mPendingCaptureSettings & ECaptureParamSettings) ||
+ bufArr[0].type != imgCaptureData->mBufferType ||
+ imgCaptureData->mNumBufs != num) {
+ if (mCaptureConfigured) {
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
}
+ }
- mCapturedFrames = mBurstFrames;
- return NO_ERROR;
- }
-
- imgCaptureData->mNumBufs = num;
-
- //TODO: Support more pixelformats
+ imgCaptureData->mBufferType = bufArr[0].type;
+ imgCaptureData->mNumBufs = num;
- CAMHAL_LOGDB("Params Width = %d", (int)imgCaptureData->mWidth);
- CAMHAL_LOGDB("Params Height = %d", (int)imgCaptureData->mWidth);
+ CAMHAL_LOGDB("Params Width = %d", (int)imgCaptureData->mWidth);
+ CAMHAL_LOGDB("Params Height = %d", (int)imgCaptureData->mHeight);
- if (mPendingCaptureSettings & SetFormat) {
- mPendingCaptureSettings &= ~SetFormat;
- ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
- if ( ret != NO_ERROR ) {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingCaptureSettings & SetFormat) {
+ mPendingCaptureSettings &= ~SetFormat;
+ ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
}
- }
- if (mPendingCaptureSettings & SetThumb) {
- mPendingCaptureSettings &= ~SetThumb;
- ret = setThumbnailParams(mThumbWidth, mThumbHeight, mThumbQuality);
- if ( NO_ERROR != ret) {
- CAMHAL_LOGEB("Error configuring thumbnail size %x", ret);
- return ret;
+ if (mPendingCaptureSettings & SetThumb) {
+ mPendingCaptureSettings &= ~SetThumb;
+ ret = setThumbnailParams(mThumbWidth, mThumbHeight, mThumbQuality);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring thumbnail size %x", ret);
+ return ret;
+ }
}
- }
- if (mPendingCaptureSettings & SetExpBracket) {
- mPendingCaptureSettings &= ~SetExpBracket;
- ret = setExposureBracketing( mExposureBracketingValues,
- mExposureBracketingValidEntries, mBurstFrames);
- if ( ret != NO_ERROR ) {
- CAMHAL_LOGEB("setExposureBracketing() failed %d", ret);
- goto EXIT;
+ if (mPendingCaptureSettings & SetQuality) {
+ mPendingCaptureSettings &= ~SetQuality;
+ ret = setImageQuality(mPictureQuality);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring image quality %x", ret);
+ goto EXIT;
+ }
}
- }
- if (mPendingCaptureSettings & SetQuality) {
- mPendingCaptureSettings &= ~SetQuality;
- ret = setImageQuality(mPictureQuality);
- if ( NO_ERROR != ret) {
- CAMHAL_LOGEB("Error configuring image quality %x", ret);
- goto EXIT;
- }
- }
+ // Configure DOMX to use either gralloc handles or vptrs
+ {
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD ("Using ANW Buffers");
+ initInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+ } else {
+ CAMHAL_LOGD ("Using ION Buffers");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
- ///Register for Image port ENABLE event
- ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mImagePortIndex,
- mUseCaptureSem);
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
- ///Enable Capture Port
- eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mImagePortIndex,
- NULL);
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
- GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+ CameraHal::PPM("Takepicture image port configuration: ", &bufArr->ppmStamp);
- for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++ )
- {
- OMX_BUFFERHEADERTYPE *pBufferHdr;
- CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
- (unsigned int)buffers[index],
- (int)imgCaptureData->mBufSize);
+#endif
- eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
- &pBufferHdr,
+ // Register for Image port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
mCameraAdapterParameters.mImagePortIndex,
- 0,
- mCaptureBuffersLength,
- (OMX_U8*)buffers[index]);
+ mUseCaptureSem);
+
+ // Enable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
- pBufferHdr->pAppPrivate = (OMX_PTR) index;
- pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
- pBufferHdr->nVersion.s.nVersionMajor = 1 ;
- pBufferHdr->nVersion.s.nVersionMinor = 1 ;
- pBufferHdr->nVersion.s.nRevision = 0;
- pBufferHdr->nVersion.s.nStep = 0;
- imgCaptureData->mBufferHeader[index] = pBufferHdr;
- }
+ for (int index = 0 ; index < imgCaptureData->mNumBufs ; index++) {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
+ (unsigned int)bufArr[index].opaque,
+ (int)imgCaptureData->mBufSize);
+
+ eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mImagePortIndex,
+ 0,
+ imgCaptureData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ imgCaptureData->mBufferHeader[index] = pBufferHdr;
+ imgCaptureData->mStatus[index] = OMXCameraPortParameters::IDLE;
+ }
+
+ // Wait for the image port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ ret = mUseCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ // If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State after Enable Image Port Exitting!!!");
+ goto EXIT;
+ }
- //Wait for the image port enable event
- CAMHAL_LOGDA("Waiting for port enable");
- ret = mUseCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (ret != NO_ERROR) {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port enable");
+ goto EXIT;
+ }
+ CAMHAL_LOGDA("Port enabled");
- //If somethiing bad happened while we wait
- if (mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGEA("Invalid State after Enable Image Port Exitting!!!");
- goto EXIT;
- }
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- if ( ret == NO_ERROR )
- {
- CAMHAL_LOGDA("Port enabled");
+ CameraHal::PPM("Takepicture image port enabled and buffers registered: ", &bufArr->ppmStamp);
+
+#endif
+
+ if (mNextState != LOADED_REPROCESS_CAPTURE_STATE) {
+ // Enable WB and vector shot extra data for metadata
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
}
- else
- {
- ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mImagePortIndex,
- NULL);
- CAMHAL_LOGDA("Timeout expired on port enable");
- goto EXIT;
+
+ // CPCam mode only supports vector shot
+ // Regular capture is not supported
+ if ( (mCapMode == CP_CAM) && (mNextState != LOADED_REPROCESS_CAPTURE_STATE) ) {
+ initVectorShot();
}
+ mCaptureBuffersAvailable.clear();
+ for (unsigned int i = 0; i < imgCaptureData->mMaxQueueable; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
+ }
+
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for (unsigned int i = imgCaptureData->mMaxQueueable; i < imgCaptureData->mNumBufs; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
+ }
+ }
+
if ( NO_ERROR == ret )
{
ret = setupEXIF();
@@ -1205,21 +1911,184 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
}
- mCapturedFrames = mBurstFrames;
+ // Choose proper single preview mode for cp capture (reproc or hs)
+ if (( NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mCapMode)) {
+ OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
+ OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
+ if (mNextState == LOADED_CAPTURE_STATE) {
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ } else if (mNextState == LOADED_REPROCESS_CAPTURE_STATE) {
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_Reprocess;
+ } else {
+ CAMHAL_LOGE("Wrong state trying to start a capture in CPCAM mode?");
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ }
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigSinglePreviewMode,
+ &singlePrevMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ } else {
+ CAMHAL_LOGDA("single preview mode configured successfully");
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture extra configs on image port done: ", &bufArr->ppmStamp);
+
+#endif
+
mCaptureConfigured = true;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ mCaptureConfigured = false;
+ }
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
+ // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
+ // setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
//Release image buffers
if ( NULL != mReleaseImageBuffersCallback ) {
mReleaseImageBuffersCallback(mReleaseData);
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+status_t OMXCameraAdapter::UseBuffersRawCapture(CameraBuffer *bufArr, int num)
+{
+ LOG_FUNCTION_NAME
+ status_t ret;
+ OMX_ERRORTYPE eError;
+ OMXCameraPortParameters * imgRawCaptureData = NULL;
+ Utils::Semaphore camSem;
+ OMXCameraPortParameters cap;
+
+ imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+
+ if (mCaptureConfigured) {
+ return NO_ERROR;
+ }
+
+ camSem.Create();
+
+ // mWaitingForSnapshot is true only when we're in the process of capturing
+ if (mWaitingForSnapshot) {
+ ///Register for Video port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ camSem);
+
+ ///Disable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ CAMHAL_LOGDA("Waiting for port disable");
+ //Wait for the image port enable event
+ camSem.Wait();
+ CAMHAL_LOGDA("Port disabled");
+ }
+
+ imgRawCaptureData->mNumBufs = num;
+
+ CAMHAL_LOGDB("RAW Max sensor width = %d", (int)imgRawCaptureData->mWidth);
+ CAMHAL_LOGDB("RAW Max sensor height = %d", (int)imgRawCaptureData->mHeight);
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_VIDEO, *imgRawCaptureData);
+
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+ }
+
+ ///Register for Video port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ camSem);
+
+ ///Enable Video Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ mCaptureBuffersLength = (int)imgRawCaptureData->mBufSize;
+ for ( int index = 0 ; index < imgRawCaptureData->mNumBufs ; index++ ) {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer rawCapture address: 0x%x, size = %d ",
+ (unsigned int)bufArr[index].opaque,
+ (int)imgRawCaptureData->mBufSize );
+ eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mVideoPortIndex,
+ 0,
+ mCaptureBuffersLength,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_UseBuffer = 0x%x", eError);
+ }
+
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ imgRawCaptureData->mBufferHeader[index] = pBufferHdr;
+
+ }
+
+ //Wait for the image port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ camSem.Wait();
+ CAMHAL_LOGDA("Port enabled");
+
+ if (NO_ERROR == ret) {
+ ret = setupEXIF();
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret);
+ }
+ }
+
+ mCapturedFrames = mBurstFrames;
+ mBurstFramesQueued = 0;
+ mCaptureConfigured = true;
+
+ EXIT:
+
+ if (eError != OMX_ErrorNone) {
+ if ( NULL != mErrorNotifier )
+ {
+ mErrorNotifier->errorNotify(eError);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDCC.cpp b/camera/OMXCameraAdapter/OMXDCC.cpp
new file mode 100644
index 0000000..914a53e
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXDCC.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXDCC.cpp
+*
+* This file contains functionality for loading the DCC binaries.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "OMXDCC.h"
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+namespace Ti {
+namespace Camera {
+
+android::String8 DCCHandler::DCCPath("/data/misc/camera/");
+bool DCCHandler::mDCCLoaded = false;
+
+status_t DCCHandler::loadDCC(OMX_HANDLETYPE hComponent)
+{
+ OMX_ERRORTYPE dccError = OMX_ErrorNone;
+
+ if (!mDCCLoaded) {
+ dccError = initDCC(hComponent);
+ if (dccError != OMX_ErrorNone) {
+ CAMHAL_LOGE(" Error in DCC Init");
+ }
+
+ mDCCLoaded = true;
+ }
+
+ return Utils::ErrorUtils::omxToAndroidError(dccError);
+}
+
+OMX_ERRORTYPE DCCHandler::initDCC(OMX_HANDLETYPE hComponent)
+{
+ OMX_TI_PARAM_DCCURIINFO param;
+ OMX_PTR ptempbuf;
+ OMX_U16 nIndex = 0;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ int ret;
+ OMX_S32 status = 0;
+ android::Vector<android::String8 *> dccDirs;
+ OMX_U16 i;
+ MemoryManager memMgr;
+ CameraBuffer *dccBuffer = NULL;
+ int dccbuf_size = 0;
+ OMX_INIT_STRUCT_PTR(&param, OMX_TI_PARAM_DCCURIINFO);
+
+ // Read the the DCC URI info
+ for (nIndex = 0; eError != OMX_ErrorNoMore; nIndex++) {
+ param.nIndex = nIndex;
+ eError = OMX_GetParameter(hComponent,
+ ( OMX_INDEXTYPE )OMX_TI_IndexParamDccUriInfo,
+ &param);
+
+ if (eError == OMX_ErrorNone) {
+ CAMHAL_LOGD("DCC URI's %s ", param.sDCCURI);
+ android::String8 *dccDir = new android::String8();
+ if ( NULL != dccDir ) {
+ dccDir->clear();
+ dccDir->append(DCCPath);
+ dccDir->append((const char *) param.sDCCURI);
+ dccDir->append("/");
+ dccDirs.add(dccDir);
+ } else {
+ CAMHAL_LOGE("DCC URI not allocated");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+ }
+ }
+
+ // setting back errortype OMX_ErrorNone
+ if (eError == OMX_ErrorNoMore) {
+ eError = OMX_ErrorNone;
+ }
+
+ dccbuf_size = readDCCdir(NULL, dccDirs);
+ if(dccbuf_size <= 0) {
+ CAMHAL_LOGE("No DCC files found, switching back to default DCC");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+ dccbuf_size = ((dccbuf_size + 4095 )/4096)*4096;
+
+ if ( memMgr.initialize() != NO_ERROR ) {
+ CAMHAL_LOGE("DCC memory manager initialization failed!!!");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+
+ dccBuffer = memMgr.allocateBufferList(0, 0, NULL, dccbuf_size, 1);
+ if ( NULL == dccBuffer ) {
+ CAMHAL_LOGE("DCC buffer allocation failed!!!");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+
+ dccbuf_size = readDCCdir(dccBuffer[0].mapped, dccDirs);
+ CAMHAL_ASSERT_X(dccbuf_size > 0,"ERROR in copy DCC files into buffer");
+
+ eError = sendDCCBufPtr(hComponent, dccBuffer);
+
+EXIT:
+
+ for (i = 0; i < dccDirs.size(); i++) {
+ android::String8 *dccDir = dccDirs.itemAt(0);
+ dccDirs.removeAt(0);
+ delete dccDir;
+ }
+
+ if ( NULL != dccBuffer ) {
+ memMgr.freeBufferList(dccBuffer);
+ }
+
+ return eError;
+}
+
+OMX_ERRORTYPE DCCHandler::sendDCCBufPtr(OMX_HANDLETYPE hComponent,
+ CameraBuffer *dccBuffer)
+{
+ OMX_TI_CONFIG_SHAREDBUFFER uribufparam;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_INIT_STRUCT_PTR(&uribufparam, OMX_TI_CONFIG_SHAREDBUFFER);
+
+ CAMHAL_ASSERT_X(dccBuffer != NULL,"ERROR invalid DCC buffer");
+
+ uribufparam.nPortIndex = OMX_ALL;
+ uribufparam.nSharedBuffSize = dccBuffer->size;
+ uribufparam.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr(dccBuffer);
+
+ eError = OMX_SetParameter(hComponent,
+ ( OMX_INDEXTYPE )OMX_TI_IndexParamDccUriBuffer,
+ &uribufparam);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB(" Error in SetParam for DCC Uri Buffer 0x%x", eError);
+ }
+
+ return eError;
+}
+
+size_t DCCHandler::readDCCdir(OMX_PTR buffer,
+ const android::Vector<android::String8 *> &dirPaths)
+{
+ FILE *pFile;
+ OMX_S32 lSize;
+ OMX_S32 dcc_buf_size = 0;
+ size_t result;
+ OMX_STRING filename;
+ android::String8 temp;
+ const char *dotdot = "..";
+ DIR *d;
+ struct dirent *dir;
+ OMX_U16 i = 0;
+ status_t stat = NO_ERROR;
+ size_t ret = 0;
+
+ for (i = 0; i < dirPaths.size(); i++) {
+ d = opendir(dirPaths.itemAt(i)->string());
+ if (d) {
+ // read each filename
+ while ((dir = readdir(d)) != NULL) {
+ filename = dir->d_name;
+ temp.clear();
+ temp.append(dirPaths.itemAt(i)->string());
+ temp.append(filename);
+ if ((*filename != *dotdot)) {
+ pFile = fopen(temp.string(), "rb");
+ if (pFile == NULL) {
+ stat = -errno;
+ } else {
+ fseek(pFile, 0, SEEK_END);
+ lSize = ftell(pFile);
+ rewind(pFile);
+ // buffer is not NULL then copy all the DCC profiles into buffer
+ // else return the size of the DCC directory.
+ if (buffer) {
+ // copy file into the buffer:
+ result = fread(buffer, 1, lSize, pFile);
+ if (result != (size_t) lSize) {
+ stat = INVALID_OPERATION;
+ }
+ buffer = buffer + lSize;
+ }
+ // getting the size of the total dcc files available in FS */
+ dcc_buf_size = dcc_buf_size + lSize;
+ // terminate
+ fclose(pFile);
+ }
+ }
+ }
+ closedir(d);
+ }
+ }
+
+ if (stat == NO_ERROR) {
+ ret = dcc_buf_size;
+ }
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDccDataSave.cpp b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
new file mode 100644
index 0000000..7547743
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
@@ -0,0 +1,361 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXDccDataSave.cpp
+*
+* This file contains functionality for handling DCC data save
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::initDccFileDataSave(OMX_HANDLETYPE* omxHandle, int portIndex)
+{
+ OMX_CONFIG_EXTRADATATYPE extraDataControl;
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
+ extraDataControl.nPortIndex = portIndex;
+ extraDataControl.eExtraDataType = OMX_TI_DccData;
+ extraDataControl.bEnable = OMX_TRUE;
+
+ eError = OMX_SetConfig(*omxHandle,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigOtherExtraDataControl,
+ &extraDataControl);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring dcc data overwrite extra data 0x%x",
+ eError);
+
+ ret = NO_INIT;
+ }
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ mDccData.pData = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::sniffDccFileDataSave(OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_TI_DCCDATATYPE* dccData;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if ( NULL == pBuffHeader ) {
+ CAMHAL_LOGEA("Invalid Buffer header");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE)OMX_TI_DccData);
+
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
+ } else {
+ CAMHAL_LOGVA("Invalid OMX_TI_DCCDATATYPE");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ dccData = ( OMX_TI_DCCDATATYPE * ) extraData->data;
+
+ if (NULL == dccData) {
+ CAMHAL_LOGVA("OMX_TI_DCCDATATYPE is not found in extra data");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ }
+
+ memcpy(&mDccData, dccData, sizeof(mDccData));
+
+ int dccDataSize = (int)dccData->nSize - (int)(&(((OMX_TI_DCCDATATYPE*)0)->pData));
+
+ mDccData.pData = (OMX_PTR)malloc(dccDataSize);
+
+ if (NULL == mDccData.pData) {
+ CAMHAL_LOGVA("not enough memory for DCC data");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ memcpy(mDccData.pData, &(dccData->pData), dccDataSize);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// Recursively searches given directory contents for the correct DCC file.
+// The directory must be opened and its stream pointer + path passed
+// as arguments. As this function is called recursively, to avoid excessive
+// stack usage the path param is reused -> this MUST be char array with
+// enough length!!! (260 should suffice). Path must end with "/".
+// The directory must also be closed in the caller function.
+// If the correct camera DCC file is found (based on the OMX measurement data)
+// its file stream pointer is returned. NULL is returned otherwise
+FILE * OMXCameraAdapter::parseDCCsubDir(DIR *pDir, char *path)
+{
+ FILE *pFile;
+ DIR *pSubDir;
+ struct dirent *dirEntry;
+ int initialPathLength = strlen(path);
+
+ LOG_FUNCTION_NAME;
+
+ /* check each directory entry */
+ while ((dirEntry = readdir(pDir)) != NULL)
+ {
+ if (dirEntry->d_name[0] == '.')
+ continue;
+
+ strcat(path, dirEntry->d_name);
+ // dirEntry might be sub directory -> check it
+ pSubDir = opendir(path);
+ if (pSubDir) {
+ // dirEntry is sub directory -> parse it
+ strcat(path, "/");
+ pFile = parseDCCsubDir(pSubDir, path);
+ closedir(pSubDir);
+ if (pFile) {
+ // the correct DCC file found!
+ LOG_FUNCTION_NAME_EXIT;
+ return pFile;
+ }
+ } else {
+ // dirEntry is file -> open it
+ pFile = fopen(path, "rb");
+ if (pFile) {
+ // now check if this is the correct DCC file for that camera
+ OMX_U32 dccFileIDword;
+ OMX_U32 *dccFileDesc = (OMX_U32 *) &mDccData.nCameraModuleId;
+ int i;
+
+ // DCC file ID is 3 4-byte words
+ for (i = 0; i < 3; i++) {
+ if (fread(&dccFileIDword, sizeof(OMX_U32), 1, pFile) != 1) {
+ // file too short
+ break;
+ }
+ if (dccFileIDword != dccFileDesc[i]) {
+ // DCC file ID word i does not match
+ break;
+ }
+ }
+
+ fclose(pFile);
+ if (i == 3) {
+ // the correct DCC file found!
+ CAMHAL_LOGDB("DCC file to be updated: %s", path);
+ // reopen it for modification
+ pFile = fopen(path, "rb+");
+ if (!pFile)
+ CAMHAL_LOGEB("ERROR: DCC file %s failed to open for modification", path);
+ LOG_FUNCTION_NAME_EXIT;
+ return pFile;
+ }
+ } else {
+ CAMHAL_LOGEB("ERROR: Failed to open file %s for reading", path);
+ }
+ }
+ // restore original path
+ path[initialPathLength] = '\0';
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ // DCC file not found in this directory tree
+ return NULL;
+}
+
+// Finds the DCC file corresponding to the current camera based on the
+// OMX measurement data, opens it and returns the file stream pointer
+// (NULL on error or if file not found).
+// The folder string dccFolderPath must end with "/"
+FILE * OMXCameraAdapter::fopenCameraDCC(const char *dccFolderPath)
+{
+ FILE *pFile;
+ DIR *pDir;
+ char dccPath[260];
+
+ LOG_FUNCTION_NAME;
+
+ strcpy(dccPath, dccFolderPath);
+
+ pDir = opendir(dccPath);
+ if (!pDir) {
+ CAMHAL_LOGEB("ERROR: Opening DCC directory %s failed", dccPath);
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ pFile = parseDCCsubDir(pDir, dccPath);
+ closedir(pDir);
+ if (pFile) {
+ CAMHAL_LOGDB("DCC file %s opened for modification", dccPath);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return pFile;
+}
+
+// Positions the DCC file stream pointer to the correct offset within the
+// correct usecase based on the OMX mesurement data. Returns 0 on success
+status_t OMXCameraAdapter::fseekDCCuseCasePos(FILE *pFile)
+{
+ OMX_U32 dccNumUseCases = 0;
+ OMX_U32 dccUseCaseData[3];
+ OMX_U32 i;
+
+ LOG_FUNCTION_NAME;
+
+ // position the file pointer to the DCC use cases section
+ if (fseek(pFile, 80, SEEK_SET)) {
+ CAMHAL_LOGEA("ERROR: Unexpected end of DCC file");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ if (fread(&dccNumUseCases, sizeof(OMX_U32), 1, pFile) != 1 ||
+ dccNumUseCases == 0) {
+ CAMHAL_LOGEA("ERROR: DCC file contains 0 use cases");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ for (i = 0; i < dccNumUseCases; i++) {
+ if (fread(dccUseCaseData, sizeof(OMX_U32), 3, pFile) != 3) {
+ CAMHAL_LOGEA("ERROR: Unexpected end of DCC file");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ if (dccUseCaseData[0] == mDccData.nUseCaseId) {
+ // DCC use case match!
+ break;
+ }
+ }
+
+ if (i == dccNumUseCases) {
+ CAMHAL_LOGEB("ERROR: Use case ID %lu not found in DCC file", mDccData.nUseCaseId);
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ // dccUseCaseData[1] is the offset to the beginning of the actual use case
+ // from the beginning of the file
+ // mDccData.nOffset is the offset within the actual use case (from the
+ // beginning of the use case to the data to be modified)
+
+ if (fseek(pFile,dccUseCaseData[1] + mDccData.nOffset, SEEK_SET ))
+ {
+ CAMHAL_LOGEA("ERROR: Error setting the correct offset");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::saveDccFileDataSave()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if (mDccData.pData)
+ {
+ FILE *fd = fopenCameraDCC(DCC_PATH);
+
+ if (fd)
+ {
+ if (!fseekDCCuseCasePos(fd))
+ {
+ int dccDataSize = (int)mDccData.nSize - (int)(&(((OMX_TI_DCCDATATYPE*)0)->pData));
+
+ if (fwrite(mDccData.pData, dccDataSize, 1, fd) != 1)
+ {
+ CAMHAL_LOGEA("ERROR: Writing to DCC file failed");
+ }
+ else
+ {
+ CAMHAL_LOGDA("DCC file successfully updated");
+ }
+ }
+ fclose(fd);
+ }
+ else
+ {
+ CAMHAL_LOGEA("ERROR: Correct DCC file not found or failed to open for modification");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::closeDccFileDataSave()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ mDccData.pData = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDefaults.cpp b/camera/OMXCameraAdapter/OMXDefaults.cpp
index aff38d1..2928573 100644
--- a/camera/OMXCameraAdapter/OMXDefaults.cpp
+++ b/camera/OMXCameraAdapter/OMXDefaults.cpp
@@ -24,10 +24,8 @@
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-namespace android {
-
-#undef LOG_TAG
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
#define __STRINGIFY(s) __STRING(s)
@@ -42,42 +40,44 @@ const char OMXCameraAdapter::DEFAULT_EXPOSURE_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_FLASH_MODE[] = "off";
const char OMXCameraAdapter::DEFAULT_FOCUS_MODE_PREFERRED[] = "auto";
const char OMXCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
-const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE_IMAGE[] = "15000,30000";
-const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE_VIDEO[]="24000,30000";
const char OMXCameraAdapter::DEFAULT_IPP[] = "ldc-nsf";
-const char OMXCameraAdapter::DEFAULT_GBCE[] = "disable";
const char OMXCameraAdapter::DEFAULT_ISO_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_JPEG_QUALITY[] = "95";
const char OMXCameraAdapter::DEFAULT_THUMBNAIL_QUALITY[] = "60";
const char OMXCameraAdapter::DEFAULT_THUMBNAIL_SIZE[] = "160x120";
const char OMXCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char OMXCameraAdapter::DEFAULT_S3D_PICTURE_LAYOUT[] = "tb-full";
const char OMXCameraAdapter::DEFAULT_PICTURE_SIZE[] = "320x240";
+const char OMXCameraAdapter::DEFAULT_PICTURE_SS_SIZE[] = "640x240";
+const char OMXCameraAdapter::DEFAULT_PICTURE_TB_SIZE[] = "320x480";
const char OMXCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
const char OMXCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char OMXCameraAdapter::DEFAULT_S3D_PREVIEW_LAYOUT[] = "tb-subsampled";
const char OMXCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_SS_SIZE[] = "1280x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_TB_SIZE[] = "640x960";
const char OMXCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
const char OMXCameraAdapter::DEFAULT_NUM_PIC_BUFS[] = "1";
-const char OMXCameraAdapter::DEFAULT_MAX_FOCUS_AREAS[] = "1";
const char OMXCameraAdapter::DEFAULT_SATURATION[] = "100";
const char OMXCameraAdapter::DEFAULT_SCENE_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_SHARPNESS[] = "100";
-const char OMXCameraAdapter::DEFAULT_VSTAB[] = "false";
-const char OMXCameraAdapter::DEFAULT_VSTAB_SUPPORTED[] = "true";
+const char * OMXCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
const char OMXCameraAdapter::DEFAULT_WB[] = "auto";
const char OMXCameraAdapter::DEFAULT_ZOOM[] = "0";
const char OMXCameraAdapter::DEFAULT_MAX_FD_HW_FACES[] = __STRINGIFY(MAX_NUM_FACES_SUPPORTED);
const char OMXCameraAdapter::DEFAULT_MAX_FD_SW_FACES[] = "0";
-const char OMXCameraAdapter::DEFAULT_FOCAL_LENGTH_PRIMARY[] = "3.43";
-const char OMXCameraAdapter::DEFAULT_FOCAL_LENGTH_SECONDARY[] = "1.95";
const char OMXCameraAdapter::DEFAULT_HOR_ANGLE[] = "54.8";
const char OMXCameraAdapter::DEFAULT_VER_ANGLE[] = "42.5";
-const char OMXCameraAdapter::DEFAULT_AE_LOCK[] = "false";
-const char OMXCameraAdapter::DEFAULT_AWB_LOCK[] = "false";
-const char OMXCameraAdapter::DEFAULT_MAX_NUM_METERING_AREAS[] = "0";
-const char OMXCameraAdapter::DEFAULT_LOCK_SUPPORTED[] = "true";
-const char OMXCameraAdapter::DEFAULT_LOCK_UNSUPPORTED[] = "false";
-const char OMXCameraAdapter::DEFAULT_VIDEO_SNAPSHOT_SUPPORTED[] = "true";
+const char * OMXCameraAdapter::DEFAULT_AE_LOCK = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_AWB_LOCK = android::CameraParameters::FALSE;
const char OMXCameraAdapter::DEFAULT_VIDEO_SIZE[] = "1920x1080";
-const char OMXCameraAdapter::DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "1920x1080";
-};
+const char OMXCameraAdapter::DEFAULT_SENSOR_ORIENTATION[] = "0";
+const char OMXCameraAdapter::DEFAULT_AUTOCONVERGENCE_MODE[] = "frame";
+const char OMXCameraAdapter::DEFAULT_MANUAL_CONVERGENCE[] = "0";
+const char * OMXCameraAdapter::DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE = android::CameraParameters::TRUE;
+const char OMXCameraAdapter::DEFAULT_EXIF_MAKE[] = "default_make";
+const char OMXCameraAdapter::DEFAULT_EXIF_MODEL[] = "default_model";
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXExif.cpp b/camera/OMXCameraAdapter/OMXExif.cpp
index 76d94bd..b4fde5a 100644
--- a/camera/OMXCameraAdapter/OMXExif.cpp
+++ b/camera/OMXCameraAdapter/OMXExif.cpp
@@ -21,17 +21,14 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include <math.h>
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersEXIF(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -40,7 +37,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
LOG_FUNCTION_NAME;
- if( ( valstr = params.get(CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
@@ -72,7 +69,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mLatValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
@@ -104,7 +101,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mLongValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
mEXIFData.mGPSData.mAltitude = floor(fabs(gpsPos));
@@ -120,7 +117,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mAltitudeValid= false;
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
{
long gpsTimestamp = strtol(valstr, NULL, 10);
struct tm *timeinfo = gmtime( ( time_t * ) & (gpsTimestamp) );
@@ -141,7 +138,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mTimeStampValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
{
long gpsDatestamp = strtol(valstr, NULL, 10);
struct tm *timeinfo = gmtime( ( time_t * ) & (gpsDatestamp) );
@@ -160,7 +157,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mDatestampValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
{
strncpy(mEXIFData.mGPSData.mProcMethod, valstr, GPS_PROCESSING_SIZE-1);
mEXIFData.mGPSData.mProcMethodValid = true;
@@ -213,7 +210,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
}
- if( ( valstr = params.get(CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
+ if( ( valstr = params.get(android::CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
CAMHAL_LOGVB("EXIF Focal length: %s", valstr);
ExifElementsTable::stringToRational(valstr,
&mEXIFData.mFocalNum,
@@ -235,12 +232,12 @@ status_t OMXCameraAdapter::setupEXIF()
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
OMX_TI_CONFIG_EXIF_TAGS *exifTags;
+ unsigned char *startPtr = NULL;
unsigned char *sharedPtr = NULL;
struct timeval sTv;
struct tm *pTime;
OMXCameraPortParameters * capData = NULL;
- MemoryManager memMgr;
- OMX_U8** memmgr_buf_array = NULL;
+ CameraBuffer *memmgr_buf_array;
int buf_size = 0;
LOG_FUNCTION_NAME;
@@ -272,22 +269,23 @@ status_t OMXCameraAdapter::setupEXIF()
buf_size = ((buf_size+4095)/4096)*4096;
sharedBuffer.nSharedBuffSize = buf_size;
- memmgr_buf_array = (OMX_U8 **)memMgr.allocateBuffer(0, 0, NULL, buf_size, 1);
- sharedBuffer.pSharedBuff = ( OMX_U8 * ) memmgr_buf_array[0];
+ memmgr_buf_array = mMemMgr.allocateBufferList(0, 0, NULL, buf_size, 1);
+ sharedBuffer.pSharedBuff = (OMX_U8*)camera_buffer_get_omx_ptr(&memmgr_buf_array[0]);
+ startPtr = ( OMX_U8 * ) memmgr_buf_array[0].opaque;
- if ( NULL == sharedBuffer.pSharedBuff )
+ if ( NULL == startPtr)
{
CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
ret = -1;
}
//Extra data begins right after the EXIF configuration structure.
- sharedPtr = sharedBuffer.pSharedBuff + sizeof(OMX_TI_CONFIG_EXIF_TAGS);
+ sharedPtr = startPtr + sizeof(OMX_TI_CONFIG_EXIF_TAGS);
}
if ( NO_ERROR == ret )
{
- exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) sharedBuffer.pSharedBuff;
+ exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) startPtr;
OMX_INIT_STRUCT_PTR (exifTags, OMX_TI_CONFIG_EXIF_TAGS);
exifTags->nPortIndex = mCameraAdapterParameters.mImagePortIndex;
@@ -310,7 +308,7 @@ status_t OMXCameraAdapter::setupEXIF()
mEXIFData.mModel,
EXIF_MODEL_SIZE - 1);
- exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulModelBuffSizeBytes = strlen((char*)sharedPtr) + 1;
sharedPtr += EXIF_MODEL_SIZE;
exifTags->eStatusModel = OMX_TI_TagUpdated;
@@ -323,7 +321,7 @@ status_t OMXCameraAdapter::setupEXIF()
mEXIFData.mMake,
EXIF_MAKE_SIZE - 1);
- exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulMakeBuffSizeBytes = strlen((char*)sharedPtr) + 1;
sharedPtr += EXIF_MAKE_SIZE;
exifTags->eStatusMake = OMX_TI_TagUpdated;
@@ -357,7 +355,7 @@ status_t OMXCameraAdapter::setupEXIF()
pTime->tm_sec );
}
- exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
sharedPtr += EXIF_DATE_TIME_SIZE;
exifTags->ulDateTimeBuffSizeBytes = EXIF_DATE_TIME_SIZE;
exifTags->eStatusDateTime = OMX_TI_TagUpdated;
@@ -435,7 +433,7 @@ status_t OMXCameraAdapter::setupEXIF()
{
memcpy(sharedPtr, mEXIFData.mGPSData.mMapDatum, GPS_MAPDATUM_SIZE);
- exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulGpsMapDatumBuffSizeBytes = GPS_MAPDATUM_SIZE;
exifTags->eStatusGpsMapDatum = OMX_TI_TagUpdated;
sharedPtr += GPS_MAPDATUM_SIZE;
@@ -444,7 +442,7 @@ status_t OMXCameraAdapter::setupEXIF()
if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsProcessingMethod ) &&
( mEXIFData.mGPSData.mProcMethodValid ) )
{
- exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
memcpy(sharedPtr, ExifAsciiPrefix, sizeof(ExifAsciiPrefix));
sharedPtr += sizeof(ExifAsciiPrefix);
@@ -500,7 +498,7 @@ status_t OMXCameraAdapter::setupEXIF()
if ( NULL != memmgr_buf_array )
{
- memMgr.freeBuffer(memmgr_buf_array);
+ mMemMgr.freeBufferList(memmgr_buf_array);
}
LOG_FUNCTION_NAME_EXIT;
@@ -809,7 +807,7 @@ status_t OMXCameraAdapter::convertGPSCoord(double coord,
if ( coord == 0 ) {
- ALOGE("Invalid GPS coordinate");
+ CAMHAL_LOGE("Invalid GPS coordinate");
return -EINVAL;
}
@@ -836,4 +834,5 @@ status_t OMXCameraAdapter::convertGPSCoord(double coord,
return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFD.cpp b/camera/OMXCameraAdapter/OMXFD.cpp
index 26bbd87..1a482b2 100644
--- a/camera/OMXCameraAdapter/OMXFD.cpp
+++ b/camera/OMXCameraAdapter/OMXFD.cpp
@@ -21,25 +21,15 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-#define FACE_DETECTION_THRESHOLD 80
-
-// constants used for face smooth filtering
-static const int HorizontalFilterThreshold = 40;
-static const int VerticalFilterThreshold = 40;
-static const int HorizontalFaceSizeThreshold = 30;
-static const int VerticalFaceSizeThreshold = 30;
+namespace Ti {
+namespace Camera {
+const uint32_t OMXCameraAdapter::FACE_DETECTION_THRESHOLD = 80;
-namespace android {
-
-status_t OMXCameraAdapter::setParametersFD(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersFD(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -55,9 +45,9 @@ status_t OMXCameraAdapter::startFaceDetection()
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
- ret = setFaceDetection(true, mDeviceOrientation);
+ ret = setFaceDetection(true, mFaceOrientation);
if (ret != NO_ERROR) {
goto out;
}
@@ -82,17 +72,21 @@ status_t OMXCameraAdapter::stopFaceDetection()
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
- ret = setFaceDetection(false, mDeviceOrientation);
+ ret = setFaceDetection(false, mFaceOrientation);
if (ret != NO_ERROR) {
goto out;
}
- // Reset 3A settings
- ret = setParameters3A(mParams, state);
- if (ret != NO_ERROR) {
- goto out;
+ if ( mFaceDetectionRunning ) {
+ //Enable region priority and disable face priority for AF
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , false);
+
+ //Enable Region priority and disable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, true);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, false);
}
if (mPending3Asettings) {
@@ -106,7 +100,7 @@ status_t OMXCameraAdapter::stopFaceDetection()
void OMXCameraAdapter::pauseFaceDetection(bool pause)
{
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
// pausing will only take affect if fd is already running
if (mFaceDetectionRunning) {
mFaceDetectionPaused = pause;
@@ -114,6 +108,22 @@ void OMXCameraAdapter::pauseFaceDetection(bool pause)
}
}
+status_t OMXCameraAdapter::setFaceDetectionOrientation(OMX_U32 orientation)
+{
+ status_t ret = NO_ERROR;
+
+ android::AutoMutex lock(mFaceDetectionLock);
+
+ mFaceOrientation = orientation;
+
+ if (mFaceDetectionRunning) {
+ // restart face detection with new rotation
+ setFaceDetection(true, orientation);
+ }
+
+ return ret;
+}
+
status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
{
status_t ret = NO_ERROR;
@@ -162,7 +172,9 @@ status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
if ( NO_ERROR == ret )
{
- ret = setExtraData(enable, mCameraAdapterParameters.mPrevPortIndex, OMX_FaceDetection);
+ // TODO(XXX): Should enable/disable FD extra data separately
+ // on each port.
+ ret = setExtraData(enable, OMX_ALL, OMX_FaceDetection);
if ( NO_ERROR != ret )
{
@@ -185,18 +197,15 @@ status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
return ret;
}
-status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
- sp<CameraFDResult> &result,
- size_t previewWidth,
- size_t previewHeight)
+status_t OMXCameraAdapter::createPreviewMetadata(OMX_BUFFERHEADERTYPE* pBuffHeader,
+ android::sp<CameraMetadataResult> &result,
+ size_t previewWidth,
+ size_t previewHeight)
{
status_t ret = NO_ERROR;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_FACERESULT *faceResult;
- OMX_OTHER_EXTRADATATYPE *extraData;
- OMX_FACEDETECTIONTYPE *faceData;
- OMX_TI_PLATFORMPRIVATE *platformPrivate;
- camera_frame_metadata_t *faces;
+ status_t faceRet = NO_ERROR;
+ status_t metaRet = NO_ERROR;
+ OMX_FACEDETECTIONTYPE *faceData = NULL;
LOG_FUNCTION_NAME;
@@ -210,78 +219,78 @@ status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
return-EINVAL;
}
- platformPrivate = (OMX_TI_PLATFORMPRIVATE *) (pBuffHeader->pPlatformPrivate);
- if ( NULL != platformPrivate ) {
- if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
- CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
- platformPrivate->nSize,
- sizeof(OMX_TI_PLATFORMPRIVATE),
- platformPrivate->pAuxBuf1,
- platformPrivate->pAuxBufSize1,
- platformPrivate->pMetaDataBuffer,
- platformPrivate->nMetaDataSize);
+ if ( mFaceDetectionRunning && !mFaceDetectionPaused ) {
+ OMX_OTHER_EXTRADATATYPE *extraData;
+
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE)OMX_FaceDetection);
+
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
} else {
- CAMHAL_LOGDB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
- ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
- ( unsigned int ) platformPrivate->nSize);
+ CAMHAL_LOGD("FD extra data not found!");
return -EINVAL;
}
- } else {
- CAMHAL_LOGDA("Invalid OMX_TI_PLATFORMPRIVATE");
- return-EINVAL;
- }
-
- if ( 0 >= platformPrivate->nMetaDataSize ) {
- CAMHAL_LOGDB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
- ( unsigned int ) platformPrivate->nMetaDataSize);
- return -EINVAL;
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ if ( NULL != faceData ) {
+ if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
+ CAMHAL_LOGVB("Faces detected %d",
+ faceData->ulFaceCount,
+ faceData->nSize,
+ sizeof(OMX_FACEDETECTIONTYPE),
+ faceData->eCameraView,
+ faceData->nPortIndex,
+ faceData->nVersion);
+ } else {
+ CAMHAL_LOGEB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
+ ( unsigned int ) faceData->nSize);
+ return -EINVAL;
+ }
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
+ return -EINVAL;
+ }
}
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE *) (platformPrivate->pMetaDataBuffer),
- (OMX_EXTRADATATYPE)OMX_FaceDetection);
-
- if ( NULL != extraData ) {
- CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
- extraData->nSize,
- sizeof(OMX_OTHER_EXTRADATATYPE),
- extraData->eType,
- extraData->nDataSize,
- extraData->nPortIndex,
- extraData->nVersion);
- } else {
- CAMHAL_LOGDA("Invalid OMX_OTHER_EXTRADATATYPE");
- return -EINVAL;
+ result = new (std::nothrow) CameraMetadataResult;
+ if(NULL == result.get()) {
+ ret = NO_MEMORY;
+ return ret;
}
- faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
- if ( NULL != faceData ) {
- if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
- CAMHAL_LOGVB("Faces detected %d",
- faceData->ulFaceCount,
- faceData->nSize,
- sizeof(OMX_FACEDETECTIONTYPE),
- faceData->eCameraView,
- faceData->nPortIndex,
- faceData->nVersion);
- } else {
- CAMHAL_LOGDB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
- ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
- ( unsigned int ) faceData->nSize);
- return -EINVAL;
+ //Encode face coordinates
+ faceRet = encodeFaceCoordinates(faceData, result->getMetadataResult()
+ , previewWidth, previewHeight);
+ if ((NO_ERROR == faceRet) || (NOT_ENOUGH_DATA == faceRet)) {
+ // Ignore harmless errors (no error and no update) and go ahead and encode
+ // the preview meta data
+ metaRet = encodePreviewMetadata(result->getMetadataResult()
+ , pBuffHeader->pPlatformPrivate);
+ if ( (NO_ERROR != metaRet) && (NOT_ENOUGH_DATA != metaRet) ) {
+ // Some 'real' error occurred during preview meta data encod, clear metadata
+ // result and return correct error code
+ result.clear();
+ ret = metaRet;
}
} else {
- CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
- return -EINVAL;
+ //Some real error occurred during face encoding, clear metadata result
+ // and return correct error code
+ result.clear();
+ ret = faceRet;
}
- ret = encodeFaceCoordinates(faceData, &faces, previewWidth, previewHeight);
-
- if ( NO_ERROR == ret ) {
- result = new CameraFDResult(faces);
- } else {
+ if((NOT_ENOUGH_DATA == faceRet) && (NOT_ENOUGH_DATA == metaRet)) {
+ //No point sending the callback if nothing is changed
result.clear();
- result = NULL;
+ ret = faceRet;
}
LOG_FUNCTION_NAME_EXIT;
@@ -290,40 +299,38 @@ status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
}
status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
- camera_frame_metadata_t **pFaces,
+ camera_frame_metadata_t *metadataResult,
size_t previewWidth,
size_t previewHeight)
{
status_t ret = NO_ERROR;
camera_face_t *faces;
- camera_frame_metadata_t *faceResult;
size_t hRange, vRange;
double tmp;
+ bool faceArrayChanged = false;
LOG_FUNCTION_NAME;
- if ( NULL == faceData ) {
- CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE parameter");
- return EINVAL;
- }
-
- LOG_FUNCTION_NAME
+ hRange = CameraMetadataResult::RIGHT - CameraMetadataResult::LEFT;
+ vRange = CameraMetadataResult::BOTTOM - CameraMetadataResult::TOP;
- hRange = CameraFDResult::RIGHT - CameraFDResult::LEFT;
- vRange = CameraFDResult::BOTTOM - CameraFDResult::TOP;
+ android::AutoMutex lock(mFaceDetectionLock);
- faceResult = ( camera_frame_metadata_t * ) malloc(sizeof(camera_frame_metadata_t));
- if ( NULL == faceResult ) {
- return -ENOMEM;
+ // Avoid memory leak if called twice on same CameraMetadataResult
+ if ( (0 < metadataResult->number_of_faces) && (NULL != metadataResult->faces) ) {
+ free(metadataResult->faces);
+ metadataResult->number_of_faces = 0;
+ metadataResult->faces = NULL;
}
- if ( 0 < faceData->ulFaceCount ) {
+ if ( (NULL != faceData) && (0 < faceData->ulFaceCount) ) {
int orient_mult;
int trans_left, trans_top, trans_right, trans_bot;
faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount);
if ( NULL == faces ) {
- return -ENOMEM;
+ ret = NO_MEMORY;
+ goto out;
}
/**
@@ -359,7 +366,7 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
/ * (r, b)
*/
- if (mDeviceOrientation == 180) {
+ if (mFaceOrientation == 180) {
orient_mult = -1;
trans_left = 2; // right is now left
trans_top = 3; // bottom is now top
@@ -386,7 +393,7 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD)
continue;
- if (mDeviceOrientation == 180) {
+ if (mFaceOrientation == 180) {
// from sensor pov, the left pos is the right corner of the face in pov of frame
nLeft = faceData->tFacePosition[j].nLeft + faceData->tFacePosition[j].nWidth;
nTop = faceData->tFacePosition[j].nTop + faceData->tFacePosition[j].nHeight;
@@ -417,20 +424,21 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
faces[i].score = faceData->tFacePosition[j].nScore;
faces[i].id = 0;
- faces[i].left_eye[0] = CameraFDResult::INVALID_DATA;
- faces[i].left_eye[1] = CameraFDResult::INVALID_DATA;
- faces[i].right_eye[0] = CameraFDResult::INVALID_DATA;
- faces[i].right_eye[1] = CameraFDResult::INVALID_DATA;
- faces[i].mouth[0] = CameraFDResult::INVALID_DATA;
- faces[i].mouth[1] = CameraFDResult::INVALID_DATA;
+ faces[i].left_eye[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].left_eye[1] = CameraMetadataResult::INVALID_DATA;
+ faces[i].right_eye[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].right_eye[1] = CameraMetadataResult::INVALID_DATA;
+ faces[i].mouth[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].mouth[1] = CameraMetadataResult::INVALID_DATA;
i++;
}
- faceResult->number_of_faces = i;
- faceResult->faces = faces;
+ metadataResult->number_of_faces = i;
+ metadataResult->faces = faces;
- for (int i = 0; i < faceResult->number_of_faces; i++)
+ for (int i = 0; i < metadataResult->number_of_faces; i++)
{
+ bool faceChanged = true;
int centerX = (faces[i].rect[trans_left] + faces[i].rect[trans_right] ) / 2;
int centerY = (faces[i].rect[trans_top] + faces[i].rect[trans_bot] ) / 2;
@@ -448,43 +456,49 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
int tempSizeY = (faceDetectionLastOutput[j].rect[trans_bot] -
faceDetectionLastOutput[j].rect[trans_top] ) ;
- if ( (abs(tempCenterX - centerX) < HorizontalFilterThreshold) &&
- (abs(tempCenterY - centerY) < VerticalFilterThreshold) )
- {
- // Found Face. It did not move too far.
- // Now check size of rectangle compare to last output
- if ( (abs (tempSizeX -sizeX) < HorizontalFaceSizeThreshold) &&
- (abs (tempSizeY -sizeY) < VerticalFaceSizeThreshold) )
- {
- // Rectangle is almost same as last time
- // Output exactly what was done for this face last time.
- faces[i] = faceDetectionLastOutput[j];
- }
- else
- {
- // TODO(XXX): Rectangle size changed but position is same.
- // Possibly we can apply just positional correctness.
+ if ( ( tempCenterX == centerX) &&
+ ( tempCenterY == centerY) ) {
+ // Found Face.
+ // Now check size of rectangle
+ // compare to last output.
+ if ( ( tempSizeX == sizeX ) &&
+ ( tempSizeY == sizeY ) ) {
+ faceChanged = false;
}
}
}
+ // Send face detection data after some face coordinate changes
+ if (faceChanged) {
+ faceArrayChanged = true;
+ }
}
// Save this output for next iteration
- for (int i = 0; i < faceResult->number_of_faces; i++)
+ for (int i = 0; i < metadataResult->number_of_faces; i++)
{
faceDetectionLastOutput[i] = faces[i];
}
- faceDetectionNumFacesLastOutput = faceResult->number_of_faces;
} else {
- faceResult->number_of_faces = 0;
- faceResult->faces = NULL;
+ metadataResult->number_of_faces = 0;
+ metadataResult->faces = NULL;
}
- *pFaces = faceResult;
+ // Send face detection data after face count changes
+ if (faceDetectionNumFacesLastOutput != metadataResult->number_of_faces) {
+ faceArrayChanged = true;
+ }
+ faceDetectionNumFacesLastOutput = metadataResult->number_of_faces;
+
+ if ( !faceArrayChanged ) {
+ ret = NOT_ENOUGH_DATA;
+ }
LOG_FUNCTION_NAME_EXIT;
+out:
+
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFocus.cpp b/camera/OMXCameraAdapter/OMXFocus.cpp
index be1dfc5..386fff3 100644
--- a/camera/OMXCameraAdapter/OMXFocus.cpp
+++ b/camera/OMXCameraAdapter/OMXFocus.cpp
@@ -22,10 +22,6 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
@@ -34,23 +30,26 @@
#define AF_IMAGE_CALLBACK_TIMEOUT 5000000 //5 seconds timeout
#define AF_VIDEO_CALLBACK_TIMEOUT 2800000 //2.8 seconds timeout
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
+const nsecs_t OMXCameraAdapter::CANCEL_AF_TIMEOUT = seconds_to_nanoseconds(1);
+
+status_t OMXCameraAdapter::setParametersFocus(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *str = NULL;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
size_t MAX_FOCUS_AREAS;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mFocusAreasLock);
+ android::AutoMutex lock(mFocusAreasLock);
- str = params.get(CameraParameters::KEY_FOCUS_AREAS);
+ str = params.get(android::CameraParameters::KEY_FOCUS_AREAS);
- MAX_FOCUS_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
+ MAX_FOCUS_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
if ( NULL != str ) {
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
@@ -72,7 +71,7 @@ status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
}
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -84,6 +83,7 @@ status_t OMXCameraAdapter::doAutoFocus()
OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
OMX_PARAM_FOCUSSTATUSTYPE focusStatus;
OMX_CONFIG_BOOLEANTYPE bOMX;
+ CameraAdapter::AdapterState state;
nsecs_t timeout = 0;
LOG_FUNCTION_NAME;
@@ -102,12 +102,19 @@ status_t OMXCameraAdapter::doAutoFocus()
return NO_ERROR;
}
-
if( ((AF_ACTIVE & getState()) != AF_ACTIVE) && ((AF_ACTIVE & getNextState()) != AF_ACTIVE) ) {
CAMHAL_LOGDA("Auto focus got canceled before doAutoFocus could be called");
return NO_ERROR;
}
+ // AF when fixed focus modes are set should be a no-op.
+ if ( ( mParameters3A.Focus == OMX_IMAGE_FocusControlOff ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlHyperfocal ) ) {
+ returnFocusStatus(true);
+ return NO_ERROR;
+ }
+
OMX_INIT_STRUCT_PTR (&focusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
// If the app calls autoFocus, the camera will stop sending face callbacks.
@@ -152,8 +159,7 @@ status_t OMXCameraAdapter::doAutoFocus()
( focusStatus.eFocusStatus == OMX_FocusStatusRequest ||
focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) ||
- (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) )
- {
+ (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) ) {
OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
bOMX.bEnabled = OMX_TRUE;
@@ -161,6 +167,12 @@ status_t OMXCameraAdapter::doAutoFocus()
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
(OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable,
&bOMX);
+ if ( OMX_ErrorNone != eError ) {
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ {
+ android::AutoMutex lock(mDoAFMutex);
// force AF, Ducati will take care of whether CAF
// or AF will be performed, depending on light conditions
@@ -170,29 +182,31 @@ status_t OMXCameraAdapter::doAutoFocus()
focusControl.eFocusControl = OMX_IMAGE_FocusControlAutoLock;
}
- if ( focusControl.eFocusControl != OMX_IMAGE_FocusControlAuto )
- {
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigFocusControl,
&focusControl);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
+ return INVALID_OPERATION;
+ } else {
+ CAMHAL_LOGDA("Autofocus started successfully");
}
- if ( OMX_ErrorNone != eError ) {
- CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
- return INVALID_OPERATION;
- } else {
- CAMHAL_LOGDA("Autofocus started successfully");
- }
+ // No need to wait if preview is about to stop
+ getNextState(state);
+ if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE ) {
+ return NO_ERROR;
+ }
+
+ // configure focus timeout based on capture mode
+ timeout = (mCapMode == VIDEO_MODE) || (mCapMode == VIDEO_MODE_HQ) ?
+ ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
+ ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );
- // configure focus timeout based on capture mode
- timeout = (mCapMode == VIDEO_MODE) ?
- ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
- ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );
- {
- Mutex::Autolock lock(mDoAFMutex);
ret = mDoAFCond.waitRelative(mDoAFMutex, timeout);
- }
+ }
//If somethiing bad happened while we wait
if (mComponentState == OMX_StateInvalid) {
@@ -204,6 +218,7 @@ status_t OMXCameraAdapter::doAutoFocus()
CAMHAL_LOGEA("Autofocus callback timeout expired");
ret = returnFocusStatus(true);
} else {
+ CAMHAL_LOGDA("Autofocus callback received");
ret = returnFocusStatus(false);
}
} else { // Focus mode in continuous
@@ -225,18 +240,16 @@ status_t OMXCameraAdapter::stopAutoFocus()
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
+ if ( OMX_StateInvalid == mComponentState ) {
CAMHAL_LOGEA("OMX component in Invalid state");
returnFocusStatus(false);
return -EINVAL;
- }
+ }
- if ( OMX_StateExecuting != mComponentState )
- {
+ if ( OMX_StateExecuting != mComponentState ) {
CAMHAL_LOGEA("OMX component not in executing state");
return NO_ERROR;
- }
+ }
if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) {
// No need to stop focus if we are in infinity mode. Nothing to stop.
@@ -249,19 +262,20 @@ status_t OMXCameraAdapter::stopAutoFocus()
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigFocusControl,
&focusControl);
- if ( OMX_ErrorNone != eError )
- {
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while stopping focus 0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
- } else {
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+#ifdef CAMERAHAL_TUNA
+ else {
// This is a WA. Usually the OMX Camera component should
// generate AF status change OMX event fairly quickly
// ( after one preview frame ) and this notification should
// actually come from 'handleFocusCallback()'.
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
-
+#endif
LOG_FUNCTION_NAME_EXIT;
@@ -292,7 +306,7 @@ status_t OMXCameraAdapter::getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focus
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::cancelAutoFocus()
@@ -308,17 +322,30 @@ status_t OMXCameraAdapter::cancelAutoFocus()
return ret;
}
- //Stop the AF only for modes other than CAF or Inifinity
+ //Stop the AF only for modes other than CAF, Inifinity or Off
if ( ( focusMode.eFocusControl != OMX_IMAGE_FocusControlAuto ) &&
( focusMode.eFocusControl != ( OMX_IMAGE_FOCUSCONTROLTYPE )
- OMX_IMAGE_FocusControlAutoInfinity ) ) {
+ OMX_IMAGE_FocusControlAutoInfinity ) &&
+ ( focusMode.eFocusControl != OMX_IMAGE_FocusControlOff ) ) {
+ android::AutoMutex lock(mCancelAFMutex);
stopAutoFocus();
+ ret = mCancelAFCond.waitRelative(mCancelAFMutex, CANCEL_AF_TIMEOUT);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Cancel AF timeout!");
+ }
} else if (focusMode.eFocusControl == OMX_IMAGE_FocusControlAuto) {
// This re-enabling of CAF doesn't seem to
// be needed any more.
// re-apply CAF after unlocking and canceling
// mPending3Asettings |= SetFocus;
}
+
+ {
+ // Signal to 'doAutoFocus()'
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
+ }
+
// If the apps call #cancelAutoFocus()}, the face callbacks will also resume.
pauseFaceDetection(false);
@@ -345,7 +372,7 @@ status_t OMXCameraAdapter::setFocusCallback(bool enabled)
if ( OMX_StateExecuting != mComponentState )
{
CAMHAL_LOGEA("OMX component not in executing state");
- ret = NO_ERROR;
+ return NO_ERROR;
}
if ( NO_ERROR == ret )
@@ -450,9 +477,9 @@ status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached)
} else {
CAMHAL_LOGDA("Focus locked. Applied focus locks successfully");
}
+
stopAutoFocus();
}
-
//Query current focus distance after AF is complete
updateFocusDistances(mParameters);
}
@@ -508,6 +535,7 @@ status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
if ( NO_ERROR == ret )
{
OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonFocusStatus,
eFocusStatus);
@@ -528,7 +556,7 @@ status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
return ret;
}
-status_t OMXCameraAdapter::updateFocusDistances(CameraParameters &params)
+status_t OMXCameraAdapter::updateFocusDistances(android::CameraParameters &params)
{
OMX_U32 focusNear, focusOptimal, focusFar;
status_t ret = NO_ERROR;
@@ -614,7 +642,7 @@ status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_
{
if ( 0 == dist )
{
- strncpy(buffer, CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
+ strncpy(buffer, android::CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
}
else
{
@@ -632,7 +660,7 @@ status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_
status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
OMX_U32 &optimal,
OMX_U32 &far,
- CameraParameters& params)
+ android::CameraParameters& params)
{
status_t ret = NO_ERROR;
@@ -671,7 +699,7 @@ status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
mFocusDistOptimal,
mFocusDistFar);
- params.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
}
LOG_FUNCTION_NAME_EXIT;
@@ -684,9 +712,9 @@ status_t OMXCameraAdapter::setTouchFocus()
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_ALGOAREASTYPE **focusAreas;
+ OMX_ALGOAREASTYPE *focusAreas;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
- MemoryManager memMgr;
+ CameraBuffer *bufferlist;
int areasSize = 0;
LOG_FUNCTION_NAME;
@@ -701,7 +729,8 @@ status_t OMXCameraAdapter::setTouchFocus()
{
areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
- focusAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1);
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ focusAreas = (OMX_ALGOAREASTYPE*) bufferlist[0].opaque;
OMXCameraPortParameters * mPreviewData = NULL;
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
@@ -712,51 +741,60 @@ status_t OMXCameraAdapter::setTouchFocus()
return -ENOMEM;
}
- OMX_INIT_STRUCT_PTR (focusAreas[0], OMX_ALGOAREASTYPE);
+ OMX_INIT_STRUCT_PTR (focusAreas, OMX_ALGOAREASTYPE);
- focusAreas[0]->nPortIndex = OMX_ALL;
- focusAreas[0]->nNumAreas = mFocusAreas.size();
- focusAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaFocus;
+ focusAreas->nPortIndex = OMX_ALL;
+ focusAreas->nNumAreas = mFocusAreas.size();
+ focusAreas->nAlgoAreaPurpose = OMX_AlgoAreaFocus;
// If the area is the special case of (0, 0, 0, 0, 0), then
// the algorithm needs nNumAreas to be set to 0,
// in order to automatically choose the best fitting areas.
if ( mFocusAreas.itemAt(0)->isZeroArea() )
{
- focusAreas[0]->nNumAreas = 0;
+ focusAreas->nNumAreas = 0;
+ }
+
+ for ( unsigned int n = 0; n < mFocusAreas.size(); n++) {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
}
- for ( unsigned int n = 0; n < mFocusAreas.size(); n++)
- {
// transform the coordinates to 3A-type coordinates
- mFocusAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth,
- (size_t)mPreviewData->mHeight,
- (size_t&)focusAreas[0]->tAlgoAreas[n].nTop,
- (size_t&)focusAreas[0]->tAlgoAreas[n].nLeft,
- (size_t&)focusAreas[0]->tAlgoAreas[n].nWidth,
- (size_t&)focusAreas[0]->tAlgoAreas[n].nHeight);
-
- focusAreas[0]->tAlgoAreas[n].nLeft =
- ( focusAreas[0]->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
- focusAreas[0]->tAlgoAreas[n].nTop =
- ( focusAreas[0]->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
- focusAreas[0]->tAlgoAreas[n].nWidth =
- ( focusAreas[0]->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
- focusAreas[0]->tAlgoAreas[n].nHeight =
- ( focusAreas[0]->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
- focusAreas[0]->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();
+ mFocusAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&)focusAreas->tAlgoAreas[n].nTop,
+ (size_t&)focusAreas->tAlgoAreas[n].nLeft,
+ (size_t&)focusAreas->tAlgoAreas[n].nWidth,
+ (size_t&)focusAreas->tAlgoAreas[n].nHeight);
+
+ focusAreas->tAlgoAreas[n].nLeft =
+ ( focusAreas->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
+ focusAreas->tAlgoAreas[n].nTop =
+ ( focusAreas->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
+ focusAreas->tAlgoAreas[n].nWidth =
+ ( focusAreas->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
+ focusAreas->tAlgoAreas[n].nHeight =
+ ( focusAreas->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
+ focusAreas->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();
CAMHAL_LOGDB("Focus area %d : top = %d left = %d width = %d height = %d prio = %d",
- n, (int)focusAreas[0]->tAlgoAreas[n].nTop, (int)focusAreas[0]->tAlgoAreas[n].nLeft,
- (int)focusAreas[0]->tAlgoAreas[n].nWidth, (int)focusAreas[0]->tAlgoAreas[n].nHeight,
- (int)focusAreas[0]->tAlgoAreas[n].nPriority);
- }
+ n, (int)focusAreas->tAlgoAreas[n].nTop, (int)focusAreas->tAlgoAreas[n].nLeft,
+ (int)focusAreas->tAlgoAreas[n].nWidth, (int)focusAreas->tAlgoAreas[n].nHeight,
+ (int)focusAreas->tAlgoAreas[n].nPriority);
+ }
OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = areasSize;
- sharedBuffer.pSharedBuff = (OMX_U8 *) focusAreas[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr (&bufferlist[0]);
if ( NULL == sharedBuffer.pSharedBuff )
{
@@ -775,10 +813,9 @@ status_t OMXCameraAdapter::setTouchFocus()
}
EXIT:
- if (NULL != focusAreas)
+ if (NULL != bufferlist)
{
- memMgr.freeBuffer((void*) focusAreas);
- focusAreas = NULL;
+ mMemMgr.freeBufferList (bufferlist);
}
}
@@ -802,17 +839,22 @@ void OMXCameraAdapter::handleFocusCallback() {
CAMHAL_LOGEA("Focus status check failed!");
// signal and unblock doAutoFocus
if (AF_ACTIVE & nextState) {
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
return;
}
- if ( ( eFocusStatus.eFocusStatus != OMX_FocusStatusRequest ) &&
- ( eFocusStatus.eFocusStatus != OMX_FocusStatusOff ) ) {
+ if ( eFocusStatus.eFocusStatus == OMX_FocusStatusOff ) {
+ android::AutoMutex lock(mCancelAFMutex);
+ mCancelAFCond.signal();
+ return;
+ }
+
+ if (eFocusStatus.eFocusStatus != OMX_FocusStatusRequest) {
// signal doAutoFocus when a end of scan message comes
// ignore start of scan
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
@@ -837,4 +879,5 @@ void OMXCameraAdapter::handleFocusCallback() {
notifyFocusSubscribers(focusStatus);
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXMetadata.cpp b/camera/OMXCameraAdapter/OMXMetadata.cpp
new file mode 100644
index 0000000..af8c49c
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXMetadata.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMX3A.cpp
+*
+* This file contains functionality for handling 3A configurations.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "OMXMetaData"
+
+#include "OMXCameraAdapter.h"
+#include <camera/CameraMetadata.h>
+
+namespace Ti {
+namespace Camera {
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+camera_memory_t * OMXCameraAdapter::getMetaData(const OMX_PTR plat_pvt,
+ camera_request_memory allocator) const
+{
+ camera_memory_t * ret = NULL;
+
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_FACEDETECTIONTYPE *faceData = NULL;
+ OMX_TI_WHITEBALANCERESULTTYPE * WBdata = NULL;
+ OMX_TI_VECTSHOTINFOTYPE *shotInfo = NULL;
+ OMX_TI_LSCTABLETYPE *lscTbl = NULL;
+ camera_metadata_t *metaData;
+ size_t offset = 0;
+
+ size_t metaDataSize = sizeof(camera_metadata_t);
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_FaceDetection);
+ if ( NULL != extraData ) {
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ metaDataSize += faceData->ulFaceCount * sizeof(camera_metadata_face_t);
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_WhiteBalance);
+ if ( NULL != extraData ) {
+ WBdata = ( OMX_TI_WHITEBALANCERESULTTYPE * ) extraData->data;
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_VectShotInfo);
+ if ( NULL != extraData ) {
+ shotInfo = ( OMX_TI_VECTSHOTINFOTYPE * ) extraData->data;
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_LSCTable);
+ if ( NULL != extraData ) {
+ lscTbl = ( OMX_TI_LSCTABLETYPE * ) extraData->data;
+ metaDataSize += OMX_TI_LSC_GAIN_TABLE_SIZE;
+ }
+
+ ret = allocator(-1, metaDataSize, 1, NULL);
+ if ( NULL == ret ) {
+ return NULL;
+ } else {
+ metaData = static_cast<camera_metadata_t *> (ret->data);
+ offset += sizeof(camera_metadata_t);
+ }
+
+ if ( NULL != faceData ) {
+ metaData->number_of_faces = 0;
+ int idx = 0;
+ metaData->faces_offset = offset;
+ struct camera_metadata_face *faces = reinterpret_cast<struct camera_metadata_face *> (static_cast<char*>(ret->data) + offset);
+ for ( int j = 0; j < faceData->ulFaceCount ; j++ ) {
+ if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD) {
+ continue;
+ }
+ idx = metaData->number_of_faces;
+ metaData->number_of_faces++;
+ // TODO: Rework and re-use encodeFaceCoordinates()
+ faces[idx].left = faceData->tFacePosition[j].nLeft;
+ faces[idx].top = faceData->tFacePosition[j].nTop;
+ faces[idx].bottom = faceData->tFacePosition[j].nWidth;
+ faces[idx].right = faceData->tFacePosition[j].nHeight;
+ }
+ offset += sizeof(camera_metadata_face_t) * metaData->number_of_faces;
+ }
+
+ if ( NULL != WBdata ) {
+ metaData->awb_temp = WBdata->nColorTemperature;
+ metaData->gain_b = WBdata->nGainB;
+ metaData->gain_gb = WBdata->nGainGB;
+ metaData->gain_gr = WBdata->nGainGR;
+ metaData->gain_r = WBdata->nGainR;
+ metaData->offset_b = WBdata->nOffsetB;
+ metaData->offset_gb = WBdata->nOffsetGB;
+ metaData->offset_gr = WBdata->nOffsetGR;
+ metaData->offset_r = WBdata->nOffsetR;
+ }
+
+ if ( NULL != lscTbl ) {
+ metaData->lsc_table_applied = lscTbl->bApplied;
+ metaData->lsc_table_size = OMX_TI_LSC_GAIN_TABLE_SIZE;
+ metaData->lsc_table_offset = offset;
+ uint8_t *lsc_table = reinterpret_cast<uint8_t *> (static_cast<char*>(ret->data) + offset);
+ memcpy(lsc_table, lscTbl->pGainTable, OMX_TI_LSC_GAIN_TABLE_SIZE);
+ offset += metaData->lsc_table_size;
+ }
+
+ if ( NULL != shotInfo ) {
+ metaData->frame_number = shotInfo->nFrameNum;
+ metaData->shot_number = shotInfo->nConfigId;
+ metaData->analog_gain = shotInfo->nAGain;
+ metaData->analog_gain_req = shotInfo->nReqGain;
+ metaData->analog_gain_min = shotInfo->nGainMin;
+ metaData->analog_gain_max = shotInfo->nGainMax;
+ metaData->analog_gain_error = shotInfo->nSenAGainErr;
+ metaData->analog_gain_dev = shotInfo->nDevAGain;
+ metaData->exposure_time = shotInfo->nExpTime;
+ metaData->exposure_time_req = shotInfo->nReqExpTime;
+ metaData->exposure_time_min = shotInfo->nExpMin;
+ metaData->exposure_time_max = shotInfo->nExpMax;
+ metaData->exposure_time_dev = shotInfo->nDevExpTime;
+ metaData->exposure_time_error = shotInfo->nSenExpTimeErr;
+ metaData->exposure_compensation_req = shotInfo->nReqEC;
+ metaData->exposure_dev = shotInfo->nDevEV;
+ }
+
+ return ret;
+}
+#endif
+
+status_t OMXCameraAdapter::encodePreviewMetadata(camera_frame_metadata_t *meta, const OMX_PTR plat_pvt)
+{
+ status_t ret = NO_ERROR;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ OMX_OTHER_EXTRADATATYPE *extraData = NULL;
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_VectShotInfo);
+
+ if ( (NULL != extraData) && (NULL != extraData->data) ) {
+ OMX_TI_VECTSHOTINFOTYPE *shotInfo;
+ shotInfo = (OMX_TI_VECTSHOTINFOTYPE*) extraData->data;
+
+ meta->analog_gain = shotInfo->nAGain;
+ meta->exposure_time = shotInfo->nExpTime;
+ } else {
+ meta->analog_gain = -1;
+ meta->exposure_time = -1;
+ }
+
+ // Send metadata event only after any value has been changed
+ if ((metadataLastAnalogGain == meta->analog_gain) &&
+ (metadataLastExposureTime == meta->exposure_time)) {
+ ret = NOT_ENOUGH_DATA;
+ } else {
+ metadataLastAnalogGain = meta->analog_gain;
+ metadataLastExposureTime = meta->exposure_time;
+ }
+#else
+ // no-op in non enhancement mode
+ CAMHAL_UNUSED(meta);
+ CAMHAL_UNUSED(plat_pvt);
+#endif
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXReprocess.cpp b/camera/OMXCameraAdapter/OMXReprocess.cpp
new file mode 100644
index 0000000..6fdbe7b
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXReprocess.cpp
@@ -0,0 +1,382 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXReprocess.cpp
+*
+* This file contains functionality for handling reprocessing operations.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::setParametersReprocess(const android::CameraParameters &params,
+ CameraBuffer* buffers,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ int w, h, s;
+ OMX_COLOR_FORMATTYPE pixFormat;
+ OMXCameraPortParameters *portData;
+ const char* valstr;
+
+ LOG_FUNCTION_NAME;
+
+ if (!buffers) {
+ CAMHAL_LOGE("invalid buffer array");
+ return BAD_VALUE;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ w = buffers[0].width;
+ h = buffers[0].height;
+ s = buffers[0].stride;
+
+ valstr = buffers[0].format;
+ if (valstr != NULL) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ CAMHAL_LOGDA("RAW Picture format selected");
+ pixFormat = OMX_COLOR_FormatRawBayer10bit;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ CAMHAL_LOGDA("YUV422i Picture format selected");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ } else {
+ CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ } else {
+ CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+
+ if ( (w != (int)portData->mWidth) || (h != (int)portData->mHeight) ||
+ (s != (int) portData->mStride) || (pixFormat != portData->mColorFormat)) {
+ portData->mWidth = w;
+ portData->mHeight = h;
+
+ if ( ( OMX_COLOR_FormatRawBayer10bit == pixFormat ) ||
+ ( OMX_COLOR_FormatCbYCrY == pixFormat ) ) {
+ portData->mStride = w * 2;
+ } else {
+ portData->mStride = s;
+ }
+
+ portData->mColorFormat = pixFormat;
+
+ mPendingReprocessSettings |= SetFormat;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startReprocess()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * portData = NULL;
+
+ LOG_FUNCTION_NAME;
+ CAMHAL_LOGD ("mReprocConfigured = %d", mReprocConfigured);
+ if (!mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ CAMHAL_LOGD ("mReprocConfigured = %d", mBurstFramesQueued);
+ if (NO_ERROR == ret) {
+ android::AutoMutex lock(mBurstLock);
+
+ for ( int index = 0 ; index < portData->mMaxQueueable ; index++ ) {
+ CAMHAL_LOGDB("Queuing buffer on video input port - %p, offset: %d, length: %d",
+ portData->mBufferHeader[index]->pBuffer,
+ portData->mBufferHeader[index]->nOffset,
+ portData->mBufferHeader[index]->nFilledLen);
+ portData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_EmptyThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startReprocess buffers queued on video port: ", &mStartCapture);
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::stopReprocess()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *portData = NULL;
+
+ if (!mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ // Disable port - send command and then free all buffers
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ mStopReprocSem);
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ if (portData) {
+ CAMHAL_LOGDB("Freeing buffers on reproc port - num: %d", portData->mNumBufs);
+ for (int index = 0 ; index < portData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on reproc port - 0x%x",
+ ( unsigned int ) portData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ (OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for port disable");
+ ret = mStopReprocSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State after Disable Image Port Exitting!!!");
+ goto EXIT;
+ }
+ if (NO_ERROR == ret) {
+ CAMHAL_LOGDA("Port disabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port disable");
+ goto EXIT;
+ }
+
+ deinitInternalBuffers(mCameraAdapterParameters.mVideoInPortIndex);
+
+ mReprocConfigured = false;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::disableReprocess(){
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ // no-op..for now
+
+EXIT:
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *portData = NULL;
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ if ( 0 != mUseReprocessSem.Count() ) {
+ CAMHAL_LOGEB("Error mUseReprocessSem semaphore count %d", mUseReprocessSem.Count());
+ return BAD_VALUE;
+ }
+
+ CAMHAL_ASSERT(num > 0);
+
+ if (mAdapterState == REPROCESS_STATE) {
+ stopReprocess();
+ } else if (mAdapterState == CAPTURE_STATE) {
+ stopImageCapture();
+ stopReprocess();
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess stopping image capture and disabling image port: ", &bufArr->ppmStamp);
+
+#endif
+
+ portData->mNumBufs = num;
+
+ // Configure
+ ret = setParametersReprocess(mParams, bufArr, mAdapterState);
+
+ if (mReprocConfigured) {
+ if (mPendingReprocessSettings & ECaptureParamSettings) {
+ stopReprocess();
+ } else {
+ // Tap in port has been already configured.
+ return NO_ERROR;
+ }
+ }
+
+ if (mPendingReprocessSettings & SetFormat) {
+ mPendingReprocessSettings &= ~SetFormat;
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_IN_VIDEO, *portData);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ }
+
+ // Configure DOMX to use either gralloc handles or vptrs
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mVideoInPortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD("Using ANW");
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ // Need to allocate tiler reservation and state we are going to be using
+ // pagelist buffers. Assuming this happens when buffers if from anw
+ initInternalBuffers(mCameraAdapterParameters.mVideoInPortIndex);
+ } else {
+ CAMHAL_LOGD("Using ION");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess configuration done: ", &bufArr->ppmStamp);
+
+#endif
+
+ // Enable Port
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ mUseReprocessSem);
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ for (int index = 0 ; index < portData->mNumBufs ; index++)
+ {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
+ (unsigned int)bufArr[index].opaque,
+ (int)portData->mBufSize);
+
+ eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ 0,
+ portData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ pBufferHdr->nOffset = bufArr[index].offset;
+ pBufferHdr->nFilledLen = bufArr[index].actual_size;
+ portData->mBufferHeader[index] = pBufferHdr;
+ }
+
+ // Wait for port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ ret = mUseReprocessSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ // Error out if somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State while trying to enable port for reprocessing");
+ goto EXIT;
+ }
+
+ if (ret == NO_ERROR) {
+ CAMHAL_LOGDA("Port enabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port enable");
+ goto EXIT;
+ }
+
+ mReprocConfigured = true;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess video port enabled and buffers registered: ", &bufArr->ppmStamp);
+
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ // Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXZoom.cpp b/camera/OMXCameraAdapter/OMXZoom.cpp
index eec7691..e39a3b0 100644
--- a/camera/OMXCameraAdapter/OMXZoom.cpp
+++ b/camera/OMXCameraAdapter/OMXZoom.cpp
@@ -21,14 +21,11 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
65536, 68157, 70124, 72745,
@@ -49,20 +46,19 @@ const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
524288 };
-status_t OMXCameraAdapter::setParametersZoom(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersZoom(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
LOG_FUNCTION_NAME;
//Immediate zoom should not be avaialable while smooth zoom is running
if ( ( ZOOM_ACTIVE & state ) != ZOOM_ACTIVE )
{
- int zoom = params.getInt(CameraParameters::KEY_ZOOM);
- if( ( zoom >= 0 ) && ( zoom < ZOOM_STAGES ) )
- {
+ int zoom = params.getInt(android::CameraParameters::KEY_ZOOM);
+ if (( zoom >= 0 ) && ( zoom < mMaxZoomSupported )) {
mTargetZoomIdx = zoom;
//Immediate zoom should be applied instantly ( CTS requirement )
@@ -97,8 +93,7 @@ status_t OMXCameraAdapter::doZoom(int index)
ret = -1;
}
- if ( ( 0 > index) || ( ( ZOOM_STAGES - 1 ) < index ) )
- {
+ if (( 0 > index) || ((mMaxZoomSupported - 1 ) < index )) {
CAMHAL_LOGEB("Zoom index %d out of range", index);
ret = -EINVAL;
}
@@ -139,7 +134,7 @@ status_t OMXCameraAdapter::advanceZoom()
{
status_t ret = NO_ERROR;
AdapterState state;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
BaseCameraAdapter::getState(state);
@@ -241,23 +236,20 @@ status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
CAMHAL_LOGDB("Start smooth zoom target = %d, mCurrentIdx = %d",
targetIdx,
mCurrentZoomIdx);
- if ( ( targetIdx >= 0 ) && ( targetIdx < ZOOM_STAGES ) )
- {
+ if (( targetIdx >= 0 ) && ( targetIdx < mMaxZoomSupported )) {
mTargetZoomIdx = targetIdx;
mZoomParameterIdx = mCurrentZoomIdx;
mReturnZoomStatus = false;
- }
- else
- {
+ } else {
CAMHAL_LOGEB("Smooth value out of range %d!", targetIdx);
ret = -EINVAL;
- }
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -267,7 +259,7 @@ status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
status_t OMXCameraAdapter::stopSmoothZoom()
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
LOG_FUNCTION_NAME;
@@ -293,4 +285,5 @@ status_t OMXCameraAdapter::stopSmoothZoom()
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/SensorListener.cpp b/camera/SensorListener.cpp
index bb6f577..e53fa83 100644
--- a/camera/SensorListener.cpp
+++ b/camera/SensorListener.cpp
@@ -21,16 +21,14 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
#include "SensorListener.h"
-#include "CameraHal.h"
#include <stdint.h>
#include <math.h>
#include <sys/types.h>
-namespace android {
+namespace Ti {
+namespace Camera {
/*** static declarations ***/
static const float RADIANS_2_DEG = (float) (180 / M_PI);
@@ -46,7 +44,7 @@ static int sensor_events_listener(int fd, int events, void* data)
ASensorEvent sen_events[8];
while ((num_sensors = listener->mSensorEventQueue->read(sen_events, 8)) > 0) {
for (int i = 0; i < num_sensors; i++) {
- if (sen_events[i].type == Sensor::TYPE_ACCELEROMETER) {
+ if (sen_events[i].type == android::Sensor::TYPE_ACCELEROMETER) {
float x = sen_events[i].vector.azimuth;
float y = sen_events[i].vector.pitch;
float z = sen_events[i].vector.roll;
@@ -79,7 +77,7 @@ static int sensor_events_listener(int fd, int events, void* data)
}
listener->handleOrientation(orient, tilt);
CAMHAL_LOGVB(" tilt = %d orientation = %d", tilt, orient);
- } else if (sen_events[i].type == Sensor::TYPE_GYROSCOPE) {
+ } else if (sen_events[i].type == android::Sensor::TYPE_GYROSCOPE) {
CAMHAL_LOGVA("GYROSCOPE EVENT");
}
}
@@ -132,11 +130,11 @@ SensorListener::~SensorListener() {
status_t SensorListener::initialize() {
status_t ret = NO_ERROR;
- SensorManager& mgr(SensorManager::getInstance());
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- sp<Looper> mLooper;
+ android::sp<android::Looper> mLooper;
mSensorEventQueue = mgr.createEventQueue();
if (mSensorEventQueue == NULL) {
@@ -145,7 +143,7 @@ status_t SensorListener::initialize() {
goto out;
}
- mLooper = new Looper(false);
+ mLooper = new android::Looper(false);
mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);
if (mSensorLooperThread.get() == NULL)
@@ -157,7 +155,7 @@ status_t SensorListener::initialize() {
goto out;
}
- ret = mSensorLooperThread->run("sensor looper thread", PRIORITY_URGENT_DISPLAY);
+ ret = mSensorLooperThread->run("sensor looper thread", android::PRIORITY_URGENT_DISPLAY);
if (ret == INVALID_OPERATION){
CAMHAL_LOGDA("thread already running ?!?");
} else if (ret != NO_ERROR) {
@@ -184,7 +182,7 @@ void SensorListener::setCallbacks(orientation_callback_t orientation_cb, void *c
void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) {
mOrientationCb(orientation, tilt, mCbCookie);
@@ -194,34 +192,38 @@ void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
}
void SensorListener::enableSensor(sensor_type_t type) {
- Sensor const* sensor;
- SensorManager& mgr(SensorManager::getInstance());
+ android::Sensor const* sensor;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
- sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
- CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
- mSensorEventQueue->enableSensor(sensor);
- mSensorEventQueue->setEventRate(sensor, ms2ns(100));
- sensorsEnabled |= SENSOR_ORIENTATION;
+ sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
+ if(sensor) {
+ CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
+ mSensorEventQueue->enableSensor(sensor);
+ mSensorEventQueue->setEventRate(sensor, ms2ns(100));
+ sensorsEnabled |= SENSOR_ORIENTATION;
+ } else {
+ CAMHAL_LOGDB("not enabling absent orientation sensor");
+ }
}
LOG_FUNCTION_NAME_EXIT;
}
void SensorListener::disableSensor(sensor_type_t type) {
- Sensor const* sensor;
- SensorManager& mgr(SensorManager::getInstance());
+ android::Sensor const* sensor;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if ((type & SENSOR_ORIENTATION) && (sensorsEnabled & SENSOR_ORIENTATION)) {
- sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
mSensorEventQueue->disableSensor(sensor);
sensorsEnabled &= ~SENSOR_ORIENTATION;
@@ -230,4 +232,5 @@ void SensorListener::disableSensor(sensor_type_t type) {
LOG_FUNCTION_NAME_EXIT;
}
-} // namespace android
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/TICameraParameters.cpp b/camera/TICameraParameters.cpp
index 221cff4..ae8cd81 100644
--- a/camera/TICameraParameters.cpp
+++ b/camera/TICameraParameters.cpp
@@ -14,24 +14,27 @@
* limitations under the License.
*/
-
-
-
-#define LOG_TAG "CameraHAL"
#include <utils/Log.h>
#include <string.h>
#include <stdlib.h>
#include <TICameraParameters.h>
-#include "CameraHal.h"
-namespace android {
+#define TI_KEY_ALGO_PREFIX "ti-algo-"
+
+namespace Ti {
+namespace Camera {
//TI extensions to camera mode
const char TICameraParameters::HIGH_PERFORMANCE_MODE[] = "high-performance";
const char TICameraParameters::HIGH_QUALITY_MODE[] = "high-quality";
const char TICameraParameters::HIGH_QUALITY_ZSL_MODE[] = "high-quality-zsl";
+const char TICameraParameters::CP_CAM_MODE[] = "cp-cam";
const char TICameraParameters::VIDEO_MODE[] = "video-mode";
+const char TICameraParameters::VIDEO_MODE_HQ[] = "video-mode-hq";
+const char TICameraParameters::EXPOSURE_BRACKETING[] = "exposure-bracketing";
+const char TICameraParameters::ZOOM_BRACKETING[] = "zoom-bracketing";
+const char TICameraParameters::TEMP_BRACKETING[] = "temporal-bracketing";
// TI extensions to standard android Parameters
const char TICameraParameters::KEY_SUPPORTED_CAMERAS[] = "camera-indexes";
@@ -40,61 +43,73 @@ const char TICameraParameters::KEY_SHUTTER_ENABLE[] = "shutter-enable";
const char TICameraParameters::KEY_CAMERA_NAME[] = "camera-name";
const char TICameraParameters::KEY_BURST[] = "burst-capture";
const char TICameraParameters::KEY_CAP_MODE[] = "mode";
+const char TICameraParameters::KEY_CAP_MODE_VALUES[] = "mode-values";
const char TICameraParameters::KEY_VNF[] = "vnf";
+const char TICameraParameters::KEY_VNF_SUPPORTED[] = "vnf-supported";
const char TICameraParameters::KEY_SATURATION[] = "saturation";
const char TICameraParameters::KEY_BRIGHTNESS[] = "brightness";
-const char TICameraParameters::KEY_EXPOSURE_MODE[] = "exposure";
const char TICameraParameters::KEY_SUPPORTED_EXPOSURE[] = "exposure-mode-values";
+const char TICameraParameters::KEY_EXPOSURE_MODE[] = "exposure";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN[] = "supported-manual-exposure-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX[] = "supported-manual-exposure-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_STEP[] = "supported-manual-exposure-step";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN[] = "supported-manual-gain-iso-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX[] = "supported-manual-gain-iso-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP[] = "supported-manual-gain-iso-step";
+const char TICameraParameters::KEY_MANUAL_EXPOSURE[] = "manual-exposure";
+const char TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT[] = "manual-exposure-right";
+const char TICameraParameters::KEY_MANUAL_GAIN_ISO[] = "manual-gain-iso";
+const char TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT[] = "manual-gain-iso-right";
const char TICameraParameters::KEY_CONTRAST[] = "contrast";
const char TICameraParameters::KEY_SHARPNESS[] = "sharpness";
const char TICameraParameters::KEY_ISO[] = "iso";
const char TICameraParameters::KEY_SUPPORTED_ISO_VALUES[] = "iso-mode-values";
const char TICameraParameters::KEY_SUPPORTED_IPP[] = "ipp-values";
const char TICameraParameters::KEY_IPP[] = "ipp";
-const char TICameraParameters::KEY_MAN_EXPOSURE[] = "manual-exposure";
const char TICameraParameters::KEY_METERING_MODE[] = "meter-mode";
-const char TICameraParameters::KEY_PADDED_WIDTH[] = "padded-width";
-const char TICameraParameters::KEY_PADDED_HEIGHT[] = "padded-height";
const char TICameraParameters::KEY_EXP_BRACKETING_RANGE[] = "exp-bracketing-range";
+const char TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE[] = "exp-gain-bracketing-range";
+const char TICameraParameters::KEY_ZOOM_BRACKETING_RANGE[] = "zoom-bracketing-range";
const char TICameraParameters::KEY_TEMP_BRACKETING[] = "temporal-bracketing";
const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS[] = "temporal-bracketing-range-positive";
const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG[] = "temporal-bracketing-range-negative";
-const char TICameraParameters::KEY_S3D_SUPPORTED[] = "s3d-supported";
+const char TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE[] = "flush-shot-config-queue";
const char TICameraParameters::KEY_MEASUREMENT_ENABLE[] = "measurement";
const char TICameraParameters::KEY_GBCE[] = "gbce";
+const char TICameraParameters::KEY_GBCE_SUPPORTED[] = "gbce-supported";
const char TICameraParameters::KEY_GLBCE[] = "glbce";
+const char TICameraParameters::KEY_GLBCE_SUPPORTED[] = "glbce-supported";
const char TICameraParameters::KEY_CURRENT_ISO[] = "current-iso";
const char TICameraParameters::KEY_SENSOR_ORIENTATION[] = "sensor-orientation";
-const char TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES[] = "sensor-orientation-values";
-const char TICameraParameters::KEY_MINFRAMERATE[] = "min-framerate";
-const char TICameraParameters::KEY_MAXFRAMERATE[] = "max-framerate";
const char TICameraParameters::KEY_RECORDING_HINT[] = "internal-recording-hint";
const char TICameraParameters::KEY_AUTO_FOCUS_LOCK[] = "auto-focus-lock";
-
-//TI extensions for enabling/disabling GLBCE
-const char TICameraParameters::GLBCE_ENABLE[] = "enable";
-const char TICameraParameters::GLBCE_DISABLE[] = "disable";
-
-//TI extensions for enabling/disabling GBCE
-const char TICameraParameters::GBCE_ENABLE[] = "enable";
-const char TICameraParameters::GBCE_DISABLE[] = "disable";
-
-//TI extensions for enabling/disabling measurement
-const char TICameraParameters::MEASUREMENT_ENABLE[] = "enable";
-const char TICameraParameters::MEASUREMENT_DISABLE[] = "disable";
-
-//TI extensions for zoom
-const char TICameraParameters::ZOOM_SUPPORTED[] = "true";
-const char TICameraParameters::ZOOM_UNSUPPORTED[] = "false";
-
-// TI extensions for 2D Preview in Stereo Mode
-const char TICameraParameters::KEY_S3D2D_PREVIEW[] = "s3d2d-preview";
-const char TICameraParameters::KEY_S3D2D_PREVIEW_MODE[] = "s3d2d-preview-values";
+const char TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED[] = "preview-fps-range-ext-values";
+const char TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED[] = "preview-fps-ext-values";
+
+const char TICameraParameters::RAW_WIDTH[] = "raw-width";
+const char TICameraParameters::RAW_HEIGHT[] = "raw-height";
+
+// TI extensions for Stereo Mode
+const char TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT[] = "s3d-prv-frame-layout";
+const char TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT_VALUES[] = "s3d-prv-frame-layout-values";
+const char TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT[] = "s3d-cap-frame-layout";
+const char TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT_VALUES[] = "s3d-cap-frame-layout-values";
+
+//TI extentions fo 3D resolutions
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES[] = "supported-picture-subsampled-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES[] = "supported-picture-topbottom-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[] = "supported-picture-sidebyside-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[] = "supported-preview-subsampled-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[] = "supported-preview-topbottom-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[] = "supported-preview-sidebyside-size-values";
//TI extensions for SAC/SMC
-const char TICameraParameters::KEY_AUTOCONVERGENCE[] = "auto-convergence";
const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE[] = "auto-convergence-mode";
-const char TICameraParameters::KEY_MANUALCONVERGENCE_VALUES[] = "manual-convergence-values";
+const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE_VALUES[] = "auto-convergence-mode-values";
+const char TICameraParameters::KEY_MANUAL_CONVERGENCE[] = "manual-convergence";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN[] = "supported-manual-convergence-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX[] = "supported-manual-convergence-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP[] = "supported-manual-convergence-step";
//TI extensions for setting EXIF tags
const char TICameraParameters::KEY_EXIF_MODEL[] = "exif-model";
@@ -105,13 +120,10 @@ const char TICameraParameters::KEY_GPS_MAPDATUM[] = "gps-mapdatum";
const char TICameraParameters::KEY_GPS_VERSION[] = "gps-version";
const char TICameraParameters::KEY_GPS_DATESTAMP[] = "gps-datestamp";
-//TI extensions for enabling/disabling shutter sound
-const char TICameraParameters::SHUTTER_ENABLE[] = "true";
-const char TICameraParameters::SHUTTER_DISABLE[] = "false";
-
-//TI extensions for Temporal Bracketing
-const char TICameraParameters::BRACKET_ENABLE[] = "enable";
-const char TICameraParameters::BRACKET_DISABLE[] = "disable";
+// TI extensions for slice mode implementation for VTC
+const char TICameraParameters::KEY_VTC_HINT[] = "internal-vtc-hint";
+const char TICameraParameters::KEY_VIDEO_ENCODER_HANDLE[] = "encoder_handle";
+const char TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT[] = "encoder_slice_height";
//TI extensions to Image post-processing
const char TICameraParameters::IPP_LDCNSF[] = "ldc-nsf";
@@ -120,14 +132,12 @@ const char TICameraParameters::IPP_NSF[] = "nsf";
const char TICameraParameters::IPP_NONE[] = "off";
// TI extensions to standard android pixel formats
-const char TICameraParameters::PIXEL_FORMAT_RAW[] = "raw";
+const char TICameraParameters::PIXEL_FORMAT_UNUSED[] = "unused";
const char TICameraParameters::PIXEL_FORMAT_JPS[] = "jps";
const char TICameraParameters::PIXEL_FORMAT_MPO[] = "mpo";
-const char TICameraParameters::PIXEL_FORMAT_RAW_JPEG[] = "raw+jpeg";
-const char TICameraParameters::PIXEL_FORMAT_RAW_MPO[] = "raw+mpo";
+const char TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY[] = "yuv422i-uyvy";
// TI extensions to standard android scene mode settings
-const char TICameraParameters::SCENE_MODE_SPORT[] = "sport";
const char TICameraParameters::SCENE_MODE_CLOSEUP[] = "closeup";
const char TICameraParameters::SCENE_MODE_AQUA[] = "aqua";
const char TICameraParameters::SCENE_MODE_SNOWBEACH[] = "snow-beach";
@@ -149,6 +159,7 @@ const char TICameraParameters::WHITE_BALANCE_FACE[] = "face-priority";
const char TICameraParameters::FOCUS_MODE_PORTRAIT[] = "portrait";
const char TICameraParameters::FOCUS_MODE_EXTENDED[] = "extended";
const char TICameraParameters::FOCUS_MODE_FACE[] = "face-priority";
+const char TICameraParameters::FOCUS_MODE_OFF[] = "off";
// TI extensions to add values for effect settings.
const char TICameraParameters::EFFECT_NATURAL[] = "natural";
@@ -157,7 +168,7 @@ const char TICameraParameters::EFFECT_COLOR_SWAP[] = "color-swap";
const char TICameraParameters::EFFECT_BLACKWHITE[] = "blackwhite";
// TI extensions to add exposure preset modes
-const char TICameraParameters::EXPOSURE_MODE_OFF[] = "off";
+const char TICameraParameters::EXPOSURE_MODE_MANUAL[] = "manual";
const char TICameraParameters::EXPOSURE_MODE_AUTO[] = "auto";
const char TICameraParameters::EXPOSURE_MODE_NIGHT[] = "night";
const char TICameraParameters::EXPOSURE_MODE_BACKLIGHT[] = "backlighting";
@@ -179,12 +190,19 @@ const char TICameraParameters::ISO_MODE_1000[] = "1000";
const char TICameraParameters::ISO_MODE_1200[] = "1200";
const char TICameraParameters::ISO_MODE_1600[] = "1600";
+//TI extensions for stereo frame layouts
+const char TICameraParameters::S3D_NONE[] = "none";
+const char TICameraParameters::S3D_TB_FULL[] = "tb-full";
+const char TICameraParameters::S3D_SS_FULL[] = "ss-full";
+const char TICameraParameters::S3D_TB_SUBSAMPLED[] = "tb-subsampled";
+const char TICameraParameters::S3D_SS_SUBSAMPLED[] = "ss-subsampled";
+
// TI extensions to add auto convergence values
-const char TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE[] = "mode-disable";
-const char TICameraParameters::AUTOCONVERGENCE_MODE_FRAME[] = "mode-frame";
-const char TICameraParameters::AUTOCONVERGENCE_MODE_CENTER[] = "mode-center";
-const char TICameraParameters::AUTOCONVERGENCE_MODE_FFT[] = "mode-fft";
-const char TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL[] = "mode-manual";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE[] = "disable";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_FRAME[] = "frame";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_CENTER[] = "center";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_TOUCH[] = "touch";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL[] = "manual";
//TI values for camera direction
const char TICameraParameters::FACING_FRONT[]="front";
@@ -198,5 +216,19 @@ const char TICameraParameters::ORIENTATION_SENSOR_NONE[] = "0";
const char TICameraParameters::ORIENTATION_SENSOR_90[] = "90";
const char TICameraParameters::ORIENTATION_SENSOR_180[] = "180";
const char TICameraParameters::ORIENTATION_SENSOR_270[] = "270";
-};
+const char TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[] = "mechanical-misalignment-correction-supported";
+const char TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION[] = "mechanical-misalignment-correction";
+
+//TI extensions for enable/disable algos
+const char TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA[] = TI_KEY_ALGO_PREFIX "external-gamma";
+const char TICameraParameters::KEY_ALGO_NSF1[] = TI_KEY_ALGO_PREFIX "nsf1";
+const char TICameraParameters::KEY_ALGO_NSF2[] = TI_KEY_ALGO_PREFIX "nsf2";
+const char TICameraParameters::KEY_ALGO_SHARPENING[] = TI_KEY_ALGO_PREFIX "sharpening";
+const char TICameraParameters::KEY_ALGO_THREELINCOLORMAP[] = TI_KEY_ALGO_PREFIX "threelinecolormap";
+const char TICameraParameters::KEY_ALGO_GIC[] = TI_KEY_ALGO_PREFIX "gic";
+
+const char TICameraParameters::KEY_GAMMA_TABLE[] = "gamma-table";
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
index c365023..91ecfe1 100644
--- a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -25,6 +25,7 @@
#include "V4LCameraAdapter.h"
#include "CameraHal.h"
#include "TICameraParameters.h"
+#include "DebugUtils.h"
#include <signal.h>
#include <stdio.h>
#include <stdlib.h>
@@ -37,6 +38,8 @@
#include <sys/select.h>
#include <linux/videodev.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
#include <cutils/properties.h>
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
@@ -44,28 +47,250 @@ static int mDebugFps = 0;
#define Q16_OFFSET 16
-#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
+#define HERE(Msg) {CAMHAL_LOGEB("--=== %s===--\n", Msg);}
-namespace android {
-
-#undef LOG_TAG
-///Maintain a separate tag for V4LCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-Mutex gAdapterLock;
-const char *device = DEVICE;
+//define this macro to save first few raw frames when starting the preview.
+//#define SAVE_RAW_FRAMES 1
+//#define DUMP_CAPTURE_FRAME 1
+//#define PPM_PER_FRAME_CONVERSION 1
+
+//Proto Types
+static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
+static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height );
+static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height );
+
+android::Mutex gV4LAdapterLock;
+char device[15];
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
-status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
+/*--------------------V4L wrapper functions -------------------------------*/
+status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
+ status_t ret = NO_ERROR;
+ errno = 0;
+
+ do {
+ ret = ioctl (fd, req, argp);
+ }while (-1 == ret && EINTR == errno);
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitMmap(int& count) {
+ status_t ret = NO_ERROR;
+
+ //First allocate adapter internal buffers at V4L level for USB Cam
+ //These are the buffers from which we will copy the data into overlay buffers
+ /* Check if camera can handle NB_BUFFER buffers */
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }
+
+ count = mVideoInfo->rb.count;
+ for (int i = 0; i < count; i++) {
+
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->mem[i] = mmap (NULL,
+ mVideoInfo->buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mCameraHandle,
+ mVideoInfo->buf.m.offset);
+
+ CAMHAL_LOGVB(" mVideoInfo->mem[%d]=%p ; mVideoInfo->buf.length = %d", i, mVideoInfo->mem[i], mVideoInfo->buf.length);
+ if (mVideoInfo->mem[i] == MAP_FAILED) {
+ CAMHAL_LOGEB("Unable to map buffer [%d]. (%s)", i, strerror(errno));
+ return -1;
+ }
+ }
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitUsrPtr(int& count) {
+ status_t ret = NO_ERROR;
+
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_USERPTR;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed for USERPTR: %s", strerror(errno));
+ return ret;
+ }
+
+ count = mVideoInfo->rb.count;
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lStartStreaming () {
+ status_t ret = NO_ERROR;
+ enum v4l2_buf_type bufType;
+
+ if (!mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
+ return ret;
+ }
+ mVideoInfo->isStreaming = true;
+ }
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
+ status_t ret = NO_ERROR;
+ enum v4l2_buf_type bufType;
+
+ if (mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
+ goto EXIT;
+ }
+ mVideoInfo->isStreaming = false;
+
+ /* Unmap buffers */
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ for (int i = 0; i < nBufferCount; i++) {
+ if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0) {
+ CAMHAL_LOGEA("munmap() failed");
+ }
+ }
+
+ //free the memory allocated during REQBUFS, by setting the count=0
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = 0;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ }
+EXIT:
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) {
+ status_t ret = NO_ERROR;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_G_FMT Failed: %s", strerror(errno));
+ }
+
+ mVideoInfo->width = width;
+ mVideoInfo->height = height;
+ mVideoInfo->framesizeIn = (width * height << 1);
+ mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->format.fmt.pix.width = width;
+ mVideoInfo->format.fmt.pix.height = height;
+ mVideoInfo->format.fmt.pix.pixelformat = pix_format;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_S_FMT Failed: %s", strerror(errno));
+ return ret;
+ }
+ v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
+ CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height);
+ return ret;
+}
+
+status_t V4LCameraAdapter::restartPreview ()
{
- LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+ int width = 0;
+ int height = 0;
+ struct v4l2_streamparm streamParams;
+
+ //configure for preview size and pixel format.
+ mParams.getPreviewSize(&width, &height);
+
+ ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = v4lInitMmap(mPreviewBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ //set frame rate
+ streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
+ streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
+ streamParams.parm.capture.timeperframe.numerator= 1;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_S_PARM Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+ nQueued++;
+ }
+
+ ret = v4lStartStreaming();
+ CAMHAL_LOGDA("Ready for preview....");
+EXIT:
+ return ret;
+}
+
+/*--------------------Camera Adapter Functions-----------------------------*/
+status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
+{
char value[PROPERTY_VALUE_MAX];
+
+ LOG_FUNCTION_NAME;
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
@@ -73,115 +298,129 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
// Allocate memory for video info structure
mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
- if(!mVideoInfo)
- {
- return NO_MEMORY;
- }
+ if(!mVideoInfo) {
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
- if ((mCameraHandle = open(device, O_RDWR)) == -1)
- {
+ if ((mCameraHandle = open(device, O_RDWR | O_NONBLOCK) ) == -1) {
CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- ret = ioctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
- if (ret < 0)
- {
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
+ if (ret < 0) {
CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0)
- {
+ if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING))
- {
+ if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING)) {
CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
// Initialize flags
mPreviewing = false;
mVideoInfo->isStreaming = false;
mRecording = false;
-
+ mCapturing = false;
+EXIT:
LOG_FUNCTION_NAME_EXIT;
-
return ret;
}
-status_t V4LCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType)
{
-
status_t ret = NO_ERROR;
+ int idx = 0;
+ LOG_FUNCTION_NAME;
- if ( !mVideoInfo->isStreaming )
- {
- return NO_ERROR;
+ if ( frameType == CameraFrame::IMAGE_FRAME) { //(1 > mCapturedFrames)
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ CAMHAL_LOGDB("===========Signal End Image Capture==========");
+ mEndImageCaptureCallback(mEndCaptureData);
}
+ goto EXIT;
+ }
+ if ( !mVideoInfo->isStreaming ) {
+ goto EXIT;
+ }
- int i = mPreviewBufs.valueFor(( unsigned int )frameBuf);
- if(i<0)
- {
- return BAD_VALUE;
- }
+ idx = mPreviewBufs.valueFor(frameBuf);
+ if(idx < 0) {
+ CAMHAL_LOGEB("Wrong index = %d",idx);
+ goto EXIT;
+ }
- mVideoInfo->buf.index = i;
+ mVideoInfo->buf.index = idx;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
- CAMHAL_LOGEA("Init: VIDIOC_QBUF Failed");
- return -1;
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
}
-
nQueued++;
-
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
+status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params)
{
- LOG_FUNCTION_NAME;
-
status_t ret = NO_ERROR;
-
int width, height;
+ struct v4l2_streamparm streamParams;
- params.getPreviewSize(&width, &height);
-
- CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
-
- mVideoInfo->width = width;
- mVideoInfo->height = height;
- mVideoInfo->framesizeIn = (width * height << 1);
- mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+ LOG_FUNCTION_NAME;
- mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->format.fmt.pix.width = width;
- mVideoInfo->format.fmt.pix.height = height;
- mVideoInfo->format.fmt.pix.pixelformat = DEFAULT_PIXEL_FORMAT;
+ if(!mPreviewing && !mCapturing) {
+ params.getPreviewSize(&width, &height);
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
- ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
- if (ret < 0) {
- CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
- return ret;
+ ret = v4lSetFormat( width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ //set frame rate
+ // Now its fixed to 30 FPS
+ streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
+ streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
+ streamParams.parm.capture.timeperframe.numerator= 1;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_PARM Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ int actualFps = streamParams.parm.capture.timeperframe.denominator / streamParams.parm.capture.timeperframe.numerator;
+ CAMHAL_LOGDB("Actual FPS set is : %d.", actualFps);
}
// Udpate the current parameter set
mParams = params;
+EXIT:
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-void V4LCameraAdapter::getParameters(CameraParameters& params)
+void V4LCameraAdapter::getParameters(android::CameraParameters& params)
{
LOG_FUNCTION_NAME;
@@ -193,27 +432,37 @@ void V4LCameraAdapter::getParameters(CameraParameters& params)
///API to give the buffers to Adapter
-status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+status_t V4LCameraAdapter::useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
switch(mode)
{
case CAMERA_PREVIEW:
+ mPreviewBufferCountQueueable = queueable;
ret = UseBuffersPreview(bufArr, num);
break;
- //@todo Insert Image capture case here
+ case CAMERA_IMAGE_CAPTURE:
+ mCaptureBufferCountQueueable = queueable;
+ ret = UseBuffersCapture(bufArr, num);
+ break;
case CAMERA_VIDEO:
//@warn Video capture is not fully supported yet
+ mPreviewBufferCountQueueable = queueable;
ret = UseBuffersPreview(bufArr, num);
break;
+ case CAMERA_MEASUREMENT:
+ break;
+
+ default:
+ break;
}
LOG_FUNCTION_NAME_EXIT;
@@ -221,172 +470,325 @@ status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
return ret;
}
-status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
-{
+status_t V4LCameraAdapter::UseBuffersCapture(CameraBuffer *bufArr, int num) {
int ret = NO_ERROR;
- if(NULL == bufArr)
- {
- return BAD_VALUE;
- }
+ LOG_FUNCTION_NAME;
+ if(NULL == bufArr) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- //First allocate adapter internal buffers at V4L level for USB Cam
- //These are the buffers from which we will copy the data into overlay buffers
- /* Check if camera can handle NB_BUFFER buffers */
- mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
- mVideoInfo->rb.count = num;
+ for (int i = 0; i < num; i++) {
+ //Associate each Camera internal buffer with the one from Overlay
+ mCaptureBufs.add(&bufArr[i], i);
+ CAMHAL_LOGDB("capture- buff [%d] = 0x%x ",i, mCaptureBufs.keyAt(i));
+ }
- ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
- if (ret < 0) {
- CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
- return ret;
+ mCaptureBuffersAvailable.clear();
+ for (int i = 0; i < mCaptureBufferCountQueueable; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
}
- for (int i = 0; i < num; i++) {
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for (int i = mCaptureBufferCountQueueable; i < num; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
+ }
- memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+ // Update the preview buffer count
+ mCaptureBufferCount = num;
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
- mVideoInfo->buf.index = i;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+}
- ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
- if (ret < 0) {
- CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
- return ret;
- }
+status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
+{
+ int ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
- mVideoInfo->mem[i] = mmap (0,
- mVideoInfo->buf.length,
- PROT_READ | PROT_WRITE,
- MAP_SHARED,
- mCameraHandle,
- mVideoInfo->buf.m.offset);
+ if(NULL == bufArr) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if (mVideoInfo->mem[i] == MAP_FAILED) {
- CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
- return -1;
+ ret = v4lInitMmap(num);
+ if (ret == NO_ERROR) {
+ for (int i = 0; i < num; i++) {
+ //Associate each Camera internal buffer with the one from Overlay
+ mPreviewBufs.add(&bufArr[i], i);
+ CAMHAL_LOGDB("Preview- buff [%d] = 0x%x ",i, mPreviewBufs.keyAt(i));
}
- uint32_t *ptr = (uint32_t*) bufArr;
+ // Update the preview buffer count
+ mPreviewBufferCount = num;
+ }
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
- //Associate each Camera internal buffer with the one from Overlay
- mPreviewBufs.add((int)ptr[i], i);
+status_t V4LCameraAdapter::takePicture() {
+ status_t ret = NO_ERROR;
+ int width = 0;
+ int height = 0;
+ size_t yuv422i_buff_size = 0;
+ int index = 0;
+ char *fp = NULL;
+ CameraBuffer *buffer = NULL;
+ CameraFrame frame;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mCaptureBufsLock);
+ if(mCapturing) {
+ CAMHAL_LOGEA("Already Capture in Progress...");
+ ret = BAD_VALUE;
+ goto EXIT;
}
- // Update the preview buffer count
- mPreviewBufferCount = num;
+ mCapturing = true;
+ mPreviewing = false;
- return ret;
-}
+ // Stop preview streaming
+ ret = v4lStopStreaming(mPreviewBufferCount);
+ if (ret < 0 ) {
+ CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
-status_t V4LCameraAdapter::startPreview()
-{
- status_t ret = NO_ERROR;
+ //configure for capture image size and pixel format.
+ mParams.getPictureSize(&width, &height);
+ CAMHAL_LOGDB("Image Capture Size WxH = %dx%d",width,height);
+ yuv422i_buff_size = width * height * 2;
- Mutex::Autolock lock(mPreviewBufsLock);
+ ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
+ goto EXIT;
+ }
- if(mPreviewing)
- {
- return BAD_VALUE;
+ ret = v4lInitMmap(mCaptureBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
+ goto EXIT;
}
- for (int i = 0; i < mPreviewBufferCount; i++) {
+ for (int i = 0; i < mCaptureBufferCountQueueable; i++) {
mVideoInfo->buf.index = i;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
CAMHAL_LOGEA("VIDIOC_QBUF Failed");
- return -EINVAL;
+ ret = BAD_VALUE;
+ goto EXIT;
}
-
nQueued++;
- }
+ }
- enum v4l2_buf_type bufType;
- if (!mVideoInfo->isStreaming) {
- bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = v4lStartStreaming();
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lStartStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
- ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
- if (ret < 0) {
- CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
- return ret;
- }
+ CAMHAL_LOGDA("Streaming started for Image Capture");
+
+ //get the frame and send to encode as JPG
+ fp = this->GetFrame(index);
+ if(!fp) {
+ CAMHAL_LOGEA("!!! Captured frame is NULL !!!!");
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- mVideoInfo->isStreaming = true;
- }
+ CAMHAL_LOGDA("::Capture Frame received from V4L::");
+ buffer = mCaptureBufs.keyAt(index);
+ CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d", index, buffer->opaque, yuv422i_buff_size);
- // Create and start preview thread for receiving buffers from V4L Camera
- mPreviewThread = new PreviewThread(this);
+ //copy the yuv422i data to the image buffer.
+ memcpy(buffer->opaque, fp, yuv422i_buff_size);
- CAMHAL_LOGDA("Created preview thread");
+#ifdef DUMP_CAPTURE_FRAME
+ //dump the YUV422 buffer in to a file
+ //a folder should have been created at /data/misc/camera/raw/
+ {
+ int fd =-1;
+ fd = open("/data/misc/camera/raw/captured_yuv422i_dump.yuv", O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGEB("Unable to open file: %s", strerror(fd));
+ }
+ else {
+ write(fd, fp, yuv422i_buff_size );
+ close(fd);
+ CAMHAL_LOGDB("::Captured Frame dumped at /data/misc/camera/raw/captured_yuv422i_dump.yuv::");
+ }
+ }
+#endif
+
+ CAMHAL_LOGDA("::sending capture frame to encoder::");
+ frame.mFrameType = CameraFrame::IMAGE_FRAME;
+ frame.mBuffer = buffer;
+ frame.mLength = yuv422i_buff_size;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mAlignment = width*2;
+ frame.mOffset = 0;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mFrameMask = (unsigned int)CameraFrame::IMAGE_FRAME;
+ frame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
+ frame.mQuirks |= CameraFrame::FORMAT_YUV422I_YUYV;
+
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
+ // Stop streaming after image capture
+ ret = v4lStopStreaming(mCaptureBufferCount);
+ if (ret < 0 ) {
+ CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
- //Update the flag to indicate we are previewing
- mPreviewing = true;
+ ret = restartPreview();
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
- return ret;
+status_t V4LCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ //Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ mCaptureBufs.clear();
+
+ mCapturing = false;
+ mPreviewing = true;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
}
-status_t V4LCameraAdapter::stopPreview()
+status_t V4LCameraAdapter::autoFocus()
{
- enum v4l2_buf_type bufType;
- int ret = NO_ERROR;
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mPreviewBufsLock);
+ //autoFocus is not implemented. Just return.
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
- if(!mPreviewing)
- {
- return NO_INIT;
- }
+status_t V4LCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
- if (mVideoInfo->isStreaming) {
- bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mPreviewBufsLock);
+
+ if(mPreviewing) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
- CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
- return ret;
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
}
+ nQueued++;
+ }
- mVideoInfo->isStreaming = false;
+ ret = v4lStartStreaming();
+
+ // Create and start preview thread for receiving buffers from V4L Camera
+ if(!mCapturing) {
+ mPreviewThread = new PreviewThread(this);
+ CAMHAL_LOGDA("Created preview thread");
}
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ //Update the flag to indicate we are previewing
+ mPreviewing = true;
+ mCapturing = false;
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::stopPreview()
+{
+ enum v4l2_buf_type bufType;
+ int ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mStopPreviewLock);
+
+ if(!mPreviewing) {
+ return NO_INIT;
+ }
+ mPreviewing = false;
+
+ ret = v4lStopStreaming(mPreviewBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno));
+ }
nQueued = 0;
nDequeued = 0;
-
- /* Unmap buffers */
- for (int i = 0; i < mPreviewBufferCount; i++)
- if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0)
- CAMHAL_LOGEA("Unmap failed");
+ mFramesWithEncoder = 0;
mPreviewBufs.clear();
mPreviewThread->requestExitAndWait();
mPreviewThread.clear();
+ LOG_FUNCTION_NAME_EXIT;
return ret;
-
}
char * V4LCameraAdapter::GetFrame(int &index)
{
- int ret;
+ int ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
/* DQ */
- ret = ioctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
+ // Some V4L drivers, notably uvc, protect each incoming call with
+ // a driver-wide mutex. If we use poll() or blocking VIDIOC_DQBUF ioctl
+ // here then we sometimes would run into a deadlock on VIDIO_QBUF ioctl.
+ while(true) {
+ if(!mVideoInfo->isStreaming) {
+ return NULL;
+ }
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
+ if((ret == 0) || (errno != EAGAIN)) {
+ break;
+ }
+ }
+
if (ret < 0) {
CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
return NULL;
@@ -395,6 +797,7 @@ char * V4LCameraAdapter::GetFrame(int &index)
index = mVideoInfo->buf.index;
+ LOG_FUNCTION_NAME_EXIT;
return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
}
@@ -403,6 +806,7 @@ char * V4LCameraAdapter::GetFrame(int &index)
status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
{
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
// Just return the current preview size, nothing more to do here.
mParams.getPreviewSize(( int * ) &width,
@@ -419,9 +823,23 @@ status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t buffer
return NO_ERROR;
}
-status_t V4LCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
{
- // We don't support image capture yet, safely return from here without messing up
+ int width = 0;
+ int height = 0;
+ int bytesPerPixel = 2; // for YUV422i; default pixel format
+
+ LOG_FUNCTION_NAME;
+
+ mParams.getPictureSize( &width, &height );
+ frame.mLength = width * height * bytesPerPixel;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mAlignment = width * bytesPerPixel;
+
+ CAMHAL_LOGDB("Picture size: W x H = %u x %u (size=%u bytes, alignment=%u bytes)",
+ frame.mWidth, frame.mHeight, frame.mLength, frame.mAlignment);
+ LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
}
@@ -431,16 +849,17 @@ static void debugShowFPS()
static int mLastFrameCount = 0;
static nsecs_t mLastFpsTime = 0;
static float mFps = 0;
- mFrameCount++;
- if (!(mFrameCount & 0x1F)) {
- nsecs_t now = systemTime();
- nsecs_t diff = now - mLastFpsTime;
- mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
- mLastFpsTime = now;
- mLastFrameCount = mFrameCount;
- ALOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ if(mDebugFps) {
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ CAMHAL_LOGI("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
}
- // XXX: mFPS has the value we want
}
status_t V4LCameraAdapter::recalculateFPS()
@@ -487,6 +906,7 @@ V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
LOG_FUNCTION_NAME;
// Nothing useful to do in the constructor
+ mFramesWithEncoder = 0;
LOG_FUNCTION_NAME_EXIT;
}
@@ -507,6 +927,202 @@ V4LCameraAdapter::~V4LCameraAdapter()
LOG_FUNCTION_NAME_EXIT;
}
+static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size ) {
+ //convert YUV422I yuyv to uyvy format.
+ uint32_t *bf = (uint32_t*)src;
+ uint32_t *dst = (uint32_t*)dest;
+
+ LOG_FUNCTION_NAME;
+
+ if (!src || !dest) {
+ return;
+ }
+
+ for(size_t i = 0; i < size; i = i+4)
+ {
+ dst[0] = ((bf[0] & 0x00FF00FF) << 8) | ((bf[0] & 0xFF00FF00) >> 8);
+ bf++;
+ dst++;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height ) {
+ //convert YUV422I to YUV420 NV12 format and copies directly to preview buffers (Tiler memory).
+ int stride = 4096;
+ unsigned char *bf = src;
+ unsigned char *dst_y = dest;
+ unsigned char *dst_uv = dest + ( height * stride);
+#ifdef PPM_PER_FRAME_CONVERSION
+ static int frameCount = 0;
+ static nsecs_t ppm_diff = 0;
+ nsecs_t ppm_start = systemTime();
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if (width % 16 ) {
+ for(int i = 0; i < height; i++) {
+ for(int j = 0; j < width; j++) {
+ *dst_y = *bf;
+ dst_y++;
+ bf = bf + 2;
+ }
+ dst_y += (stride - width);
+ }
+
+ bf = src;
+ bf++; //UV sample
+ for(int i = 0; i < height/2; i++) {
+ for(int j=0; j<width; j++) {
+ *dst_uv = *bf;
+ dst_uv++;
+ bf = bf + 2;
+ }
+ bf = bf + width*2;
+ dst_uv = dst_uv + (stride - width);
+ }
+ } else {
+ //neon conversion
+ for(int i = 0; i < height; i++) {
+ int n = width;
+ int skip = i & 0x1; // skip uv elements for the odd rows
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel copy \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " @ now q0 = y q1 = uv \n\t"
+ " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
+ " cmp %[skip], #0 \n\t"
+ " bne 1f \n\t"
+ " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
+ "1: @ skip odd rows for UV \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width), [skip] "r" (skip)
+ : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
+ );
+ dst_y = dst_y + (stride - width);
+ if (skip == 0) {
+ dst_uv = dst_uv + (stride - width);
+ }
+ } //end of for()
+ }
+
+#ifdef PPM_PER_FRAME_CONVERSION
+ ppm_diff += (systemTime() - ppm_start);
+ frameCount++;
+
+ if (frameCount >= 30) {
+ ppm_diff = ppm_diff / frameCount;
+ LOGD("PPM: YUV422i to NV12 Conversion(%d x %d): %llu us ( %llu ms )", width, height,
+ ns2us(ppm_diff), ns2ms(ppm_diff) );
+ ppm_diff = 0;
+ frameCount = 0;
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height ) {
+ //convert YUV422I to YUV420 NV12 format.
+ unsigned char *bf = src;
+ unsigned char *dst_y = dest;
+ unsigned char *dst_uv = dest + (width * height);
+
+ LOG_FUNCTION_NAME;
+
+ if (width % 16 ) {
+ for(int i = 0; i < height; i++) {
+ for(int j = 0; j < width; j++) {
+ *dst_y = *bf;
+ dst_y++;
+ bf = bf + 2;
+ }
+ }
+
+ bf = src;
+ bf++; //UV sample
+ for(int i = 0; i < height/2; i++) {
+ for(int j=0; j<width; j++) {
+ *dst_uv = *bf;
+ dst_uv++;
+ bf = bf + 2;
+ }
+ bf = bf + width*2;
+ }
+ } else {
+ //neon conversion
+ for(int i = 0; i < height; i++) {
+ int n = width;
+ int skip = i & 0x1; // skip uv elements for the odd rows
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel copy \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " @ now q0 = y q1 = uv \n\t"
+ " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
+ " cmp %[skip], #0 \n\t"
+ " bne 1f \n\t"
+ " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
+ "1: @ skip odd rows for UV \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width), [skip] "r" (skip)
+ : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
+ );
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+#ifdef SAVE_RAW_FRAMES
+void saveFile(unsigned char* buff, int buff_size) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+ if (counter > 3) {
+ return;
+ }
+ //dump nv12 buffer
+ counter++;
+ sprintf(fn, "/data/misc/camera/raw/nv12_dump_%03d.yuv", counter);
+ CAMHAL_LOGEB("Dumping nv12 frame to a file : %s.", fn);
+
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ write(fd, buff, buff_size );
+ close(fd);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+#endif
+
/* Preview Thread */
// ---------------------------------------------------------------------------
@@ -515,63 +1131,115 @@ int V4LCameraAdapter::previewThread()
status_t ret = NO_ERROR;
int width, height;
CameraFrame frame;
+ void *y_uv[2];
+ int index = 0;
+ int stride = 4096;
+ char *fp = NULL;
- if (mPreviewing)
- {
- int index = 0;
- char *fp = this->GetFrame(index);
- if(!fp)
- {
- return BAD_VALUE;
- }
+ mParams.getPreviewSize(&width, &height);
- uint8_t* ptr = (uint8_t*) mPreviewBufs.keyAt(index);
+ if (mPreviewing) {
- int width, height;
- uint16_t* dest = (uint16_t*)ptr;
- uint16_t* src = (uint16_t*) fp;
- mParams.getPreviewSize(&width, &height);
- for(int i=0;i<height;i++)
- {
- for(int j=0;j<width;j++)
- {
- //*dest = *src;
- //convert from YUYV to UYVY supported in Camera service
- *dest = (((*src & 0xFF000000)>>24)<<16)|(((*src & 0x00FF0000)>>16)<<24) |
- (((*src & 0xFF00)>>8)<<0)|(((*src & 0x00FF)>>0)<<8);
- src++;
- dest++;
- }
- dest += 4096/2-width;
- }
+ fp = this->GetFrame(index);
+ if(!fp) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+ CameraBuffer *buffer = mPreviewBufs.keyAt(index);
+ CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer);
+ if (!lframe) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ debugShowFPS();
+
+ if ( mFrameSubscribers.size() == 0 ) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+ y_uv[0] = (void*) lframe->mYuv[0];
+ //y_uv[1] = (void*) lframe->mYuv[1];
+ //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride);
+ convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height);
+ CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] );
+
+#ifdef SAVE_RAW_FRAMES
+ unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
+ //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
+ convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
+ saveFile( nv12_buff, ((width*height)*3/2) );
+ free (nv12_buff);
+#endif
- mParams.getPreviewSize(&width, &height);
frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
- frame.mBuffer = ptr;
- frame.mLength = width*height*2;
- frame.mAlignment = width*2;
+ frame.mBuffer = buffer;
+ frame.mLength = width*height*3/2;
+ frame.mAlignment = stride;
frame.mOffset = 0;
- frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
- ret = sendFrameToSubscribers(&frame);
+ if (mRecording)
+ {
+ frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
}
+ }
+EXIT:
return ret;
}
-extern "C" CameraAdapter* CameraAdapter_Factory()
+//scan for video devices
+void detectVideoDevice(char** video_device_list, int& num_device) {
+ char dir_path[20];
+ char* filename;
+ char** dev_list = video_device_list;
+ DIR *d;
+ struct dirent *dir;
+ int index = 0;
+
+ strcpy(dir_path, DEVICE_PATH);
+ d = opendir(dir_path);
+ if(d) {
+ //read each entry in the /dev/ and find if there is videox entry.
+ while ((dir = readdir(d)) != NULL) {
+ filename = dir->d_name;
+ if (strncmp(filename, DEVICE_NAME, 5) == 0) {
+ strcpy(dev_list[index],DEVICE_PATH);
+ strncat(dev_list[index],filename,sizeof(DEVICE_NAME));
+ index++;
+ }
+ } //end of while()
+ closedir(d);
+ num_device = index;
+
+ for(int i=0; i<index; i++){
+ CAMHAL_LOGDB("Video device list::dev_list[%d]= %s",i,dev_list[i]);
+ }
+ }
+}
+
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index)
{
CameraAdapter *adapter = NULL;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gV4LAdapterLock);
LOG_FUNCTION_NAME;
adapter = new V4LCameraAdapter(sensor_index);
if ( adapter ) {
- CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index);
+ CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index);
} else {
- CAMHAL_LOGEA("Camera adapter create failed!");
+ CAMHAL_LOGEA("V4L Camera adapter create failed for sensor index = %d!",sensor_index);
}
LOG_FUNCTION_NAME_EXIT;
@@ -579,32 +1247,91 @@ extern "C" CameraAdapter* CameraAdapter_Factory()
return adapter;
}
-extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
- const unsigned int starting_camera,
- const unsigned int max_camera) {
+extern "C" status_t V4LCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
+ status_t ret = NO_ERROR;
+ struct v4l2_capability cap;
+ int tempHandle = NULL;
int num_cameras_supported = 0;
+ char device_list[5][15];
+ char* video_device_list[5];
+ int num_v4l_devices = 0;
+ int sensorId = 0;
CameraProperties::Properties* properties = NULL;
LOG_FUNCTION_NAME;
- if(!properties_array)
- {
- return -EINVAL;
+ supportedCameras = 0;
+ memset((void*)&cap, 0, sizeof(v4l2_capability));
+
+ if (!properties_array) {
+ CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
}
- // TODO: Need to tell camera properties what other cameras we can support
- if (starting_camera + num_cameras_supported < max_camera) {
- num_cameras_supported++;
- properties = properties_array + starting_camera;
- properties->set(CameraProperties::CAMERA_NAME, "USBCamera");
+ for (int i = 0; i < 5; i++) {
+ video_device_list[i] = device_list[i];
}
+ //look for the connected video devices
+ detectVideoDevice(video_device_list, num_v4l_devices);
- LOG_FUNCTION_NAME_EXIT;
+ for (int i = 0; i < num_v4l_devices; i++) {
+ if ( (starting_camera + num_cameras_supported) < max_camera) {
+ sensorId = starting_camera + num_cameras_supported;
+
+ CAMHAL_LOGDB("Opening device[%d] = %s..",i, video_device_list[i]);
+ if ((tempHandle = open(video_device_list[i], O_RDWR)) == -1) {
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera(%s): %s",video_device_list[i], strerror(errno));
+ continue;
+ }
+
+ ret = ioctl (tempHandle, VIDIOC_QUERYCAP, &cap);
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ close(tempHandle);
+ continue;
+ }
+
+ //check for video capture devices
+ if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ close(tempHandle);
+ continue;
+ }
+
+ strcpy(device, video_device_list[i]);
+ properties = properties_array + starting_camera + num_cameras_supported;
+
+ //fetch capabilities for this camera
+ ret = V4LCameraAdapter::getCaps( sensorId, properties, tempHandle );
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error while getting capabilities.");
+ close(tempHandle);
+ continue;
+ }
+
+ num_cameras_supported++;
+
+ }
+ //For now exit this loop once a valid video capture device is found.
+ //TODO: find all V4L capture devices and it capabilities
+ break;
+ }//end of for() loop
- return num_cameras_supported;
+ supportedCameras = num_cameras_supported;
+ CAMHAL_LOGDB("Number of V4L cameras detected =%d", num_cameras_supported);
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ close(tempHandle);
+ return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
diff --git a/camera/V4LCameraAdapter/V4LCapabilities.cpp b/camera/V4LCameraAdapter/V4LCapabilities.cpp
new file mode 100644
index 0000000..575f943
--- /dev/null
+++ b/camera/V4LCameraAdapter/V4LCapabilities.cpp
@@ -0,0 +1,367 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LCapabilities.cpp
+*
+* This file implements the V4L Capabilities feature.
+*
+*/
+
+#include "CameraHal.h"
+#include "V4LCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "TICameraParameters.h"
+
+namespace Ti {
+namespace Camera {
+
+/************************************
+ * global constants and variables
+ *************************************/
+
+#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
+#define MAX_RES_STRING_LENGTH 10
+#define DEFAULT_WIDTH 640
+#define DEFAULT_HEIGHT 480
+
+static const char PARAM_SEP[] = ",";
+
+//Camera defaults
+const char V4LCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char V4LCameraAdapter::DEFAULT_PICTURE_SIZE[] = "640x480";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv422i-yuyv";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char V4LCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
+const char V4LCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char V4LCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
+const char * V4LCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * V4LCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
+
+
+const CapPixelformat V4LCameraAdapter::mPixelformats [] = {
+ { V4L2_PIX_FMT_YUYV, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { V4L2_PIX_FMT_JPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
+};
+
+/*****************************************
+ * internal static function declarations
+ *****************************************/
+
+/**** Utility functions to help translate V4L Caps to Parameter ****/
+
+status_t V4LCameraAdapter::insertDefaults(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
+
+ params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
+ params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
+
+ params->set(CameraProperties::CAMERA_NAME, "USBCAMERA");
+ params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, "320x240");
+ params->set(CameraProperties::JPEG_QUALITY, "90");
+ params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, "50");
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(30000,30000)");
+ params->set(CameraProperties::FRAMERATE_RANGE, "30000,30000");
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, "none");
+ params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, "auto");
+ params->set(CameraProperties::SUPPORTED_ISO_VALUES, "auto");
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, "auto");
+ params->set(CameraProperties::SUPPORTED_EFFECTS, "none");
+ params->set(CameraProperties::SUPPORTED_IPP_MODES, "ldc-nsf");
+ params->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT);
+ params->set(CameraProperties::ORIENTATION_INDEX, 0);
+ params->set(CameraProperties::SENSOR_ORIENTATION, "0");
+ params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
+ params->set(CameraProperties::VNF, DEFAULT_VNF);
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulPreviewFormatCount; i++) {
+ for (unsigned int j = 0; j < ARRAY_SIZE(mPixelformats); j++) {
+ if(caps.ePreviewFormats[i] == mPixelformats[j].pixelformat ) {
+ strncat (supported, mPixelformats[j].param, MAX_PROP_VALUE_LENGTH-1 );
+ strncat (supported, PARAM_SEP, 1 );
+ }
+ }
+ }
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulPreviewResCount; i++) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, caps.tPreviewRes[i].param, MAX_PROP_VALUE_LENGTH-1 );
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertImageSizes(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulCaptureResCount; i++) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, caps.tCaptureRes[i].param, MAX_PROP_VALUE_LENGTH-1 );
+ }
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertFrameRates(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char temp[10];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulFrameRateCount; i++) {
+ snprintf (temp, 10, "%d", caps.ulFrameRates[i] );
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, temp, MAX_PROP_VALUE_LENGTH-1 );
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertCapabilities(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewFormats(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertImageSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFrameRates(params, caps);
+ }
+
+ //Insert Supported Focus modes.
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, "infinity");
+
+ params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, "jpeg");
+
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::sortAscend(V4L_TI_CAPTYPE &caps, uint16_t count) {
+ size_t tempRes;
+ size_t w, h, tmpW,tmpH;
+ for (int i=0; i<count; i++) {
+ w = caps.tPreviewRes[i].width;
+ h = caps.tPreviewRes[i].height;
+ tempRes = w*h;
+ for (int j=i+1; j<count; j++) {
+ tmpW = caps.tPreviewRes[j].width;
+ tmpH = caps.tPreviewRes[j].height;
+
+ if (tempRes > (tmpW * tmpH) ) {
+ caps.tPreviewRes[j].width = w;
+ caps.tPreviewRes[j].height = h;
+ w = tmpW;
+ h = tmpH;
+ }
+ }
+ caps.tPreviewRes[i].width = w;
+ caps.tPreviewRes[i].height = h;
+
+ }
+ return NO_ERROR;
+}
+
+/*****************************************
+ * public exposed function declarations
+ *****************************************/
+
+status_t V4LCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params,
+ V4L_HANDLETYPE handle) {
+ status_t status = NO_ERROR;
+ V4L_TI_CAPTYPE caps;
+ int i = 0;
+ int j = 0;
+ struct v4l2_fmtdesc fmtDesc;
+ struct v4l2_frmsizeenum frmSizeEnum;
+ struct v4l2_frmivalenum frmIvalEnum;
+
+ //get supported pixel formats
+ for ( i = 0; status == NO_ERROR; i++) {
+ fmtDesc.index = i;
+ fmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ status = ioctl (handle, VIDIOC_ENUM_FMT, &fmtDesc);
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("fmtDesc[%d].description::pixelformat::flags== (%s::%d::%d)",i, fmtDesc.description,fmtDesc.pixelformat,fmtDesc.flags);
+ caps.ePreviewFormats[i] = fmtDesc.pixelformat;
+ }
+ }
+ caps.ulPreviewFormatCount = i;
+
+ //get preview sizes & capture image sizes
+ status = NO_ERROR;
+ for ( i = 0; status == NO_ERROR; i++) {
+ frmSizeEnum.index = i;
+ //Check for frame sizes for default pixel format
+ //TODO: Check for frame sizes for all supported pixel formats
+ frmSizeEnum.pixel_format = V4L2_PIX_FMT_YUYV;
+ status = ioctl (handle, VIDIOC_ENUM_FRAMESIZES, &frmSizeEnum);
+ if (status == NO_ERROR) {
+ int width;
+ int height;
+
+ if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
+ CAMHAL_LOGDB("\nfrmSizeEnum.type = %d", frmSizeEnum.type);
+ CAMHAL_LOGDB("\nmin_width x height = %d x %d ",frmSizeEnum.stepwise.min_width, frmSizeEnum.stepwise.min_height);
+ CAMHAL_LOGDB("\nmax_width x height = %d x %d ",frmSizeEnum.stepwise.max_width, frmSizeEnum.stepwise.max_height);
+ CAMHAL_LOGDB("\nstep width x height = %d x %d ",frmSizeEnum.stepwise.step_width,frmSizeEnum.stepwise.step_height);
+ //TODO: validate populating the sizes when type = V4L2_FRMSIZE_TYPE_STEPWISE
+ width = frmSizeEnum.stepwise.max_width;
+ height = frmSizeEnum.stepwise.max_height;
+ }
+ else {
+ CAMHAL_LOGDB("frmSizeEnum.index[%d].width x height == (%d x %d)", i, frmSizeEnum.discrete.width, frmSizeEnum.discrete.height);
+ width = frmSizeEnum.discrete.width;
+ height = frmSizeEnum.discrete.height;
+ }
+
+ caps.tCaptureRes[i].width = width;
+ caps.tCaptureRes[i].height = height;
+ caps.tPreviewRes[i].width = width;
+ caps.tPreviewRes[i].height = height;
+
+ snprintf(caps.tPreviewRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tPreviewRes[i].width,caps.tPreviewRes[i].height);
+ snprintf(caps.tCaptureRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tCaptureRes[i].width,caps.tCaptureRes[i].height);
+ }
+ else {
+ caps.ulCaptureResCount = i;
+ caps.ulPreviewResCount = i;
+ }
+ }
+
+ //sort the preview sizes in ascending order
+ sortAscend(caps, caps.ulPreviewResCount);
+
+ //get supported frame rates
+ bool fps30 = false;
+ for ( j=caps.ulPreviewResCount-1; j >= 0; j--) {
+ CAMHAL_LOGDB(" W x H = %d x %d", caps.tPreviewRes[j].width, caps.tPreviewRes[j].height);
+ status = NO_ERROR;
+ for ( i = 0; status == NO_ERROR; i++) {
+ frmIvalEnum.index = i;
+ //Check for supported frame rates for the default pixel format.
+ frmIvalEnum.pixel_format = V4L2_PIX_FMT_YUYV;
+ frmIvalEnum.width = caps.tPreviewRes[j].width;
+ frmIvalEnum.height = caps.tPreviewRes[j].height;
+
+ status = ioctl (handle, VIDIOC_ENUM_FRAMEINTERVALS, &frmIvalEnum);
+ if (status == NO_ERROR) {
+ if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ CAMHAL_LOGDB("frmIvalEnum[%d].type = %d)", i, frmIvalEnum.type);
+ CAMHAL_LOGDB("frmIvalEnum[%d].stepwise.min = %d/%d)", i, frmIvalEnum.stepwise.min.denominator, frmIvalEnum.stepwise.min.numerator);
+ CAMHAL_LOGDB("frmIvalEnum[%d].stepwise.max = %d/%d)", i, frmIvalEnum.stepwise.max.denominator, frmIvalEnum.stepwise.max.numerator);
+ CAMHAL_LOGDB("frmIvalEnum[%d].stepwise.step = %d/%d)", i, frmIvalEnum.stepwise.step.denominator, frmIvalEnum.stepwise.step.numerator);
+ caps.ulFrameRates[i] = (frmIvalEnum.stepwise.max.denominator/frmIvalEnum.stepwise.max.numerator);
+ }
+ else {
+ CAMHAL_LOGDB("frmIvalEnum[%d].frame rate= %d)",i, (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator));
+ caps.ulFrameRates[i] = (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator);
+ }
+
+ if (caps.ulFrameRates[i] == 30) {
+ fps30 = true;
+ }
+ }
+ else if (i == 0) {
+ // Framerate reporting is not guaranteed in V4L2 implementation.
+ caps.ulFrameRates[i] = 30;
+ fps30 = true;
+ caps.ulFrameRateCount = 1;
+ } else {
+ CAMHAL_LOGE("caps.ulFrameRateCount = %d",i);
+ caps.ulFrameRateCount = i;
+ }
+ }
+ if(fps30) {
+ break;
+ }
+ }
+
+ if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ //TODO: populate the frame rates when type = V4L2_FRMIVAL_TYPE_STEPWISE;
+ }
+
+ //update the preview resolution with the highest resolution which supports 30fps.
+/* // for video preview the application choose the resolution from the mediaprofiles.xml.
+ // so populating all supported preview resolution is required for video mode.
+ caps.tPreviewRes[0].width = caps.tPreviewRes[j].width;
+ caps.tPreviewRes[0].height = caps.tPreviewRes[j].height;
+ snprintf(caps.tPreviewRes[0].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tPreviewRes[j].width,caps.tPreviewRes[j].height);
+ caps.ulPreviewResCount = 1;
+*/
+ insertCapabilities (params, caps);
+ return NO_ERROR;
+}
+
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/inc/ANativeWindowDisplayAdapter.h b/camera/inc/ANativeWindowDisplayAdapter.h
index 9cdf45a..560e98d 100644
--- a/camera/inc/ANativeWindowDisplayAdapter.h
+++ b/camera/inc/ANativeWindowDisplayAdapter.h
@@ -20,10 +20,8 @@
#include <ui/GraphicBufferMapper.h>
#include <hal_public.h>
-//temporarily define format here
-#define HAL_PIXEL_FORMAT_TI_NV12 0x100
-
-namespace android {
+namespace Ti {
+namespace Camera {
/**
* Display handler class - This class basically handles the buffer posting to display
@@ -35,7 +33,7 @@ public:
typedef struct
{
- void *mBuffer;
+ CameraBuffer *mBuffer;
void *mUser;
int mOffset;
int mWidth;
@@ -65,7 +63,7 @@ public:
virtual int setPreviewWindow(struct preview_stream_ops *window);
virtual int setFrameProvider(FrameNotifier *frameProvider);
virtual int setErrorHandler(ErrorNotifier *errorNotifier);
- virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL);
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL);
virtual int disableDisplay(bool cancel_buffer = true);
virtual status_t pauseDisplay(bool pause);
@@ -76,16 +74,17 @@ public:
#endif
- virtual int useBuffers(void* bufArr, int num);
virtual bool supportsExternalBuffering();
//Implementation of inherited interfaces
- virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
virtual uint32_t * getOffsets() ;
virtual int getFd() ;
- virtual int freeBuffer(void* buf);
+ virtual int freeBufferList(CameraBuffer * buflist);
- virtual int maxQueueableBuffers(unsigned int& queueable);
+ virtual status_t maxQueueableBuffers(unsigned int& queueable);
+ virtual status_t minUndequeueableBuffers(int& unqueueable);
///Class specific functions
static void frameCallbackRelay(CameraFrame* caFrame);
@@ -105,17 +104,17 @@ public:
static const int DISPLAY_TIMEOUT;
static const int FAILED_DQS_TO_SUSPEND;
- class DisplayThread : public Thread
+ class DisplayThread : public android::Thread
{
ANativeWindowDisplayAdapter* mDisplayAdapter;
- TIUTILS::MessageQueue mDisplayThreadQ;
+ Utils::MessageQueue mDisplayThreadQ;
public:
DisplayThread(ANativeWindowDisplayAdapter* da)
: Thread(false), mDisplayAdapter(da) { }
///Returns a reference to the display message Q for display adapter to post messages
- TIUTILS::MessageQueue& msgQ()
+ Utils::MessageQueue& msgQ()
{
return mDisplayThreadQ;
}
@@ -147,20 +146,22 @@ private:
int mFailedDQs;
bool mPaused; //Pause state
preview_stream_ops_t* mANativeWindow;
- sp<DisplayThread> mDisplayThread;
+ android::sp<DisplayThread> mDisplayThread;
FrameProvider *mFrameProvider; ///Pointer to the frame provider interface
- TIUTILS::MessageQueue mDisplayQ;
+ Utils::MessageQueue mDisplayQ;
unsigned int mDisplayState;
///@todo Have a common class for these members
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
bool mDisplayEnabled;
int mBufferCount;
- buffer_handle_t** mBufferHandleMap;
- IMG_native_handle_t** mGrallocHandleMap;
- uint32_t* mOffsetsMap;
+ CameraBuffer *mBuffers;
+ //buffer_handle_t** mBufferHandleMap; // -> frames[i].BufferHandle
+ //IMG_native_handle_t** mGrallocHandleMap; // -> frames[i].GrallocHandle
+ uint32_t* mOffsetsMap; // -> frames[i].Offset
int mFD;
- KeyedVector<int, int> mFramesWithCameraAdapterMap;
- sp<ErrorNotifier> mErrorNotifier;
+ android::KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
+ android::KeyedVector<int, int> mFramesType;
+ android::sp<ErrorNotifier> mErrorNotifier;
uint32_t mFrameWidth;
uint32_t mFrameHeight;
@@ -184,5 +185,5 @@ private:
};
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/inc/BaseCameraAdapter.h b/camera/inc/BaseCameraAdapter.h
index bc38e00..b7966b0 100644
--- a/camera/inc/BaseCameraAdapter.h
+++ b/camera/inc/BaseCameraAdapter.h
@@ -21,7 +21,18 @@
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
+
+struct LUT {
+ const char * userDefinition;
+ int halDefinition;
+};
+
+struct LUTtypeHAL{
+ int size;
+ const LUT *Table;
+};
class BaseCameraAdapter : public CameraAdapter
{
@@ -39,16 +50,16 @@ public:
//Message/Frame notification APIs
virtual void enableMsgType(int32_t msgs, frame_callback callback=NULL, event_callback eventCb=NULL, void* cookie=NULL);
virtual void disableMsgType(int32_t msgs, void* cookie);
- virtual void returnFrame(void * frameBuf, CameraFrame::FrameType frameType);
- virtual void addFramePointers(void *frameBuf, void *y_uv);
+ virtual void returnFrame(CameraBuffer * frameBuf, CameraFrame::FrameType frameType);
+ virtual void addFramePointers(CameraBuffer *frameBuf, void *y_uv);
virtual void removeFramePointers();
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params) = 0;
- virtual void getParameters(CameraParameters& params) = 0;
+ virtual status_t setParameters(const android::CameraParameters& params) = 0;
+ virtual void getParameters(android::CameraParameters& params) = 0;
//API to send a command to the camera
- virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0 );
+ virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0, int value4 = 0 );
virtual status_t registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data);
@@ -59,6 +70,8 @@ public:
//Retrieves the next Adapter state
virtual AdapterState getNextState();
+ virtual status_t setSharedAllocator(camera_request_memory shmem_alloc) { mSharedAllocator = shmem_alloc; return NO_ERROR; };
+
// Rolls the state machine back to INTIALIZED_STATE from the current state
virtual status_t rollbackToInitializedState();
@@ -115,10 +128,10 @@ protected:
virtual status_t stopSmoothZoom();
//Should be implemented by deriving classes in order to stop smooth zoom
- virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t useBuffers(CameraMode mode, CameraBuffer* bufArr, int num, size_t length, unsigned int queueable);
//Should be implemented by deriving classes in order queue a released buffer in CameraAdapter
- virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t fillThisBuffer(CameraBuffer* frameBuf, CameraFrame::FrameType frameType);
//API to get the frame size required to be allocated. This size is used to override the size passed
//by camera service when VSTAB/VNF is turned ON for example
@@ -128,7 +141,7 @@ protected:
virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
//API to get required picture buffers size with the current configuration in CameraParameters
- virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getPictureBufferSize(CameraFrame &frame, size_t bufferCount);
// Should be implemented by deriving classes in order to start face detection
// ( if supported )
@@ -140,6 +153,12 @@ protected:
virtual status_t switchToExecuting();
+ virtual status_t setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height);
+
+ virtual status_t destroyTunnel();
+
+ virtual status_t cameraPreviewInitialization();
+
// Receive orientation events from CameraHal
virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
@@ -148,7 +167,7 @@ protected:
status_t notifyFocusSubscribers(CameraHalEvent::FocusStatus status);
status_t notifyShutterSubscribers();
status_t notifyZoomSubscribers(int zoomIdx, bool targetReached);
- status_t notifyFaceSubscribers(sp<CameraFDResult> &faces);
+ status_t notifyMetadataSubscribers(android::sp<CameraMetadataResult> &meta);
//Send the frame to subscribers
status_t sendFrameToSubscribers(CameraFrame *frame);
@@ -157,14 +176,15 @@ protected:
status_t resetFrameRefCount(CameraFrame &frame);
//A couple of helper functions
- void setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount);
- int getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType);
- int setInitFrameRefCount(void* buf, unsigned int mask);
+ void setFrameRefCount(CameraBuffer* frameBuf, CameraFrame::FrameType frameType, int refCount);
+ int getFrameRefCount(CameraBuffer* frameBuf, CameraFrame::FrameType frameType);
+ int setInitFrameRefCount(CameraBuffer* buf, unsigned int mask);
+ static const char* getLUTvalue_translateHAL(int Value, LUTtypeHAL LUT);
// private member functions
private:
status_t __sendFrameToSubscribers(CameraFrame* frame,
- KeyedVector<int, frame_callback> *subscribers,
+ android::KeyedVector<int, frame_callback> *subscribers,
CameraFrame::FrameType frameType);
status_t rollbackToPreviousState();
@@ -198,55 +218,66 @@ protected:
#endif
- mutable Mutex mReturnFrameLock;
+ mutable android::Mutex mReturnFrameLock;
//Lock protecting the Adapter state
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
AdapterState mAdapterState;
AdapterState mNextState;
//Different frame subscribers get stored using these
- KeyedVector<int, frame_callback> mFrameSubscribers;
- KeyedVector<int, frame_callback> mFrameDataSubscribers;
- KeyedVector<int, frame_callback> mVideoSubscribers;
- KeyedVector<int, frame_callback> mImageSubscribers;
- KeyedVector<int, frame_callback> mRawSubscribers;
- KeyedVector<int, event_callback> mFocusSubscribers;
- KeyedVector<int, event_callback> mZoomSubscribers;
- KeyedVector<int, event_callback> mShutterSubscribers;
- KeyedVector<int, event_callback> mFaceSubscribers;
+ android::KeyedVector<int, frame_callback> mFrameSubscribers;
+ android::KeyedVector<int, frame_callback> mSnapshotSubscribers;
+ android::KeyedVector<int, frame_callback> mFrameDataSubscribers;
+ android::KeyedVector<int, frame_callback> mVideoSubscribers;
+ android::KeyedVector<int, frame_callback> mVideoInSubscribers;
+ android::KeyedVector<int, frame_callback> mImageSubscribers;
+ android::KeyedVector<int, frame_callback> mRawSubscribers;
+ android::KeyedVector<int, event_callback> mFocusSubscribers;
+ android::KeyedVector<int, event_callback> mZoomSubscribers;
+ android::KeyedVector<int, event_callback> mShutterSubscribers;
+ android::KeyedVector<int, event_callback> mMetadataSubscribers;
//Preview buffer management data
- int *mPreviewBuffers;
+ CameraBuffer *mPreviewBuffers;
int mPreviewBufferCount;
size_t mPreviewBuffersLength;
- KeyedVector<int, int> mPreviewBuffersAvailable;
- mutable Mutex mPreviewBufferLock;
+ android::KeyedVector<CameraBuffer *, int> mPreviewBuffersAvailable;
+ mutable android::Mutex mPreviewBufferLock;
+
+ //Snapshot buffer management data
+ android::KeyedVector<int, int> mSnapshotBuffersAvailable;
+ mutable android::Mutex mSnapshotBufferLock;
//Video buffer management data
- int *mVideoBuffers;
- KeyedVector<int, int> mVideoBuffersAvailable;
+ CameraBuffer *mVideoBuffers;
+ android::KeyedVector<CameraBuffer *, int> mVideoBuffersAvailable;
int mVideoBuffersCount;
size_t mVideoBuffersLength;
- mutable Mutex mVideoBufferLock;
+ mutable android::Mutex mVideoBufferLock;
//Image buffer management data
- int *mCaptureBuffers;
- KeyedVector<int, bool> mCaptureBuffersAvailable;
+ CameraBuffer *mCaptureBuffers;
+ android::KeyedVector<CameraBuffer *, int> mCaptureBuffersAvailable;
int mCaptureBuffersCount;
size_t mCaptureBuffersLength;
- mutable Mutex mCaptureBufferLock;
+ mutable android::Mutex mCaptureBufferLock;
//Metadata buffermanagement
- int *mPreviewDataBuffers;
- KeyedVector<int, bool> mPreviewDataBuffersAvailable;
+ CameraBuffer *mPreviewDataBuffers;
+ android::KeyedVector<CameraBuffer *, int> mPreviewDataBuffersAvailable;
int mPreviewDataBuffersCount;
size_t mPreviewDataBuffersLength;
- mutable Mutex mPreviewDataBufferLock;
+ mutable android::Mutex mPreviewDataBufferLock;
- TIUTILS::MessageQueue mFrameQ;
- TIUTILS::MessageQueue mAdapterQ;
- mutable Mutex mSubscriberLock;
+ //Video input buffer management data (used for reproc pipe)
+ CameraBuffer *mVideoInBuffers;
+ android::KeyedVector<CameraBuffer *, int> mVideoInBuffersAvailable;
+ mutable android::Mutex mVideoInBufferLock;
+
+ Utils::MessageQueue mFrameQ;
+ Utils::MessageQueue mAdapterQ;
+ mutable android::Mutex mSubscriberLock;
ErrorNotifier *mErrorNotifier;
release_image_buffers_callback mReleaseImageBuffersCallback;
end_image_capture_callback mEndImageCaptureCallback;
@@ -254,18 +285,21 @@ protected:
void *mEndCaptureData;
bool mRecording;
+ camera_request_memory mSharedAllocator;
+
uint32_t mFramesWithDucati;
uint32_t mFramesWithDisplay;
uint32_t mFramesWithEncoder;
-#ifdef DEBUG_LOG
- KeyedVector<int, bool> mBuffersWithDucati;
+#ifdef CAMERAHAL_DEBUG
+ android::KeyedVector<int, bool> mBuffersWithDucati;
#endif
- KeyedVector<void *, CameraFrame *> mFrameQueue;
+ android::KeyedVector<void *, CameraFrame *> mFrameQueue;
};
-};
+} // namespace Camera
+} // namespace Ti
#endif //BASE_CAMERA_ADAPTER_H
diff --git a/camera/inc/BufferSourceAdapter.h b/camera/inc/BufferSourceAdapter.h
new file mode 100644
index 0000000..8d1fa7c
--- /dev/null
+++ b/camera/inc/BufferSourceAdapter.h
@@ -0,0 +1,226 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BUFFER_SOURCE_ADAPTER_H
+#define BUFFER_SOURCE_ADAPTER_H
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+
+#include "CameraHal.h"
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace Ti {
+namespace Camera {
+
+/**
+ * Handles enqueueing/dequeing buffers to tap-in/tap-out points
+ * TODO(XXX): this class implements DisplayAdapter for now
+ * but this will most likely change once tap-in/tap-out points
+ * are better defined
+ */
+
+class BufferSourceAdapter : public DisplayAdapter
+{
+// private types
+private:
+ ///Constant declarations
+ static const int NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+
+
+ // helper class to return frame in different thread context
+ class ReturnFrame : public android::Thread {
+ public:
+ ReturnFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
+ android::AutoMutex lock(mReturnFrameMutex);
+ mDestroying = false;
+ mFrameCount = 0;
+ }
+
+ ~ReturnFrame() {
+ android::AutoMutex lock(mReturnFrameMutex);
+ }
+
+ void signal() {
+ android::AutoMutex lock(mReturnFrameMutex);
+ mFrameCount++;
+ mReturnFrameCondition.signal();
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
+ android::AutoMutex lock(mReturnFrameMutex);
+ mDestroying = true;
+ mReturnFrameCondition.signal();
+ }
+
+ virtual bool threadLoop() {
+ android::AutoMutex lock(mReturnFrameMutex);
+ if ( 0 >= mFrameCount ) {
+ mReturnFrameCondition.wait(mReturnFrameMutex);
+ }
+ if (!mDestroying) {
+ mBufferSourceAdapter->handleFrameReturn();
+ mFrameCount--;
+ }
+ return true;
+ }
+
+ private:
+ BufferSourceAdapter* mBufferSourceAdapter;
+ android::Condition mReturnFrameCondition;
+ android::Mutex mReturnFrameMutex;
+ int mFrameCount;
+ bool mDestroying;
+ };
+
+ // helper class to queue frame in different thread context
+ class QueueFrame : public android::Thread {
+ public:
+ QueueFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
+ mDestroying = false;
+ }
+
+ ~QueueFrame() {
+ }
+
+ void addFrame(CameraFrame *frame) {
+ android::AutoMutex lock(mFramesMutex);
+ mFrames.add(new CameraFrame(*frame));
+ mFramesCondition.signal();
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
+ mDestroying = true;
+
+ android::AutoMutex lock(mFramesMutex);
+ while (!mFrames.empty()) {
+ CameraFrame *frame = mFrames.itemAt(0);
+ mFrames.removeAt(0);
+ frame->mMetaData.clear();
+ delete frame;
+ }
+ mFramesCondition.signal();
+ }
+
+ virtual bool threadLoop() {
+ CameraFrame *frame = NULL;
+ {
+ android::AutoMutex lock(mFramesMutex);
+ while (mFrames.empty() && !mDestroying) mFramesCondition.wait(mFramesMutex);
+ if (!mDestroying) {
+ frame = mFrames.itemAt(0);
+ mFrames.removeAt(0);
+ }
+ }
+
+ if (frame) {
+ mBufferSourceAdapter->handleFrameCallback(frame);
+ frame->mMetaData.clear();
+
+ // signal return frame thread that it can dequeue a buffer now
+ mBufferSourceAdapter->mReturnFrame->signal();
+
+ delete frame;
+ }
+
+ return true;
+ }
+
+ private:
+ BufferSourceAdapter* mBufferSourceAdapter;
+ android::Vector<CameraFrame *> mFrames;
+ android::Condition mFramesCondition;
+ android::Mutex mFramesMutex;
+ bool mDestroying;
+ };
+
+ enum {
+ BUFFER_SOURCE_TAP_IN,
+ BUFFER_SOURCE_TAP_OUT
+ };
+
+// public member functions
+public:
+ BufferSourceAdapter();
+ virtual ~BufferSourceAdapter();
+
+ virtual status_t initialize();
+ virtual int setPreviewWindow(struct preview_stream_ops *source);
+ virtual int setFrameProvider(FrameNotifier *frameProvider);
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL);
+ virtual int disableDisplay(bool cancel_buffer = true);
+ virtual status_t pauseDisplay(bool pause);
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ // Not implemented in this class
+ virtual status_t setSnapshotTimeRef(struct timeval *refTime = NULL) { return NO_ERROR; }
+#endif
+ virtual bool supportsExternalBuffering();
+ virtual CameraBuffer * allocateBufferList(int width, int dummyHeight, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
+ virtual uint32_t * getOffsets() ;
+ virtual int getFd() ;
+ virtual int freeBufferList(CameraBuffer * buflist);
+ virtual int maxQueueableBuffers(unsigned int& queueable);
+ virtual int minUndequeueableBuffers(int& unqueueable);
+ virtual bool match(const char * str);
+
+ virtual CameraBuffer * getBuffers(bool reset = false);
+ virtual unsigned int getSize();
+ virtual int getBufferCount();
+
+ static void frameCallback(CameraFrame* caFrame);
+ void addFrame(CameraFrame* caFrame);
+ void handleFrameCallback(CameraFrame* caFrame);
+ bool handleFrameReturn();
+
+private:
+ void destroy();
+ status_t returnBuffersToWindow();
+
+private:
+ preview_stream_ops_t* mBufferSource;
+ FrameProvider *mFrameProvider; // Pointer to the frame provider interface
+
+ mutable android::Mutex mLock;
+ int mBufferCount;
+ CameraBuffer *mBuffers;
+
+ android::KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
+ android::sp<ErrorNotifier> mErrorNotifier;
+ android::sp<ReturnFrame> mReturnFrame;
+ android::sp<QueueFrame> mQueueFrame;
+
+ uint32_t mFrameWidth;
+ uint32_t mFrameHeight;
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+
+ int mBufferSourceDirection;
+
+ const char *mPixelFormat;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
+
+#endif
diff --git a/camera/inc/CameraHal.h b/camera/inc/CameraHal.h
index 8b8392a..29825c3 100644
--- a/camera/inc/CameraHal.h
+++ b/camera/inc/CameraHal.h
@@ -29,22 +29,38 @@
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/stat.h>
+
+#include <hardware/camera.h>
#include <utils/Log.h>
#include <utils/threads.h>
-#include <linux/videodev2.h>
-#include "binder/MemoryBase.h"
-#include "binder/MemoryHeapBase.h"
#include <utils/threads.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
#include <camera/CameraParameters.h>
-#include <hardware/camera.h>
+#ifdef OMAP_ENHANCEMENT_CPCAM
+#include <camera/CameraMetadata.h>
+#include <camera/ShotParameters.h>
+#endif
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/GraphicBuffer.h>
+
+/* For IMG_native_handle_t */
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+#include <ion/ion.h>
+
+#include "Common.h"
#include "MessageQueue.h"
#include "Semaphore.h"
#include "CameraProperties.h"
-#include "DebugUtils.h"
#include "SensorListener.h"
-#include <ui/GraphicBufferAllocator.h>
-#include <ui/GraphicBuffer.h>
+//temporarily define format here
+#define HAL_PIXEL_FORMAT_TI_NV12 0x100
+#define HAL_PIXEL_FORMAT_TI_Y8 0x103
+#define HAL_PIXEL_FORMAT_TI_Y16 0x104
+#define HAL_PIXEL_FORMAT_TI_UYVY 0x105
#define MIN_WIDTH 640
#define MIN_HEIGHT 480
@@ -66,6 +82,8 @@
#define SHARPNESS_OFFSET 100
#define CONTRAST_OFFSET 100
+#define FRAME_RATE_HIGH_HD 60
+
#define CAMHAL_GRALLOC_USAGE GRALLOC_USAGE_HW_TEXTURE | \
GRALLOC_USAGE_HW_RENDER | \
GRALLOC_USAGE_SW_READ_RARELY | \
@@ -77,56 +95,94 @@
#define LOCK_BUFFER_TRIES 5
#define HAL_PIXEL_FORMAT_NV12 0x100
-#define CAMHAL_LOGI ALOGI
+#define OP_STR_SIZE 100
-//Uncomment to enable more verbose/debug logs
-//#define DEBUG_LOG
+#define NONNEG_ASSIGN(x,y) \
+ if(x > -1) \
+ y = x
-///Camera HAL Logging Functions
-#ifndef DEBUG_LOG
+#define CAMHAL_SIZE_OF_ARRAY(x) static_cast<int>(sizeof(x)/sizeof(x[0]))
-#define CAMHAL_LOGDA(str)
-#define CAMHAL_LOGDB(str, ...)
-#define CAMHAL_LOGVA(str)
-#define CAMHAL_LOGVB(str, ...)
+namespace Ti {
+namespace Camera {
-#define CAMHAL_LOGEA ALOGE
-#define CAMHAL_LOGEB ALOGE
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+extern const char * const kRawImagesOutputDirPath;
+extern const char * const kYuvImagesOutputDirPath;
+#endif
+#define V4L_CAMERA_NAME_USB "USBCAMERA"
+#define OMX_CAMERA_NAME_OV "OV5640"
+#define OMX_CAMERA_NAME_SONY "IMX060"
-#undef LOG_FUNCTION_NAME
-#undef LOG_FUNCTION_NAME_EXIT
-#define LOG_FUNCTION_NAME
-#define LOG_FUNCTION_NAME_EXIT
-#else
+///Forward declarations
+class CameraHal;
+class CameraFrame;
+class CameraHalEvent;
+class DisplayFrame;
-#define CAMHAL_LOGDA DBGUTILS_LOGDA
-#define CAMHAL_LOGDB DBGUTILS_LOGDB
-#define CAMHAL_LOGVA DBGUTILS_LOGVA
-#define CAMHAL_LOGVB DBGUTILS_LOGVB
+class FpsRange {
+public:
+ static int compare(const FpsRange * left, const FpsRange * right);
-#define CAMHAL_LOGEA DBGUTILS_LOGEA
-#define CAMHAL_LOGEB DBGUTILS_LOGEB
+ FpsRange(int min, int max);
+ FpsRange();
-#endif
+ bool operator==(const FpsRange & fpsRange) const;
+ bool isNull() const;
+ bool isFixed() const;
+ int min() const;
+ int max() const;
-#define NONNEG_ASSIGN(x,y) \
- if(x > -1) \
- y = x
+private:
+ int mMin;
+ int mMax;
+};
-namespace android {
-#define PARAM_BUFFER 6000
+inline int FpsRange::compare(const FpsRange * const left, const FpsRange * const right) {
+ if ( left->max() < right->max() ) {
+ return -1;
+ }
-///Forward declarations
-class CameraHal;
-class CameraFrame;
-class CameraHalEvent;
-class DisplayFrame;
+ if ( left->max() > right->max() ) {
+ return 1;
+ }
+
+ if ( left->min() < right->min() ) {
+ return -1;
+ }
+
+ if ( left->min() > right->min() ) {
+ return 1;
+ }
+
+ return 0;
+}
+
+inline FpsRange::FpsRange(const int min, const int max) : mMin(min), mMax(max) {}
+
+inline FpsRange::FpsRange() : mMin(-1), mMax(-1) {}
-class CameraArea : public RefBase
+inline bool FpsRange::operator==(const FpsRange & fpsRange) const {
+ return mMin == fpsRange.mMin && mMax == fpsRange.mMax;
+}
+
+inline bool FpsRange::isNull() const {
+ return mMin == -1 || mMax == -1;
+}
+
+inline bool FpsRange::isFixed() const {
+ return mMin == mMax;
+}
+
+inline int FpsRange::min() const { return mMin; }
+
+inline int FpsRange::max() const { return mMax; }
+
+class CameraArea : public android::RefBase
{
public:
@@ -163,11 +219,11 @@ public:
return mWeight;
}
- bool compare(const sp<CameraArea> &area);
+ bool compare(const android::sp<CameraArea> &area);
static status_t parseAreas(const char *area,
size_t areaLength,
- Vector< sp<CameraArea> > &areas);
+ android::Vector< android::sp<CameraArea> > &areas);
static status_t checkArea(ssize_t top,
ssize_t left,
@@ -175,7 +231,7 @@ public:
ssize_t right,
ssize_t weight);
- static bool areAreasDifferent(Vector< sp<CameraArea> > &, Vector< sp<CameraArea> > &);
+ static bool areAreasDifferent(android::Vector< android::sp<CameraArea> > &, android::Vector< android::sp<CameraArea> > &);
protected:
static const ssize_t TOP = -1000;
@@ -192,28 +248,50 @@ protected:
size_t mWeight;
};
-class CameraFDResult : public RefBase
+class CameraMetadataResult : public android::RefBase
{
public:
- CameraFDResult() : mFaceData(NULL) {};
- CameraFDResult(camera_frame_metadata_t *faces) : mFaceData(faces) {};
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ CameraMetadataResult(camera_memory_t * extMeta) : mExtendedMetadata(extMeta) {
+ mMetadata.faces = NULL;
+ mMetadata.number_of_faces = 0;
+#ifdef OMAP_ENHANCEMENT
+ mMetadata.analog_gain = 0;
+ mMetadata.exposure_time = 0;
+#endif
+ };
+#endif
- virtual ~CameraFDResult() {
- if ( ( NULL != mFaceData ) && ( NULL != mFaceData->faces ) ) {
- free(mFaceData->faces);
- free(mFaceData);
- mFaceData=NULL;
- }
+ CameraMetadataResult() {
+ mMetadata.faces = NULL;
+ mMetadata.number_of_faces = 0;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mMetadata.analog_gain = 0;
+ mMetadata.exposure_time = 0;
+#endif
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mExtendedMetadata = NULL;
+#endif
+ }
- if(( NULL != mFaceData ))
- {
- free(mFaceData);
- mFaceData = NULL;
- }
+ virtual ~CameraMetadataResult() {
+ if ( NULL != mMetadata.faces ) {
+ free(mMetadata.faces);
+ }
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != mExtendedMetadata ) {
+ mExtendedMetadata->release(mExtendedMetadata);
+ }
+#endif
}
- camera_frame_metadata_t *getFaceResult() { return mFaceData; };
+ camera_frame_metadata_t *getMetadataResult() { return &mMetadata; };
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ camera_memory_t *getExtendedMetadata() { return mExtendedMetadata; };
+#endif
static const ssize_t TOP = -1000;
static const ssize_t LEFT = -1000;
@@ -223,9 +301,68 @@ public:
private:
- camera_frame_metadata_t *mFaceData;
+ camera_frame_metadata_t mMetadata;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ camera_memory_t *mExtendedMetadata;
+#endif
};
+typedef enum {
+ CAMERA_BUFFER_NONE = 0,
+ CAMERA_BUFFER_GRALLOC,
+ CAMERA_BUFFER_ANW,
+ CAMERA_BUFFER_MEMORY,
+ CAMERA_BUFFER_ION
+} CameraBufferType;
+
+typedef struct _CameraBuffer {
+ CameraBufferType type;
+ /* opaque is the generic drop-in replacement for the pointers
+ * that were used previously */
+ void *opaque;
+
+ /* opaque has different meanings depending on the buffer type:
+ * GRALLOC - gralloc_handle_t
+ * ANW - a pointer to the buffer_handle_t (which corresponds to
+ * the ANativeWindowBuffer *)
+ * MEMORY - address of allocated memory
+ * ION - address of mapped ion allocation
+ *
+ * FIXME opaque should be split into several fields:
+ * - handle/pointer we got from the allocator
+ * - handle/value we pass to OMX
+ * - pointer to mapped memory (if the buffer is mapped)
+ */
+
+ /* mapped holds ptr to mapped memory in userspace */
+ void *mapped;
+
+ /* These are specific to ION buffers */
+ struct ion_handle * ion_handle;
+ int ion_fd;
+ int fd;
+ size_t size;
+ int index;
+
+ /* These describe the camera buffer */
+ int width;
+ int stride;
+ int height;
+ const char *format;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ struct timeval ppmStamp;
+
+#endif
+
+ /* These are for buffers which include borders */
+ int offset; // where valid data starts
+ int actual_size; // size of the entire buffer with borders
+} CameraBuffer;
+
+void * camera_buffer_get_omx_ptr (CameraBuffer *buffer);
+
class CameraFrame
{
public:
@@ -242,6 +379,7 @@ class CameraFrame
FRAME_DATA= 0x80,
RAW_FRAME = 0x100,
SNAPSHOT_FRAME = 0x200,
+ REPROCESS_INPUT_FRAME = 0x400,
ALL_FRAMES = 0xFFFF ///Maximum of 16 frame types supported
};
@@ -249,6 +387,8 @@ class CameraFrame
{
ENCODE_RAW_YUV422I_TO_JPEG = 0x1 << 0,
HAS_EXIF_DATA = 0x1 << 1,
+ FORMAT_YUV422I_YUYV = 0x1 << 2,
+ FORMAT_YUV422I_UYVY = 0x1 << 3,
};
//default contrustor
@@ -265,35 +405,19 @@ class CameraFrame
mFd(0),
mLength(0),
mFrameMask(0),
- mQuirks(0) {
-
- mYuv[0] = NULL;
- mYuv[1] = NULL;
- }
+ mQuirks(0)
+ {
+ mYuv[0] = 0;
+ mYuv[1] = 0;
- //copy constructor
- CameraFrame(const CameraFrame &frame) :
- mCookie(frame.mCookie),
- mCookie2(frame.mCookie2),
- mBuffer(frame.mBuffer),
- mFrameType(frame.mFrameType),
- mTimestamp(frame.mTimestamp),
- mWidth(frame.mWidth),
- mHeight(frame.mHeight),
- mOffset(frame.mOffset),
- mAlignment(frame.mAlignment),
- mFd(frame.mFd),
- mLength(frame.mLength),
- mFrameMask(frame.mFrameMask),
- mQuirks(frame.mQuirks) {
-
- mYuv[0] = frame.mYuv[0];
- mYuv[1] = frame.mYuv[1];
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mMetaData = 0;
+#endif
}
void *mCookie;
void *mCookie2;
- void *mBuffer;
+ CameraBuffer *mBuffer;
int mFrameType;
nsecs_t mTimestamp;
unsigned int mWidth, mHeight;
@@ -304,6 +428,9 @@ class CameraFrame
unsigned mFrameMask;
unsigned int mQuirks;
unsigned int mYuv[2];
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ android::sp<CameraMetadataResult> mMetaData;
+#endif
///@todo add other member vars like stride etc
};
@@ -326,7 +453,7 @@ public:
EVENT_FOCUS_ERROR = 0x2,
EVENT_ZOOM_INDEX_REACHED = 0x4,
EVENT_SHUTTER = 0x8,
- EVENT_FACE = 0x10,
+ EVENT_METADATA = 0x10,
///@remarks Future enum related to display, like frame displayed event, could be added here
ALL_EVENTS = 0xFFFF ///Maximum of 16 event types supported
};
@@ -366,16 +493,16 @@ public:
size_t score;
} FaceData;
- typedef sp<CameraFDResult> FaceEventData;
+ typedef android::sp<CameraMetadataResult> MetaEventData;
- class CameraHalEventData : public RefBase{
+ class CameraHalEventData : public android::RefBase{
public:
CameraHalEvent::FocusEventData focusEvent;
CameraHalEvent::ZoomEventData zoomEvent;
CameraHalEvent::ShutterEventData shutterEvent;
- CameraHalEvent::FaceEventData faceEvent;
+ CameraHalEvent::MetaEventData metadataEvent;
};
//default contrustor
@@ -391,7 +518,7 @@ public:
void* mCookie;
CameraHalEventType mEventType;
- sp<CameraHalEventData> mEventData;
+ android::sp<CameraHalEventData> mEventData;
};
@@ -423,7 +550,7 @@ public:
virtual ~MessageNotifier() {};
};
-class ErrorNotifier : public virtual RefBase
+class ErrorNotifier : public virtual android::RefBase
{
public:
virtual void errorNotify(int error) = 0;
@@ -439,8 +566,8 @@ public:
class FrameNotifier : public MessageNotifier
{
public:
- virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0;
- virtual void addFramePointers(void *frameBuf, void *buf) = 0;
+ virtual void returnFrame(CameraBuffer* frameBuf, CameraFrame::FrameType frameType) = 0;
+ virtual void addFramePointers(CameraBuffer *frameBuf, void *buf) = 0;
virtual void removeFramePointers() = 0;
virtual ~FrameNotifier() {};
@@ -460,8 +587,8 @@ public:
int enableFrameNotification(int32_t frameTypes);
int disableFrameNotification(int32_t frameTypes);
- int returnFrame(void *frameBuf, CameraFrame::FrameType frameType);
- void addFramePointers(void *frameBuf, void *buf);
+ int returnFrame(CameraBuffer *frameBuf, CameraFrame::FrameType frameType);
+ void addFramePointers(CameraBuffer *frameBuf, void *buf);
void removeFramePointers();
};
@@ -489,13 +616,21 @@ public:
class BufferProvider
{
public:
- virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs) = 0;
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs) = 0;
+
+ // gets a buffer list from BufferProvider when buffers are sent from external source and already pre-allocated
+ // only call this function for an input source into CameraHal. If buffers are not from a pre-allocated source
+ // this function will return NULL and numBufs of -1
+ virtual CameraBuffer *getBufferList(int *numBufs) = 0;
//additional methods used for memory mapping
virtual uint32_t * getOffsets() = 0;
virtual int getFd() = 0;
+ virtual CameraBuffer * getBuffers(bool reset = false) { return NULL; }
+ virtual unsigned int getSize() {return 0; }
+ virtual int getBufferCount() {return -1; }
- virtual int freeBuffer(void* buf) = 0;
+ virtual int freeBufferList(CameraBuffer * buf) = 0;
virtual ~BufferProvider() {}
};
@@ -503,7 +638,7 @@ public:
/**
* Class for handling data and notify callbacks to application
*/
-class AppCallbackNotifier: public ErrorNotifier , public virtual RefBase
+class AppCallbackNotifier: public ErrorNotifier , public virtual android::RefBase
{
public:
@@ -545,7 +680,7 @@ public:
//All sub-components of Camera HAL call this whenever any error happens
virtual void errorNotify(int error);
- status_t startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
+ status_t startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
status_t stopPreviewCallbacks();
status_t enableMsgType(int32_t msgType);
@@ -577,12 +712,12 @@ public:
//Notifications from CameraHal for video recording case
status_t startRecording();
status_t stopRecording();
- status_t initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count, void *vidBufs);
+ status_t initSharedVideoBuffers(CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count, CameraBuffer *vidBufs);
status_t releaseRecordingFrame(const void *opaque);
- status_t useMetaDataBufferMode(bool enable);
+ status_t useMetaDataBufferMode(bool enable);
- void EncoderDoneCb(void*, void*, CameraFrame::FrameType type, void* cookie1, void* cookie2);
+ void EncoderDoneCb(void*, void*, CameraFrame::FrameType type, void* cookie1, void* cookie2, void *cookie3);
void useVideoBuffers(bool useVideoBuffers);
@@ -592,9 +727,9 @@ public:
void flushEventQueue();
//Internal class definitions
- class NotificationThread : public Thread {
+ class NotificationThread : public android::Thread {
AppCallbackNotifier* mAppCallbackNotifier;
- TIUTILS::MessageQueue mNotificationThreadQ;
+ Utils::MessageQueue mNotificationThreadQ;
public:
enum NotificationThreadCommands
{
@@ -609,7 +744,7 @@ public:
return mAppCallbackNotifier->notificationThread();
}
- TIUTILS::MessageQueue &msgQ() { return mNotificationThreadQ;}
+ Utils::MessageQueue &msgQ() { return mNotificationThreadQ;}
};
//Friend declarations
@@ -623,10 +758,12 @@ private:
status_t dummyRaw();
void copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType);
void copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType);
+ size_t calculateBufferSize(size_t width, size_t height, const char *pixelFormat);
+ const char* getContstantForPixelFormat(const char *pixelFormat);
private:
- mutable Mutex mLock;
- mutable Mutex mBurstLock;
+ mutable android::Mutex mLock;
+ mutable android::Mutex mBurstLock;
CameraHal* mCameraHal;
camera_notify_callback mNotifyCb;
camera_data_callback mDataCb;
@@ -636,34 +773,37 @@ private:
//Keeps Video MemoryHeaps and Buffers within
//these objects
- KeyedVector<unsigned int, unsigned int> mVideoHeaps;
- KeyedVector<unsigned int, unsigned int> mVideoBuffers;
- KeyedVector<unsigned int, unsigned int> mVideoMap;
+ android::KeyedVector<unsigned int, unsigned int> mVideoHeaps;
+ android::KeyedVector<unsigned int, unsigned int> mVideoBuffers;
+ android::KeyedVector<void *, CameraBuffer *> mVideoMap;
//Keeps list of Gralloc handles and associated Video Metadata Buffers
- KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferMemoryMap;
- KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferReverseMap;
+ android::KeyedVector<void *, camera_memory_t *> mVideoMetadataBufferMemoryMap;
+ android::KeyedVector<void *, CameraBuffer *> mVideoMetadataBufferReverseMap;
bool mBufferReleased;
- sp< NotificationThread> mNotificationThread;
+ android::sp< NotificationThread> mNotificationThread;
EventProvider *mEventProvider;
FrameProvider *mFrameProvider;
- TIUTILS::MessageQueue mEventQ;
- TIUTILS::MessageQueue mFrameQ;
+ Utils::MessageQueue mEventQ;
+ Utils::MessageQueue mFrameQ;
NotifierState mNotifierState;
bool mPreviewing;
camera_memory_t* mPreviewMemory;
- unsigned char* mPreviewBufs[MAX_BUFFERS];
+ CameraBuffer mPreviewBuffers[MAX_BUFFERS];
int mPreviewBufCount;
+ int mPreviewWidth;
+ int mPreviewHeight;
+ int mPreviewStride;
const char *mPreviewPixelFormat;
- KeyedVector<unsigned int, sp<MemoryHeapBase> > mSharedPreviewHeaps;
- KeyedVector<unsigned int, sp<MemoryBase> > mSharedPreviewBuffers;
+ android::KeyedVector<unsigned int, android::sp<android::MemoryHeapBase> > mSharedPreviewHeaps;
+ android::KeyedVector<unsigned int, android::sp<android::MemoryBase> > mSharedPreviewBuffers;
//Burst mode active
bool mBurst;
- mutable Mutex mRecordingLock;
+ mutable android::Mutex mRecordingLock;
bool mRecording;
bool mMeasurementEnabled;
@@ -681,27 +821,24 @@ private:
/**
* Class used for allocating memory for JPEG bit stream buffers, output buffers of camera in no overlay case
*/
-class MemoryManager : public BufferProvider, public virtual RefBase
+class MemoryManager : public BufferProvider, public virtual android::RefBase
{
public:
- MemoryManager():mIonFd(-1){ }
+ MemoryManager();
+ ~MemoryManager();
- ///Initializes the memory manager creates any resources required
- status_t initialize() { return NO_ERROR; }
+ status_t initialize();
int setErrorHandler(ErrorNotifier *errorNotifier);
- virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
virtual uint32_t * getOffsets();
virtual int getFd() ;
- virtual int freeBuffer(void* buf);
+ virtual int freeBufferList(CameraBuffer * buflist);
private:
-
- sp<ErrorNotifier> mErrorNotifier;
+ android::sp<ErrorNotifier> mErrorNotifier;
int mIonFd;
- KeyedVector<unsigned int, unsigned int> mIonHandleMap;
- KeyedVector<unsigned int, unsigned int> mIonFdMap;
- KeyedVector<unsigned int, unsigned int> mIonBufLength;
};
@@ -712,24 +849,26 @@ private:
* Concrete classes derive from this class and provide implementations based on the specific camera h/w interface
*/
-class CameraAdapter: public FrameNotifier, public virtual RefBase
+class CameraAdapter: public FrameNotifier, public virtual android::RefBase
{
protected:
enum AdapterActiveStates {
- INTIALIZED_ACTIVE = 1 << 0,
- LOADED_PREVIEW_ACTIVE = 1 << 1,
- PREVIEW_ACTIVE = 1 << 2,
- LOADED_CAPTURE_ACTIVE = 1 << 3,
- CAPTURE_ACTIVE = 1 << 4,
- BRACKETING_ACTIVE = 1 << 5,
- AF_ACTIVE = 1 << 6,
- ZOOM_ACTIVE = 1 << 7,
- VIDEO_ACTIVE = 1 << 8,
+ INTIALIZED_ACTIVE = 1 << 0,
+ LOADED_PREVIEW_ACTIVE = 1 << 1,
+ PREVIEW_ACTIVE = 1 << 2,
+ LOADED_CAPTURE_ACTIVE = 1 << 3,
+ CAPTURE_ACTIVE = 1 << 4,
+ BRACKETING_ACTIVE = 1 << 5,
+ AF_ACTIVE = 1 << 6,
+ ZOOM_ACTIVE = 1 << 7,
+ VIDEO_ACTIVE = 1 << 8,
+ LOADED_REPROCESS_ACTIVE = 1 << 9,
+ REPROCESS_ACTIVE = 1 << 10,
};
public:
typedef struct
{
- void *mBuffers;
+ CameraBuffer *mBuffers;
uint32_t *mOffsets;
int mFd;
size_t mLength;
@@ -764,6 +903,16 @@ public:
CAMERA_START_FD = 22,
CAMERA_STOP_FD = 23,
CAMERA_SWITCH_TO_EXECUTING = 24,
+ CAMERA_USE_BUFFERS_VIDEO_CAPTURE = 25,
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ CAMERA_USE_BUFFERS_REPROCESS = 26,
+ CAMERA_START_REPROCESS = 27,
+#endif
+#ifdef OMAP_ENHANCEMENT_VTC
+ CAMERA_SETUP_TUNNEL = 28,
+ CAMERA_DESTROY_TUNNEL = 29,
+#endif
+ CAMERA_PREVIEW_INITIALIZATION = 30,
};
enum CameraMode
@@ -771,27 +920,32 @@ public:
CAMERA_PREVIEW,
CAMERA_IMAGE_CAPTURE,
CAMERA_VIDEO,
- CAMERA_MEASUREMENT
+ CAMERA_MEASUREMENT,
+ CAMERA_REPROCESS,
};
enum AdapterState {
- INTIALIZED_STATE = INTIALIZED_ACTIVE,
- LOADED_PREVIEW_STATE = LOADED_PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- PREVIEW_STATE = PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- LOADED_CAPTURE_STATE = LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- CAPTURE_STATE = CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- BRACKETING_STATE = BRACKETING_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE ,
- AF_STATE = AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- ZOOM_STATE = ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_STATE = VIDEO_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_AF_STATE = VIDEO_ACTIVE | AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_ZOOM_STATE = VIDEO_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_LOADED_CAPTURE_STATE = VIDEO_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_CAPTURE_STATE = VIDEO_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- AF_ZOOM_STATE = AF_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- BRACKETING_ZOOM_STATE = BRACKETING_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ INTIALIZED_STATE = INTIALIZED_ACTIVE,
+ LOADED_PREVIEW_STATE = LOADED_PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ PREVIEW_STATE = PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_CAPTURE_STATE = LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ CAPTURE_STATE = CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_STATE = BRACKETING_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE ,
+ AF_STATE = AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ ZOOM_STATE = ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_STATE = VIDEO_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_AF_STATE = VIDEO_ACTIVE | AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_ZOOM_STATE = VIDEO_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_LOADED_CAPTURE_STATE = VIDEO_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_CAPTURE_STATE = VIDEO_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ AF_ZOOM_STATE = AF_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_ZOOM_STATE = BRACKETING_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_REPROCESS_STATE = LOADED_REPROCESS_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_REPROCESS_CAPTURE_STATE = LOADED_REPROCESS_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ REPROCESS_STATE = REPROCESS_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
};
+
public:
///Initialzes the camera adapter creates any resources required
@@ -805,19 +959,13 @@ public:
event_callback eventCb = NULL,
void *cookie = NULL) = 0;
virtual void disableMsgType(int32_t msgs, void* cookie) = 0;
- virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0;
- virtual void addFramePointers(void *frameBuf, void *buf) = 0;
+ virtual void returnFrame(CameraBuffer* frameBuf, CameraFrame::FrameType frameType) = 0;
+ virtual void addFramePointers(CameraBuffer *frameBuf, void *buf) = 0;
virtual void removeFramePointers() = 0;
//APIs to configure Camera adapter and get the current parameter set
- virtual int setParameters(const CameraParameters& params) = 0;
- virtual void getParameters(CameraParameters& params) = 0;
-
- //API to flush the buffers from Camera
- status_t flushBuffers()
- {
- return sendCommand(CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS);
- }
+ virtual int setParameters(const android::CameraParameters& params) = 0;
+ virtual void getParameters(android::CameraParameters& params) = 0;
//Registers callback for returning image buffers back to CameraHAL
virtual int registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data) = 0;
@@ -826,7 +974,7 @@ public:
virtual int registerEndCaptureCallback(end_image_capture_callback callback, void *user_data) = 0;
//API to send a command to the camera
- virtual status_t sendCommand(CameraCommands operation, int value1=0, int value2=0, int value3=0) = 0;
+ virtual status_t sendCommand(CameraCommands operation, int value1=0, int value2=0, int value3=0, int value4=0) = 0;
virtual ~CameraAdapter() {};
@@ -847,6 +995,8 @@ public:
// Retrieves the next Adapter state - for internal use (not locked)
virtual status_t getNextState(AdapterState &state) = 0;
+ virtual status_t setSharedAllocator(camera_request_memory shmem_alloc) = 0;
+
protected:
//The first two methods will try to switch the adapter state.
//Every call to setState() should be followed by a corresponding
@@ -857,16 +1007,18 @@ protected:
virtual status_t rollbackState() = 0;
};
-class DisplayAdapter : public BufferProvider, public virtual RefBase
+class DisplayAdapter : public BufferProvider, public virtual android::RefBase
{
public:
- typedef struct S3DParameters_t
- {
- int mode;
- int framePacking;
- int order;
- int subSampling;
- } S3DParameters;
+ DisplayAdapter();
+
+#ifdef OMAP_ENHANCEMENT
+ preview_stream_extended_ops_t * extendedOps() const {
+ return mExtendedOps;
+ }
+
+ void setExtendedOps(preview_stream_extended_ops_t * extendedOps);
+#endif
///Initializes the display adapter creates any resources required
virtual int initialize() = 0;
@@ -874,7 +1026,7 @@ public:
virtual int setPreviewWindow(struct preview_stream_ops *window) = 0;
virtual int setFrameProvider(FrameNotifier *frameProvider) = 0;
virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0;
- virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL) = 0;
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL) = 0;
virtual int disableDisplay(bool cancel_buffer = true) = 0;
//Used for Snapshot review temp. pause
virtual int pauseDisplay(bool pause) = 0;
@@ -884,13 +1036,23 @@ public:
virtual int setSnapshotTimeRef(struct timeval *refTime = NULL) = 0;
#endif
- virtual int useBuffers(void *bufArr, int num) = 0;
virtual bool supportsExternalBuffering() = 0;
// Get max queueable buffers display supports
// This function should only be called after
- // allocateBuffer
- virtual int maxQueueableBuffers(unsigned int& queueable) = 0;
+ // allocateBufferList
+ virtual status_t maxQueueableBuffers(unsigned int& queueable) = 0;
+
+ // Get min buffers display needs at any given time
+ virtual status_t minUndequeueableBuffers(int& unqueueable) = 0;
+
+ // Given a vector of DisplayAdapters find the one corresponding to str
+ virtual bool match(const char * str) { return false; }
+
+private:
+#ifdef OMAP_ENHANCEMENT
+ preview_stream_extended_ops_t * mExtendedOps;
+#endif
};
static void releaseImageBuffers(void *userData);
@@ -912,6 +1074,7 @@ public:
///Constants
static const int NO_BUFFERS_PREVIEW;
static const int NO_BUFFERS_IMAGE_CAPTURE;
+ static const int NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
static const uint32_t VFR_SCALE = 1000;
@@ -959,10 +1122,30 @@ public:
int startPreview();
/**
+ * Set preview mode related initialization.
+ * Only used when slice based processing is enabled.
+ */
+ int cameraPreviewInitialization();
+
+ /**
* Only used if overlays are used for camera preview.
*/
int setPreviewWindow(struct preview_stream_ops *window);
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ void setExtendedPreviewStreamOps(preview_stream_extended_ops_t *ops);
+
+ /**
+ * Set a tap-in or tap-out point.
+ */
+ int setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout);
+#endif
+
+ /**
+ * Release a tap-in or tap-out point.
+ */
+ int releaseBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout);
+
/**
* Stop a previously started preview.
*/
@@ -1013,7 +1196,7 @@ public:
/**
* Take a picture.
*/
- int takePicture();
+ int takePicture(const char* params);
/**
* Cancel a picture that was started with takePicture. Calling this
@@ -1023,7 +1206,7 @@ public:
/** Set the camera parameters. */
int setParameters(const char* params);
- int setParameters(const CameraParameters& params);
+ int setParameters(const android::CameraParameters& params);
/** Return the camera parameters. */
char* getParameters();
@@ -1045,8 +1228,19 @@ public:
*/
int dump(int fd) const;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ /**
+ * start a reprocessing operation.
+ */
+ int reprocess(const char* params);
+
+ /**
+ * cancels current reprocessing operation
+ */
+ int cancel_reprocess();
+#endif
- status_t storeMetaDataInBuffers(bool enable);
+ status_t storeMetaDataInBuffers(bool enable);
//@}
@@ -1090,6 +1284,13 @@ public:
void eventCallback(CameraHalEvent* event);
void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+ static const char* getPixelFormatConstant(const char* parameters_format);
+ static size_t calculateBufferSize(const char* parameters_format, int width, int height);
+ static void getXYFromOffset(unsigned int *x, unsigned int *y,
+ unsigned int offset, unsigned int stride,
+ const char* format);
+ static unsigned int getBPP(const char* format);
+
/*--------------------Internal Member functions - Private---------------------------------*/
private:
@@ -1097,7 +1298,7 @@ private:
//@{
/** Set the camera parameters specific to Video Recording. */
- bool setVideoModeParameters(const CameraParameters&);
+ bool setVideoModeParameters(const android::CameraParameters&);
/** Reset the camera parameters specific to Video Recording. */
bool resetVideoModeParameters();
@@ -1122,18 +1323,29 @@ private:
status_t allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount);
/** Allocate image capture buffers */
- status_t allocImageBufs(unsigned int width, unsigned int height, size_t length, const char* previewFormat, unsigned int bufferCount);
+ status_t allocImageBufs(unsigned int width, unsigned int height, size_t length,
+ const char* previewFormat, unsigned int bufferCount);
+
+ /** Allocate Raw buffers */
+ status_t allocRawBufs(int width, int height, const char* previewFormat, int bufferCount);
/** Free preview buffers */
status_t freePreviewBufs();
/** Free video bufs */
- status_t freeVideoBufs(void *bufs);
+ status_t freeVideoBufs(CameraBuffer *bufs);
+
+ /** Free RAW bufs */
+ status_t freeRawBufs();
//Check if a given resolution is supported by the current camera
//instance
bool isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions);
+ //Check if a given variable frame rate range is supported by the current camera
+ //instance
+ bool isFpsRangeValid(int fpsMin, int fpsMax, const char *supportedFpsRanges);
+
//Check if a given parameter is supported by the current camera
// instance
bool isParameterValid(const char *param, const char *supportedParams);
@@ -1153,14 +1365,17 @@ private:
void forceStopPreview();
- void selectFPSRange(int framerate, int *min_fps, int *max_fps);
-
- void setPreferredPreviewRes(int width, int height);
- void resetPreviewRes(CameraParameters *mParams, int width, int height);
+ void getPreferredPreviewRes(int *width, int *height);
+ void resetPreviewRes(android::CameraParameters *params);
+ // Internal __takePicture function - used in public takePicture() and reprocess()
+ int __takePicture(const char* params, struct timeval *captureStart = NULL);
//@}
-
+ status_t setTapoutLocked(struct preview_stream_ops *out);
+ status_t releaseTapoutLocked(struct preview_stream_ops *out);
+ status_t setTapinLocked(struct preview_stream_ops *in);
+ status_t releaseTapinLocked(struct preview_stream_ops *in);
/*----------Member variables - Public ---------------------*/
public:
int32_t mMsgEnabled;
@@ -1178,21 +1393,37 @@ public:
static const char PARAMS_DELIMITER[];
CameraAdapter *mCameraAdapter;
- sp<AppCallbackNotifier> mAppCallbackNotifier;
- sp<DisplayAdapter> mDisplayAdapter;
- sp<MemoryManager> mMemoryManager;
+ android::sp<AppCallbackNotifier> mAppCallbackNotifier;
+ android::sp<DisplayAdapter> mDisplayAdapter;
+ android::sp<MemoryManager> mMemoryManager;
+
+ android::Vector< android::sp<DisplayAdapter> > mOutAdapters;
+ android::Vector< android::sp<DisplayAdapter> > mInAdapters;
+
+ // TODO(XXX): Even though we support user setting multiple BufferSourceAdapters now
+ // only one tap in surface and one tap out surface is supported at a time.
+ android::sp<DisplayAdapter> mBufferSourceAdapter_In;
+ android::sp<DisplayAdapter> mBufferSourceAdapter_Out;
- sp<IMemoryHeap> mPictureHeap;
+#ifdef OMAP_ENHANCEMENT
+ preview_stream_extended_ops_t * mExtendedPreviewStreamOps;
+#endif
+
+ android::sp<android::IMemoryHeap> mPictureHeap;
int* mGrallocHandles;
bool mFpsRangeChangedByApp;
-
+ int mRawWidth;
+ int mRawHeight;
+ bool mRawCapture;
///static member vars
+ static const int SW_SCALING_FPS_LIMIT;
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
//Timestamp from the CameraHal constructor
@@ -1211,34 +1442,41 @@ private:
bool mDynamicPreviewSwitch;
//keeps paused state of display
bool mDisplayPaused;
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ bool mTunnelSetup;
+ bool mVTCUseCase;
+#endif
+
//Index of current camera adapter
int mCameraIndex;
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
- sp<SensorListener> mSensorListener;
+ android::sp<SensorListener> mSensorListener;
void* mCameraAdapterHandle;
- CameraParameters mParameters;
+ android::CameraParameters mParameters;
bool mPreviewRunning;
bool mPreviewStateOld;
bool mRecordingEnabled;
EventProvider *mEventProvider;
- int32_t *mPreviewDataBufs;
+ CameraBuffer *mPreviewDataBuffers;
uint32_t *mPreviewDataOffsets;
int mPreviewDataFd;
int mPreviewDataLength;
- int32_t *mImageBufs;
+ CameraBuffer *mImageBuffers;
uint32_t *mImageOffsets;
int mImageFd;
int mImageLength;
- int32_t *mPreviewBufs;
+ unsigned int mImageCount;
+ CameraBuffer *mPreviewBuffers;
uint32_t *mPreviewOffsets;
int mPreviewLength;
int mPreviewFd;
- int32_t *mVideoBufs;
+ CameraBuffer *mVideoBuffers;
uint32_t *mVideoOffsets;
int mVideoFd;
int mVideoLength;
@@ -1254,6 +1492,7 @@ private:
CameraProperties::Properties* mCameraProperties;
bool mPreviewStartInProgress;
+ bool mPreviewInitializationDone;
bool mSetPreviewWindowCalled;
@@ -1264,9 +1503,10 @@ private:
int mVideoWidth;
int mVideoHeight;
+ android::String8 mCapModeBackup;
};
-
-}; // namespace android
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/CameraProperties.h b/camera/inc/CameraProperties.h
index 6f05877..6d92341 100644
--- a/camera/inc/CameraProperties.h
+++ b/camera/inc/CameraProperties.h
@@ -30,15 +30,28 @@
#include <ctype.h>
#include "cutils/properties.h"
-namespace android {
+#include "Common.h"
-#define MAX_CAMERAS_SUPPORTED 2
+namespace Ti {
+namespace Camera {
+
+#define MAX_CAMERAS_SUPPORTED 3
#define MAX_SIMUL_CAMERAS_SUPPORTED 1
#define MAX_PROP_NAME_LENGTH 50
#define MAX_PROP_VALUE_LENGTH 2048
-#define EXIF_MAKE_DEFAULT "default_make"
-#define EXIF_MODEL_DEFAULT "default_model"
+#define REMAINING_BYTES(buff) ((((int)sizeof(buff) - 1 - (int)strlen(buff)) < 0) ? 0 : (sizeof(buff) - 1 - strlen(buff)))
+
+enum OperatingMode {
+ MODE_HIGH_SPEED = 0,
+ MODE_HIGH_QUALITY,
+ MODE_ZEROSHUTTERLAG,
+ MODE_VIDEO,
+ MODE_STEREO,
+ MODE_CPCAM,
+ MODE_VIDEO_HIGH_QUALITY,
+ MODE_MAX
+};
// Class that handles the Camera Properties
class CameraProperties
@@ -47,19 +60,32 @@ public:
static const char INVALID[];
static const char CAMERA_NAME[];
static const char CAMERA_SENSOR_INDEX[];
+ static const char CAMERA_SENSOR_ID[];
static const char ORIENTATION_INDEX[];
static const char FACING_INDEX[];
- static const char S3D_SUPPORTED[];
static const char SUPPORTED_PREVIEW_SIZES[];
+ static const char SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[];
+ static const char SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[];
+ static const char SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[];
static const char SUPPORTED_PREVIEW_FORMATS[];
static const char SUPPORTED_PREVIEW_FRAME_RATES[];
+ static const char SUPPORTED_PREVIEW_FRAME_RATES_EXT[];
static const char SUPPORTED_PICTURE_SIZES[];
+ static const char SUPPORTED_PICTURE_SUBSAMPLED_SIZES[];
+ static const char SUPPORTED_PICTURE_TOPBOTTOM_SIZES[];
+ static const char SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[];
static const char SUPPORTED_PICTURE_FORMATS[];
static const char SUPPORTED_THUMBNAIL_SIZES[];
static const char SUPPORTED_WHITE_BALANCE[];
static const char SUPPORTED_EFFECTS[];
static const char SUPPORTED_ANTIBANDING[];
static const char SUPPORTED_EXPOSURE_MODES[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_MIN[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_MAX[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_STEP[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_MIN[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_MAX[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_STEP[];
static const char SUPPORTED_EV_MIN[];
static const char SUPPORTED_EV_MAX[];
static const char SUPPORTED_EV_STEP[];
@@ -97,9 +123,15 @@ public:
static const char CONTRAST[];
static const char IPP[];
static const char GBCE[];
- static const char AUTOCONVERGENCE[];
+ static const char SUPPORTED_GBCE[];
+ static const char GLBCE[];
+ static const char SUPPORTED_GLBCE[];
static const char AUTOCONVERGENCE_MODE[];
- static const char MANUALCONVERGENCE_VALUES[];
+ static const char AUTOCONVERGENCE_MODE_VALUES[];
+ static const char MANUAL_CONVERGENCE[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_MIN[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_MAX[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_STEP[];
static const char SENSOR_ORIENTATION[];
static const char SENSOR_ORIENTATION_VALUES[];
static const char REVISION[];
@@ -113,16 +145,22 @@ public:
static const char MAX_FD_HW_FACES[];
static const char MAX_FD_SW_FACES[];
+ static const char MAX_PICTURE_WIDTH[];
+ static const char MAX_PICTURE_HEIGHT[];
+
static const char PARAMS_DELIMITER [];
- static const char S3D2D_PREVIEW[];
- static const char S3D2D_PREVIEW_MODES[];
+ static const char S3D_PRV_FRAME_LAYOUT[];
+ static const char S3D_PRV_FRAME_LAYOUT_VALUES[];
+ static const char S3D_CAP_FRAME_LAYOUT[];
+ static const char S3D_CAP_FRAME_LAYOUT_VALUES[];
static const char VSTAB[];
static const char VSTAB_SUPPORTED[];
+ static const char VNF[];
+ static const char VNF_SUPPORTED[];
static const char FRAMERATE_RANGE[];
- static const char FRAMERATE_RANGE_IMAGE[];
- static const char FRAMERATE_RANGE_VIDEO[];
static const char FRAMERATE_RANGE_SUPPORTED[];
+ static const char FRAMERATE_RANGE_EXT_SUPPORTED[];
static const char DEFAULT_VALUE[];
@@ -138,7 +176,14 @@ public:
static const char VIDEO_SIZE[];
static const char SUPPORTED_VIDEO_SIZES[];
- static const char PREFERRED_PREVIEW_SIZE_FOR_VIDEO[];
+
+ static const char MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[];
+ static const char MECHANICAL_MISALIGNMENT_CORRECTION[];
+
+ static const char RAW_WIDTH[];
+ static const char RAW_HEIGHT[];
+
+ static const char CAP_MODE_VALUES[];
CameraProperties();
~CameraProperties();
@@ -147,32 +192,31 @@ public:
class Properties
{
public:
+
Properties()
{
- mProperties = new DefaultKeyedVector<String8, String8>(String8(DEFAULT_VALUE));
- char property[PROPERTY_VALUE_MAX];
- property_get("ro.product.manufacturer", property, EXIF_MAKE_DEFAULT);
- property[0] = toupper(property[0]);
- set(EXIF_MAKE, property);
- property_get("ro.product.model", property, EXIF_MODEL_DEFAULT);
- property[0] = toupper(property[0]);
- set(EXIF_MODEL, property);
}
+
~Properties()
{
- delete mProperties;
}
- ssize_t set(const char *prop, const char *value);
- ssize_t set(const char *prop, int value);
- const char* get(const char * prop);
+
+ void set(const char *prop, const char *value);
+ void set(const char *prop, int value);
+ const char* get(const char * prop) const;
+ int getInt(const char * prop) const;
+ void setSensorIndex(int idx);
+ void setMode(OperatingMode mode);
+ OperatingMode getMode() const;
void dump();
protected:
- const char* keyAt(unsigned int);
- const char* valueAt(unsigned int);
+ const char* keyAt(const unsigned int) const;
+ const char* valueAt(const unsigned int) const;
private:
- DefaultKeyedVector<String8, String8>* mProperties;
+ OperatingMode mCurrentMode;
+ android::DefaultKeyedVector<android::String8, android::String8> mProperties[MODE_MAX];
};
@@ -184,15 +228,15 @@ public:
private:
- uint32_t mCamerasSupported;
+ int mCamerasSupported;
int mInitialized;
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
Properties mCameraProps[MAX_CAMERAS_SUPPORTED];
};
-};
+} // namespace Camera
+} // namespace Ti
#endif //CAMERA_PROPERTIES_H
-
diff --git a/camera/inc/Common.h b/camera/inc/Common.h
new file mode 100644
index 0000000..b369e65
--- /dev/null
+++ b/camera/inc/Common.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERAHAL_COMMON_H
+#define CAMERAHAL_COMMON_H
+
+#include "UtilsCommon.h"
+#include "DebugUtils.h"
+#include "Status.h"
+
+
+
+
+// logging functions
+#ifdef CAMERAHAL_DEBUG
+# define CAMHAL_LOGD DBGUTILS_LOGD
+# define CAMHAL_LOGDA DBGUTILS_LOGDA
+# define CAMHAL_LOGDB DBGUTILS_LOGDB
+# ifdef CAMERAHAL_DEBUG_VERBOSE
+# define CAMHAL_LOGV DBGUTILS_LOGV
+# define CAMHAL_LOGVA DBGUTILS_LOGVA
+# define CAMHAL_LOGVB DBGUTILS_LOGVB
+# else
+# define CAMHAL_LOGV(...)
+# define CAMHAL_LOGVA(str)
+# define CAMHAL_LOGVB(str, ...)
+# endif
+#else
+# define CAMHAL_LOGD(...)
+# define CAMHAL_LOGDA(str)
+# define CAMHAL_LOGDB(str, ...)
+# define CAMHAL_LOGV(...)
+# define CAMHAL_LOGVA(str)
+# define CAMHAL_LOGVB(str, ...)
+#endif
+
+#define CAMHAL_LOGI DBGUTILS_LOGI
+#define CAMHAL_LOGW DBGUTILS_LOGW
+#define CAMHAL_LOGE DBGUTILS_LOGE
+#define CAMHAL_LOGEA DBGUTILS_LOGEA
+#define CAMHAL_LOGEB DBGUTILS_LOGEB
+#define CAMHAL_LOGF DBGUTILS_LOGF
+
+#define CAMHAL_ASSERT DBGUTILS_ASSERT
+#define CAMHAL_ASSERT_X DBGUTILS_ASSERT_X
+
+#define CAMHAL_UNUSED(x) (void)x
+
+
+
+
+#endif // CAMERAHAL_COMMON_H
diff --git a/camera/inc/Decoder_libjpeg.h b/camera/inc/Decoder_libjpeg.h
new file mode 100755
index 0000000..8d5b649
--- /dev/null
+++ b/camera/inc/Decoder_libjpeg.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CAMERA_HARDWARE_DECODER_LIBJPEG_H
+#define ANDROID_CAMERA_HARDWARE_DECODER_LIBJPEG_H
+
+#include "CameraHal.h"
+
+extern "C" {
+#include "jhead.h"
+
+#undef TRUE
+#undef FALSE
+
+}
+
+
+namespace Ti {
+namespace Camera {
+
+class Decoder_libjpeg
+{
+
+public:
+ Decoder_libjpeg();
+ ~Decoder_libjpeg();
+ int readDHTSize();
+ int appendDHT(unsigned char *jpeg_src, int filled_len, unsigned char *jpeg_with_dht_buffer, int buff_size);
+ bool decode(unsigned char *jpeg_src, int filled_len, unsigned char *nv12_buffer, int stride);
+
+private:
+ void release();
+ unsigned char **Y_Plane;
+ unsigned char **U_Plane;
+ unsigned char **V_Plane;
+ unsigned char *UV_Plane;
+ unsigned int mWidth, mHeight;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
+
diff --git a/camera/inc/Encoder_libjpeg.h b/camera/inc/Encoder_libjpeg.h
index fb9a894..72feb08 100644
--- a/camera/inc/Encoder_libjpeg.h
+++ b/camera/inc/Encoder_libjpeg.h
@@ -29,11 +29,19 @@
extern "C" {
#include "jhead.h"
+
+#undef TRUE
+#undef FALSE
+
}
-#define CANCEL_TIMEOUT 3000000 // 3 seconds
+#include "CameraHal.h"
+
+#define CANCEL_TIMEOUT 5000000 // 5 seconds
+
+namespace Ti {
+namespace Camera {
-namespace android {
/**
* libjpeg encoder class - uses libjpeg to encode yuv
*/
@@ -45,6 +53,7 @@ typedef void (*encoder_libjpeg_callback_t) (void* main_jpeg,
void* cookie1,
void* cookie2,
void* cookie3,
+ void* cookie4,
bool canceled);
// these have to match strings defined in external/jhead/exif.c
@@ -86,7 +95,12 @@ class ExifElementsTable {
public:
ExifElementsTable() :
gps_tag_count(0), exif_tag_count(0), position(0),
- jpeg_opened(false), has_datetime_tag(false) { }
+ jpeg_opened(false)
+ {
+#ifdef ANDROID_API_JB_OR_LATER
+ has_datetime_tag = false;
+#endif
+ }
~ExifElementsTable();
status_t insertElement(const char* tag, const char* value);
@@ -102,10 +116,12 @@ class ExifElementsTable {
unsigned int exif_tag_count;
unsigned int position;
bool jpeg_opened;
+#ifdef ANDROID_API_JB_OR_LATER
bool has_datetime_tag;
+#endif
};
-class Encoder_libjpeg : public Thread {
+class Encoder_libjpeg : public android::Thread {
/* public member types and variables */
public:
struct params {
@@ -131,9 +147,9 @@ class Encoder_libjpeg : public Thread {
CameraFrame::FrameType type,
void* cookie1,
void* cookie2,
- void* cookie3)
- : Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb),
- mCancelEncoding(false), mCookie1(cookie1), mCookie2(cookie2), mCookie3(cookie3),
+ void* cookie3, void *cookie4)
+ : android::Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb),
+ mCancelEncoding(false), mCookie1(cookie1), mCookie2(cookie2), mCookie3(cookie3), mCookie4(cookie4),
mType(type), mThumb(NULL) {
this->incStrong(this);
mCancelSem.Create(0);
@@ -145,10 +161,9 @@ class Encoder_libjpeg : public Thread {
virtual bool threadLoop() {
size_t size = 0;
- sp<Encoder_libjpeg> tn = NULL;
if (mThumbnailInput) {
// start thread to encode thumbnail
- mThumb = new Encoder_libjpeg(mThumbnailInput, NULL, NULL, mType, NULL, NULL, NULL);
+ mThumb = new Encoder_libjpeg(mThumbnailInput, NULL, NULL, mType, NULL, NULL, NULL, NULL);
mThumb->run();
}
@@ -167,7 +182,7 @@ class Encoder_libjpeg : public Thread {
}
if(mCb) {
- mCb(mMainInput, mThumbnailInput, mType, mCookie1, mCookie2, mCookie3, mCancelEncoding);
+ mCb(mMainInput, mThumbnailInput, mType, mCookie1, mCookie2, mCookie3, mCookie4, mCancelEncoding);
}
// encoder thread runs, self-destructs, and then exits
@@ -197,13 +212,15 @@ class Encoder_libjpeg : public Thread {
void* mCookie1;
void* mCookie2;
void* mCookie3;
+ void* mCookie4;
CameraFrame::FrameType mType;
- sp<Encoder_libjpeg> mThumb;
- Semaphore mCancelSem;
+ android::sp<Encoder_libjpeg> mThumb;
+ Utils::Semaphore mCancelSem;
size_t encode(params*);
};
-}
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/General3A_Settings.h b/camera/inc/General3A_Settings.h
index dab1f8e..c1e017c 100644
--- a/camera/inc/General3A_Settings.h
+++ b/camera/inc/General3A_Settings.h
@@ -29,12 +29,8 @@
#ifndef GENERAL_3A_SETTINGS_H
#define GENERAL_3A_SETTINGS_H
-#define FOCUS_FACE_PRIORITY OMX_IMAGE_FocusControlMax -1
-#define FOCUS_REGION_PRIORITY OMX_IMAGE_FocusControlMax -2
-#define WB_FACE_PRIORITY OMX_WhiteBalControlMax -1
-#define EXPOSURE_FACE_PRIORITY OMX_ExposureControlMax - 1
-
-namespace android {
+namespace Ti {
+namespace Camera {
struct userToOMX_LUT{
const char * userDefinition;
@@ -58,15 +54,15 @@ const userToOMX_LUT isoUserToOMX[] = {
};
const userToOMX_LUT effects_UserToOMX [] = {
- { CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
- { CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
- { CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
- { CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
- { CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
- { CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
- { CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
- { CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
- { CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
+ { android::CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
+ { android::CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
+ { android::CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
+ { android::CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
+ { android::CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
+ { android::CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
+ { android::CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
+ { android::CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
+ { android::CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::EFFECT_NATURAL, OMX_ImageFilterNatural },
{ TICameraParameters::EFFECT_VIVID, OMX_ImageFilterVivid },
@@ -76,27 +72,24 @@ const userToOMX_LUT effects_UserToOMX [] = {
};
const userToOMX_LUT scene_UserToOMX [] = {
- { CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
- { CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
- { CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
- { CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
- { CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
-/*********** TODO: These scene modes are not verified. ************
- ***************** Have to verify and reeable later. **************
- { CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre },
- { CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
- { CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
- { CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
- { CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
- { CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
- { CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
- { CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
- { CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
-*********************************************************************/
+ { android::CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
+ { android::CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
+ { android::CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
+ { android::CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
+ { android::CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
+ { android::CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
+ { android::CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
+ { android::CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
+ { android::CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
+ { android::CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
+ { android::CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
+ { android::CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
+ { android::CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
+ { android::CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre },
+ { android::CameraParameters::SCENE_MODE_SPORTS, OMX_Sport },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::SCENE_MODE_CLOSEUP, OMX_Closeup },
{ TICameraParameters::SCENE_MODE_AQUA, OMX_Underwater },
- { TICameraParameters::SCENE_MODE_SPORT, OMX_Sport },
{ TICameraParameters::SCENE_MODE_MOOD, OMX_Mood },
{ TICameraParameters::SCENE_MODE_NIGHT_INDOOR, OMX_NightIndoor },
{ TICameraParameters::SCENE_MODE_DOCUMENT, OMX_Document },
@@ -108,47 +101,45 @@ const userToOMX_LUT scene_UserToOMX [] = {
};
const userToOMX_LUT whiteBal_UserToOMX [] = {
- { CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
- { CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
- { CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
- { CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
- { CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
-/********************** THESE ARE CURRENT NOT TUNED PROPERLY *************************
- { CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
- { CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
- { CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
-**************************************************************************************/
+ { android::CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
+ { android::CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
+ { android::CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
+ { android::CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
+ { android::CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
+ { android::CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
+ { android::CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
+ { android::CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::WHITE_BALANCE_TUNGSTEN, OMX_WhiteBalControlTungsten },
{ TICameraParameters::WHITE_BALANCE_HORIZON, OMX_WhiteBalControlHorizon },
- { TICameraParameters::WHITE_BALANCE_FACE, WB_FACE_PRIORITY },
{ TICameraParameters::WHITE_BALANCE_SUNSET, OMX_TI_WhiteBalControlSunset }
#endif
};
const userToOMX_LUT antibanding_UserToOMX [] = {
- { CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
- { CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
- { CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
- { CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
+ { android::CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
+ { android::CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
+ { android::CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
+ { android::CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
};
const userToOMX_LUT focus_UserToOMX [] = {
- { CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
- { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
- { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal },
- { CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
- { CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
- { CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto },
+ { android::CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
+ { android::CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
+ { android::CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal },
+ { android::CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
+ { android::CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
+ { android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto },
#ifdef OMAP_ENHANCEMENT
- { TICameraParameters::FOCUS_MODE_FACE , FOCUS_FACE_PRIORITY },
+ { TICameraParameters::FOCUS_MODE_FACE , OMX_IMAGE_FocusControlContinousFacePriority },
{ TICameraParameters::FOCUS_MODE_PORTRAIT, OMX_IMAGE_FocusControlPortrait },
{ TICameraParameters::FOCUS_MODE_EXTENDED, OMX_IMAGE_FocusControlExtended },
#endif
+ { TICameraParameters::FOCUS_MODE_OFF , OMX_IMAGE_FocusControlOff }
};
const userToOMX_LUT exposure_UserToOMX [] = {
- { TICameraParameters::EXPOSURE_MODE_OFF, OMX_ExposureControlOff },
+ { TICameraParameters::EXPOSURE_MODE_MANUAL, OMX_ExposureControlOff },
{ TICameraParameters::EXPOSURE_MODE_AUTO, OMX_ExposureControlAuto },
{ TICameraParameters::EXPOSURE_MODE_NIGHT, OMX_ExposureControlNight },
{ TICameraParameters::EXPOSURE_MODE_BACKLIGHT, OMX_ExposureControlBackLight },
@@ -158,15 +149,14 @@ const userToOMX_LUT exposure_UserToOMX [] = {
{ TICameraParameters::EXPOSURE_MODE_BEACH, OMX_ExposureControlBeach },
{ TICameraParameters::EXPOSURE_MODE_APERTURE, OMX_ExposureControlLargeAperture },
{ TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE, OMX_ExposureControlSmallApperture },
- { TICameraParameters::EXPOSURE_MODE_FACE, EXPOSURE_FACE_PRIORITY },
};
const userToOMX_LUT flash_UserToOMX [] = {
- { CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
- { CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
- { CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
- { CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
- { CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
+ { android::CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
+ { android::CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
+ { android::CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
+ { android::CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
+ { android::CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::FLASH_MODE_FILL_IN ,OMX_IMAGE_FlashControlFillin }
#endif
@@ -241,11 +231,25 @@ class Gen3A_settings{
int Sharpness;
int ISO;
int FlashMode;
+ int ManualExposure;
+ int ManualExposureRight;
+ int ManualGain;
+ int ManualGainRight;
unsigned int Brightness;
OMX_BOOL ExposureLock;
OMX_BOOL FocusLock;
OMX_BOOL WhiteBalanceLock;
+
+ OMX_BOOL AlgoExternalGamma;
+ OMX_BOOL AlgoNSF1;
+ OMX_BOOL AlgoNSF2;
+ OMX_BOOL AlgoSharpening;
+ OMX_BOOL AlgoThreeLinColorMap;
+ OMX_BOOL AlgoGIC;
+
+ OMX_TI_CONFIG_GAMMATABLE_TYPE mGammaTable;
+
};
/*
@@ -270,11 +274,22 @@ enum E3ASettingsFlags
SetExpLock = 1 << 16,
SetWBLock = 1 << 17,
SetMeteringAreas = 1 << 18,
+ SetManualExposure = 1 << 19,
+
+ SetAlgoExternalGamma = 1 << 20,
+ SetAlgoNSF1 = 1 << 21,
+ SetAlgoNSF2 = 1 << 22,
+ SetAlgoSharpening = 1 << 23,
+ SetAlgoThreeLinColorMap = 1 << 24,
+ SetAlgoGIC = 1 << 25,
+ SetGammaTable = 1 << 26,
+
E3aSettingMax,
E3AsettingsAll = ( ((E3aSettingMax -1 ) << 1) -1 ) /// all possible flags raised
};
-};
+} // namespace Camera
+} // namespace Ti
#endif //GENERAL_3A_SETTINGS_H
diff --git a/camera/inc/NV12_resize.h b/camera/inc/NV12_resize.h
index 927faf8..4b05a4f 100644
--- a/camera/inc/NV12_resize.h
+++ b/camera/inc/NV12_resize.h
@@ -1,66 +1,61 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
#ifndef NV12_RESIZE_H_
#define NV12_RESIZE_H_
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-typedef unsigned char mmBool;
-typedef unsigned char mmUchar;
-typedef unsigned char mmUint8;
-typedef unsigned char mmByte;
-typedef unsigned short mmUint16;
-typedef unsigned int mmUint32;
-typedef unsigned long mmUint64;
-typedef signed char mmInt8;
-typedef char mmChar;
-typedef signed short mmInt16;
-typedef signed int mmInt32;
-typedef signed long mmLong;
-typedef signed int mmHandle;
-typedef float mmFloat;
-typedef double mmDouble;
-typedef int HObj;
-typedef HObj HFile;
-typedef int HDir;
-typedef void* mmMutexHandle;
-typedef struct _fstat
-{
- mmInt32 fileSize;
-}VE_FileAttribute;
-
-typedef struct
-{
- mmInt32 second;
- mmInt32 millisecond;
-}tsVE_Time;
-
-typedef struct
-{
- mmInt32 year;
- mmInt32 month;
- mmInt32 day;
- mmInt32 hour;
- mmInt32 minute;
- mmInt32 second;
+#include "Common.h"
+
+typedef unsigned char mmBool;
+typedef unsigned char mmUchar;
+typedef unsigned char mmUint8;
+typedef unsigned char mmByte;
+typedef unsigned short mmUint16;
+typedef unsigned int mmUint32;
+typedef unsigned long mmUint64;
+typedef signed char mmInt8;
+typedef char mmChar;
+typedef signed short mmInt16;
+typedef signed int mmInt32;
+typedef signed long mmLong;
+typedef signed int mmHandle;
+typedef float mmFloat;
+typedef double mmDouble;
+typedef int HObj;
+typedef HObj HFile;
+typedef int HDir;
+typedef void* mmMutexHandle;
+typedef struct _fstat {
+ mmInt32 fileSize;
+} VE_FileAttribute;
+
+typedef struct {
+ mmInt32 second;
+ mmInt32 millisecond;
+} tsVE_Time;
+
+typedef struct {
+ mmInt32 year;
+ mmInt32 month;
+ mmInt32 day;
+ mmInt32 hour;
+ mmInt32 minute;
+ mmInt32 second;
} TmDateTime;
-/*----------------------------------------------------------------------------
- Define : TRUE/FALSE for boolean operations
-----------------------------------------------------------------------------*/
-
-#ifndef TRUE
- #define TRUE 1
-#endif
-
-#ifndef FALSE
- #define FALSE 0
-#endif
-
-#ifndef NULL
- #define NULL 0
-#endif
-
const mmUint8 bWeights[8][8][4] = {
{{64, 0, 0, 0}, {56, 0, 0, 8}, {48, 0, 0,16}, {40, 0, 0,24},
{32, 0, 0,32}, {24, 0, 0,40}, {16, 0, 0,48}, { 8, 0, 0,56}},
@@ -87,8 +82,7 @@ const mmUint8 bWeights[8][8][4] = {
{ 4,28,28,4 }, { 3,21,35, 5}, { 2,14,42, 6}, { 1,7 ,49, 7}}
};
-typedef enum
-{
+typedef enum {
IC_FORMAT_NONE,
IC_FORMAT_RGB565,
IC_FORMAT_RGB888,
@@ -96,26 +90,24 @@ typedef enum
IC_FORMAT_YCbCr,
IC_FORMAT_YCbCr420_FRAME_PK,
IC_FORMAT_MAX
-}enumImageFormat;
+} enumImageFormat;
/* This structure defines the format of an image */
-typedef struct
-{
- mmInt32 uWidth;
- mmInt32 uHeight;
- mmInt32 uStride;
- enumImageFormat eFormat;
- mmByte *imgPtr;
- mmByte *clrPtr;
- mmInt32 uOffset;
+typedef struct {
+ mmInt32 uWidth;
+ mmInt32 uHeight;
+ mmInt32 uStride;
+ enumImageFormat eFormat;
+ mmByte *imgPtr;
+ mmByte *clrPtr;
+ mmInt32 uOffset;
} structConvImage;
-typedef struct IC_crop_struct
-{
- mmUint32 x; /* x pos of rectangle */
- mmUint32 y; /* y pos of rectangle */
- mmUint32 uWidth; /* dx of rectangle */
- mmUint32 uHeight; /* dy of rectangle */
+typedef struct IC_crop_struct {
+ mmUint32 x; /* x pos of rectangle */
+ mmUint32 y; /* y pos of rectangle */
+ mmUint32 uWidth; /* dx of rectangle */
+ mmUint32 uHeight; /* dy of rectangle */
} IC_rect_type;
/*==========================================================================
@@ -133,16 +125,11 @@ typedef struct IC_crop_struct
* faster version.
============================================================================*/
mmBool
-VT_resizeFrame_Video_opt2_lp
-(
- structConvImage* i_img_ptr, /* Points to the input image */
- structConvImage* o_img_ptr, /* Points to the output image */
- IC_rect_type* cropout, /* how much to resize to in final image */
- mmUint16 dummy /* Transparent pixel value */
- );
-
-#ifdef __cplusplus
-}
-#endif
+VT_resizeFrame_Video_opt2_lp(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ );
#endif //#define NV12_RESIZE_H_
diff --git a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
index 59c5efc..b2da574 100644
--- a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
+++ b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
@@ -48,7 +48,8 @@ extern "C"
}
-namespace android {
+namespace Ti {
+namespace Camera {
#define Q16_OFFSET 16
@@ -60,6 +61,7 @@ namespace android {
#define MIN_JPEG_QUALITY 1
#define MAX_JPEG_QUALITY 100
#define EXP_BRACKET_RANGE 10
+#define ZOOM_BRACKET_RANGE 10
#define FOCUS_DIST_SIZE 100
#define FOCUS_DIST_BUFFER_SIZE 500
@@ -68,6 +70,8 @@ namespace android {
#define DEFAULT_THUMB_WIDTH 160
#define DEFAULT_THUMB_HEIGHT 120
#define FRAME_RATE_FULL_HD 27
+#define FRAME_RATE_HIGH_HD 60
+
#define ZOOM_STAGES 61
#define FACE_DETECTION_BUFFER_SIZE 0x1000
@@ -116,27 +120,27 @@ namespace android {
#define OMX_CAMERA_PORT_IMAGE_OUT_IMAGE (OMX_CAMERA_PORT_IMAGE_START + 0)
-#define OMX_INIT_STRUCT(_s_, _name_) \
- memset(&(_s_), 0x0, sizeof(_name_)); \
- (_s_).nSize = sizeof(_name_); \
- (_s_).nVersion.s.nVersionMajor = 0x1; \
- (_s_).nVersion.s.nVersionMinor = 0x1; \
- (_s_).nVersion.s.nRevision = 0x0; \
+#define OMX_INIT_STRUCT(_s_, _name_) \
+ memset(&(_s_), 0x0, sizeof(_name_)); \
+ (_s_).nSize = sizeof(_name_); \
+ (_s_).nVersion.s.nVersionMajor = 0x1; \
+ (_s_).nVersion.s.nVersionMinor = 0x1; \
+ (_s_).nVersion.s.nRevision = 0x0; \
(_s_).nVersion.s.nStep = 0x0
#define OMX_INIT_STRUCT_PTR(_s_, _name_) \
- memset((_s_), 0x0, sizeof(_name_)); \
- (_s_)->nSize = sizeof(_name_); \
- (_s_)->nVersion.s.nVersionMajor = 0x1; \
- (_s_)->nVersion.s.nVersionMinor = 0x1; \
- (_s_)->nVersion.s.nRevision = 0x0; \
+ memset((_s_), 0x0, sizeof(_name_)); \
+ (_s_)->nSize = sizeof(_name_); \
+ (_s_)->nVersion.s.nVersionMajor = 0x1; \
+ (_s_)->nVersion.s.nVersionMinor = 0x1; \
+ (_s_)->nVersion.s.nRevision = 0x0; \
(_s_)->nVersion.s.nStep = 0x0
-#define GOTO_EXIT_IF(_CONDITION,_ERROR) { \
- if ((_CONDITION)) { \
- eError = (_ERROR); \
- goto EXIT; \
- } \
+#define GOTO_EXIT_IF(_CONDITION,_ERROR) { \
+ if ((_CONDITION)) { \
+ eError = (_ERROR); \
+ goto EXIT; \
+ } \
}
const int64_t kCameraBufferLatencyNs = 250000000LL; // 250 ms
@@ -167,15 +171,16 @@ struct CapPixelformat {
const char *param;
};
-struct CapU32 {
- OMX_U32 num;
+struct CapCodingFormat {
+ OMX_IMAGE_CODINGTYPE imageCodingFormat;
const char *param;
};
-struct CapU32Pair {
- OMX_U32 num1, num2;
+struct CapU32 {
+ OMX_U32 num;
const char *param;
};
+
struct CapS32 {
OMX_S32 num;
const char *param;
@@ -185,7 +190,6 @@ typedef CapU32 CapFramerate;
typedef CapU32 CapISO;
typedef CapU32 CapSensorName;
typedef CapS32 CapZoom;
-typedef CapS32 CapEVComp;
/**
* Class which completely abstracts the camera hardware interaction from camera hal
@@ -205,19 +209,15 @@ public:
///Five second timeout
static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000;
- enum OMXCameraEvents
- {
- CAMERA_PORT_ENABLE = 0x1,
- CAMERA_PORT_FLUSH = 0x2,
- CAMERA_PORT_DISABLE = 0x4,
- };
-
enum CaptureMode
{
+ INITIAL_MODE = -1,
HIGH_SPEED = 1,
- HIGH_QUALITY = 2,
- VIDEO_MODE = 3,
- HIGH_QUALITY_ZSL = 4,
+ HIGH_QUALITY,
+ VIDEO_MODE,
+ HIGH_QUALITY_ZSL,
+ CP_CAM,
+ VIDEO_MODE_HQ,
};
enum IPPMode
@@ -231,11 +231,9 @@ public:
enum CodingMode
{
- CodingNone = 0,
+ CodingJPEG = 0,
CodingJPS,
CodingMPO,
- CodingRAWJPEG,
- CodingRAWMPO,
};
enum Algorithm3A
@@ -261,11 +259,32 @@ public:
enum CaptureSettingsFlags {
SetFormat = 1 << 0,
SetThumb = 1 << 1,
- SetExpBracket = 1 << 2,
+ SetBurstExpBracket = 1 << 2,
SetQuality = 1 << 3,
SetRotation = 1 << 4,
ECaptureSettingMax,
- ECapturesettingsAll = ( ((ECaptureSettingMax -1 ) << 1) -1 ) /// all possible flags raised
+ ECapturesettingsAll = ( ((ECaptureSettingMax -1 ) << 1) -1 ), /// all possible flags raised
+ ECaptureParamSettings = SetFormat | SetThumb | SetQuality, // Settings set with SetParam
+ ECaptureConfigSettings = (ECapturesettingsAll & ~ECaptureParamSettings)
+ };
+
+ enum PreviewSettingsFlags {
+ SetLDC = 1 << 0,
+ SetNSF = 1 << 1,
+ SetCapMode = 1 << 2,
+ SetVNF = 1 << 3,
+ SetVSTAB = 1 << 4,
+ EPreviewSettingMax,
+ EPreviewSettingsAll = ( ((EPreviewSettingMax -1 ) << 1) -1 ) /// all possible flags raised
+ };
+
+ enum BracketingValueMode {
+ BracketingValueAbsolute,
+ BracketingValueRelative,
+ BracketingValueAbsoluteForced,
+ BracketingValueRelativeForced,
+ BracketingValueCompensation,
+ BracketingValueCompensationForced
};
class GPSData
@@ -309,8 +328,9 @@ public:
class OMXCameraPortParameters
{
public:
- OMX_U32 mHostBufaddr[MAX_NO_BUFFERS];
+ //CameraBuffer * mHostBufaddr[MAX_NO_BUFFERS];
OMX_BUFFERHEADERTYPE *mBufferHeader[MAX_NO_BUFFERS];
+ OMX_U8 mStatus[MAX_NO_BUFFERS];
OMX_U32 mWidth;
OMX_U32 mHeight;
OMX_U32 mStride;
@@ -328,9 +348,18 @@ public:
OMX_CONFIG_FRAMESTABTYPE mVidStabConfig;
OMX_U32 mCapFrame;
OMX_U32 mFrameRate;
- OMX_S32 mMinFrameRate;
- OMX_S32 mMaxFrameRate;
- CameraFrame::FrameType mImageType;
+ OMX_U32 mMinFrameRate;
+ OMX_U32 mMaxFrameRate;
+ CameraFrame::FrameType mImageType;
+ OMX_TI_STEREOFRAMELAYOUTTYPE mFrameLayoutType;
+ CameraBufferType mBufferType;
+
+ CameraBuffer * lookup_omx_buffer (OMX_BUFFERHEADERTYPE *pBufHeader);
+ enum {
+ IDLE = 0, // buffer is neither with HAL or Ducati
+ FILL, // buffer is with Ducati
+ DONE, // buffer is filled and sent to HAL
+ };
};
///Context of the OMX Camera component
@@ -344,9 +373,24 @@ public:
OMX_U32 mPrevPortIndex;
OMX_U32 mImagePortIndex;
OMX_U32 mMeasurementPortIndex;
+ OMX_U32 mVideoInPortIndex;
OMXCameraPortParameters mCameraPortParams[MAX_NO_PORTS];
};
+ class CachedCaptureParameters
+ {
+ public:
+ unsigned int mPendingCaptureSettings;
+ unsigned int mPictureRotation;
+ int mExposureBracketingValues[EXP_BRACKET_RANGE];
+ int mExposureGainBracketingValues[EXP_BRACKET_RANGE];
+ int mExposureGainBracketingModes[EXP_BRACKET_RANGE];
+ size_t mExposureBracketingValidEntries;
+ OMX_BRACKETMODETYPE mExposureBracketMode;
+ unsigned int mBurstFrames;
+ bool mFlushShotConfigQueue;
+ };
+
public:
OMXCameraAdapter(size_t sensor_index);
@@ -356,21 +400,22 @@ public:
virtual status_t initialize(CameraProperties::Properties*);
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params);
- virtual void getParameters(CameraParameters& params);
+ virtual status_t setParameters(const android::CameraParameters& params);
+ virtual void getParameters(android::CameraParameters& params);
// API
- virtual status_t UseBuffersPreview(void* bufArr, int num);
+ status_t UseBuffersPreview(CameraBuffer *bufArr, int num);
- //API to flush the buffers for preview
- status_t flushBuffers();
+ //API to flush the buffers
+ status_t flushBuffers(OMX_U32 port = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW);
// API
virtual status_t setFormat(OMX_U32 port, OMXCameraPortParameters &cap);
// Function to get and populate caps from handle
- static status_t getCaps(CameraProperties::Properties* props, OMX_HANDLETYPE handle);
+ static status_t getCaps(int sensorId, CameraProperties::Properties* props, OMX_HANDLETYPE handle);
static const char* getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT);
+ static int getMultipleLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT, char * supported);
static int getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT);
OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent,
@@ -385,7 +430,8 @@ public:
OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader);
- static OMX_ERRORTYPE OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData=NULL);
+ static OMX_ERRORTYPE OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ const OMX_CALLBACKTYPE & callbacks);
protected:
@@ -402,10 +448,10 @@ protected:
virtual status_t stopVideoCapture();
virtual status_t startPreview();
virtual status_t stopPreview();
- virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
- virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t useBuffers(CameraMode mode, CameraBuffer * bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType);
virtual status_t getFrameSize(size_t &width, size_t &height);
- virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getPictureBufferSize(CameraFrame &frame, size_t bufferCount);
virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
virtual status_t startFaceDetection();
virtual status_t stopFaceDetection();
@@ -414,11 +460,17 @@ protected:
private:
+ // Caches and returns current set of parameters
+ CachedCaptureParameters* cacheCaptureParameters();
+
status_t doSwitchToExecuting();
void performCleanupAfterError();
- status_t switchToLoaded();
+ status_t switchToIdle();
+
+ status_t switchToLoaded(bool bPortEnableRequired = false);
+ status_t prevPortEnable();
OMXCameraPortParameters *getPortParams(CameraFrame::FrameType frameType);
@@ -437,15 +489,21 @@ private:
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN Semaphore &semaphore);
+ OMX_IN Utils::Semaphore &semaphore);
status_t setPictureRotation(unsigned int degree);
status_t setSensorOrientation(unsigned int degree);
status_t setImageQuality(unsigned int quality);
status_t setThumbnailParams(unsigned int width, unsigned int height, unsigned int quality);
+ status_t setSensorQuirks(int orientation,
+ OMXCameraPortParameters &portParams,
+ bool &portConfigured);
+
+ status_t setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height);
+ status_t destroyTunnel();
//EXIF
- status_t setParametersEXIF(const CameraParameters &params,
+ status_t setParametersEXIF(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t convertGPSCoord(double coord, int &deg, int &min, int &sec, int &secDivisor);
status_t setupEXIF();
@@ -462,12 +520,12 @@ private:
//Focus distances
- status_t setParametersFocus(const CameraParameters &params,
+ status_t setParametersFocus(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t addFocusDistances(OMX_U32 &near,
OMX_U32 &optimal,
OMX_U32 &far,
- CameraParameters& params);
+ android::CameraParameters& params);
status_t encodeFocusDistance(OMX_U32 dist, char *buffer, size_t length);
status_t getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far);
@@ -476,14 +534,17 @@ private:
status_t enableVideoStabilization(bool enable);
//Digital zoom
- status_t setParametersZoom(const CameraParameters &params,
+ status_t setParametersZoom(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t doZoom(int index);
status_t advanceZoom();
//3A related parameters
- status_t setParameters3A(const CameraParameters &params,
+ status_t setParameters3A(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
+ void declareParameter3ABool(const android::CameraParameters &params, const char *key,
+ OMX_BOOL &current_setting, E3ASettingsFlags pending,
+ const char *msg);
// scene modes
status_t setScene(Gen3A_settings& Gen3A);
@@ -501,6 +562,7 @@ private:
//Exposure Modes
status_t setExposureMode(Gen3A_settings& Gen3A);
+ status_t setManualExposureVal(Gen3A_settings& Gen3A);
status_t setEVCompensation(Gen3A_settings& Gen3A);
status_t setWBMode(Gen3A_settings& Gen3A);
status_t setFlicker(Gen3A_settings& Gen3A);
@@ -512,6 +574,22 @@ private:
status_t setEffect(Gen3A_settings& Gen3A);
status_t setMeteringAreas(Gen3A_settings& Gen3A);
+ //TI extensions for enable/disable algos
+ status_t setParameter3ABool(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg);
+ status_t setParameter3ABoolInvert(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg);
+ status_t setAlgoExternalGamma(Gen3A_settings& Gen3A);
+ status_t setAlgoNSF1(Gen3A_settings& Gen3A);
+ status_t setAlgoNSF2(Gen3A_settings& Gen3A);
+ status_t setAlgoSharpening(Gen3A_settings& Gen3A);
+ status_t setAlgoThreeLinColorMap(Gen3A_settings& Gen3A);
+ status_t setAlgoGIC(Gen3A_settings& Gen3A);
+
+ //Gamma table
+ void updateGammaTable(const char* gamma);
+ status_t setGammaTable(Gen3A_settings& Gen3A);
+
status_t getEVCompensation(Gen3A_settings& Gen3A);
status_t getWBMode(Gen3A_settings& Gen3A);
status_t getSharpness(Gen3A_settings& Gen3A);
@@ -524,10 +602,14 @@ private:
status_t setWhiteBalanceLock(Gen3A_settings& Gen3A);
status_t set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus);
+ //Stereo 3D
+ void setParamS3D(OMX_U32 port, const char *valstr);
+ status_t setS3DFrameLayout(OMX_U32 port) const;
+
//API to set FrameRate using VFR interface
status_t setVFramerate(OMX_U32 minFrameRate,OMX_U32 maxFrameRate);
- status_t setParametersAlgo(const CameraParameters &params,
+ status_t setParametersAlgo(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
//Noise filtering
@@ -548,18 +630,21 @@ private:
status_t setTouchFocus();
//Face detection
- status_t setParametersFD(const CameraParameters &params,
+ status_t setParametersFD(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
- status_t updateFocusDistances(CameraParameters &params);
+ status_t updateFocusDistances(android::CameraParameters &params);
+ status_t setFaceDetectionOrientation(OMX_U32 orientation);
status_t setFaceDetection(bool enable, OMX_U32 orientation);
- status_t detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
- sp<CameraFDResult> &result,
+ status_t createPreviewMetadata(OMX_BUFFERHEADERTYPE* pBuffHeader,
+ android::sp<CameraMetadataResult> &result,
size_t previewWidth,
size_t previewHeight);
status_t encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
- camera_frame_metadata_t **pFaces,
+ camera_frame_metadata_t *metadataResult,
size_t previewWidth,
size_t previewHeight);
+ status_t encodePreviewMetadata(camera_frame_metadata_t *meta, const OMX_PTR plat_pvt);
+
void pauseFaceDetection(bool pause);
//3A Algorithms priority configuration
@@ -569,17 +654,29 @@ private:
status_t setSensorOverclock(bool enable);
// Utility methods for OMX Capabilities
+ static bool _checkOmxTiCap(const OMX_TI_CAPTYPE & caps);
+ static bool _dumpOmxTiCap(int sensorId, const OMX_TI_CAPTYPE & caps);
+
static status_t insertCapabilities(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t encodeSizeCap(OMX_TI_CAPRESTYPE&, const CapResolution *, size_t, char *, size_t);
static status_t encodeISOCap(OMX_U32, const CapISO*, size_t, char*, size_t);
static size_t encodeZoomCap(OMX_S32, const CapZoom*, size_t, char*, size_t);
- static status_t encodeFramerateCap(OMX_U32, OMX_U32, const CapFramerate*, size_t, char*, size_t);
- static status_t encodeVFramerateCap(OMX_TI_CAPTYPE&, const CapU32Pair*, size_t, char*, char*, size_t);
+ static void encodeFrameRates(int minFrameRate, int maxFrameRate, const OMX_TI_CAPTYPE & caps,
+ const CapFramerate * fixedFrameRates, int frameRateCount, android::Vector<FpsRange> & fpsRanges);
+ static status_t encodeImageCodingFormatCap(OMX_IMAGE_CODINGTYPE,
+ const CapCodingFormat *,
+ size_t,
+ char *);
static status_t encodePixelformatCap(OMX_COLOR_FORMATTYPE,
const CapPixelformat*,
size_t,
char*,
size_t);
+ static status_t encodeSizeCap3D(OMX_TI_CAPRESTYPE&,
+ const CapResolution*,
+ size_t ,
+ char * ,
+ size_t);
static status_t insertImageSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertPreviewSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertThumbSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
@@ -587,13 +684,13 @@ private:
static status_t insertImageFormats(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertPreviewFormats(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertFramerates(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
- static status_t insertVFramerates(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertEVs(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertISOModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertIPPModes(CameraProperties::Properties*, OMX_TI_CAPTYPE &);
static status_t insertWBModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertEffects(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertExpModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertManualExpRanges(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertSceneModes(CameraProperties::Properties*, OMX_TI_CAPTYPE &);
static status_t insertFocusModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertFlickerModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
@@ -602,21 +699,51 @@ private:
static status_t insertDefaults(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertLocks(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertAreas(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertMechanicalMisalignmentCorrection(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertCaptureModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertVideoSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
-
- status_t setParametersCapture(const CameraParameters &params,
+ static status_t insertFacing(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertFocalLength(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertAutoConvergenceModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertManualConvergenceRange(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertLayout(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertVideoSnapshotSupported(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertVNFSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps);
+ static status_t insertVSTABSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps);
+ static status_t insertGBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps);
+ static status_t insertGLBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps);
+ static status_t insertRaw(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+
+ status_t setParametersCapture(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
//Exposure Bracketing
- status_t setExposureBracketing(int *evValues, size_t evCount, size_t frameCount);
- status_t parseExpRange(const char *rangeStr, int * expRange, size_t count, size_t &validEntries);
+ status_t initVectorShot();
+ status_t setVectorShot(int *evValues, int *evValues2, int *evModes2,
+ size_t evCount, size_t frameCount,
+ bool flush, OMX_BRACKETMODETYPE bracketMode);
+ status_t setVectorStop(bool toPreview = false);
+ status_t setExposureBracketing(int *evValues, int *evValues2,
+ size_t evCount, size_t frameCount,
+ OMX_BRACKETMODETYPE bracketMode);
+ status_t doExposureBracketing(int *evValues, int *evValues2,
+ int *evModes2,
+ size_t evCount, size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode);
+ int getBracketingValueMode(const char *a, const char *b) const;
+ status_t parseExpRange(const char *rangeStr, int *expRange, int *gainRange,
+ int *expGainModes,
+ size_t count, size_t &validEntries);
//Temporal Bracketing
status_t doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader, CameraFrame::FrameType typeOfFrame);
- status_t sendBracketFrames();
+ status_t sendBracketFrames(size_t &framesSent);
// Image Capture Service
- status_t startImageCapture();
+ status_t startImageCapture(bool bracketing, CachedCaptureParameters*);
status_t disableImagePort();
//Shutter callback notifications
@@ -624,34 +751,62 @@ private:
//Sets eithter HQ or HS mode and the frame count
status_t setCaptureMode(OMXCameraAdapter::CaptureMode mode);
- status_t UseBuffersCapture(void* bufArr, int num);
- status_t UseBuffersPreviewData(void* bufArr, int num);
+ status_t UseBuffersCapture(CameraBuffer *bufArr, int num);
+ status_t UseBuffersPreviewData(CameraBuffer *bufArr, int num);
+ status_t UseBuffersRawCapture(CameraBuffer *bufArr, int num);
//Used for calculation of the average frame rate during preview
status_t recalculateFPS();
- //Helper method for initializing a CameFrame object
- status_t initCameraFrame(CameraFrame &frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, int typeOfFrame, OMXCameraPortParameters *port);
-
//Sends the incoming OMX buffer header to subscribers
- status_t sendFrame(CameraFrame &frame);
-
status_t sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, unsigned int mask, OMXCameraPortParameters *port);
status_t apply3Asettings( Gen3A_settings& Gen3A );
- status_t init3AParams(Gen3A_settings &Gen3A);
// AutoConvergence
- status_t setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode, OMX_S32 pManualConverence);
- status_t getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode, OMX_S32 *pManualConverence);
+ status_t setAutoConvergence(const char *valstr, const char *pValManualstr, const android::CameraParameters &params);
status_t setExtraData(bool enable, OMX_U32, OMX_EXT_EXTRADATATYPE);
- OMX_OTHER_EXTRADATATYPE *getExtradata(OMX_OTHER_EXTRADATATYPE *extraData, OMX_EXTRADATATYPE type);
+ OMX_OTHER_EXTRADATATYPE *getExtradata(const OMX_PTR ptrPrivate, OMX_EXTRADATATYPE type) const;
+
+ // Meta data
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ camera_memory_t * getMetaData(const OMX_PTR plat_pvt,
+ camera_request_memory allocator) const;
+#endif
+
+ // Mechanical Misalignment Correction
+ status_t setMechanicalMisalignmentCorrection(bool enable);
+
+ // DCC file data save
+ status_t initDccFileDataSave(OMX_HANDLETYPE* omxHandle, int portIndex);
+ status_t sniffDccFileDataSave(OMX_BUFFERHEADERTYPE* pBuffHeader);
+ status_t saveDccFileDataSave();
+ status_t closeDccFileDataSave();
+ status_t fseekDCCuseCasePos(FILE *pFile);
+ FILE * fopenCameraDCC(const char *dccFolderPath);
+ FILE * parseDCCsubDir(DIR *pDir, char *path);
+
+#ifdef CAMERAHAL_OMX_PROFILING
+ status_t storeProfilingData(OMX_BUFFERHEADERTYPE* pBuffHeader);
+#endif
+
+ // Internal buffers
+ status_t initInternalBuffers (OMX_U32);
+ status_t deinitInternalBuffers (OMX_U32);
+
+ // Reprocess Methods -- implementation in OMXReprocess.cpp
+ status_t setParametersReprocess(const android::CameraParameters &params, CameraBuffer* bufs,
+ BaseCameraAdapter::AdapterState state);
+ status_t startReprocess();
+ status_t disableReprocess();
+ status_t stopReprocess();
+ status_t UseBuffersReprocess(CameraBuffer *bufArr, int num);
- class CommandHandler : public Thread {
+ class CommandHandler : public android::Thread {
public:
CommandHandler(OMXCameraAdapter* ca)
- : Thread(false), mCameraAdapter(ca) { }
+ : android::Thread(false), mCameraAdapter(ca) { }
virtual bool threadLoop() {
bool ret;
@@ -659,38 +814,42 @@ private:
return ret;
}
- status_t put(TIUTILS::Message* msg){
- Mutex::Autolock lock(mLock);
+ status_t put(Utils::Message* msg){
+ android::AutoMutex lock(mLock);
return mCommandMsgQ.put(msg);
}
void clearCommandQ()
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.clear();
}
enum {
COMMAND_EXIT = -1,
CAMERA_START_IMAGE_CAPTURE = 0,
- CAMERA_PERFORM_AUTOFOCUS = 1,
+ CAMERA_PERFORM_AUTOFOCUS,
CAMERA_SWITCH_TO_EXECUTING,
+ CAMERA_START_REPROCESS
};
private:
bool Handler();
- TIUTILS::MessageQueue mCommandMsgQ;
+ Utils::MessageQueue mCommandMsgQ;
OMXCameraAdapter* mCameraAdapter;
- Mutex mLock;
+ android::Mutex mLock;
};
- sp<CommandHandler> mCommandHandler;
+ android::sp<CommandHandler> mCommandHandler;
public:
- class OMXCallbackHandler : public Thread {
+ class OMXCallbackHandler : public android::Thread {
public:
OMXCallbackHandler(OMXCameraAdapter* ca)
- : Thread(false), mCameraAdapter(ca) { }
+ : Thread(false), mCameraAdapter(ca)
+ {
+ mIsProcessed = true;
+ }
virtual bool threadLoop() {
bool ret;
@@ -698,31 +857,36 @@ public:
return ret;
}
- status_t put(TIUTILS::Message* msg){
- Mutex::Autolock lock(mLock);
+ status_t put(Utils::Message* msg){
+ android::AutoMutex lock(mLock);
+ mIsProcessed = false;
return mCommandMsgQ.put(msg);
}
void clearCommandQ()
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.clear();
}
+ void flush();
+
enum {
COMMAND_EXIT = -1,
CAMERA_FILL_BUFFER_DONE,
- CAMERA_FOCUS_STATUS,
+ CAMERA_FOCUS_STATUS
};
private:
bool Handler();
- TIUTILS::MessageQueue mCommandMsgQ;
+ Utils::MessageQueue mCommandMsgQ;
OMXCameraAdapter* mCameraAdapter;
- Mutex mLock;
+ android::Mutex mLock;
+ android::Condition mCondition;
+ bool mIsProcessed;
};
- sp<OMXCallbackHandler> mOMXCallbackHandler;
+ android::sp<OMXCallbackHandler> mOMXCallbackHandler;
private:
@@ -731,15 +895,38 @@ private:
//OMX Capabilities data
static const CapResolution mImageCapRes [];
+ static const CapResolution mImageCapResSS [];
+ static const CapResolution mImageCapResTB [];
static const CapResolution mPreviewRes [];
+ static const CapResolution mPreviewResSS [];
+ static const CapResolution mPreviewResTB [];
+ static const CapResolution mPreviewPortraitRes [];
static const CapResolution mThumbRes [];
static const CapPixelformat mPixelformats [];
+ static const userToOMX_LUT mFrameLayout [];
+ static const LUTtype mLayoutLUT;
+ static const CapCodingFormat mImageCodingFormat[];
static const CapFramerate mFramerates [];
static const CapU32 mSensorNames[] ;
static const CapZoom mZoomStages [];
- static const CapEVComp mEVCompRanges [];
static const CapISO mISOStages [];
- static const CapU32Pair mVarFramerates [];
+ static const int SENSORID_IMX060;
+ static const int SENSORID_OV5650;
+ static const int SENSORID_OV5640;
+ static const int SENSORID_OV14825;
+ static const int SENSORID_S5K4E1GA;
+ static const int SENSORID_S5K6A1GX03;
+ static const int SENSORID_OV8830;
+ static const int SENSORID_OV2722;
+ static const CapU32 mFacing [];
+ static const userToOMX_LUT mAutoConvergence [];
+ static const LUTtype mAutoConvergenceLUT;
+ static const userToOMX_LUT mBracketingModes[];
+ static const LUTtype mBracketingModesLUT;
+
+ static const int FPS_MIN;
+ static const int FPS_MAX;
+ static const int FPS_MAX_EXTENDED;
// OMX Camera defaults
static const char DEFAULT_ANTIBANDING[];
@@ -752,45 +939,54 @@ private:
static const char DEFAULT_FLASH_MODE[];
static const char DEFAULT_FOCUS_MODE_PREFERRED[];
static const char DEFAULT_FOCUS_MODE[];
- static const char DEFAULT_FRAMERATE_RANGE_IMAGE[];
- static const char DEFAULT_FRAMERATE_RANGE_VIDEO[];
static const char DEFAULT_IPP[];
- static const char DEFAULT_GBCE[];
static const char DEFAULT_ISO_MODE[];
static const char DEFAULT_JPEG_QUALITY[];
static const char DEFAULT_THUMBNAIL_QUALITY[];
static const char DEFAULT_THUMBNAIL_SIZE[];
static const char DEFAULT_PICTURE_FORMAT[];
+ static const char DEFAULT_S3D_PICTURE_LAYOUT[];
static const char DEFAULT_PICTURE_SIZE[];
+ static const char DEFAULT_PICTURE_SS_SIZE[];
+ static const char DEFAULT_PICTURE_TB_SIZE[];
static const char DEFAULT_PREVIEW_FORMAT[];
static const char DEFAULT_FRAMERATE[];
+ static const char DEFAULT_S3D_PREVIEW_LAYOUT[];
static const char DEFAULT_PREVIEW_SIZE[];
+ static const char DEFAULT_PREVIEW_SS_SIZE[];
+ static const char DEFAULT_PREVIEW_TB_SIZE[];
static const char DEFAULT_NUM_PREV_BUFS[];
static const char DEFAULT_NUM_PIC_BUFS[];
- static const char DEFAULT_MAX_FOCUS_AREAS[];
static const char DEFAULT_SATURATION[];
static const char DEFAULT_SCENE_MODE[];
static const char DEFAULT_SHARPNESS[];
- static const char DEFAULT_VSTAB[];
- static const char DEFAULT_VSTAB_SUPPORTED[];
+ static const char * DEFAULT_VSTAB;
+ static const char * DEFAULT_VNF;
static const char DEFAULT_WB[];
static const char DEFAULT_ZOOM[];
static const char DEFAULT_MAX_FD_HW_FACES[];
static const char DEFAULT_MAX_FD_SW_FACES[];
- static const char DEFAULT_AE_LOCK[];
- static const char DEFAULT_AWB_LOCK[];
- static const char DEFAULT_MAX_NUM_METERING_AREAS[];
- static const char DEFAULT_LOCK_SUPPORTED[];
- static const char DEFAULT_LOCK_UNSUPPORTED[];
- static const char DEFAULT_FOCAL_LENGTH_PRIMARY[];
- static const char DEFAULT_FOCAL_LENGTH_SECONDARY[];
+ static const char * DEFAULT_AE_LOCK;
+ static const char * DEFAULT_AWB_LOCK;
static const char DEFAULT_HOR_ANGLE[];
static const char DEFAULT_VER_ANGLE[];
- static const char DEFAULT_VIDEO_SNAPSHOT_SUPPORTED[];
static const char DEFAULT_VIDEO_SIZE[];
- static const char DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[];
+ static const char DEFAULT_SENSOR_ORIENTATION[];
+ static const char DEFAULT_AUTOCONVERGENCE_MODE[];
+ static const char DEFAULT_MANUAL_CONVERGENCE[];
+ static const char * DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE;
+ static const char DEFAULT_EXIF_MODEL[];
+ static const char DEFAULT_EXIF_MAKE[];
+
static const size_t MAX_FOCUS_AREAS;
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ static const char DEFAULT_PROFILE_PATH[];
+ int mDebugProfile;
+
+#endif
+
OMX_VERSIONTYPE mCompRevision;
//OMX Component UUID
@@ -803,31 +999,53 @@ private:
char mFocusDistBuffer[FOCUS_DIST_BUFFER_SIZE];
// Current Focus areas
- Vector< sp<CameraArea> > mFocusAreas;
- mutable Mutex mFocusAreasLock;
+ android::Vector<android::sp<CameraArea> > mFocusAreas;
+ mutable android::Mutex mFocusAreasLock;
+
+ // Current Touch convergence areas
+ android::Vector<android::sp<CameraArea> > mTouchAreas;
+ mutable android::Mutex mTouchAreasLock;
// Current Metering areas
- Vector< sp<CameraArea> > mMeteringAreas;
- mutable Mutex mMeteringAreasLock;
+ android::Vector<android::sp<CameraArea> > mMeteringAreas;
+ mutable android::Mutex mMeteringAreasLock;
+ OperatingMode mCapabilitiesOpMode;
CaptureMode mCapMode;
+ // TODO(XXX): Do we really need this lock? Let's
+ // try to merge temporal bracketing and burst
+ // capture later
+ mutable android::Mutex mBurstLock;
size_t mBurstFrames;
+ size_t mBurstFramesAccum;
+ size_t mBurstFramesQueued;
size_t mCapturedFrames;
+ bool mFlushShotConfigQueue;
bool mMeasurementEnabled;
//Exposure Bracketing
int mExposureBracketingValues[EXP_BRACKET_RANGE];
+ int mExposureGainBracketingValues[EXP_BRACKET_RANGE];
+ int mExposureGainBracketingModes[EXP_BRACKET_RANGE];
size_t mExposureBracketingValidEntries;
+ OMX_BRACKETMODETYPE mExposureBracketMode;
+
+ //Zoom Bracketing
+ int mZoomBracketingValues[ZOOM_BRACKET_RANGE];
+ size_t mZoomBracketingValidEntries;
- mutable Mutex mFaceDetectionLock;
+ static const uint32_t FACE_DETECTION_THRESHOLD;
+ mutable android::Mutex mFaceDetectionLock;
//Face detection status
bool mFaceDetectionRunning;
bool mFaceDetectionPaused;
bool mFDSwitchAlgoPriority;
- camera_face_t faceDetectionLastOutput [MAX_NUM_FACES_SUPPORTED];
+ camera_face_t faceDetectionLastOutput[MAX_NUM_FACES_SUPPORTED];
int faceDetectionNumFacesLastOutput;
+ int metadataLastAnalogGain;
+ int metadataLastExposureTime;
//Geo-tagging
EXIFData mEXIFData;
@@ -857,7 +1075,7 @@ private:
unsigned int mZoomParameterIdx;
//current zoom
- Mutex mZoomLock;
+ android::Mutex mZoomLock;
unsigned int mCurrentZoomIdx, mTargetZoomIdx, mPreviousZoomIndx;
bool mZoomUpdating, mZoomUpdate;
int mZoomInc;
@@ -868,63 +1086,79 @@ private:
OMX_VERSIONTYPE mLocalVersionParam;
unsigned int mPending3Asettings;
- Mutex m3ASettingsUpdateLock;
+ android::Mutex m3ASettingsUpdateLock;
Gen3A_settings mParameters3A;
const char *mPictureFormatFromClient;
+ BrightnessMode mGBCE;
+ BrightnessMode mGLBCE;
+
OMX_TI_CONFIG_3A_FACE_PRIORITY mFacePriority;
OMX_TI_CONFIG_3A_REGION_PRIORITY mRegionPriority;
- CameraParameters mParams;
+ android::CameraParameters mParams;
CameraProperties::Properties* mCapabilities;
unsigned int mPictureRotation;
bool mWaitingForSnapshot;
- int mSnapshotCount;
bool mCaptureConfigured;
unsigned int mPendingCaptureSettings;
+ unsigned int mPendingPreviewSettings;
+ unsigned int mPendingReprocessSettings;
OMX_TI_ANCILLARYDATATYPE* mCaptureAncillaryData;
OMX_TI_WHITEBALANCERESULTTYPE* mWhiteBalanceData;
+ bool mReprocConfigured;
//Temporal bracketing management data
- mutable Mutex mBracketingLock;
+ bool mBracketingSet;
+ mutable android::Mutex mBracketingLock;
bool *mBracketingBuffersQueued;
int mBracketingBuffersQueuedCount;
int mLastBracetingBufferIdx;
bool mBracketingEnabled;
- int mBracketingRange;
+ bool mZoomBracketingEnabled;
+ size_t mBracketingRange;
+ int mCurrentZoomBracketing;
+ android::CameraParameters mParameters;
+#ifdef CAMERAHAL_TUNA
bool mIternalRecordingHint;
+#endif
- CameraParameters mParameters;
bool mOmxInitialized;
OMXCameraAdapterComponentContext mCameraAdapterParameters;
bool mFirstTimeInit;
///Semaphores used internally
- Semaphore mInitSem;
- Semaphore mFlushSem;
- Semaphore mUsePreviewDataSem;
- Semaphore mUsePreviewSem;
- Semaphore mUseCaptureSem;
- Semaphore mStartPreviewSem;
- Semaphore mStopPreviewSem;
- Semaphore mStartCaptureSem;
- Semaphore mStopCaptureSem;
- Semaphore mSwitchToLoadedSem;
- Semaphore mSwitchToExecSem;
-
- mutable Mutex mStateSwitchLock;
-
- Vector<struct TIUTILS::Message *> mEventSignalQ;
- Mutex mEventLock;
+ Utils::Semaphore mInitSem;
+ Utils::Semaphore mFlushSem;
+ Utils::Semaphore mUsePreviewDataSem;
+ Utils::Semaphore mUsePreviewSem;
+ Utils::Semaphore mUseCaptureSem;
+ Utils::Semaphore mStartPreviewSem;
+ Utils::Semaphore mStopPreviewSem;
+ Utils::Semaphore mStartCaptureSem;
+ Utils::Semaphore mStopCaptureSem;
+ Utils::Semaphore mSwitchToLoadedSem;
+ Utils::Semaphore mSwitchToExecSem;
+ Utils::Semaphore mStopReprocSem;
+ Utils::Semaphore mUseReprocessSem;
+
+ mutable android::Mutex mStateSwitchLock;
+ mutable android::Mutex mIdleStateSwitchLock;
+
+ android::Vector<Utils::Message *> mEventSignalQ;
+ android::Mutex mEventLock;
OMX_STATETYPE mComponentState;
+ OMX_TI_AUTOCONVERGENCEMODETYPE mAutoConv;
+ OMX_S32 mManualConv;
bool mVnfEnabled;
bool mVstabEnabled;
int mSensorOrientation;
int mDeviceOrientation;
+ int mFaceOrientation;
bool mSensorOverclock;
//Indicates if we should leave
@@ -936,11 +1170,15 @@ private:
int mLastFrameCount;
unsigned int mIter;
nsecs_t mLastFPSTime;
- Mutex mFrameCountMutex;
- Condition mFirstFrameCondition;
+ android::Mutex mFrameCountMutex;
+ android::Condition mFirstFrameCondition;
- Mutex mDoAFMutex;
- Condition mDoAFCond;
+ static const nsecs_t CANCEL_AF_TIMEOUT;
+ android::Mutex mCancelAFMutex;
+ android::Condition mCancelAFCond;
+
+ android::Mutex mDoAFMutex;
+ android::Condition mDoAFCond;
size_t mSensorIndex;
CodingMode mCodingMode;
@@ -949,13 +1187,33 @@ private:
OMX_TICKS mTimeSourceDelta;
bool onlyOnce;
- Semaphore mCaptureSem;
+ Utils::Semaphore mCaptureSem;
bool mCaptureSignalled;
OMX_BOOL mUserSetExpLock;
OMX_BOOL mUserSetWbLock;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ bool mRawCapture;
+ bool mYuvCapture;
+#endif
+
+ bool mSetFormatDone;
+
+ OMX_TI_DCCDATATYPE mDccData;
+ android::Mutex mDccDataLock;
+
+ int mMaxZoomSupported;
+ android::Mutex mImageCaptureLock;
+
+ bool mTunnelDestroyed;
+ bool mPreviewPortInitialized;
+
+ // Used for allocations that need to be sent to Ducati
+ MemoryManager mMemMgr;
};
-}; //// namespace
-#endif //OMX_CAMERA_ADAPTER_H
+} // namespace Camera
+} // namespace Ti
+
+#endif //OMX_CAMERA_ADAPTER_H
diff --git a/camera/inc/OMXCameraAdapter/OMXDCC.h b/camera/inc/OMXCameraAdapter/OMXDCC.h
new file mode 100644
index 0000000..c75a24d
--- /dev/null
+++ b/camera/inc/OMXCameraAdapter/OMXDCC.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OMX_DCC_H
+#define OMX_DCC_H
+
+namespace Ti {
+namespace Camera {
+
+class DCCHandler
+{
+public:
+
+ status_t loadDCC(OMX_HANDLETYPE hComponent);
+
+private:
+
+ OMX_ERRORTYPE initDCC(OMX_HANDLETYPE hComponent);
+ OMX_ERRORTYPE sendDCCBufPtr(OMX_HANDLETYPE hComponent, CameraBuffer *dccBuffer);
+ size_t readDCCdir(OMX_PTR buffer, const android::Vector<android::String8 *> &dirPaths);
+
+private:
+
+ static android::String8 DCCPath;
+ static bool mDCCLoaded;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif // OMX_DCC_H
diff --git a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
index 205a87b..e791727 100644
--- a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
+++ b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
@@ -32,6 +32,9 @@
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
#endif
+namespace Ti {
+namespace Camera {
+
struct SceneModesEntry {
OMX_SCENEMODETYPE scene;
OMX_IMAGE_FLASHCONTROLTYPE flash;
@@ -239,9 +242,512 @@ static const SceneModesEntry S5K6A1GX03_SceneModesLUT [] = {
OMX_WhiteBalControlAuto },
};
+static const SceneModesEntry IMX060_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoMacro,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlPortrait,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOn,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoMacro,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoMacro,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoLock,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ ( OMX_WHITEBALCONTROLTYPE ) OMX_TI_WhiteBalControlSunset },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
+static const SceneModesEntry OV5640_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
+static const SceneModesEntry OV5650_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlPortrait,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOn,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoLock,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ ( OMX_WHITEBALCONTROLTYPE ) OMX_TI_WhiteBalControlSunset },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
+static const SceneModesEntry OV8830_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlPortrait,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOn,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoLock,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ ( OMX_WHITEBALCONTROLTYPE ) OMX_TI_WhiteBalControlSunset },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
+static const SceneModesEntry OV2722_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOn,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ ( OMX_WHITEBALCONTROLTYPE ) OMX_TI_WhiteBalControlSunset },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
static const CameraToSensorModesLUTEntry CameraToSensorModesLUT [] = {
{ "S5K4E1GA", S5K4E1GA_SceneModesLUT, ARRAY_SIZE(S5K4E1GA_SceneModesLUT)},
{ "S5K6A1GX03", S5K6A1GX03_SceneModesLUT, ARRAY_SIZE(S5K6A1GX03_SceneModesLUT)},
+ { "IMX060", IMX060_SceneModesLUT, ARRAY_SIZE(IMX060_SceneModesLUT)},
+ { "OV5640", OV5640_SceneModesLUT, ARRAY_SIZE(OV5640_SceneModesLUT)},
+ { "OV5650", OV5650_SceneModesLUT, ARRAY_SIZE(OV5650_SceneModesLUT)},
+ { "OV8830", OV8830_SceneModesLUT, ARRAY_SIZE(OV8830_SceneModesLUT)},
+ { "OV2722", OV2722_SceneModesLUT, ARRAY_SIZE(OV2722_SceneModesLUT)}
};
+} // namespace Camera
+} // namespace Ti
+
#endif
diff --git a/camera/inc/SensorListener.h b/camera/inc/SensorListener.h
index 913eb95..44037b7 100644
--- a/camera/inc/SensorListener.h
+++ b/camera/inc/SensorListener.h
@@ -30,7 +30,10 @@
#include <gui/SensorEventQueue.h>
#include <utils/Looper.h>
-namespace android {
+#include "Common.h"
+
+namespace Ti {
+namespace Camera {
/**
* SensorListner class - Registers with sensor manager to get sensor events
@@ -38,11 +41,11 @@ namespace android {
typedef void (*orientation_callback_t) (uint32_t orientation, uint32_t tilt, void* cookie);
-class SensorLooperThread : public Thread {
+class SensorLooperThread : public android::Thread {
public:
- SensorLooperThread(Looper* looper)
+ SensorLooperThread(android::Looper* looper)
: Thread(false) {
- mLooper = sp<Looper>(looper);
+ mLooper = android::sp<android::Looper>(looper);
}
~SensorLooperThread() {
mLooper.clear();
@@ -58,11 +61,11 @@ class SensorLooperThread : public Thread {
mLooper->wake();
}
private:
- sp<Looper> mLooper;
+ android::sp<android::Looper> mLooper;
};
-class SensorListener : public RefBase
+class SensorListener : public android::RefBase
{
/* public - types */
public:
@@ -85,17 +88,18 @@ public:
void handleOrientation(uint32_t orientation, uint32_t tilt);
/* public - member variables */
public:
- sp<SensorEventQueue> mSensorEventQueue;
+ android::sp<android::SensorEventQueue> mSensorEventQueue;
/* private - member variables */
private:
int sensorsEnabled;
orientation_callback_t mOrientationCb;
void *mCbCookie;
- sp<Looper> mLooper;
- sp<SensorLooperThread> mSensorLooperThread;
- Mutex mLock;
+ android::sp<android::Looper> mLooper;
+ android::sp<SensorLooperThread> mSensorLooperThread;
+ android::Mutex mLock;
};
-}
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/TICameraParameters.h b/camera/inc/TICameraParameters.h
index 4701cae..c5a24e9 100644
--- a/camera/inc/TICameraParameters.h
+++ b/camera/inc/TICameraParameters.h
@@ -14,16 +14,14 @@
* limitations under the License.
*/
-
-
-
#ifndef TI_CAMERA_PARAMETERS_H
#define TI_CAMERA_PARAMETERS_H
#include <utils/KeyedVector.h>
#include <utils/String8.h>
-namespace android {
+namespace Ti {
+namespace Camera {
///TI Specific Camera Parameters
class TICameraParameters
@@ -36,35 +34,48 @@ static const char KEY_SUPPORTED_CAMERAS[];
// Select logical Camera index
static const char KEY_CAMERA[];
static const char KEY_CAMERA_NAME[];
-static const char KEY_S3D_SUPPORTED[];
static const char KEY_BURST[];
static const char KEY_CAP_MODE[];
+static const char KEY_CAP_MODE_VALUES[];
static const char KEY_VNF[];
+static const char KEY_VNF_SUPPORTED[];
static const char KEY_SATURATION[];
static const char KEY_BRIGHTNESS[];
-static const char KEY_EXPOSURE_MODE[];
static const char KEY_SUPPORTED_EXPOSURE[];
+static const char KEY_EXPOSURE_MODE[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_MIN[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_MAX[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_STEP[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP[];
+static const char KEY_MANUAL_EXPOSURE[];
+static const char KEY_MANUAL_EXPOSURE_RIGHT[];
+static const char KEY_MANUAL_GAIN_ISO[];
+static const char KEY_MANUAL_GAIN_ISO_RIGHT[];
static const char KEY_CONTRAST[];
static const char KEY_SHARPNESS[];
static const char KEY_ISO[];
static const char KEY_SUPPORTED_ISO_VALUES[];
static const char KEY_SUPPORTED_IPP[];
static const char KEY_IPP[];
-static const char KEY_MAN_EXPOSURE[];
static const char KEY_METERING_MODE[];
-static const char KEY_PADDED_WIDTH[];
-static const char KEY_PADDED_HEIGHT[];
static const char KEY_EXP_BRACKETING_RANGE[];
+static const char KEY_EXP_GAIN_BRACKETING_RANGE[];
+static const char KEY_ZOOM_BRACKETING_RANGE[];
static const char KEY_TEMP_BRACKETING[];
static const char KEY_TEMP_BRACKETING_RANGE_POS[];
static const char KEY_TEMP_BRACKETING_RANGE_NEG[];
+static const char KEY_FLUSH_SHOT_CONFIG_QUEUE[];
static const char KEY_SHUTTER_ENABLE[];
static const char KEY_MEASUREMENT_ENABLE[];
static const char KEY_INITIAL_VALUES[];
static const char KEY_GBCE[];
+static const char KEY_GBCE_SUPPORTED[];
static const char KEY_GLBCE[];
-static const char KEY_MINFRAMERATE[];
-static const char KEY_MAXFRAMERATE[];
+static const char KEY_GLBCE_SUPPORTED[];
+static const char KEY_FRAMERATE_RANGES_EXT_SUPPORTED[];
+static const char KEY_FRAMERATES_EXT_SUPPORTED[];
// TI recording hint to notify camera adapters of possible recording
static const char KEY_RECORDING_HINT[];
@@ -72,32 +83,18 @@ static const char KEY_AUTO_FOCUS_LOCK[];
static const char KEY_CURRENT_ISO[];
static const char KEY_SENSOR_ORIENTATION[];
-static const char KEY_SENSOR_ORIENTATION_VALUES[];
-
-//TI extensions for zoom
-static const char ZOOM_SUPPORTED[];
-static const char ZOOM_UNSUPPORTED[];
//TI extensions for camera capabilies
static const char INITIAL_VALUES_TRUE[];
static const char INITIAL_VALUES_FALSE[];
-//TI extensions for enabling/disabling measurements
-static const char MEASUREMENT_ENABLE[];
-static const char MEASUREMENT_DISABLE[];
-
// TI extensions to add values for ManualConvergence and AutoConvergence mode
-static const char KEY_AUTOCONVERGENCE[];
static const char KEY_AUTOCONVERGENCE_MODE[];
-static const char KEY_MANUALCONVERGENCE_VALUES[];
-
-//TI extensions for enabling/disabling GLBCE
-static const char GLBCE_ENABLE[];
-static const char GLBCE_DISABLE[];
-
-//TI extensions for enabling/disabling GBCE
-static const char GBCE_ENABLE[];
-static const char GBCE_DISABLE[];
+static const char KEY_AUTOCONVERGENCE_MODE_VALUES[];
+static const char KEY_MANUAL_CONVERGENCE[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP[];
// TI extensions to add Min frame rate Values
static const char VIDEO_MINFRAMERATE_5[];
@@ -109,16 +106,6 @@ static const char VIDEO_MINFRAMERATE_25[];
static const char VIDEO_MINFRAMERATE_30[];
static const char VIDEO_MINFRAMERATE_33[];
-// TI extensions for Manual Gain and Manual Exposure
-static const char KEY_MANUAL_EXPOSURE_LEFT[];
-static const char KEY_MANUAL_EXPOSURE_RIGHT[];
-static const char KEY_MANUAL_EXPOSURE_MODES[];
-static const char KEY_MANUAL_GAIN_EV_RIGHT[];
-static const char KEY_MANUAL_GAIN_EV_LEFT[];
-static const char KEY_MANUAL_GAIN_ISO_RIGHT[];
-static const char KEY_MANUAL_GAIN_ISO_LEFT[];
-static const char KEY_MANUAL_GAIN_MODES[];
-
//TI extensions for setting EXIF tags
static const char KEY_EXIF_MODEL[];
static const char KEY_EXIF_MAKE[];
@@ -128,13 +115,13 @@ static const char KEY_GPS_MAPDATUM[];
static const char KEY_GPS_VERSION[];
static const char KEY_GPS_DATESTAMP[];
-//TI extensions for enabling/disabling shutter sound
-static const char SHUTTER_ENABLE[];
-static const char SHUTTER_DISABLE[];
+// TI extensions for VTC
+static const char KEY_VTC_HINT[];
+static const char KEY_VIDEO_ENCODER_HANDLE[];
+static const char KEY_VIDEO_ENCODER_SLICE_HEIGHT[];
-//TI extensions for Temporal bracketing
-static const char BRACKET_ENABLE[];
-static const char BRACKET_DISABLE[];
+static const char RAW_WIDTH[];
+static const char RAW_HEIGHT[];
//TI extensions to Image post-processing
static const char IPP_LDCNSF[];
@@ -146,18 +133,20 @@ static const char IPP_NONE[];
static const char HIGH_PERFORMANCE_MODE[];
static const char HIGH_QUALITY_MODE[];
static const char HIGH_QUALITY_ZSL_MODE[];
+static const char CP_CAM_MODE[];
static const char VIDEO_MODE[];
-
+static const char VIDEO_MODE_HQ[];
+static const char EXPOSURE_BRACKETING[];
+static const char ZOOM_BRACKETING[];
+static const char TEMP_BRACKETING[];
// TI extensions to standard android pixel formats
-static const char PIXEL_FORMAT_RAW[];
+static const char PIXEL_FORMAT_UNUSED[];
static const char PIXEL_FORMAT_JPS[];
static const char PIXEL_FORMAT_MPO[];
-static const char PIXEL_FORMAT_RAW_JPEG[];
-static const char PIXEL_FORMAT_RAW_MPO[];
+static const char PIXEL_FORMAT_YUV422I_UYVY[];
// TI extensions to standard android scene mode settings
-static const char SCENE_MODE_SPORT[];
static const char SCENE_MODE_CLOSEUP[];
static const char SCENE_MODE_AQUA[];
static const char SCENE_MODE_SNOWBEACH[];
@@ -176,7 +165,7 @@ static const char WHITE_BALANCE_SUNSET[];
static const char WHITE_BALANCE_FACE[];
// TI extensions to add exposure preset modes to android api
-static const char EXPOSURE_MODE_OFF[];
+static const char EXPOSURE_MODE_MANUAL[];
static const char EXPOSURE_MODE_AUTO[];
static const char EXPOSURE_MODE_NIGHT[];
static const char EXPOSURE_MODE_BACKLIGHT[];
@@ -192,6 +181,7 @@ static const char EXPOSURE_MODE_FACE[];
static const char FOCUS_MODE_PORTRAIT[];
static const char FOCUS_MODE_EXTENDED[];
static const char FOCUS_MODE_FACE[];
+static const char FOCUS_MODE_OFF[];
// TI extensions to add iso values
static const char ISO_MODE_AUTO[];
@@ -209,17 +199,34 @@ static const char EFFECT_VIVID[];
static const char EFFECT_COLOR_SWAP[];
static const char EFFECT_BLACKWHITE[];
-static const char KEY_S3D2D_PREVIEW[];
-static const char KEY_S3D2D_PREVIEW_MODE[];
+//TI extensions for stereo frame layouts
+static const char KEY_S3D_PRV_FRAME_LAYOUT[];
+static const char KEY_S3D_PRV_FRAME_LAYOUT_VALUES[];
+static const char KEY_S3D_CAP_FRAME_LAYOUT[];
+static const char KEY_S3D_CAP_FRAME_LAYOUT_VALUES[];
+
+//TI extensions for stereo frame layouts
+static const char S3D_NONE[];
+static const char S3D_TB_FULL[];
+static const char S3D_SS_FULL[];
+static const char S3D_TB_SUBSAMPLED[];
+static const char S3D_SS_SUBSAMPLED[];
+
+//TI extentions fo 3D resolutions
+static const char KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES[];
+static const char KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES[];
+static const char KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[];
// TI extensions to add values for AutoConvergence settings.
static const char AUTOCONVERGENCE_MODE_DISABLE[];
static const char AUTOCONVERGENCE_MODE_FRAME[];
static const char AUTOCONVERGENCE_MODE_CENTER[];
-static const char AUTOCONVERGENCE_MODE_FFT[];
+static const char AUTOCONVERGENCE_MODE_TOUCH[];
static const char AUTOCONVERGENCE_MODE_MANUAL[];
-
//TI extensions for flash mode settings
static const char FLASH_MODE_FILL_IN[];
@@ -234,9 +241,23 @@ static const char ORIENTATION_SENSOR_270[];
static const char FACING_FRONT[];
static const char FACING_BACK[];
-};
+static const char KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[];
+static const char KEY_MECHANICAL_MISALIGNMENT_CORRECTION[];
+
+//TI extensions for enable/disable algos
+static const char KEY_ALGO_EXTERNAL_GAMMA[];
+static const char KEY_ALGO_NSF1[];
+static const char KEY_ALGO_NSF2[];
+static const char KEY_ALGO_SHARPENING[];
+static const char KEY_ALGO_THREELINCOLORMAP[];
+static const char KEY_ALGO_GIC[];
+
+//Gamma table
+static const char KEY_GAMMA_TABLE[];
};
-#endif
+} // namespace Camera
+} // namespace Ti
+#endif
diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
index b9d3952..bc99a6c 100644
--- a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -19,16 +19,40 @@
#ifndef V4L_CAMERA_ADAPTER_H
#define V4L_CAMERA_ADAPTER_H
+#include <linux/videodev2.h>
+
#include "CameraHal.h"
#include "BaseCameraAdapter.h"
#include "DebugUtils.h"
-namespace android {
+namespace Ti {
+namespace Camera {
#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
+
#define NB_BUFFER 10
-#define DEVICE "/dev/video4"
+#define DEVICE "/dev/videoxx"
+#define DEVICE_PATH "/dev/"
+#define DEVICE_NAME "videoxx"
+
+typedef int V4L_HANDLETYPE;
+
+struct CapPixelformat {
+ uint32_t pixelformat;
+ const char *param;
+};
+
+struct CapResolution {
+ size_t width, height;
+ char param[10];
+};
+struct CapU32 {
+ uint32_t num;
+ const char *param;
+};
+
+typedef CapU32 CapFramerate;
struct VideoInfo {
struct v4l2_capability cap;
@@ -36,6 +60,7 @@ struct VideoInfo {
struct v4l2_buffer buf;
struct v4l2_requestbuffers rb;
void *mem[NB_BUFFER];
+ void *CaptureBuffers[NB_BUFFER];
bool isStreaming;
int width;
int height;
@@ -43,6 +68,16 @@ struct VideoInfo {
int framesizeIn;
};
+typedef struct V4L_TI_CAPTYPE {
+ uint16_t ulPreviewFormatCount; // supported preview pixelformat count
+ uint32_t ePreviewFormats[32];
+ uint16_t ulPreviewResCount; // supported preview resolution sizes
+ CapResolution tPreviewRes[32];
+ uint16_t ulCaptureResCount; // supported capture resolution sizes
+ CapResolution tCaptureRes[32];
+ uint16_t ulFrameRateCount; // supported frame rate
+ uint16_t ulFrameRates[32];
+}V4L_TI_CAPTYPE;
/**
* Class which completely abstracts the camera hardware interaction from camera hal
@@ -64,32 +99,35 @@ public:
public:
- V4LCameraAdapter();
+ V4LCameraAdapter(size_t sensor_index);
~V4LCameraAdapter();
///Initialzes the camera adapter creates any resources required
- virtual status_t initialize(CameraProperties::Properties*, int sensor_index=0);
+ virtual status_t initialize(CameraProperties::Properties*);
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params);
- virtual void getParameters(CameraParameters& params);
+ virtual status_t setParameters(const android::CameraParameters& params);
+ virtual void getParameters(android::CameraParameters& params);
// API
- virtual status_t UseBuffersPreview(void* bufArr, int num);
+ virtual status_t UseBuffersPreview(CameraBuffer *bufArr, int num);
+ virtual status_t UseBuffersCapture(CameraBuffer *bufArr, int num);
- //API to flush the buffers for preview
- status_t flushBuffers();
+ static status_t getCaps(const int sensorId, CameraProperties::Properties* params, V4L_HANDLETYPE handle);
protected:
//----------Parent class method implementation------------------------------------
virtual status_t startPreview();
virtual status_t stopPreview();
- virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
- virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t takePicture();
+ virtual status_t stopImageCapture();
+ virtual status_t autoFocus();
+ virtual status_t useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType);
virtual status_t getFrameSize(size_t &width, size_t &height);
- virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getPictureBufferSize(CameraFrame &frame, size_t bufferCount);
virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
//-----------------------------------------------------------------------------
@@ -97,13 +135,13 @@ protected:
private:
- class PreviewThread : public Thread {
+ class PreviewThread : public android::Thread {
V4LCameraAdapter* mAdapter;
public:
PreviewThread(V4LCameraAdapter* hw) :
Thread(false), mAdapter(hw) { }
virtual void onFirstRef() {
- run("CameraPreviewThread", PRIORITY_URGENT_DISPLAY);
+ run("CameraPreviewThread", android::PRIORITY_URGENT_DISPLAY);
}
virtual bool threadLoop() {
mAdapter->previewThread();
@@ -122,15 +160,56 @@ private:
public:
private:
+ //capabilities data
+ static const CapPixelformat mPixelformats [];
+ static const CapResolution mPreviewRes [];
+ static const CapFramerate mFramerates [];
+ static const CapResolution mImageCapRes [];
+
+ //camera defaults
+ static const char DEFAULT_PREVIEW_FORMAT[];
+ static const char DEFAULT_PREVIEW_SIZE[];
+ static const char DEFAULT_FRAMERATE[];
+ static const char DEFAULT_NUM_PREV_BUFS[];
+
+ static const char DEFAULT_PICTURE_FORMAT[];
+ static const char DEFAULT_PICTURE_SIZE[];
+ static const char DEFAULT_FOCUS_MODE[];
+ static const char * DEFAULT_VSTAB;
+ static const char * DEFAULT_VNF;
+
+ static status_t insertDefaults(CameraProperties::Properties*, V4L_TI_CAPTYPE&);
+ static status_t insertCapabilities(CameraProperties::Properties*, V4L_TI_CAPTYPE&);
+ static status_t insertPreviewFormats(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertPreviewSizes(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertImageSizes(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertFrameRates(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t sortAscend(V4L_TI_CAPTYPE&, uint16_t ) ;
+
+ status_t v4lIoctl(int, int, void*);
+ status_t v4lInitMmap(int&);
+ status_t v4lInitUsrPtr(int&);
+ status_t v4lStartStreaming();
+ status_t v4lStopStreaming(int nBufferCount);
+ status_t v4lSetFormat(int, int, uint32_t);
+ status_t restartPreview();
+
+
int mPreviewBufferCount;
- KeyedVector<int, int> mPreviewBufs;
- mutable Mutex mPreviewBufsLock;
+ int mPreviewBufferCountQueueable;
+ int mCaptureBufferCount;
+ int mCaptureBufferCountQueueable;
+ android::KeyedVector<CameraBuffer *, int> mPreviewBufs;
+ android::KeyedVector<CameraBuffer *, int> mCaptureBufs;
+ mutable android::Mutex mPreviewBufsLock;
+ mutable android::Mutex mCaptureBufsLock;
+ mutable android::Mutex mStopPreviewLock;
- CameraParameters mParams;
+ android::CameraParameters mParams;
bool mPreviewing;
bool mCapturing;
- Mutex mLock;
+ android::Mutex mLock;
int mFrameCount;
int mLastFrameCount;
@@ -142,17 +221,18 @@ private:
int mSensorIndex;
- // protected by mLock
- sp<PreviewThread> mPreviewThread;
-
- struct VideoInfo *mVideoInfo;
- int mCameraHandle;
+ // protected by mLock
+ android::sp<PreviewThread> mPreviewThread;
+ struct VideoInfo *mVideoInfo;
+ int mCameraHandle;
int nQueued;
int nDequeued;
};
-}; //// namespace
-#endif //V4L_CAMERA_ADAPTER_H
+} // namespace Camera
+} // namespace Ti
+
+#endif //V4L_CAMERA_ADAPTER_H
diff --git a/domx/Android.mk b/domx/Android.mk
index 6571161..ae08d6b 100644
--- a/domx/Android.mk
+++ b/domx/Android.mk
@@ -1 +1,34 @@
-include $(all-subdir-makefiles)
+# DOMX not used in OMAP3 program
+ifneq ($(TARGET_BOARD_PLATFORM),omap3)
+
+ LOCAL_PATH:= $(call my-dir)
+ OMAP4_DEBUG_MEMLEAK:= false
+
+ ifeq ($(OMAP4_DEBUG_MEMLEAK),true)
+
+ OMAP4_DEBUG_CFLAGS:= -DHEAPTRACKER
+ OMAP4_DEBUG_LDFLAGS:= $(foreach f, $(strip malloc realloc calloc free), -Wl,--wrap=$(f))
+ OMAP4_DEBUG_SHARED_LIBRARIES:= liblog
+ BUILD_HEAPTRACKED_SHARED_LIBRARY:= hardware/ti/omap4xxx/heaptracked-shared-library.mk
+ BUILD_HEAPTRACKED_EXECUTABLE:= hardware/ti/omap4xxx/heaptracked-executable.mk
+
+ LOCAL_PATH:= $(call my-dir)
+ include $(CLEAR_VARS)
+ LOCAL_SRC_FILES:= heaptracker.c stacktrace.c mapinfo.c
+ LOCAL_MODULE:= libheaptracker
+ LOCAL_MODULE_TAGS:= optional
+ include $(BUILD_STATIC_LIBRARY)
+
+ include $(CLEAR_VARS)
+ LOCAL_SRC_FILES:= tm.c
+ LOCAL_MODULE:= tm
+ LOCAL_MODULE_TAGS:= test
+ include $(BUILD_HEAPTRACKED_EXECUTABLE)
+
+ else
+ BUILD_HEAPTRACKED_SHARED_LIBRARY:=$(BUILD_SHARED_LIBRARY)
+ BUILD_HEAPTRACKED_EXECUTABLE:= $(BUILD_EXECUTABLE)
+ endif
+
+ include $(call first-makefiles-under,$(LOCAL_PATH))
+endif # ifeq ($(TARGET_BOARD_PLATFORM),omap4)
diff --git a/domx/domx/Android.mk b/domx/domx/Android.mk
index 3a47878..90ebc19 100644
--- a/domx/domx/Android.mk
+++ b/domx/domx/Android.mk
@@ -8,14 +8,18 @@ LOCAL_SRC_FILES:= \
omx_rpc/src/omx_rpc_stub.c \
omx_rpc/src/omx_rpc_config.c \
omx_rpc/src/omx_rpc_platform.c \
- omx_proxy_common/src/omx_proxy_common.c
+ omx_proxy_common/src/omx_proxy_common.c \
+ profiling/src/profile.c
LOCAL_C_INCLUDES += \
$(LOCAL_PATH)/omx_rpc/inc \
$(LOCAL_PATH)/../omx_core/inc \
$(LOCAL_PATH)/../mm_osal/inc \
+ $(LOCAL_PATH)/profiling/inc \
$(HARDWARE_TI_OMAP4_BASE)/hwc/ \
$(HARDWARE_TI_OMAP4_BASE)/ion/ \
+ $(HARDWARE_TI_OMAP4_BASE)/kernel-headers-ti \
+ $(HARDWARE_TI_OMAP4_BASE)/system-core-headers-ti \
system/core/include/cutils \
$(HARDWARE_TI_OMAP4_BASE)/../../libhardware/include
@@ -26,7 +30,8 @@ LOCAL_SHARED_LIBRARIES := \
libmm_osal \
libc \
liblog \
- libion_ti
+ libion_ti \
+ libcutils
LOCAL_MODULE:= libdomx
LOCAL_MODULE_TAGS:= optional
diff --git a/domx/domx/Makefile b/domx/domx/Makefile
index 4e3639e..cab0b80 100644
--- a/domx/domx/Makefile
+++ b/domx/domx/Makefile
@@ -71,11 +71,11 @@ SOURCES = \
omx_rpc/src/omx_rpc.c \
omx_rpc/src/omx_rpc_skel.c \
omx_rpc/src/omx_rpc_stub.c \
-omx_rpc/src/omx_rpc_config.c \
-omx_rpc/src/omx_rpc_platform.c \
-omx_proxy_common/src/omx_proxy_common.c
-
-
+omx_proxy_common/src/omx_proxy_common.c \
+profiling/profile.c
+# The below files are currently empty, so removed them from building
+# omx_rpc/src/omx_rpc_config.c \
+# omx_rpc/src/omx_rpc_platform.c \
# Search path for include files
diff --git a/domx/domx/omx_proxy_common.h b/domx/domx/omx_proxy_common.h
index 8eb21e5..7c7d6c8 100755
--- a/domx/domx/omx_proxy_common.h
+++ b/domx/domx/omx_proxy_common.h
@@ -80,7 +80,7 @@ extern "C"
#define OMX_VER_MAJOR 0x1
#define OMX_VER_MINOR 0x1
-#define MAX_NUM_PROXY_BUFFERS 32
+#define MAX_NUM_PROXY_BUFFERS 100
#define MAX_COMPONENT_NAME_LENGTH 128
#define PROXY_MAXNUMOFPORTS 8
@@ -94,7 +94,7 @@ extern "C"
#define PROXY_paramCheck(C, V, S) do {\
if (!(C)) { eError = V;\
if(S) DOMX_ERROR("failed check:" #C" - returning error: 0x%x - %s",V,S);\
- else DOMX_ERROR("failed check:" #C" - returning error: 0x%x",C, V); \
+ else DOMX_ERROR("failed check:" #C" - returning error: 0x%x",V); \
goto EXIT; }\
} while(0)
@@ -110,6 +110,38 @@ extern "C"
OMX_ErrorVersionMismatch, NULL); \
} while(0)
+#define PROXY_checkRpcError() do { \
+ if (eRPCError == RPC_OMX_ErrorNone) \
+ { \
+ DOMX_DEBUG("Corresponding RPC function executed successfully"); \
+ eError = eCompReturn; \
+ PROXY_assert((eError == OMX_ErrorNone) || (eError == OMX_ErrorNoMore), eError, "Error returned from OMX API in ducati"); \
+ } else \
+ { \
+ DOMX_ERROR("RPC function returned error 0x%x", eRPCError); \
+ switch (eRPCError) \
+ { \
+ case RPC_OMX_ErrorHardware: \
+ eError = OMX_ErrorHardware; \
+ break; \
+ case RPC_OMX_ErrorInsufficientResources: \
+ eError = OMX_ErrorInsufficientResources; \
+ break; \
+ case RPC_OMX_ErrorBadParameter: \
+ eError = OMX_ErrorBadParameter; \
+ break; \
+ case RPC_OMX_ErrorUnsupportedIndex: \
+ eError = OMX_ErrorUnsupportedIndex; \
+ break; \
+ case RPC_OMX_ErrorTimeout: \
+ eError = OMX_ErrorTimeout; \
+ break; \
+ default: \
+ eError = OMX_ErrorUndefined; \
+ } \
+ PROXY_assert((eError == OMX_ErrorNone), eError, "Error returned from OMX API in ducati"); \
+ } \
+} while(0)
typedef OMX_ERRORTYPE(*PROXY_EMPTYBUFFER_DONE) (OMX_HANDLETYPE
hComponent, OMX_U32 remoteBufHdr, OMX_U32 nfilledLen,
@@ -134,6 +166,12 @@ extern "C"
* @param pBufHeader : This is a pointer to the A9 bufferheader.
*
* @param pBufHeaderRemote : This is pointer to Ducati side bufferheader.
+ *
+ * @param pRegisteredAufBux0
+ * @param pRegisteredAufBux1
+ * @param pRegisteredAufBux2 : These are pointers to buffers registered with rpc driver
+ * They will assigned when registering and used when
+ * unregistering the buffer
*/
/*===============================================================*/
typedef struct PROXY_BUFFER_INFO
@@ -145,6 +183,9 @@ extern "C"
#ifdef USE_ION
int mmap_fd;
int mmap_fd_metadata_buff;
+ OMX_PTR pRegisteredAufBux0;
+ OMX_PTR pRegisteredAufBux1;
+ OMX_PTR pRegisteredAufBux2;
#endif
} PROXY_BUFFER_INFO;
@@ -158,7 +199,8 @@ extern "C"
VirtualPointers, /*Used when buffer pointers come from the normal A9 virtual space */
GrallocPointers, /*Used when buffer pointers come from Gralloc allocations */
IONPointers, /*Used when buffer pointers come from ION allocations */
- EncoderMetadataPointers /*Used when buffer pointers come from Stagefright in camcorder usecase */
+ EncoderMetadataPointers, /*Used when buffer pointers come from Stagefright in camcorder usecase */
+ BufferDescriptorVirtual2D /*Virtual unpacked buffers passed via OMX_TI_BUFFERDESCRIPTOR_TYPE */
} PROXY_BUFFER_TYPE;
/*===============================================================*/
@@ -172,6 +214,33 @@ extern "C"
OMX_U32 IsBuffer2D; /*Used when buffer pointers come from Gralloc allocations */
} PROXY_PORT_TYPE;
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+/*===============================================================*/
+/** DebugFrame_Dump : Structure holding the info about frames to dump
+ * @param fromFrame: From which frame to start dumping
+ * @param toFrame: till which frame to dump
+ * @param frame_width: Width of the frame
+ * @param frame_height: Height of the frame
+ * @param padded_width: Width of the buffer
+ * @param padded_height: Height of the buffer
+ * @param stride: Stride of the Buffer
+ * @param runningFrame: running counter to track the frames
+ */
+/*===============================================================*/
+ typedef struct DebugFrame_Dump
+ {
+ OMX_S32 fromFrame;
+ OMX_S32 toFrame;
+ OMX_U32 frame_width;
+ OMX_U32 frame_height;
+ OMX_U32 frame_xoffset;
+ OMX_U32 frame_yoffset;
+ OMX_U32 stride;
+ OMX_S32 runningFrame;
+ OMX_U32 *y_uv[2];
+ }DebugFrame_Dump;
+#endif
+
/* ========================================================================== */
/**
* PROXY_COMPONENT_PRIVATE
@@ -207,6 +276,9 @@ extern "C"
OMX_BOOL bUseIon;
OMX_BOOL bMapIonBuffers;
#endif
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+ DebugFrame_Dump debugframeInfo;
+#endif
int secure_misc_drv_fd;
} PROXY_COMPONENT_PRIVATE;
diff --git a/domx/domx/omx_proxy_common/src/omx_proxy_common.c b/domx/domx/omx_proxy_common/src/omx_proxy_common.c
index 3014bf1..e21f772 100644
--- a/domx/domx/omx_proxy_common/src/omx_proxy_common.c
+++ b/domx/domx/omx_proxy_common/src/omx_proxy_common.c
@@ -70,20 +70,19 @@
#include "omx_rpc_stub.h"
#include "omx_rpc_utils.h"
#include "OMX_TI_IVCommon.h"
+#include "profile.h"
#ifdef ALLOCATE_TILER_BUFFER_IN_PROXY
#ifdef USE_ION
#include <unistd.h>
-#include <ion.h>
+#include <ion/ion.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/eventfd.h>
#include <fcntl.h>
-
-#else
-#include "memmgr.h"
-#include "tiler.h"
+#include <linux/rpmsg_omx.h>
+#include <errno.h>
#endif
#endif
@@ -96,23 +95,6 @@
#ifdef TILER_BUFF
#define PortFormatIsNotYUV 0
-#if 0
-static OMX_ERRORTYPE RPC_PrepareBuffer_Remote(PROXY_COMPONENT_PRIVATE *
- pCompPrv, OMX_COMPONENTTYPE * hRemoteComp, OMX_U32 nPortIndex,
- OMX_U32 nSizeBytes, OMX_BUFFERHEADERTYPE * pChironBuf,
- OMX_BUFFERHEADERTYPE * pDucBuf, OMX_PTR pBufToBeMapped);
-static OMX_ERRORTYPE RPC_PrepareBuffer_Chiron(PROXY_COMPONENT_PRIVATE *
- pCompPrv, OMX_COMPONENTTYPE * hRemoteComp, OMX_U32 nPortIndex,
- OMX_U32 nSizeBytes, OMX_BUFFERHEADERTYPE * pDucBuf,
- OMX_BUFFERHEADERTYPE * pChironBuf);
-static OMX_ERRORTYPE RPC_UnMapBuffer_Ducati(OMX_PTR pBuffer);
-static OMX_ERRORTYPE RPC_MapBuffer_Ducati(OMX_U8 * pBuf, OMX_U32 nBufLineSize,
- OMX_U32 nBufLines, OMX_U8 ** pMappedBuf, OMX_PTR pBufToBeMapped);
-
-static OMX_ERRORTYPE RPC_MapMetaData_Host(OMX_BUFFERHEADERTYPE * pBufHdr);
-static OMX_ERRORTYPE RPC_UnMapMetaData_Host(OMX_BUFFERHEADERTYPE * pBufHdr);
-#endif
-
static OMX_ERRORTYPE _RPC_IsProxyComponent(OMX_HANDLETYPE hComponent,
OMX_BOOL * bIsProxy);
OMX_ERRORTYPE RPC_UTIL_GetStride(OMX_COMPONENTTYPE * hRemoteComp,
@@ -147,53 +129,86 @@ char Core_Array[][MAX_CORENAME_LENGTH] =
* MACROS - LOCAL
******************************************************************/
-#define PROXY_checkRpcError() do { \
- if (eRPCError == RPC_OMX_ErrorNone) \
- { \
- DOMX_DEBUG("Corresponding RPC function executed successfully"); \
- eError = eCompReturn; \
- PROXY_assert((eError == OMX_ErrorNone) || (eError == OMX_ErrorNoMore), eError, "Error returned from OMX API in ducati"); \
- } else \
- { \
- DOMX_ERROR("RPC function returned error 0x%x", eRPCError); \
- switch (eRPCError) \
- { \
- case RPC_OMX_ErrorHardware: \
- eError = OMX_ErrorHardware; \
- break; \
- case RPC_OMX_ErrorInsufficientResources: \
- eError = OMX_ErrorInsufficientResources; \
- break; \
- case RPC_OMX_ErrorBadParameter: \
- eError = OMX_ErrorBadParameter; \
- break; \
- case RPC_OMX_ErrorUnsupportedIndex: \
- eError = OMX_ErrorUnsupportedIndex; \
- break; \
- case RPC_OMX_ErrorTimeout: \
- eError = OMX_ErrorTimeout; \
- break; \
- default: \
- eError = OMX_ErrorUndefined; \
- } \
- } \
-} while(0)
-
#ifdef USE_ION
+
RPC_OMX_ERRORTYPE RPC_RegisterBuffer(OMX_HANDLETYPE hRPCCtx, int fd,
- struct ion_handle **handle)
+ OMX_PTR *handle1, OMX_PTR *handle2,
+ PROXY_BUFFER_TYPE proxyBufferType)
{
+ RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
+ int status;
+ RPC_OMX_CONTEXT *pRPCCtx = (RPC_OMX_CONTEXT *) hRPCCtx;
+
+ if ((fd < 0) || (handle1 == NULL) ||
+ ((proxyBufferType == GrallocPointers) && (handle2 == NULL))) {
+ eRPCError = RPC_OMX_ErrorBadParameter;
+ goto EXIT;
+ }
+
+ if (proxyBufferType != GrallocPointers) {
+ struct ion_fd_data ion_data;
+
+ ion_data.fd = fd;
+ ion_data.handle = NULL;
+ status = ioctl(pRPCCtx->fd_omx, OMX_IOCIONREGISTER, &ion_data);
+ if (status < 0) {
+ DOMX_ERROR("RegisterBuffer ioctl call failed");
+ eRPCError = RPC_OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+ if (ion_data.handle)
+ *handle1 = ion_data.handle;
+ } else {
+#ifdef OMX_IOCPVRREGISTER
+ struct omx_pvr_data pvr_data;
+
+ pvr_data.fd = fd;
+ memset(pvr_data.handles, 0x0, sizeof(pvr_data.handles));
+ status = ioctl(pRPCCtx->fd_omx, OMX_IOCPVRREGISTER, &pvr_data);
+ if (status < 0) {
+ if (errno == ENOTTY) {
+ DOMX_DEBUG("OMX_IOCPVRREGISTER not supported with current kernel version");
+ } else {
+ DOMX_ERROR("RegisterBuffer ioctl call failed");
+ eRPCError = RPC_OMX_ErrorInsufficientResources;
+ }
+ goto EXIT;
+ }
+
+ if (pvr_data.handles[0])
+ *handle1 = pvr_data.handles[0];
+ if (pvr_data.handles[1])
+ *handle2 = pvr_data.handles[1];
+#endif
+ }
+
+
+ EXIT:
+ return eRPCError;
+}
+
+RPC_OMX_ERRORTYPE RPC_UnRegisterBuffer(OMX_HANDLETYPE hRPCCtx, OMX_PTR handle)
+{
+ RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
int status;
struct ion_fd_data data;
RPC_OMX_CONTEXT *pRPCCtx = (RPC_OMX_CONTEXT *) hRPCCtx;
- data.fd = fd;
- status = ioctl(pRPCCtx->fd_omx, ION_IOC_IMPORT, &data);
- if (status < 0)
- return RPC_OMX_ErrorInsufficientResources;
- *handle = data.handle;
- return RPC_OMX_ErrorNone;
+ if (handle == NULL) {
+ eRPCError = RPC_OMX_ErrorBadParameter;
+ goto EXIT;
+ }
+
+ data.handle = handle;
+ status = ioctl(pRPCCtx->fd_omx, OMX_IOCIONUNREGISTER, &data);
+ if (status < 0) {
+ eRPCError = RPC_OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+
+ EXIT:
+ return eRPCError;
}
static OMX_ERRORTYPE PROXY_AllocateBufferIonCarveout(PROXY_COMPONENT_PRIVATE *pCompPrv,
@@ -202,25 +217,25 @@ static OMX_ERRORTYPE PROXY_AllocateBufferIonCarveout(PROXY_COMPONENT_PRIVATE *pC
int fd;
int ret;
struct ion_handle *temp;
+ size_t stride;
ret = ion_alloc(pCompPrv->ion_fd, len, 0x1000, 1 << ION_HEAP_TYPE_CARVEOUT, &temp);
- DOMX_DEBUG("ION being USED for allocation!!!!! handle = %x, ret =%x",temp,ret);
+
+ if (ret || ((int)temp == -ENOMEM)) {
+ ret = ion_alloc_tiler(pCompPrv->ion_fd, len, 1, TILER_PIXEL_FMT_PAGE,
+ OMAP_ION_HEAP_TILER_MASK, &temp, &stride);
+ }
+
+ if (ret || ((int)temp == -ENOMEM)) {
+ DOMX_ERROR("FAILED to allocate buffer of size=%d. ret=0x%x",len, ret);
+ return OMX_ErrorInsufficientResources;
+ }
+
if (ret)
- return OMX_ErrorInsufficientResources;
- /*
- ret = ion_share(pCompPrv->ion_fd, temp, &fd);
- if (ret) {
- ion_free(pCompPrv->ion_fd, temp);
- return OMX_ErrorHardware;
- }
- RPC_RegisterBuffer(pCompPrv->hRemoteComp, fd, handle);
- close(fd);
- ion_free(pCompPrv->ion_fd, temp);
- */
+ return OMX_ErrorInsufficientResources;
*handle = temp;
return OMX_ErrorNone;
}
-
#endif
/* ===========================================================================*/
@@ -254,7 +269,7 @@ OMX_ERRORTYPE PROXY_EventHandler(OMX_HANDLETYPE hComponent,
switch (eEvent)
{
-#if 0
+#if 0 // This feature is currently not supported, so kept in if(0) to be supported in the future
case OMX_TI_EventBufferRefCount:
DOMX_DEBUG("Received Ref Count Event");
/*nData1 will be pBufferHeader, nData2 will be present count. Need to find local
@@ -364,6 +379,8 @@ static OMX_ERRORTYPE PROXY_EmptyBufferDone(OMX_HANDLETYPE hComponent,
OMX_ErrorBadParameter,
"Received invalid-buffer header from OMX component");
+ KPI_OmxCompBufferEvent(KPI_BUFFER_EBD, hComponent, &(pCompPrv->tBufList[count]));
+
EXIT:
if (eError == OMX_ErrorNone)
{
@@ -439,6 +456,8 @@ OMX_ERRORTYPE PROXY_FillBufferDone(OMX_HANDLETYPE hComponent,
OMX_ErrorBadParameter,
"Received invalid-buffer header from OMX component");
+ KPI_OmxCompBufferEvent(KPI_BUFFER_FBD, hComponent, &(pCompPrv->tBufList[count]));
+
EXIT:
if (eError == OMX_ErrorNone)
{
@@ -542,6 +561,9 @@ OMX_ERRORTYPE PROXY_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
bMapBuffer =
pCompPrv->proxyPortBuffers[pBufferHdr->nInputPortIndex].proxyBufferType ==
EncoderMetadataPointers;
+
+ KPI_OmxCompBufferEvent(KPI_BUFFER_ETB, hComponent, &(pCompPrv->tBufList[count]));
+
eRPCError =
RPC_EmptyThisBuffer(pCompPrv->hRemoteComp, pBufferHdr,
pCompPrv->tBufList[count].pBufHeaderRemote, &eCompReturn,bMapBuffer);
@@ -613,6 +635,8 @@ OMX_ERRORTYPE PROXY_FillThisBuffer(OMX_HANDLETYPE hComponent,
OMX_ErrorBadParameter,
"Could not find the remote header in buffer list");
+ KPI_OmxCompBufferEvent(KPI_BUFFER_FTB, hComponent, &(pCompPrv->tBufList[count]));
+
eRPCError = RPC_FillThisBuffer(pCompPrv->hRemoteComp, pBufferHdr,
pCompPrv->tBufList[count].pBufHeaderRemote, &eCompReturn);
@@ -646,9 +670,7 @@ OMX_ERRORTYPE PROXY_AllocateBuffer(OMX_IN OMX_HANDLETYPE hComponent,
OMX_BOOL bSlotFound = OMX_FALSE;
#ifdef USE_ION
struct ion_handle *handle = NULL;
-#else
- MemAllocBlock block;
- MemAllocBlock blocks[2];
+ OMX_PTR pIonMappedBuffer = NULL;
#endif
#ifdef ALLOCATE_TILER_BUFFER_IN_PROXY
@@ -666,17 +688,33 @@ OMX_ERRORTYPE PROXY_AllocateBuffer(OMX_IN OMX_HANDLETYPE hComponent,
PROXY_require(ppBufferHdr != NULL, OMX_ErrorBadParameter,
"Pointer to buffer header is NULL");
-#ifndef USE_ION
- memset(&block, 0, sizeof(MemAllocBlock));
- memset(blocks, 0, sizeof(MemAllocBlock)*2);
-#endif
-
pCompPrv = (PROXY_COMPONENT_PRIVATE *) hComp->pComponentPrivate;
DOMX_ENTER
("hComponent = %p, pCompPrv = %p, nPortIndex = %p, pAppPrivate = %p, nSizeBytes = %d",
hComponent, pCompPrv, nPortIndex, pAppPrivate, nSizeBytes);
+ /*Pick up 1st empty slot */
+ /*The same empty spot will be picked up by the subsequent
+ Use buffer call to fill in the corresponding buffer
+ Buffer header in the list */
+
+ bSlotFound = OMX_FALSE;
+ for (i = 0; i < pCompPrv->nTotalBuffers; i++)
+ {
+ if (pCompPrv->tBufList[i].pBufHeader == NULL)
+ {
+ currentBuffer = i;
+ bSlotFound = OMX_TRUE;
+ break;
+ }
+ }
+
+ if (bSlotFound == OMX_FALSE)
+ {
+ currentBuffer = pCompPrv->nTotalBuffers;
+ }
+
/*To find whether buffer is 2D or 1D */
eError =
RPC_UTIL_GetStride(pCompPrv->hRemoteComp, nPortIndex, &nStride);
@@ -690,138 +728,64 @@ OMX_ERRORTYPE PROXY_AllocateBuffer(OMX_IN OMX_HANDLETYPE hComponent,
DOMX_ERROR ("Tiler 2d port buffers not implemented");
eError = OMX_ErrorNotImplemented;
goto EXIT;
-#else
- tParamRect.nSize = sizeof(OMX_CONFIG_RECTTYPE);
- tParamRect.nVersion.s.nVersionMajor = 1;
- tParamRect.nVersion.s.nVersionMinor = 1;
- tParamRect.nVersion.s.nRevision = 0;
- tParamRect.nVersion.s.nStep = 0;
- tParamRect.nPortIndex = nPortIndex;
-
- eError = PROXY_GetParameter(hComponent, (OMX_INDEXTYPE)OMX_TI_IndexParam2DBufferAllocDimension, &tParamRect);
- if(eError == OMX_ErrorNone)
- {
- blocks[0].fmt = PIXEL_FMT_8BIT;
- blocks[0].dim.area.width = tParamRect.nWidth;
- blocks[0].dim.area.height = tParamRect.nHeight;
- blocks[0].stride = 0;
-
- blocks[1].fmt = PIXEL_FMT_16BIT;
- blocks[1].dim.area.width = tParamRect.nWidth >> 1;
- blocks[1].dim.area.height = tParamRect.nHeight >> 1;
- blocks[1].stride = 0;
-
- }
- else if(eError == OMX_ErrorUnsupportedIndex)
- {
- DOMX_ERROR("Component does not support OMX_TI_IndexParam2DBufferAllocDimension, \
- reverting to OMX_PARAM_PORTDEFINITIONTYPE");
- tParamPortDef.nSize = sizeof(OMX_PARAM_PORTDEFINITIONTYPE);
- tParamPortDef.nVersion.s.nVersionMajor = 1;
- tParamPortDef.nVersion.s.nVersionMinor = 1;
- tParamPortDef.nVersion.s.nRevision = 0;
- tParamPortDef.nVersion.s.nStep = 0;
- tParamPortDef.nPortIndex = nPortIndex;
-
- eError = PROXY_GetParameter(hComponent, OMX_IndexParamPortDefinition, &tParamPortDef);
- if(eError != OMX_ErrorNone)
- {
- DOMX_ERROR("PROXY_GetParameter returns err %d (0x%x)", eError, eError);
- return eError;
- }
-
- blocks[0].fmt = PIXEL_FMT_8BIT;
- blocks[0].dim.area.width = tParamPortDef.format.video.nFrameWidth;
- blocks[0].dim.area.height = tParamPortDef.format.video.nFrameHeight;
- blocks[0].stride = 0;
-
- blocks[1].fmt = PIXEL_FMT_16BIT;
- blocks[1].dim.area.width = tParamPortDef.format.video.nFrameWidth >> 1;
- blocks[1].dim.area.height = tParamPortDef.format.video.nFrameHeight >> 1;
- blocks[1].stride = 0;
- }
- if(eError != OMX_ErrorNone)
- {
- DOMX_ERROR("PROXY_GetParameter returns err %d (0x%x)", eError, eError);
- return eError;
- }
-
- pMemptr = (OMX_U8*) MemMgr_Alloc(blocks, 2);
- PROXY_assert((pMemptr != NULL), OMX_ErrorInsufficientResources, "MemMgr_Alloc returns NULL, abort,");
-
- DOMX_DEBUG(" Y Buffer : Allocated Width:%d, Height:%d",blocks[0].dim.area.width, blocks[0].dim.area.height);
#endif
}
#ifdef USE_ION
else if (pCompPrv->bUseIon == OMX_TRUE)
{
eError = PROXY_AllocateBufferIonCarveout(pCompPrv, nSize, &handle);
- pMemptr = handle;
+ pCompPrv->tBufList[currentBuffer].pYBuffer = handle;
+ if (pCompPrv->bMapIonBuffers == OMX_TRUE)
+ {
+ DOMX_DEBUG("before mapping, handle = %x, nSize = %d",handle,nSize);
+ if (ion_map(pCompPrv->ion_fd, handle, nSize, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
+ &pIonMappedBuffer, &(pCompPrv->tBufList[currentBuffer].mmap_fd)) < 0)
+ {
+ DOMX_ERROR("userspace mapping of ION buffers returned error");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+ } else {
+ if (ion_share(pCompPrv->ion_fd, handle, &(pCompPrv->tBufList[currentBuffer].mmap_fd)) < 0) {
+ DOMX_ERROR("ion_share failed !!! \n");
+ goto EXIT;
+ } else {
+ DOMX_DEBUG("ion_share success pMemptr: 0x%x \n", pCompPrv->tBufList[currentBuffer].mmap_fd);
+ }
+ }
+ pMemptr = pCompPrv->tBufList[currentBuffer].mmap_fd;
DOMX_DEBUG ("Ion handle recieved = %x",handle);
if (eError != OMX_ErrorNone)
- return eError;
+ goto EXIT;
}
-#else
- else //Allocate 1D buffer
- {
- block.fmt = PIXEL_FMT_PAGE;
- block.dim.len = nSize;
- block.stride = 0;
-
- pMemptr = (OMX_U8*) MemMgr_Alloc(&block, 1);
- PROXY_assert((pMemptr != NULL), OMX_ErrorInsufficientResources,"MemMgr_Alloc returns NULL, abort,");
- }
#endif
- /*Pick up 1st empty slot */
- /*The same empty spot will be picked up by the subsequent
- Use buffer call to fill in the corresponding buffer
- Buffer header in the list */
-
- for (i = 0; i < pCompPrv->nTotalBuffers; i++)
- {
- if (pCompPrv->tBufList[i].pBufHeader == NULL)
- {
- currentBuffer = i;
- bSlotFound = OMX_TRUE;
- break;
- }
- }
-
- if (!bSlotFound)
- {
- currentBuffer = pCompPrv->nTotalBuffers;
- }
/*No need to increment Allocated buffers here.
It will be done in the subsequent use buffer call below*/
-
eError = PROXY_UseBuffer(hComponent, ppBufferHdr, nPortIndex, pAppPrivate, nSize, pMemptr);
if(eError != OMX_ErrorNone) {
DOMX_ERROR("PROXY_UseBuffer in PROXY_AllocateBuffer failed with error %d (0x%08x)", eError, eError);
#ifdef USE_ION
- ion_free(pCompPrv->ion_fd, pMemptr);
-#else
- MemMgr_Free(pMemptr);
+ ion_free(pCompPrv->ion_fd,
+ (struct ion_handle *)pCompPrv->tBufList[currentBuffer].pYBuffer);
+ close(pCompPrv->tBufList[currentBuffer].mmap_fd);
#endif
goto EXIT;
}
else {
+#ifndef USE_ION
pCompPrv->tBufList[currentBuffer].pYBuffer = pMemptr;
+#endif
}
#ifdef USE_ION
if (pCompPrv->bUseIon == OMX_TRUE && pCompPrv->bMapIonBuffers == OMX_TRUE)
{
DOMX_DEBUG("before mapping, handle = %x, nSize = %d",handle,nSize);
- if (ion_map(pCompPrv->ion_fd, handle, nSize, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
- &((*ppBufferHdr)->pBuffer),
- &(pCompPrv->tBufList[currentBuffer].mmap_fd)) < 0)
- {
- DOMX_ERROR("userspace mapping of ION buffers returned error");
- return OMX_ErrorInsufficientResources;
- }
- //ion_free(pCompPrv->ion_fd, handleToMap);
+ (*ppBufferHdr)->pBuffer = pIonMappedBuffer;
+ } else {
+ (*ppBufferHdr)->pBuffer = (OMX_U8 *)handle;
}
#endif
@@ -953,6 +917,7 @@ static OMX_ERRORTYPE PROXY_UseBuffer(OMX_IN OMX_HANDLETYPE hComponent,
OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
OMX_BOOL bSlotFound = OMX_FALSE;
OMX_PTR pAuxBuf0 = pBuffer;
+ OMX_PTR pMappedMetaDataBuffer = NULL;
OMX_TI_PARAM_METADATABUFFERINFO tMetaDataBuffer;
OMX_U32 nBufferHeight = 0;
OMX_CONFIG_RECTTYPE tParamRect;
@@ -960,19 +925,14 @@ static OMX_ERRORTYPE PROXY_UseBuffer(OMX_IN OMX_HANDLETYPE hComponent,
#ifdef USE_ION
OMX_PTR pMetadataBuffer = NULL;
-#else
- MemAllocBlock block;
#endif
PROXY_require((hComp->pComponentPrivate != NULL),
OMX_ErrorBadParameter, NULL);
+ PROXY_require(pBuffer != NULL, OMX_ErrorBadParameter, "Pointer to buffer is NULL");
PROXY_require(ppBufferHdr != NULL, OMX_ErrorBadParameter,
"Pointer to buffer header is NULL");
-#ifndef USE_ION
- memset(&block, 0, sizeof(MemAllocBlock));
-#endif
-
pCompPrv = (PROXY_COMPONENT_PRIVATE *) hComp->pComponentPrivate;
DOMX_ENTER
@@ -1050,56 +1010,19 @@ static OMX_ERRORTYPE PROXY_UseBuffer(OMX_IN OMX_HANDLETYPE hComponent,
((OMX_TI_PLATFORMPRIVATE *) pBufferHeader->pPlatformPrivate)->
pAuxBuf1 = (OMX_U8 *)(((IMG_native_handle_t*)pBuffer)->fd[1]);
}
-#if 0
- else
- {
- tParamRect.nSize = sizeof(OMX_CONFIG_RECTTYPE);
- tParamRect.nVersion.s.nVersionMajor = 1;
- tParamRect.nVersion.s.nVersionMinor = 1;
- tParamRect.nVersion.s.nRevision = 0;
- tParamRect.nVersion.s.nStep = 0;
- tParamRect.nPortIndex = nPortIndex;
-
- eError = PROXY_GetParameter(hComponent, (OMX_INDEXTYPE)OMX_TI_IndexParam2DBufferAllocDimension, &tParamRect);
- if(eError == OMX_ErrorNone)
- {
- nBufferHeight = tParamRect.nHeight;
- }
- else if(eError == OMX_ErrorUnsupportedIndex)
- {
- DOMX_ERROR("Component does not support OMX_TI_IndexParam2DBufferAllocDimension, \
- reverting to OMX_PARAM_PORTDEFINITIONTYPE");
- tParamPortDef.nSize = sizeof(OMX_PARAM_PORTDEFINITIONTYPE);
- tParamPortDef.nVersion.s.nVersionMajor = 1;
- tParamPortDef.nVersion.s.nVersionMinor = 1;
- tParamPortDef.nVersion.s.nRevision = 0;
- tParamPortDef.nVersion.s.nStep = 0;
- tParamPortDef.nPortIndex = nPortIndex;
-
- eError = PROXY_GetParameter(hComponent, OMX_IndexParamPortDefinition, &tParamPortDef);
- if(eError != OMX_ErrorNone)
- {
- DOMX_ERROR("PROXY_GetParameter returns err %d (0x%x)", eError, eError);
- return eError;
- }
- nBufferHeight = tParamPortDef.format.video.nFrameHeight;
- }
- if(eError != OMX_ErrorNone)
- {
- DOMX_ERROR("PROXY_GetParameter returns err %d (0x%x)", eError, eError);
- return eError;
- }
-
- ((OMX_TI_PLATFORMPRIVATE *) pBufferHeader->pPlatformPrivate)->
- pAuxBuf1 = (OMX_U8*) ((OMX_U32)pBuffer + (LINUX_PAGE_SIZE*nBufferHeight));
- }
-#endif
if(pCompPrv->proxyPortBuffers[nPortIndex].proxyBufferType == EncoderMetadataPointers)
{
((OMX_TI_PLATFORMPRIVATE *) pBufferHeader->pPlatformPrivate)->
pAuxBuf1 = NULL;
}
+ if(pCompPrv->proxyPortBuffers[nPortIndex].proxyBufferType == BufferDescriptorVirtual2D)
+ {
+ pAuxBuf0 = (OMX_U8 *)(((OMX_TI_BUFFERDESCRIPTOR_TYPE*)pBuffer)->pBuf[0]);
+
+ ((OMX_TI_PLATFORMPRIVATE *) pBufferHeader->pPlatformPrivate)->
+ pAuxBuf1 = (OMX_U8 *)(((OMX_TI_BUFFERDESCRIPTOR_TYPE*)pBuffer)->pBuf[1]);
+ }
}
/*Initializing Structure */
@@ -1118,26 +1041,59 @@ static OMX_ERRORTYPE PROXY_UseBuffer(OMX_IN OMX_HANDLETYPE hComponent,
if(tMetaDataBuffer.bIsMetaDataEnabledOnPort)
{
#ifdef USE_ION
- ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->nMetaDataSize =
+ ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->nMetaDataSize =
(tMetaDataBuffer.nMetaDataSize + LINUX_PAGE_SIZE - 1) & ~(LINUX_PAGE_SIZE -1);
eError = PROXY_AllocateBufferIonCarveout(pCompPrv, ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->nMetaDataSize,
- &(((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->pMetaDataBuffer));
+ (struct ion_handle **)(&(((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->pMetaDataBuffer)));
pCompPrv->tBufList[currentBuffer].pMetaDataBuffer = ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->
pPlatformPrivate)->pMetaDataBuffer;
DOMX_DEBUG("Metadata buffer ion handle = %d",((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->pMetaDataBuffer);
-#else
- block.fmt = PIXEL_FMT_PAGE;
- block.dim.len = tMetaDataBuffer.nMetaDataSize;
- ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->
- pPlatformPrivate)->pMetaDataBuffer = MemMgr_Alloc(&block, 1);
- PROXY_assert(((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->
- pPlatformPrivate)->pMetaDataBuffer != NULL,OMX_ErrorInsufficientResources,
- "MemMngr alloc call for allocating metadata buffers failed");
- ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->nMetaDataSize = tMetaDataBuffer.nMetaDataSize;
- DOMX_DEBUG("Metadata buffer = %d",((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->pMetaDataBuffer);
+ if (pCompPrv->bMapIonBuffers == OMX_TRUE) {
+ if (ion_map(pCompPrv->ion_fd, pPlatformPrivate->pMetaDataBuffer,
+ pPlatformPrivate->nMetaDataSize,
+ PROT_READ | PROT_WRITE, MAP_SHARED, 0,
+ &pMetadataBuffer,
+ &(pCompPrv->tBufList[currentBuffer].mmap_fd_metadata_buff)) < 0)
+ {
+ DOMX_ERROR("userspace mapping of ION metadata buffers returned error");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+ }
#endif
}
+#ifdef USE_ION
+ {
+ // Need to register buffers when using ion and rpmsg
+ eRPCError = RPC_RegisterBuffer(pCompPrv->hRemoteComp, pAuxBuf0,
+ &pCompPrv->tBufList[currentBuffer].pRegisteredAufBux0,
+ &pCompPrv->tBufList[currentBuffer].pRegisteredAufBux1,
+ pCompPrv->proxyPortBuffers[nPortIndex].proxyBufferType);
+ PROXY_checkRpcError();
+ if (pCompPrv->tBufList[currentBuffer].pRegisteredAufBux0)
+ pAuxBuf0 = pCompPrv->tBufList[currentBuffer].pRegisteredAufBux0;
+ if (pCompPrv->tBufList[currentBuffer].pRegisteredAufBux1)
+ pPlatformPrivate->pAuxBuf1 = pCompPrv->tBufList[currentBuffer].pRegisteredAufBux1;
+
+ if (pPlatformPrivate->pMetaDataBuffer != NULL)
+ {
+ int fd = -1;
+ if (pCompPrv->bMapIonBuffers == OMX_TRUE) {
+ fd = pCompPrv->tBufList[currentBuffer].mmap_fd_metadata_buff;
+ } else {
+ ion_share (pCompPrv->ion_fd, pPlatformPrivate->pMetaDataBuffer, &fd);
+ pCompPrv->tBufList[currentBuffer].mmap_fd_metadata_buff = fd;
+ }
+ eRPCError = RPC_RegisterBuffer(pCompPrv->hRemoteComp, fd,
+ &pCompPrv->tBufList[currentBuffer].pRegisteredAufBux2, NULL, IONPointers);
+ PROXY_checkRpcError();
+ if (pCompPrv->tBufList[currentBuffer].pRegisteredAufBux2)
+ pPlatformPrivate->pMetaDataBuffer = pCompPrv->tBufList[currentBuffer].pRegisteredAufBux2;
+ }
+ }
+#endif
+
eRPCError = RPC_UseBuffer(pCompPrv->hRemoteComp, &pBufferHeader, nPortIndex,
pAppPrivate, nSizeBytes, pAuxBuf0, &pBufHeaderRemote, &eCompReturn);
@@ -1151,15 +1107,6 @@ static OMX_ERRORTYPE PROXY_UseBuffer(OMX_IN OMX_HANDLETYPE hComponent,
#ifdef USE_ION
if (pCompPrv->bUseIon == OMX_TRUE && pCompPrv->bMapIonBuffers == OMX_TRUE && tMetaDataBuffer.bIsMetaDataEnabledOnPort)
{
- DOMX_DEBUG("Metadata buffer ion handle given to ion map = %d",
- ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->pMetaDataBuffer);
- if (ion_map(pCompPrv->ion_fd, ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->pMetaDataBuffer,
- ((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->nMetaDataSize, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
- &pMetadataBuffer,&(pCompPrv->tBufList[currentBuffer].mmap_fd_metadata_buff)) < 0)
- {
- DOMX_ERROR("userspace mapping of ION metadata buffers returned error");
- return OMX_ErrorInsufficientResources;
- }
((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->pMetaDataBuffer = pMetadataBuffer;
//ion_free(pCompPrv->ion_fd, handleToMap);
memset(((OMX_TI_PLATFORMPRIVATE *)pBufferHeader->pPlatformPrivate)->pMetaDataBuffer,
@@ -1213,6 +1160,8 @@ OMX_ERRORTYPE PROXY_FreeBuffer(OMX_IN OMX_HANDLETYPE hComponent,
OMX_U32 count = 0, nStride = 0;
OMX_U32 pBuffer = 0;
OMX_PTR pMetaDataBuffer = NULL;
+ OMX_PTR pAuxBuf0 = NULL;
+ OMX_TI_PLATFORMPRIVATE * pPlatformPrivate = NULL;
PROXY_require(pBufferHdr != NULL, OMX_ErrorBadParameter, NULL);
PROXY_require(hComp->pComponentPrivate != NULL, OMX_ErrorBadParameter,
@@ -1238,18 +1187,47 @@ OMX_ERRORTYPE PROXY_FreeBuffer(OMX_IN OMX_HANDLETYPE hComponent,
OMX_ErrorBadParameter,
"Could not find the mapped address in component private buffer list");
- pBuffer = pBufferHdr->pBuffer;
+ pBuffer = (OMX_U32)pBufferHdr->pBuffer;
+ pAuxBuf0 = (OMX_PTR) pBuffer;
+
+#ifdef ENABLE_GRALLOC_BUFFERS
+ if (pCompPrv->proxyPortBuffers[nPortIndex].proxyBufferType == GrallocPointers)
+ {
+ //Extracting buffer pointer from the gralloc buffer
+ pAuxBuf0 = (OMX_U8 *)(((IMG_native_handle_t*)pBuffer)->fd[0]);
+ }
+#endif
+
+ /*To find whether buffer is 2D or 1D */
+ /*Not having asserts from this point since even if error occurs during
+ unmapping/freeing, still trying to clean up as much as possible */
+ eError =
+ RPC_UTIL_GetStride(pCompPrv->hRemoteComp, nPortIndex, &nStride);
+ if (eError == OMX_ErrorNone && nStride == LINUX_PAGE_SIZE)
+ {
+ if (pCompPrv->proxyPortBuffers[nPortIndex].proxyBufferType == BufferDescriptorVirtual2D)
+ {
+ pAuxBuf0 = (OMX_U8 *)(((OMX_TI_BUFFERDESCRIPTOR_TYPE*)pBuffer)->pBuf[0]);
+ }
+ }
+
/*Not having asserts from this point since even if error occurs during
unmapping/freeing, still trying to clean up as much as possible */
+ if (pCompPrv->tBufList[count].pRegisteredAufBux0 != NULL)
+ pAuxBuf0 = pCompPrv->tBufList[count].pRegisteredAufBux0;
+
eRPCError =
RPC_FreeBuffer(pCompPrv->hRemoteComp, nPortIndex,
- pCompPrv->tBufList[count].pBufHeaderRemote, pBuffer,
+ pCompPrv->tBufList[count].pBufHeaderRemote, (OMX_U32) pAuxBuf0,
&eCompReturn);
if (eRPCError != RPC_OMX_ErrorNone)
eTmpRPCError = eRPCError;
+ pPlatformPrivate = (OMX_TI_PLATFORMPRIVATE *)(pCompPrv->tBufList[count].pBufHeader)->
+ pPlatformPrivate;
+
if (pCompPrv->tBufList[count].pBufHeader)
{
#ifdef ALLOCATE_TILER_BUFFER_IN_PROXY
@@ -1258,7 +1236,7 @@ OMX_ERRORTYPE PROXY_FreeBuffer(OMX_IN OMX_HANDLETYPE hComponent,
{
if (pCompPrv->bUseIon == OMX_TRUE)
{
- if(pCompPrv->bMapIonBuffers == OMX_TRUE)
+ if(pCompPrv->bMapIonBuffers == OMX_TRUE && pBufferHdr->pBuffer)
{
munmap(pBufferHdr->pBuffer, pBufferHdr->nAllocLen);
close(pCompPrv->tBufList[count].mmap_fd);
@@ -1267,16 +1245,9 @@ OMX_ERRORTYPE PROXY_FreeBuffer(OMX_IN OMX_HANDLETYPE hComponent,
pCompPrv->tBufList[count].pYBuffer = NULL;
}
}
-#else
- if(pCompPrv->tBufList[count].pYBuffer)
- {
- MemMgr_Free(pCompPrv->tBufList[count].pYBuffer);
- pCompPrv->tBufList[count].pYBuffer = NULL;
- }
#endif
#endif
- pMetaDataBuffer = ((OMX_TI_PLATFORMPRIVATE *)(pCompPrv->tBufList[count].pBufHeader)->
- pPlatformPrivate)->pMetaDataBuffer;
+ pMetaDataBuffer = pPlatformPrivate->pMetaDataBuffer;
if (pMetaDataBuffer)
{
#ifdef USE_ION
@@ -1284,18 +1255,45 @@ OMX_ERRORTYPE PROXY_FreeBuffer(OMX_IN OMX_HANDLETYPE hComponent,
{
if(pCompPrv->bMapIonBuffers == OMX_TRUE)
{
- munmap(pMetaDataBuffer, ((OMX_TI_PLATFORMPRIVATE *)(pCompPrv->tBufList[count].pBufHeader)->
- pPlatformPrivate)->nMetaDataSize);
- close(pCompPrv->tBufList[count].mmap_fd_metadata_buff);
+ munmap(pMetaDataBuffer, pPlatformPrivate->nMetaDataSize);
}
+ close(pCompPrv->tBufList[count].mmap_fd_metadata_buff);
ion_free(pCompPrv->ion_fd, pCompPrv->tBufList[count].pMetaDataBuffer);
- ((OMX_TI_PLATFORMPRIVATE *)(pCompPrv->tBufList[count].pBufHeader)->
- pPlatformPrivate)->pMetaDataBuffer = NULL;
+ pPlatformPrivate->pMetaDataBuffer = NULL;
}
-#else
- MemMgr_Free(pMetaDataBuffer);
#endif
}
+
+#ifdef USE_ION
+ {
+ // Need to unregister buffers when using ion and rpmsg
+ if (pCompPrv->tBufList[count].pRegisteredAufBux0 != NULL)
+ {
+ eTmpRPCError = RPC_UnRegisterBuffer(pCompPrv->hRemoteComp,
+ pCompPrv->tBufList[count].pRegisteredAufBux0);
+ if (eTmpRPCError != RPC_OMX_ErrorNone) {
+ eRPCError = eTmpRPCError;
+ }
+ }
+
+ if (pCompPrv->tBufList[count].pRegisteredAufBux1 != NULL)
+ {
+ eTmpRPCError = RPC_UnRegisterBuffer(pCompPrv->hRemoteComp,
+ pCompPrv->tBufList[count].pRegisteredAufBux1);
+ if (eTmpRPCError != RPC_OMX_ErrorNone) {
+ eRPCError = eTmpRPCError;
+ }
+ }
+ if (pCompPrv->tBufList[count].pRegisteredAufBux2 != NULL)
+ {
+ eTmpRPCError = RPC_UnRegisterBuffer(pCompPrv->hRemoteComp,
+ pCompPrv->tBufList[count].pRegisteredAufBux2);
+ if (eTmpRPCError != RPC_OMX_ErrorNone) {
+ eRPCError = eTmpRPCError;
+ }
+ }
+ }
+#endif
if (pCompPrv->tBufList[count].pBufHeader->pPlatformPrivate)
{
TIMM_OSAL_Free(pCompPrv->tBufList[count].pBufHeader->
@@ -1327,15 +1325,20 @@ OMX_ERRORTYPE PROXY_FreeBuffer(OMX_IN OMX_HANDLETYPE hComponent,
/* ===========================================================================*/
OMX_ERRORTYPE __PROXY_SetParameter(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_INDEXTYPE nParamIndex, OMX_IN OMX_PTR pParamStruct,
- OMX_PTR pLocBufNeedMap)
+ OMX_PTR pLocBufNeedMap, OMX_U32 nNumOfLocalBuf)
{
OMX_ERRORTYPE eError = OMX_ErrorNone, eCompReturn = OMX_ErrorNone;
RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
PROXY_COMPONENT_PRIVATE *pCompPrv = NULL;
OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR *ptBufDescParam = NULL;
#ifdef ENABLE_GRALLOC_BUFFERS
OMX_TI_PARAMUSENATIVEBUFFER *pParamNativeBuffer = NULL;
#endif
+#ifdef USE_ION
+ OMX_PTR *pAuxBuf = pLocBufNeedMap;
+ OMX_PTR pRegistered = NULL;
+#endif
PROXY_require((pParamStruct != NULL), OMX_ErrorBadParameter, NULL);
PROXY_require((hComp->pComponentPrivate != NULL),
@@ -1346,9 +1349,10 @@ OMX_ERRORTYPE __PROXY_SetParameter(OMX_IN OMX_HANDLETYPE hComponent,
DOMX_ENTER
("hComponent = %p, pCompPrv = %p, nParamIndex = %d, pParamStruct = %p",
hComponent, pCompPrv, nParamIndex, pParamStruct);
-#ifdef ENABLE_GRALLOC_BUFFERS
+
switch(nParamIndex)
{
+#ifdef ENABLE_GRALLOC_BUFFERS
case OMX_TI_IndexUseNativeBuffers:
{
//Add check version.
@@ -1357,19 +1361,62 @@ OMX_ERRORTYPE __PROXY_SetParameter(OMX_IN OMX_HANDLETYPE hComponent,
{
pCompPrv->proxyPortBuffers[pParamNativeBuffer->nPortIndex].proxyBufferType = GrallocPointers;
pCompPrv->proxyPortBuffers[pParamNativeBuffer->nPortIndex].IsBuffer2D = OMX_TRUE;
+ } else
+ {
+ /* Reset to defaults */
+ pCompPrv->proxyPortBuffers[pParamNativeBuffer->nPortIndex].proxyBufferType = VirtualPointers;
+ pCompPrv->proxyPortBuffers[pParamNativeBuffer->nPortIndex].IsBuffer2D = OMX_FALSE;
}
+
break;
}
+#endif
+ case OMX_TI_IndexUseBufferDescriptor:
+ ptBufDescParam = (OMX_TI_PARAM_USEBUFFERDESCRIPTOR *) pParamStruct;
+ if(ptBufDescParam->bEnabled == OMX_TRUE)
+ {
+ if(ptBufDescParam->eBufferType == OMX_TI_BufferTypeVirtual2D)
+ {
+ pCompPrv->proxyPortBuffers[ptBufDescParam->nPortIndex].proxyBufferType = BufferDescriptorVirtual2D;
+ pCompPrv->proxyPortBuffers[ptBufDescParam->nPortIndex].IsBuffer2D = OMX_TRUE;
+ }
+ }
+ else if(ptBufDescParam->bEnabled == OMX_FALSE)
+ {
+ /* Reset to defaults*/
+ pCompPrv->proxyPortBuffers[ptBufDescParam->nPortIndex].proxyBufferType = VirtualPointers;
+ pCompPrv->proxyPortBuffers[ptBufDescParam->nPortIndex].IsBuffer2D = OMX_FALSE;
+ }
+ eRPCError =
+ RPC_SetParameter(pCompPrv->hRemoteComp, nParamIndex, pParamStruct,
+ pLocBufNeedMap, nNumOfLocalBuf, &eCompReturn);
+ break;
default:
+ {
+#ifdef USE_ION
+ if (pAuxBuf != NULL) {
+ int fd = *((int*)pAuxBuf);
+ if (fd > -1) {
+ eRPCError = RPC_RegisterBuffer(pCompPrv->hRemoteComp, *((int*)pAuxBuf),
+ &pRegistered, NULL, IONPointers);
+ PROXY_checkRpcError();
+ if (pRegistered)
+ *pAuxBuf = pRegistered;
+ }
+ }
+#endif
eRPCError =
RPC_SetParameter(pCompPrv->hRemoteComp, nParamIndex, pParamStruct,
- pLocBufNeedMap, &eCompReturn);
- }
-#else
- eRPCError =
- RPC_SetParameter(pCompPrv->hRemoteComp, nParamIndex, pParamStruct,
- pLocBufNeedMap, &eCompReturn);
+ pLocBufNeedMap, nNumOfLocalBuf, &eCompReturn);
+#ifdef USE_ION
+ PROXY_checkRpcError();
+ if (pRegistered != NULL) {
+ eRPCError = RPC_UnRegisterBuffer(pCompPrv->hRemoteComp, pRegistered);
+ PROXY_checkRpcError();
+ }
#endif
+ }
+ }
PROXY_checkRpcError();
@@ -1391,7 +1438,7 @@ OMX_ERRORTYPE __PROXY_SetParameter(OMX_IN OMX_HANDLETYPE hComponent,
OMX_ERRORTYPE PROXY_SetParameter(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_INDEXTYPE nParamIndex, OMX_IN OMX_PTR pParamStruct)
{
- return __PROXY_SetParameter(hComponent, nParamIndex, pParamStruct, NULL);
+ return __PROXY_SetParameter(hComponent, nParamIndex, pParamStruct, NULL, 0);
}
@@ -1413,6 +1460,11 @@ OMX_ERRORTYPE __PROXY_GetParameter(OMX_IN OMX_HANDLETYPE hComponent,
RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
PROXY_COMPONENT_PRIVATE *pCompPrv = NULL;
OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR *ptBufDescParam = NULL;
+#ifdef USE_ION
+ OMX_PTR *pAuxBuf = pLocBufNeedMap;
+ OMX_PTR pRegistered = NULL;
+#endif
PROXY_require((pParamStruct != NULL), OMX_ErrorBadParameter, NULL);
PROXY_assert((hComp->pComponentPrivate != NULL),
@@ -1424,9 +1476,47 @@ OMX_ERRORTYPE __PROXY_GetParameter(OMX_IN OMX_HANDLETYPE hComponent,
("hComponent = %p, pCompPrv = %p, nParamIndex = %d, pParamStruct = %p",
hComponent, pCompPrv, nParamIndex, pParamStruct);
- eRPCError =
- RPC_GetParameter(pCompPrv->hRemoteComp, nParamIndex, pParamStruct,
+ switch(nParamIndex)
+ {
+ case OMX_TI_IndexUseBufferDescriptor:
+ ptBufDescParam = (OMX_TI_PARAM_USEBUFFERDESCRIPTOR *) pParamStruct;
+ if(pCompPrv->proxyPortBuffers[ptBufDescParam->nPortIndex].proxyBufferType == BufferDescriptorVirtual2D)
+ {
+ ptBufDescParam->bEnabled = OMX_TRUE;
+ ptBufDescParam->eBufferType = OMX_TI_BufferTypeVirtual2D;
+ }
+ else
+ {
+ ptBufDescParam->bEnabled = OMX_FALSE;
+ ptBufDescParam->eBufferType = OMX_TI_BufferTypeMax;
+ }
+ break;
+
+ default:
+ {
+#ifdef USE_ION
+ if (pAuxBuf != NULL) {
+ int fd = *((int*)pAuxBuf);
+ if (fd > -1) {
+ eRPCError = RPC_RegisterBuffer(pCompPrv->hRemoteComp, *((int*)pAuxBuf),
+ &pRegistered, NULL, IONPointers);
+ PROXY_checkRpcError();
+ if (pRegistered)
+ *pAuxBuf = pRegistered;
+ }
+ }
+#endif
+ eRPCError = RPC_GetParameter(pCompPrv->hRemoteComp, nParamIndex, pParamStruct,
pLocBufNeedMap, &eCompReturn);
+#ifdef USE_ION
+ PROXY_checkRpcError();
+ if (pRegistered != NULL) {
+ eRPCError = RPC_UnRegisterBuffer(pCompPrv->hRemoteComp, pRegistered);
+ PROXY_checkRpcError();
+ }
+#endif
+ }
+ }
PROXY_checkRpcError();
@@ -1464,6 +1554,10 @@ OMX_ERRORTYPE PROXY_GetParameter(OMX_IN OMX_HANDLETYPE hComponent,
OMX_ERRORTYPE __PROXY_GetConfig(OMX_HANDLETYPE hComponent,
OMX_INDEXTYPE nConfigIndex, OMX_PTR pConfigStruct, OMX_PTR pLocBufNeedMap)
{
+#ifdef USE_ION
+ OMX_PTR *pAuxBuf = pLocBufNeedMap;
+ OMX_PTR pRegistered = NULL;
+#endif
OMX_ERRORTYPE eError = OMX_ErrorNone, eCompReturn = OMX_ErrorNone;
RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
@@ -1481,9 +1575,29 @@ OMX_ERRORTYPE __PROXY_GetConfig(OMX_HANDLETYPE hComponent,
hComponent, pCompPrv, nConfigIndex,
pConfigStruct);
+#ifdef USE_ION
+ if (pAuxBuf != NULL) {
+ int fd = *((int*)pAuxBuf);
+ if (fd > -1) {
+ eRPCError = RPC_RegisterBuffer(pCompPrv->hRemoteComp, *((int*)pAuxBuf),
+ &pRegistered, NULL, IONPointers);
+ PROXY_checkRpcError();
+ if (pRegistered)
+ *pAuxBuf = pRegistered;
+ }
+ }
+#endif
+
eRPCError =
RPC_GetConfig(pCompPrv->hRemoteComp, nConfigIndex, pConfigStruct,
pLocBufNeedMap, &eCompReturn);
+#ifdef USE_ION
+ PROXY_checkRpcError();
+ if (pRegistered != NULL) {
+ eRPCError = RPC_UnRegisterBuffer(pCompPrv->hRemoteComp, pRegistered);
+ PROXY_checkRpcError();
+ }
+#endif
PROXY_checkRpcError();
@@ -1522,6 +1636,10 @@ OMX_ERRORTYPE __PROXY_SetConfig(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_INDEXTYPE nConfigIndex, OMX_IN OMX_PTR pConfigStruct,
OMX_PTR pLocBufNeedMap)
{
+#ifdef USE_ION
+ OMX_PTR *pAuxBuf = pLocBufNeedMap;
+ OMX_PTR pRegistered = NULL;
+#endif
OMX_ERRORTYPE eError = OMX_ErrorNone, eCompReturn = OMX_ErrorNone;
RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
@@ -1540,10 +1658,31 @@ OMX_ERRORTYPE __PROXY_SetConfig(OMX_IN OMX_HANDLETYPE hComponent,
hComponent, pCompPrv, nConfigIndex,
pConfigStruct);
+#ifdef USE_ION
+ if (pAuxBuf != NULL) {
+ int fd = *((int*)pAuxBuf);
+ if (fd > -1) {
+ eRPCError = RPC_RegisterBuffer(pCompPrv->hRemoteComp, *((int*)pAuxBuf),
+ &pRegistered, NULL, IONPointers);
+ PROXY_checkRpcError();
+ if (pRegistered)
+ *pAuxBuf = pRegistered;
+ }
+ }
+#endif
+
eRPCError =
RPC_SetConfig(pCompPrv->hRemoteComp, nConfigIndex, pConfigStruct,
pLocBufNeedMap, &eCompReturn);
+#ifdef USE_ION
+ PROXY_checkRpcError();
+ if (pRegistered != NULL) {
+ eRPCError = RPC_UnRegisterBuffer(pCompPrv->hRemoteComp, pRegistered);
+ PROXY_checkRpcError();
+ }
+#endif
+
PROXY_checkRpcError();
EXIT:
@@ -1596,11 +1735,17 @@ static OMX_ERRORTYPE PROXY_GetState(OMX_IN OMX_HANDLETYPE hComponent,
eRPCError = RPC_GetState(pCompPrv->hRemoteComp, pState, &eCompReturn);
- DOMX_DEBUG("Returned from RPC_GetState, state: ", *pState);
+ DOMX_DEBUG("Returned from RPC_GetState, state: = %x", *pState);
PROXY_checkRpcError();
EXIT:
+ if (eError == OMX_ErrorHardware)
+ {
+ *pState = OMX_StateInvalid;
+ eError = OMX_ErrorNone;
+ DOMX_DEBUG("Invalid state returned from RPC_GetState, state due to ducati in faulty state");
+ }
DOMX_EXIT("eError: %d", eError);
return eError;
}
@@ -1867,7 +2012,33 @@ static OMX_ERRORTYPE PROXY_ComponentTunnelRequest(OMX_IN OMX_HANDLETYPE
DOMX_ENTER("hComponent = %p", hComponent);
DOMX_DEBUG(" EMPTY IMPLEMENTATION ");
+ PROXY_COMPONENT_PRIVATE *pOutCompPrv = NULL;
+ PROXY_COMPONENT_PRIVATE *pInCompPrv = NULL;
+ OMX_COMPONENTTYPE *hOutComp = hComponent;
+ OMX_COMPONENTTYPE *hInComp = hTunneledComp;
+ OMX_ERRORTYPE eCompReturn = OMX_ErrorNone;
+ RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
+ PROXY_assert((hOutComp->pComponentPrivate != NULL),
+ OMX_ErrorBadParameter, NULL);
+ PROXY_assert((hInComp->pComponentPrivate != NULL),
+ OMX_ErrorBadParameter, NULL);
+
+ //TBD
+ //PROXY_assert(nPort != 1, OMX_ErrorBadParameter, NULL);
+ //PROXY_assert(nTunnelPort != 0, OMX_ErrorBadParameter, NULL);
+ pOutCompPrv = (PROXY_COMPONENT_PRIVATE *) hOutComp->pComponentPrivate;
+ pInCompPrv = (PROXY_COMPONENT_PRIVATE *) hInComp->pComponentPrivate;
+ DOMX_ENTER("hOutComp=%p, pOutCompPrv=%p, hInComp=%p, pInCompPrv=%p, nOutPort=%d, nInPort=%d \n",
+ hOutComp, pOutCompPrv, hInComp, pInCompPrv, nPort, nTunneledPort);
+
+ DOMX_INFO("PROXY_ComponentTunnelRequest:: hOutComp=%p, pOutCompPrv=%p, hInComp=%p, pInCompPrv=%p, nOutPort=%d, nInPort=%d \n ",
+ hOutComp, pOutCompPrv, hInComp, pInCompPrv, nPort, nTunneledPort);
+ eRPCError = RPC_ComponentTunnelRequest(pOutCompPrv->hRemoteComp, nPort,
+ pInCompPrv->hRemoteComp, nTunneledPort, pTunnelSetup, &eCompReturn);
+ DOMX_INFO("\nafter: RPC_ComponentTunnelRequest = 0x%x\n ", eRPCError);
+ PROXY_checkRpcError();
+EXIT:
DOMX_EXIT("eError: %d", eError);
return eError;
}
@@ -1976,8 +2147,6 @@ OMX_ERRORTYPE PROXY_ComponentDeInit(OMX_HANDLETYPE hComponent)
pCompPrv->tBufList[count].pYBuffer = NULL;
}
}
-#else
- MemMgr_Free(pCompPrv->tBufList[count].pYBuffer);
#endif
}
#endif
@@ -1992,14 +2161,12 @@ OMX_ERRORTYPE PROXY_ComponentDeInit(OMX_HANDLETYPE hComponent)
{
munmap(pMetaDataBuffer, ((OMX_TI_PLATFORMPRIVATE *)(pCompPrv->tBufList[count].pBufHeader)->
pPlatformPrivate)->nMetaDataSize);
- close(pCompPrv->tBufList[count].mmap_fd_metadata_buff);
}
+ close(pCompPrv->tBufList[count].mmap_fd_metadata_buff);
ion_free(pCompPrv->ion_fd, pMetaDataBuffer);
((OMX_TI_PLATFORMPRIVATE *)(pCompPrv->tBufList[count].pBufHeader)->
pPlatformPrivate)->pMetaDataBuffer = NULL;
}
-#else
- MemMgr_Free(pMetaDataBuffer);
#endif
}
if (pCompPrv->tBufList[count].pBufHeader->pPlatformPrivate)
@@ -2013,6 +2180,8 @@ OMX_ERRORTYPE PROXY_ComponentDeInit(OMX_HANDLETYPE hComponent)
}
}
+ KPI_OmxCompDeinit(hComponent);
+
eRPCError = RPC_FreeHandle(pCompPrv->hRemoteComp, &eCompReturn);
if (eRPCError != RPC_OMX_ErrorNone)
eTmpRPCError = eRPCError;
@@ -2061,6 +2230,8 @@ OMX_ERRORTYPE OMX_ProxyCommonInit(OMX_HANDLETYPE hComponent)
DOMX_ENTER("hComponent = %p", hComponent);
+ TIMM_OSAL_UpdateTraceLevel();
+
PROXY_require((hComp->pComponentPrivate != NULL),
OMX_ErrorBadParameter, NULL);
@@ -2123,6 +2294,8 @@ OMX_ERRORTYPE OMX_ProxyCommonInit(OMX_HANDLETYPE hComponent)
}
#endif
+ KPI_OmxCompInit(hComponent);
+
EXIT:
if (eError != OMX_ErrorNone)
RPC_InstanceDeInit(hRemoteComp);
@@ -2148,7 +2321,7 @@ OMX_ERRORTYPE RPC_UTIL_GetStride(OMX_COMPONENTTYPE * hRemoteComp,
{
OMX_ERRORTYPE eError = OMX_ErrorNone, eCompReturn = OMX_ErrorNone;
RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
- OMX_PARAM_PORTDEFINITIONTYPE sPortDef = { 0 };
+ OMX_PARAM_PORTDEFINITIONTYPE sPortDef;
/*Initializing Structure */
sPortDef.nSize = sizeof(OMX_PARAM_PORTDEFINITIONTYPE);
@@ -2324,350 +2497,6 @@ OMX_ERRORTYPE RPC_UTIL_GetNumLines(OMX_COMPONENTTYPE * hRemoteComp,
}
-
-#if 0
-
-OMX_ERRORTYPE RPC_PrepareBuffer_Chiron(PROXY_COMPONENT_PRIVATE * pCompPrv,
- OMX_COMPONENTTYPE * hRemoteComp, OMX_U32 nPortIndex, OMX_U32 nSizeBytes,
- OMX_BUFFERHEADERTYPE * pDucBuf, OMX_BUFFERHEADERTYPE * pChironBuf)
-{
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_U32 nNumOfLines = 1;
- OMX_U8 *pBuffer;
-
- DSPtr dsptr[2];
- bytes_t lengths[2];
- OMX_U32 i = 0;
- OMX_U32 numBlocks = 0;
-
- pBuffer = pDucBuf->pBuffer;
-
- DOMX_ENTER("");
-
- if (((OMX_TI_PLATFORMPRIVATE *) pDucBuf->pPlatformPrivate)->
- pAuxBuf1 == NULL)
- {
- DOMX_DEBUG("One component buffer");
-
- if (!(pCompPrv->nNumOfLines[nPortIndex]))
- {
- pCompPrv->nNumOfLines[nPortIndex] = 1;
- }
-
- dsptr[0] = (OMX_U32) pBuffer;
- numBlocks = 1;
- lengths[0] =
- LINUX_PAGE_SIZE * ((nSizeBytes + (LINUX_PAGE_SIZE -
- 1)) / LINUX_PAGE_SIZE);
- } else
- {
- DOMX_DEBUG("Two component buffers");
- dsptr[0] = (OMX_U32) pBuffer;
- dsptr[1] =
- (OMX_U32) (((OMX_TI_PLATFORMPRIVATE *)
- pDucBuf->pPlatformPrivate)->pAuxBuf1);
-
- if (!(pCompPrv->nNumOfLines[nPortIndex]))
- {
- eError =
- RPC_UTIL_GetNumLines(hRemoteComp, nPortIndex,
- &nNumOfLines);
- PROXY_assert((eError == OMX_ErrorNone),
- OMX_ErrorUndefined,
- "ERROR WHILE GETTING FRAME HEIGHT");
-
- pCompPrv->nNumOfLines[nPortIndex] = nNumOfLines;
- } else
- {
- nNumOfLines = pCompPrv->nNumOfLines[nPortIndex];
- }
-
- lengths[0] = nNumOfLines * LINUX_PAGE_SIZE;
- lengths[1] = nNumOfLines / 2 * LINUX_PAGE_SIZE;
- numBlocks = 2;
- }
-
- //Map back to chiron
- DOMX_DEBUG("NumBlocks = %d", numBlocks);
- for (i = 0; i < numBlocks; i++)
- {
- DOMX_DEBUG("dsptr[%d] = %p", i, dsptr[i]);
- DOMX_DEBUG("length[%d] = %d", i, lengths[i]);
- }
-
- pDucBuf->pBuffer =
- tiler_assisted_phase1_D2CReMap(numBlocks, dsptr, lengths);
- PROXY_assert((pDucBuf->pBuffer != NULL), OMX_ErrorUndefined,
- "Mapping to Chiron failed");
-
- EXIT:
- DOMX_EXIT("eError: %d", eError);
- return eError;
-}
-
-
-//Takes chiron buffer buffer header and updates with ducati buffer ptr and UV ptr
-OMX_ERRORTYPE RPC_PrepareBuffer_Remote(PROXY_COMPONENT_PRIVATE * pCompPrv,
- OMX_COMPONENTTYPE * hRemoteComp, OMX_U32 nPortIndex,
- OMX_U32 nSizeBytes, OMX_BUFFERHEADERTYPE * pChironBuf,
- OMX_BUFFERHEADERTYPE * pDucBuf, OMX_PTR pBufToBeMapped)
-{
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_U32 nNumOfLines = 1;
- OMX_U8 *pBuffer;
-
- DOMX_ENTER("");
-
- pBuffer = pChironBuf->pBuffer;
-
- if (!MemMgr_Is2DBlock(pBuffer))
- {
-
- if (!(pCompPrv->nNumOfLines[nPortIndex]))
- {
- pCompPrv->nNumOfLines[nPortIndex] = 1;
- }
-
- pChironBuf->pBuffer = NULL;
- eError =
- RPC_MapBuffer_Ducati(pBuffer, nSizeBytes, nNumOfLines,
- &(pChironBuf->pBuffer), pBufToBeMapped);
- PROXY_assert(eError == OMX_ErrorNone, eError, "Map failed");
- } else
- {
- if (!(pCompPrv->nNumOfLines[nPortIndex]))
- {
- eError =
- RPC_UTIL_GetNumLines(hRemoteComp, nPortIndex,
- &nNumOfLines);
- PROXY_assert((eError == OMX_ErrorNone), eError,
- "ERROR WHILE GETTING FRAME HEIGHT");
-
- pCompPrv->nNumOfLines[nPortIndex] = nNumOfLines;
- } else
- {
- nNumOfLines = pCompPrv->nNumOfLines[nPortIndex];
- }
-
- pChironBuf->pBuffer = NULL;
- ((OMX_TI_PLATFORMPRIVATE *) (pChironBuf->pPlatformPrivate))->
- pAuxBuf1 = NULL;
-
- eError =
- RPC_MapBuffer_Ducati(pBuffer, LINUX_PAGE_SIZE,
- nNumOfLines, &(pChironBuf->pBuffer), pBufToBeMapped);
- PROXY_assert(eError == OMX_ErrorNone, eError, "Map failed");
- eError =
- RPC_MapBuffer_Ducati((OMX_U8 *) ((OMX_U32) pBuffer +
- nNumOfLines * LINUX_PAGE_SIZE), LINUX_PAGE_SIZE,
- nNumOfLines / 2,
- (OMX_U8 **) (&((OMX_TI_PLATFORMPRIVATE
- *) (pChironBuf->pPlatformPrivate))->pAuxBuf1),
- pBufToBeMapped);
- PROXY_assert(eError == OMX_ErrorNone, eError, "Map failed");
- *(OMX_U32 *) pBufToBeMapped = (OMX_U32) pBuffer;
- }
-
- EXIT:
- DOMX_EXIT("eError: %d", eError);
- return eError;
-}
-
-
-/* ===========================================================================*/
-/**
- * @name RPC_MapBuffer_Ducati()
- * @brief
- * @param void
- * @return OMX_ErrorNone = Successful
- * @sa TBD
- *
- */
-/* ===========================================================================*/
-OMX_ERRORTYPE RPC_MapBuffer_Ducati(OMX_U8 * pBuf, OMX_U32 nBufLineSize,
- OMX_U32 nBufLines, OMX_U8 ** pMappedBuf, OMX_PTR pBufToBeMapped)
-{
- ProcMgr_MapType mapType;
- SyslinkMemUtils_MpuAddrToMap MpuAddr_list_1D = { 0 };
- MemAllocBlock block = { 0 };
- OMX_S32 status;
- OMX_U32 nDiff = 0;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
-
- DOMX_ENTER("");
-
- *(OMX_U32 *) pBufToBeMapped = (OMX_U32) pBuf;
-
- if (!MemMgr_IsMapped(pBuf) && (nBufLines == 1))
- {
- DOMX_DEBUG
- ("Buffer is not mapped: Mapping as 1D buffer now..");
- block.fmt = PIXEL_FMT_PAGE;
- block.ptr = (OMX_PTR) (((OMX_U32) pBuf / LINUX_PAGE_SIZE) *
- LINUX_PAGE_SIZE);
- block.dim.len = (OMX_U32) ((((OMX_U32) pBuf + nBufLineSize +
- LINUX_PAGE_SIZE - 1) / LINUX_PAGE_SIZE) *
- LINUX_PAGE_SIZE) - (OMX_U32) block.ptr;
- block.stride = 0;
- nDiff = (OMX_U32) pBuf - (OMX_U32) block.ptr;
-
- (*(OMX_U32 *) (pBufToBeMapped)) =
- (OMX_U32) (MemMgr_Map(&block, 1));
- PROXY_assert(*(OMX_U32 *) pBufToBeMapped != 0,
- OMX_ErrorInsufficientResources,
- "Map to TILER space failed");
- //*pMappedBuf = MemMgr_Map(&block, 1);
- }
-
- if (MemMgr_IsMapped((OMX_PTR) (*(OMX_U32 *) pBufToBeMapped)))
- {
- //If Tiler 1D buffer, get corresponding ducati address and send out buffer to ducati
- //For 2D buffers, in phase1, retrive the ducati address (SSPtrs) for Y and UV buffers
- //and send out buffer to ducati
- mapType = ProcMgr_MapType_Tiler;
- MpuAddr_list_1D.mpuAddr =
- (*(OMX_U32 *) pBufToBeMapped) + nDiff;
- MpuAddr_list_1D.size = nBufLineSize * nBufLines;
-
- status =
- SysLinkMemUtils_map(&MpuAddr_list_1D, 1,
- (UInt32 *) pMappedBuf, mapType, PROC_APPM3);
- PROXY_assert(status >= 0, OMX_ErrorInsufficientResources,
- "Syslink map failed");
- }
-
- EXIT:
- DOMX_EXIT("eError: %d", eError);
- return eError;
-}
-
-
-
-/* ===========================================================================*/
-/**
- * @name RPC_UnMapBuffer_Ducati()
- * @brief
- * @param
- * @return
- * @sa
- *
- */
-/* ===========================================================================*/
-OMX_ERRORTYPE RPC_UnMapBuffer_Ducati(OMX_PTR pBuffer)
-{
- OMX_U32 status = 0;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
-
- DOMX_ENTER("");
-
- status = MemMgr_UnMap(pBuffer);
- PROXY_assert(status == 0, OMX_ErrorUndefined,
- "MemMgr_UnMap returned an error");
-
- EXIT:
- DOMX_EXIT("eError: %d", eError);
- return eError;
-}
-
-/* ===========================================================================*/
-/**
- * @name RPC_MapMetaData_Host()
- * @brief This utility maps metadata buffer in OMX buffer header to Chiron
- * virtual address space (metadata buffer is TILER 1D buffer in Ducati Virtual
- * space). It overrides the metadata buffer with Chiron address in the same
- * field. Metadata buffer size represents max size (alloc size) that needs to
- * be mapped
- * @param void
- * @return OMX_ErrorNone = Successful
- * @sa TBD
- *
- */
-/* ===========================================================================*/
-OMX_ERRORTYPE RPC_MapMetaData_Host(OMX_BUFFERHEADERTYPE * pBufHdr)
-{
- OMX_PTR pMappedMetaDataBuffer = NULL;
- OMX_U32 nMetaDataSize = 0;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
-
- DSPtr dsptr[2];
- bytes_t lengths[2];
- OMX_U32 numBlocks = 0;
-
- DOMX_ENTER("");
-
- if ((pBufHdr->pPlatformPrivate != NULL) &&
- (((OMX_TI_PLATFORMPRIVATE *) pBufHdr->pPlatformPrivate)->
- pMetaDataBuffer != NULL))
- {
-
- pMappedMetaDataBuffer = NULL;
-
- nMetaDataSize =
- ((OMX_TI_PLATFORMPRIVATE *) pBufHdr->pPlatformPrivate)->
- nMetaDataSize;
- PROXY_assert((nMetaDataSize != 0), OMX_ErrorBadParameter,
- "Received ZERO metadata size from Ducati, cannot map");
-
- dsptr[0] =
- (OMX_U32) ((OMX_TI_PLATFORMPRIVATE *)
- pBufHdr->pPlatformPrivate)->pMetaDataBuffer;
- numBlocks = 1;
- lengths[0] =
- LINUX_PAGE_SIZE * ((nMetaDataSize + (LINUX_PAGE_SIZE -
- 1)) / LINUX_PAGE_SIZE);
-
- pMappedMetaDataBuffer =
- tiler_assisted_phase1_D2CReMap(numBlocks, dsptr, lengths);
-
- PROXY_assert((pMappedMetaDataBuffer != NULL),
- OMX_ErrorInsufficientResources,
- "Mapping metadata to Chiron space failed");
-
- ((OMX_TI_PLATFORMPRIVATE *) pBufHdr->pPlatformPrivate)->
- pMetaDataBuffer = pMappedMetaDataBuffer;
- }
-
- EXIT:
- DOMX_EXIT("eError: %d", eError);
- return eError;
-}
-
-/* ===========================================================================*/
-/**
- * @name RPC_UnMapMetaData_Host()
- * @brief This utility unmaps the previously mapped metadata on host from remote
- * components
- * @param void
- * @return OMX_ErrorNone = Successful
- * @sa TBD
- *
- */
-/* ===========================================================================*/
-OMX_ERRORTYPE RPC_UnMapMetaData_Host(OMX_BUFFERHEADERTYPE * pBufHdr)
-{
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_S32 nReturn = 0;
-
- DOMX_ENTER("");
-
- if ((pBufHdr->pPlatformPrivate != NULL) &&
- (((OMX_TI_PLATFORMPRIVATE *) pBufHdr->pPlatformPrivate)->
- pMetaDataBuffer != NULL))
- {
-
- nReturn =
- tiler_assisted_phase1_DeMap((((OMX_TI_PLATFORMPRIVATE *)
- pBufHdr->pPlatformPrivate)->pMetaDataBuffer));
- PROXY_assert((nReturn == 0), OMX_ErrorUndefined,
- "Metadata unmap failed");
- }
- EXIT:
- DOMX_EXIT("eError: %d", eError);
- return eError;
-}
-
-#endif
-
/* ===========================================================================*/
/**
* @name _RPC_IsProxyComponent()
diff --git a/domx/domx/omx_rpc/inc/omx_rpc_internal.h b/domx/domx/omx_rpc/inc/omx_rpc_internal.h
index f4ad7d8..b33bf2d 100755
--- a/domx/domx/omx_rpc/inc/omx_rpc_internal.h
+++ b/domx/domx/omx_rpc/inc/omx_rpc_internal.h
@@ -84,7 +84,7 @@ extern "C"
/*This defines the maximum number of remote functions that can be registered*/
#define RPC_OMX_MAX_FUNCTION_LIST 21
/*Packet size for each message*/
-#define RPC_PACKET_SIZE 0xF0
+#define RPC_PACKET_SIZE 0x12C
diff --git a/domx/domx/omx_rpc/inc/omx_rpc_stub.h b/domx/domx/omx_rpc/inc/omx_rpc_stub.h
index 5e3837f..fd06506 100755
--- a/domx/domx/omx_rpc/inc/omx_rpc_stub.h
+++ b/domx/domx/omx_rpc/inc/omx_rpc_stub.h
@@ -76,7 +76,7 @@ extern "C"
RPC_OMX_ERRORTYPE RPC_SetParameter(OMX_HANDLETYPE hRPCCtx,
OMX_INDEXTYPE nParamIndex, OMX_PTR pCompParam,
- OMX_PTR pLocBufNeedMap, OMX_ERRORTYPE * nCmdStatus);
+ OMX_PTR pLocBufNeedMap, OMX_U32 nNumOfLocalBuf, OMX_ERRORTYPE * nCmdStatus);
RPC_OMX_ERRORTYPE RPC_GetParameter(OMX_HANDLETYPE hRPCCtx,
OMX_INDEXTYPE nParamIndex, OMX_PTR pCompParam,
diff --git a/domx/domx/omx_rpc/inc/omx_rpc_utils.h b/domx/domx/omx_rpc/inc/omx_rpc_utils.h
index 9d1c871..d8334c9 100755
--- a/domx/domx/omx_rpc/inc/omx_rpc_utils.h
+++ b/domx/domx/omx_rpc/inc/omx_rpc_utils.h
@@ -66,6 +66,7 @@ extern "C"
#define DOMX_ERROR(fmt,...) TIMM_OSAL_Error(fmt, ##__VA_ARGS__)
#define DOMX_WARN(fmt,...) TIMM_OSAL_Warning(fmt, ##__VA_ARGS__)
+#define DOMX_PROF(fmt,...) TIMM_OSAL_Profiling(fmt, ##__VA_ARGS__)
#define DOMX_INFO(fmt,...) TIMM_OSAL_Info(fmt, ##__VA_ARGS__)
#define DOMX_DEBUG(fmt,...) TIMM_OSAL_Debug(fmt, ##__VA_ARGS__)
#define DOMX_ENTER(fmt,...) TIMM_OSAL_Entering(fmt, ##__VA_ARGS__)
@@ -82,7 +83,7 @@ extern "C"
#define RPC_paramCheck(C, V, S) do { \
if (!(C)) { eRPCError = V;\
if(S) DOMX_ERROR("failed check:" #C" - returning error: 0x%x - %s",V,S);\
- else DOMX_ERROR("failed check: %s - returning error: 0x%x",C, V); \
+ else DOMX_ERROR("failed check:" #C" - returning error: 0x%x",V); \
goto EXIT; } \
} while(0)
@@ -99,7 +100,7 @@ extern "C"
* MACROS - COMMON MARSHALLING UTILITIES
******************************************************************/
#define RPC_SETFIELDVALUE(MSGBODY, POS, VALUE, TYPE) do { \
- *((TYPE *) ((OMX_U32)MSGBODY+POS)) = VALUE; \
+ *((TYPE *) ((OMX_U32)MSGBODY+POS)) = (TYPE)VALUE; \
POS += sizeof(TYPE); \
} while(0)
diff --git a/domx/domx/omx_rpc/src/omx_rpc.c b/domx/domx/omx_rpc/src/omx_rpc.c
index d93134c..0743491 100755
--- a/domx/domx/omx_rpc/src/omx_rpc.c
+++ b/domx/domx/omx_rpc/src/omx_rpc.c
@@ -72,7 +72,7 @@
#define RPC_MSGPIPE_SIZE (4)
#define RPC_MSG_SIZE_FOR_PIPE (sizeof(OMX_PTR))
-
+#define MAX_ATTEMPTS 15
#define RPC_getPacket(nPacketSize, pPacket) do { \
pPacket = TIMM_OSAL_Malloc(nPacketSize, TIMM_OSAL_TRUE, 0, TIMMOSAL_MEM_SEGMENT_INT); \
@@ -84,7 +84,7 @@
if(pPacket != NULL) TIMM_OSAL_Free(pPacket); \
} while(0)
-
+OMX_U8 pBufferError[RPC_PACKET_SIZE];
void *RPC_CallbackThread(void *data);
@@ -129,7 +129,7 @@ RPC_OMX_ERRORTYPE RPC_InstanceInit(OMX_STRING cComponentName,
while (1)
{
pRPCCtx->fd_omx = open("/dev/rpmsg-omx1", O_RDWR);
- if(pRPCCtx->fd_omx >= 0 || errno != ENOENT || nAttempts == 15)
+ if(pRPCCtx->fd_omx >= 0 || errno != ENOENT || nAttempts == MAX_ATTEMPTS)
break;
DOMX_DEBUG("errno from open= %d, REATTEMPTING OPEN!!!!",errno);
nAttempts++;
@@ -291,6 +291,7 @@ void *RPC_CallbackThread(void *data)
TIMM_OSAL_ERRORTYPE eError = TIMM_OSAL_ERR_NONE;
OMX_COMPONENTTYPE *hComp = NULL;
PROXY_COMPONENT_PRIVATE *pCompPrv = NULL;
+ OMX_PTR pBuff = pBufferError;
maxfd =
(pRPCCtx->fd_killcb >
@@ -321,6 +322,13 @@ void *RPC_CallbackThread(void *data)
{
if(errno == ENXIO)
{
+ for(nFxnIdx = 0; nFxnIdx < RPC_OMX_MAX_FUNCTION_LIST; nFxnIdx++)
+ {
+ ((struct omx_packet *) pBufferError)->result = OMX_ErrorHardware;
+ TIMM_OSAL_WriteToPipe(pRPCCtx->pMsgPipe[nFxnIdx], &pBuff, RPC_MSG_SIZE_FOR_PIPE, TIMM_OSAL_SUSPEND);
+ if(eError != TIMM_OSAL_ERR_NONE)
+ DOMX_ERROR("Write to pipe failed");
+ }
/*Indicate fatal error and exit*/
RPC_assert(0, RPC_OMX_ErrorHardware,
"Remote processor fatal error");
diff --git a/domx/domx/omx_rpc/src/omx_rpc_config.c b/domx/domx/omx_rpc/src/omx_rpc_config.c
index 07e46ed..8b13789 100755
--- a/domx/domx/omx_rpc/src/omx_rpc_config.c
+++ b/domx/domx/omx_rpc/src/omx_rpc_config.c
@@ -1,132 +1 @@
-#if 0
-/*
- * Copyright (c) 2010, Texas Instruments Incorporated
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * * Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * * Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * * Neither the name of Texas Instruments Incorporated nor the names of
- * its contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
- * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
- * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-/**
- * @file omx_rpc_config.c
- * This file contains methods that provides the functionality for
- * the OpenMAX1.1 DOMX Framework RPC.
- *
- * @path \WTSD_DucatiMMSW\framework\domx\omx_rpc\src
- *
- * @rev 1.0
- */
-
-/*==============================================================
- *! Revision History
- *! ============================
- *! 29-Mar-2010 Abhishek Ranka : Revamped DOMX implementation
- *!
- *! 19-August-2009 B Ravi Kiran ravi.kiran@ti.com: Initial Version
- *================================================================*/
- /******************************************************************
- * INCLUDE FILES
- ******************************************************************/
- /* ----- system and platform files ---------------------------- */
-#include <stdlib.h>
-#include <string.h>
-#include <stdio.h>
-#include <Std.h>
-
-#include <OMX_Types.h>
-#include <timm_osal_interfaces.h>
-#include <timm_osal_trace.h>
-
-#include <MultiProc.h>
-#include <RcmClient.h>
-#include <RcmServer.h>
-
-/*-------program files ----------------------------------------*/
-#include "omx_rpc.h"
-#include "omx_rpc_stub.h"
-#include "omx_rpc_skel.h"
-#include "omx_rpc_internal.h"
-#include "omx_rpc_utils.h"
-
-extern Int32 RPC_MemFree(UInt32 * dataSize, UInt32 * data);
-extern Int32 RPC_MemAlloc(UInt32 * dataSize, UInt32 * data);
-
-/* contains configurations or structures to be passed to omx_rpc layer */
-char rpcFxns[][MAX_FUNCTION_NAME_LENGTH] = {
- "RPC_SKEL_SetParameter",
- "RPC_SKEL_GetParameter",
- "RPC_SKEL_GetHandle",
- "RPC_SKEL_UseBuffer",
-
- "RPC_SKEL_FreeHandle",
-
- "RPC_SKEL_SetConfig",
- "RPC_SKEL_GetConfig",
- "RPC_SKEL_GetState",
- "RPC_SKEL_SendCommand",
- "RPC_SKEL_GetComponentVersion",
- "RPC_SKEL_GetExtensionIndex",
- "RPC_SKEL_FillThisBuffer",
- "RPC_SKEL_FillBufferDone",
- "RPC_SKEL_FreeBuffer",
-
- "RPC_SKEL_EmptyThisBuffer",
- "RPC_SKEL_EmptyBufferDone",
- "RPC_SKEL_EventHandler",
- "RPC_SKEL_AllocateBuffer",
- "RPC_SKEL_ComponentTunnelRequest",
-
- "MemMgr_Alloc",
- "MemMgr_Free"
-};
-
-rpcSkelArr rpcSkelFxns[] = {
- {RPC_SKEL_SetParameter},
- {RPC_SKEL_GetParameter},
- {RPC_SKEL_GetHandle},
- {RPC_SKEL_UseBuffer},
- {RPC_SKEL_FreeHandle},
- {RPC_SKEL_SetConfig},
- {RPC_SKEL_GetConfig},
- {RPC_SKEL_GetState},
- {RPC_SKEL_SendCommand},
- {RPC_SKEL_GetComponentVersion},
- {RPC_SKEL_GetExtensionIndex},
- {RPC_SKEL_FillThisBuffer},
- {RPC_SKEL_FillBufferDone},
- {RPC_SKEL_FreeBuffer},
- {RPC_SKEL_EmptyThisBuffer},
- {RPC_SKEL_EmptyBufferDone},
- {RPC_SKEL_EventHandler},
- {RPC_SKEL_AllocateBuffer},
- {RPC_SKEL_ComponentTunnelRequest},
- {RPC_MemAlloc},
- {RPC_MemFree}
-};
-
-#endif
diff --git a/domx/domx/omx_rpc/src/omx_rpc_platform.c b/domx/domx/omx_rpc/src/omx_rpc_platform.c
index 65d4cf5..8b13789 100755
--- a/domx/domx/omx_rpc/src/omx_rpc_platform.c
+++ b/domx/domx/omx_rpc/src/omx_rpc_platform.c
@@ -1,85 +1 @@
-#if 0
-/*
- * Copyright (c) 2010, Texas Instruments Incorporated
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * * Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * * Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * * Neither the name of Texas Instruments Incorporated nor the names of
- * its contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
- * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
- * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-/**
- * @file omx_rpc_platform.c
- * This file contains methods that provides the functionality for
- * the OpenMAX1.1 DOMX Framework RPC.
- *
- * @path \WTSD_DucatiMMSW\framework\domx\omx_rpc\src
- *
- * @rev 1.0
- */
-
-/*==============================================================
- *! Revision History
- *! ============================
- *! 29-Mar-2010 Abhishek Ranka : Revamped DOMX implementation
- *!
- *! 19-August-2009 B Ravi Kiran ravi.kiran@ti.com: Initial Version
- *================================================================*/
- /******************************************************************
- * INCLUDE FILES
- ******************************************************************/
- /* ----- system and platform files ---------------------------- */
-#include <stdlib.h>
-#include <string.h>
-#include <stdio.h>
-#include <Std.h>
-
-#include <OMX_Types.h>
-#include <timm_osal_interfaces.h>
-#include <timm_osal_trace.h>
-
-#include <MultiProc.h>
-#include <RcmClient.h>
-#include <RcmServer.h>
-
-/*-------program files ----------------------------------------*/
-#include "omx_rpc.h"
-#include "omx_rpc_stub.h"
-#include "omx_rpc_skel.h"
-#include "omx_rpc_internal.h"
-#include "omx_rpc_utils.h"
-
-/*This list needs to be a comprehensive list of all possible communicating RCM servers avalilable across the whole system (core 0 core 1, tesla, chiron)*/
-char Core_Array[][MAX_CORENAME_LENGTH] =
- { "TESLA", "DUCATI1", "DUCATI0", "CHIRON" };
-
-char rcmservertable[][MAX_SERVER_NAME_LENGTH] =
- { "RSrv_Tesla", "RSrv_Ducati1", "RSrv_Ducati0", "RSrv_Chiron" };
-
-OMX_U32 heapIdArray[MAX_NUMBER_OF_HEAPS] = { 0, 1, 0, 1 };
-
-#endif
diff --git a/domx/domx/omx_rpc/src/omx_rpc_stub.c b/domx/domx/omx_rpc/src/omx_rpc_stub.c
index dd622df..0c28ba1 100755
--- a/domx/domx/omx_rpc/src/omx_rpc_stub.c
+++ b/domx/domx/omx_rpc/src/omx_rpc_stub.c
@@ -52,6 +52,7 @@
/******************************************************************
* INCLUDE FILES
******************************************************************/
+#include <errno.h>
#include <string.h>
#include <stdio.h>
#include <unistd.h>
@@ -99,8 +100,13 @@
status = write(hCtx->fd_omx, pPacket, nPacketSize); \
RPC_freePacket(pPacket); \
pPacket = NULL; \
- if(status < 0 ) DOMX_ERROR("DOMX Write failed 0x%x %d",status,status); \
- RPC_assert(status >= 0, RPC_OMX_ErrorUndefined, "Write failed"); \
+ if(status < 0 && errno == ENXIO) { \
+ RPC_assert(0, RPC_OMX_ErrorHardware, "Write failed - Ducati in faulty state"); \
+ } \
+ if(status != (signed)nPacketSize) { \
+ DOMX_ERROR("Write failed returning status = 0x%x",status); \
+ RPC_assert(0, RPC_OMX_ErrorUndefined, "Write failed"); \
+ } \
eError = TIMM_OSAL_ReadFromPipe(hCtx->pMsgPipe[nFxnIdx], &pRetPacket, \
RPC_MSG_SIZE_FOR_PIPE, (TIMM_OSAL_U32 *)(&nSize), TIMM_OSAL_SUSPEND); \
RPC_assert(eError == TIMM_OSAL_ERR_NONE, eError, \
@@ -224,7 +230,7 @@ RPC_OMX_ERRORTYPE RPC_GetHandle(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -277,7 +283,7 @@ RPC_OMX_ERRORTYPE RPC_FreeHandle(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -300,7 +306,7 @@ RPC_OMX_ERRORTYPE RPC_FreeHandle(OMX_HANDLETYPE hRPCCtx,
/* ===========================================================================*/
RPC_OMX_ERRORTYPE RPC_SetParameter(OMX_HANDLETYPE hRPCCtx,
OMX_INDEXTYPE nParamIndex, OMX_PTR pCompParam,
- OMX_PTR pLocBufNeedMap, OMX_ERRORTYPE * eCompReturn)
+ OMX_PTR pLocBufNeedMap, OMX_U32 nNumOfLocalBuf, OMX_ERRORTYPE * eCompReturn)
{
RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
@@ -320,8 +326,14 @@ RPC_OMX_ERRORTYPE RPC_SetParameter(OMX_HANDLETYPE hRPCCtx,
RPC_initPacket(pPacket, pOmxPacket, pData, nFxnIdx, nPacketSize);
if (pLocBufNeedMap != NULL && (pLocBufNeedMap - pCompParam) >= 0 ) {
- RPC_SETFIELDVALUE(pData, nPos, RPC_OMX_MAP_INFO_ONE_BUF,
- RPC_OMX_MAP_INFO_TYPE);
+ if (nNumOfLocalBuf == 1) {
+ RPC_SETFIELDVALUE(pData, nPos, RPC_OMX_MAP_INFO_ONE_BUF,
+ RPC_OMX_MAP_INFO_TYPE);
+ }
+ else if (nNumOfLocalBuf == 2) {
+ RPC_SETFIELDVALUE(pData, nPos, RPC_OMX_MAP_INFO_TWO_BUF,
+ RPC_OMX_MAP_INFO_TYPE);
+ }
nOffset = (pLocBufNeedMap - pCompParam) +
sizeof(RPC_OMX_MAP_INFO_TYPE) + sizeof(OMX_U32) +
sizeof(OMX_HANDLETYPE) + sizeof(OMX_INDEXTYPE);
@@ -346,7 +358,7 @@ RPC_OMX_ERRORTYPE RPC_SetParameter(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -425,7 +437,8 @@ RPC_OMX_ERRORTYPE RPC_GetParameter(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ //In case of Error Hardware this packet gets freed in omx_rpc.c
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -493,7 +506,7 @@ RPC_OMX_ERRORTYPE RPC_SetConfig(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -570,7 +583,7 @@ RPC_OMX_ERRORTYPE RPC_GetConfig(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -642,7 +655,7 @@ RPC_OMX_ERRORTYPE RPC_SendCommand(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -704,7 +717,7 @@ RPC_OMX_ERRORTYPE RPC_GetState(OMX_HANDLETYPE hRPCCtx, OMX_STATETYPE * pState,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -775,13 +788,13 @@ RPC_OMX_ERRORTYPE RPC_GetComponentVersion(OMX_HANDLETYPE hRPCCtx,
OMX_VERSIONTYPE);
RPC_GETFIELDCOPYTYPE(pRetData, nPos, pSpecVersion,
OMX_VERSIONTYPE);
- //RPC_GETFIELDCOPYTYPE(pRetData, nPos, pComponentUUID, OMX_UUIDTYPE);
+ memcpy(pComponentUUID,(OMX_UUIDTYPE *)( (OMX_U32)pRetData + nPos), sizeof(OMX_UUIDTYPE));
}
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
return eRPCError;
@@ -846,7 +859,7 @@ RPC_OMX_ERRORTYPE RPC_GetExtensionIndex(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
return eRPCError;
@@ -975,7 +988,7 @@ RPC_OMX_ERRORTYPE RPC_AllocateBuffer(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -1138,7 +1151,7 @@ RPC_OMX_ERRORTYPE RPC_UseBuffer(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -1177,8 +1190,11 @@ RPC_OMX_ERRORTYPE RPC_FreeBuffer(OMX_HANDLETYPE hRPCCtx,
RPC_getPacket(nPacketSize, pPacket);
RPC_initPacket(pPacket, pOmxPacket, pData, nFxnIdx, nPacketSize);
+ /*Offset is the location of the buffer pointer from the start of the data packet */
+ nOffset = sizeof(RPC_OMX_MAP_INFO_TYPE) + sizeof(OMX_U32) +
+ sizeof(OMX_HANDLETYPE) + sizeof(OMX_U32) + sizeof(OMX_U32);
/*No buffer mapping required */
- RPC_SETFIELDVALUE(pData, nPos, RPC_OMX_MAP_INFO_NONE,
+ RPC_SETFIELDVALUE(pData, nPos, RPC_OMX_MAP_INFO_ONE_BUF,
RPC_OMX_MAP_INFO_TYPE);
RPC_SETFIELDVALUE(pData, nPos, nOffset, OMX_U32);
@@ -1197,7 +1213,7 @@ RPC_OMX_ERRORTYPE RPC_FreeBuffer(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -1305,7 +1321,7 @@ RPC_OMX_ERRORTYPE RPC_EmptyThisBuffer(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -1384,7 +1400,7 @@ RPC_OMX_ERRORTYPE RPC_FillThisBuffer(OMX_HANDLETYPE hRPCCtx,
EXIT:
if (pPacket)
RPC_freePacket(pPacket);
- if (pRetPacket)
+ if (pRetPacket && *eCompReturn != OMX_ErrorHardware)
RPC_freePacket(pRetPacket);
DOMX_EXIT("");
@@ -1422,9 +1438,52 @@ OMX_ERRORTYPE RPC_FillBufferDone(OMX_HANDLETYPE hRPCCtx, OMX_PTR pAppData,
}
RPC_OMX_ERRORTYPE RPC_ComponentTunnelRequest(OMX_HANDLETYPE hRPCCtx,
- OMX_IN OMX_U32 nPort, OMX_HANDLETYPE hTunneledhRemoteHandle,
+ OMX_IN OMX_U32 nPort, OMX_HANDLETYPE hTunneledRemoteHandle,
OMX_U32 nTunneledPort, OMX_INOUT OMX_TUNNELSETUPTYPE * pTunnelSetup,
- OMX_ERRORTYPE * nCmdStatus)
+ OMX_ERRORTYPE * eCompReturn)
{
- return RPC_OMX_ErrorNone;
+ RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
+ TIMM_OSAL_ERRORTYPE eError = TIMM_OSAL_ERR_NONE;
+ OMX_U32 nPacketSize = RPC_PACKET_SIZE;
+ RPC_OMX_CONTEXT *hCtx = hRPCCtx;
+ OMX_HANDLETYPE hComp = hCtx->hRemoteHandle;
+ RPC_OMX_CONTEXT *hTunneledCtx = hTunneledRemoteHandle;
+ OMX_HANDLETYPE hTunneledComp = hTunneledCtx->hRemoteHandle;
+ RPC_OMX_FXN_IDX_TYPE nFxnIdx;
+ struct omx_packet *pOmxPacket = NULL;
+ OMX_U32 nPos = 0, nSize = 0, nOffset = 0;
+ OMX_S32 status = 0;
+#ifdef RPC_SYNC_MODE
+ TIMM_OSAL_PTR pPacket = NULL, pRetPacket = NULL, pData = NULL;
+#endif
+
+ printf(" Entering rpc:domx_stub.c:ComponentTunnelRequest\n");
+
+ nFxnIdx = RPC_OMX_FXN_IDX_COMP_TUNNEL_REQUEST;
+ RPC_getPacket(nPacketSize, pPacket);
+ RPC_initPacket(pPacket, pOmxPacket, pData, nFxnIdx, nPacketSize);
+
+ /*Pack the values into a packet*/
+ //Marshalled:[>ParentComp|>ParentPort|>TunnelComp|>TunneledPort>TunnelSetup]
+ RPC_SETFIELDVALUE(pData, nPos, RPC_OMX_MAP_INFO_NONE, RPC_OMX_MAP_INFO_TYPE);
+ RPC_SETFIELDVALUE(pData, nPos, hComp, OMX_HANDLETYPE);
+ RPC_SETFIELDVALUE(pData, nPos, nPort, OMX_U32);
+ RPC_SETFIELDVALUE(pData, nPos, hTunneledComp, OMX_HANDLETYPE);
+ RPC_SETFIELDVALUE(pData, nPos, nTunneledPort, OMX_U32);
+ printf("\n after RPC_sendPacket_sync");
+ RPC_sendPacket_sync(hCtx, pPacket, nPacketSize, nFxnIdx, pRetPacket,
+ nSize);
+
+ printf("\n after RPC_sendPacket_sync: *eCompReturn : 0x%x\n", (OMX_ERRORTYPE) (((struct omx_packet *) pRetPacket)->result));
+ *eCompReturn = (OMX_ERRORTYPE) (((struct omx_packet *) pRetPacket)->result);
+
+ EXIT:
+ if (pPacket)
+ RPC_freePacket(pPacket);
+ if (pRetPacket)
+ RPC_freePacket(pRetPacket);
+
+ DOMX_EXIT("");
+ return eRPCError;
+
}
diff --git a/domx/domx/profiling/inc/profile.h b/domx/domx/profiling/inc/profile.h
new file mode 100644
index 0000000..352a91c
--- /dev/null
+++ b/domx/domx/profiling/inc/profile.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2010, Texas Instruments Incorporated
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * * Neither the name of Texas Instruments Incorporated nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+ * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+/*
+* @file profile.h
+* The header file defines the trace events definitions.
+* @path platform\hardware\ti\domx\domx\profiling\inc\
+*
+*/
+/* -------------------------------------------------------------------------- */
+/* =========================================================================
+ *!
+ *! Revision History
+ *! ===================================
+ *! 1.0: Created the first draft version
+ * ========================================================================= */
+
+#ifndef _PROFILE_H_
+#define _PROFILE_H_
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif /* #ifdef __cplusplus */
+
+#include <OMX_Types.h>
+#include <OMX_Component.h>
+
+#include "omx_rpc_utils.h"
+#include "omx_proxy_common.h"
+
+enum KPI_BUFFER_EVENT {
+ KPI_BUFFER_ETB = 1,
+ KPI_BUFFER_FTB = 2,
+ KPI_BUFFER_EBD = 3,
+ KPI_BUFFER_FBD = 4
+};
+
+/**
+ * OMX monitoring component init. Registers component
+ */
+void KPI_OmxCompInit(OMX_HANDLETYPE hComponent);
+
+/**
+ * OMX monitoring component deinit. Unregisters component
+ */
+void KPI_OmxCompDeinit(OMX_HANDLETYPE hComponent);
+
+/**
+ * OMA monitoring buffer event trace. Traces FTB/ETB/FBD/EBD event
+ */
+void KPI_OmxCompBufferEvent(enum KPI_BUFFER_EVENT event, OMX_HANDLETYPE hComponent, PROXY_BUFFER_INFO* pBuffer);
+
+#ifdef __cplusplus
+}
+#endif /* #ifdef __cplusplus */
+
+#endif /* #ifndef _PROFILE_H_ */
diff --git a/domx/domx/profiling/src/profile.c b/domx/domx/profiling/src/profile.c
new file mode 100644
index 0000000..bc050b7
--- /dev/null
+++ b/domx/domx/profiling/src/profile.c
@@ -0,0 +1,299 @@
+/*
+ * Copyright (c) 2010, Texas Instruments Incorporated
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * * Neither the name of Texas Instruments Incorporated nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+ * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+/**
+ * @file profile.c
+ * This file contains methods to profile DOMX
+ *
+ * @path ...\hardware\ti\domx\domx\profiling\inc
+ *
+ * @rev 1.0
+ */
+
+/******************************************************************
+ * INCLUDE FILES
+ ******************************************************************/
+/* ----- system and platform files ----------------------------*/
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#ifdef _Android
+#include <cutils/properties.h>
+#endif
+
+#include <OMX_Types.h>
+#include <OMX_Component.h>
+
+/*-------program files ----------------------------------------*/
+#include "omx_rpc_utils.h"
+#include "omx_proxy_common.h"
+#include "profile.h"
+
+
+/******************************************************************
+ * DEFINES - CONSTANTS
+ ******************************************************************/
+/* Events that can be dynamically enabled */
+enum KPI_STATUS {
+ KPI_BUFFER_EVENTS = 1
+};
+
+/* OMX buffer events per component */
+typedef struct {
+ OMX_HANDLETYPE hComponent;
+ OMX_U32 count_ftb;
+ OMX_U32 count_fbd;
+ OMX_U32 count_etb;
+ OMX_U32 count_ebd;
+ char name[50];
+} kpi_omx_component;
+
+/* we trace up to MAX_OMX_COMP components */
+#define MAX_OMX_COMP 8
+
+
+/***************************************************************
+ * kpi_omx_monitor
+ * -------------------------------------------------------------
+ * Contains up to 8 components data
+ *
+ ***************************************************************/
+kpi_omx_component kpi_omx_monitor[MAX_OMX_COMP]; /* we trace up to MAX_OMX_COMP components */
+OMX_U32 kpi_omx_monitor_cnt = 0; /* no component yet */
+unsigned int kpi_status = 0;
+
+
+/* ===========================================================================*/
+/**
+ * @name KPI_GetTime()
+ * @brief Compute time since boot to timestamp events
+ * @param void
+ * @return OMX_U64 = time since boot in us
+ * @sa TBD
+ *
+ */
+/* ===========================================================================*/
+OMX_U64 KPI_GetTime(void)
+{
+ struct timespec tp;
+
+ clock_gettime(CLOCK_MONOTONIC, &tp);
+ return (tp.tv_sec * 1000000 + tp.tv_nsec / 1000);
+}
+
+/* ===========================================================================*/
+/**
+ * @name KPI_OmxCompKpiUpdateStatus()
+ * @brief Update dynamic activation of traces
+ * @param void
+ * @return void
+ * @sa TBD
+ *
+ */
+/* ===========================================================================*/
+void KPI_OmxCompKpiUpdateStatus(void)
+{
+ char *val = getenv("DEBUG_DOMX_KPI_STATUS");
+
+ if (val)
+ {
+ kpi_status = strtol(val, NULL, 0);
+ }
+#ifdef _Android
+ else
+ {
+ char value[PROPERTY_VALUE_MAX];
+ int val;
+
+ property_get("debug.domx.kpi_status", value, "0");
+ val = atoi(value);
+ if (val >= 0)
+ kpi_status = val;
+ }
+#endif
+}
+
+/* ===========================================================================*/
+/**
+ * @name KPI_OmxCompInit()
+ * @brief Prepare monitoring structure for new component starting
+ * @param void
+ * @return void
+ * @sa TBD
+ *
+ */
+/* ===========================================================================*/
+void KPI_OmxCompInit(OMX_HANDLETYPE hComponent)
+{
+ OMX_VERSIONTYPE nVersionComp;
+ OMX_VERSIONTYPE nVersionSpec;
+ OMX_UUIDTYPE compUUID;
+ char compName[OMX_MAX_STRINGNAME_SIZE];
+ char* p;
+ OMX_U32 omx_cnt;
+ struct timespec tp;
+
+ /* Check if some profiling events have been enabled/disabled */
+ KPI_OmxCompKpiUpdateStatus();
+
+ if ( !(kpi_status & KPI_BUFFER_EVENTS) )
+ return;
+
+ /* First init: clear kpi_omx_monitor components */
+ if( kpi_omx_monitor_cnt == 0) {
+ for (omx_cnt = 0; omx_cnt < MAX_OMX_COMP; omx_cnt++) {
+ /*clear handler registry */
+ kpi_omx_monitor[omx_cnt].hComponent = 0;
+ }
+ }
+
+ /* find an empty monitoring structure */
+ for( omx_cnt = 0; omx_cnt < MAX_OMX_COMP; omx_cnt++ ) {
+ if( kpi_omx_monitor[omx_cnt].hComponent == 0 ) break;
+ }
+
+ /* too omany components started, do not monitor */
+ if( omx_cnt >= MAX_OMX_COMP) return;
+
+ /* current comp num and update */
+ kpi_omx_monitor_cnt++;
+
+ /* register the component handle */
+ kpi_omx_monitor[omx_cnt].hComponent = hComponent;
+
+ /* reset event counts */
+ kpi_omx_monitor[omx_cnt].count_ftb = 0;
+ kpi_omx_monitor[omx_cnt].count_fbd = 0;
+ kpi_omx_monitor[omx_cnt].count_etb = 0;
+ kpi_omx_monitor[omx_cnt].count_ebd = 0;
+
+ /* register the component name */
+ ((OMX_COMPONENTTYPE*) hComponent)->GetComponentVersion(hComponent, compName, &nVersionComp, &nVersionSpec, &compUUID);
+
+ /* get the end of the string compName... */
+ p = compName + strlen( compName ) - 1;
+ while( (*p != '.' ) && (p != compName) ) p--;
+ strncpy(kpi_omx_monitor[omx_cnt].name, p + 1, 6);
+
+ /* trace component init */
+ DOMX_PROF("<KPI> OMX %-6s Init %-8lld", kpi_omx_monitor[omx_cnt].name, KPI_GetTime());
+
+ return;
+}
+
+/* ===========================================================================*/
+/**
+ * @name KPI_OmxCompDeinit()
+ * @brief Reset monitoring structure for component stopping
+ * @param void
+ * @return void
+ * @sa TBD
+ *
+ */
+/* ===========================================================================*/
+void KPI_OmxCompDeinit( OMX_HANDLETYPE hComponent)
+{
+ OMX_U32 omx_cnt;
+
+ if ( !(kpi_status & KPI_BUFFER_EVENTS) )
+ return;
+
+ if( kpi_omx_monitor_cnt == 0) return;
+
+ /* identify the component from the registry */
+ for( omx_cnt = 0; omx_cnt < MAX_OMX_COMP; omx_cnt++ ) {
+ if( kpi_omx_monitor[omx_cnt].hComponent == hComponent ) break;
+ }
+
+ /* trace component init */
+ DOMX_PROF( "<KPI> OMX %-6s Deinit %-8lld", kpi_omx_monitor[omx_cnt].name, KPI_GetTime());
+
+ /* unregister the component */
+ kpi_omx_monitor[omx_cnt].hComponent = 0;
+
+ kpi_omx_monitor_cnt--;
+
+ return;
+}
+
+/* ===========================================================================*/
+/**
+ * @name KPI_OmxCompBufferEvent()
+ * @brief Trace FTB/ETB/FBD/EBD events
+ * @param void
+ * @return void
+ * @sa TBD
+ *
+ */
+/* ===========================================================================*/
+void KPI_OmxCompBufferEvent(enum KPI_BUFFER_EVENT event, OMX_HANDLETYPE hComponent, PROXY_BUFFER_INFO* pBuffer)
+{
+ OMX_U32 omx_cnt;
+
+ if ( !(kpi_status & KPI_BUFFER_EVENTS) )
+ return;
+
+ if (kpi_omx_monitor_cnt == 0) return;
+
+ /* identify the component from the registry */
+ for (omx_cnt = 0; omx_cnt < MAX_OMX_COMP; omx_cnt++) {
+ if( kpi_omx_monitor[omx_cnt].hComponent == hComponent ) break;
+ }
+
+ /* Update counts and trace the event */
+ if( omx_cnt < MAX_OMX_COMP ) {
+ /* trace the event, we trace remote address to correlate to Ducati trace */
+ switch(event) {
+ case KPI_BUFFER_ETB:
+ DOMX_PROF("ETB %-6s %-4u %-8lld x%-8x", kpi_omx_monitor[omx_cnt].name, \
+ (unsigned int)++kpi_omx_monitor[omx_cnt].count_etb, KPI_GetTime(), (unsigned int)pBuffer->pBufHeaderRemote);
+ break;
+ case KPI_BUFFER_FTB:
+ DOMX_PROF("FTB %-6s %-4u %-8lld x%-8x", kpi_omx_monitor[omx_cnt].name, \
+ (unsigned int)++kpi_omx_monitor[omx_cnt].count_ftb, KPI_GetTime(), (unsigned int)pBuffer->pBufHeaderRemote);
+ break;
+ case KPI_BUFFER_EBD:
+ DOMX_PROF("EBD %-6s %-4u %-8lld x%-8x", kpi_omx_monitor[omx_cnt].name, \
+ (unsigned int)++kpi_omx_monitor[omx_cnt].count_ebd, KPI_GetTime(), (unsigned int)pBuffer->pBufHeaderRemote);
+ break;
+ /* we add timestamp metadata because this is a unique identifier of buffer among all SW layers */
+ case KPI_BUFFER_FBD:
+ DOMX_PROF("FBD %-6s %-4u %-8lld x%-8x %lld", kpi_omx_monitor[omx_cnt].name, \
+ (unsigned int)++kpi_omx_monitor[omx_cnt].count_fbd, KPI_GetTime(), (unsigned int)pBuffer->pBufHeaderRemote, pBuffer->pBufHeader->nTimeStamp);
+ break;
+
+ }
+ }
+
+ return;
+}
diff --git a/domx/mm_osal/Android.mk b/domx/mm_osal/Android.mk
index a4862ec..f41f131 100644
--- a/domx/mm_osal/Android.mk
+++ b/domx/mm_osal/Android.mk
@@ -18,7 +18,8 @@ LOCAL_C_INCLUDES += \
LOCAL_SHARED_LIBRARIES := \
libdl \
liblog \
- libc
+ libc \
+ libcutils
LOCAL_CFLAGS += -DOMAP_2430 -DOMX_DEBUG -D_Android -D_POSIX_VERSION_1_
LOCAL_CFLAGS += -DTIMM_OSAL_DEBUG_TRACE_DETAIL=1 # quiet
diff --git a/domx/mm_osal/inc/timm_osal_trace.h b/domx/mm_osal/inc/timm_osal_trace.h
index 3c4b8d7..0f02f14 100755
--- a/domx/mm_osal/inc/timm_osal_trace.h
+++ b/domx/mm_osal/inc/timm_osal_trace.h
@@ -74,6 +74,17 @@ extern "C"
TIMM_OSAL_TRACEGRP_SIMCOPALGOS = (1 << 8)
} TIMM_OSAL_TRACEGRP;
+ typedef enum TIMM_OSAL_TRACE_LEVEL_TYPE
+ {
+ TIMM_OSAL_TRACE_LEVEL_ERROR = 1,
+ TIMM_OSAL_TRACE_LEVEL_WARNING = 2,
+ TIMM_OSAL_TRACE_LEVEL_PROFILING = 3,
+ TIMM_OSAL_TRACE_LEVEL_INFO = 4,
+ TIMM_OSAL_TRACE_LEVEL_DEBUG = 5,
+ TIMM_OSAL_TRACE_LEVEL_ENTERING = 6,
+ TIMM_OSAL_TRACE_LEVEL_EXITING = TIMM_OSAL_TRACE_LEVEL_ENTERING
+ } TIMM_OSAL_TRACE_LEVEL;
+
/**
* The OSAL debug trace level can be set at runtime by defining the environment
@@ -82,9 +93,10 @@ extern "C"
* Level 0 - No trace
* Level 1 - Error [Errors]
* Level 2 - Warning [Warnings that are useful to know about]
-* Level 3 - Info [General information]
-* Level 4 - Debug [most-commonly used statement for us developers]
-* Level 5 - Trace ["ENTERING <function>" and "EXITING <function>" statements]
+* Level 3 - Profiling [performance analysis trace that must not impact use case perf]
+* Level 4 - Info [General information]
+* Level 5 - Debug [most-commonly used statement for us developers]
+* Level 6 - Trace ["ENTERING <function>" and "EXITING <function>" statements]
*
* Example: if TIMM_OSAL_DEBUG_TRACE_LEVEL=3, then level 1,2 and 3 traces messages
* are enabled.
@@ -103,6 +115,13 @@ extern "C"
const short tracegrp; /* TIMM_OSAL_TRACEGRP */
} __TIMM_OSAL_TRACE_LOCATION;
+
+/**
+ * Trace level update function. Updates trace level if env variable
+ * or Android property is set. Env variable has precedence over it
+ */
+ void TIMM_OSAL_UpdateTraceLevel(void);
+
/**
* Trace implementation function. Not part of public API. Default
* implementation uses printf(), but you can use LD_PRELOAD to plug in
@@ -133,6 +152,11 @@ extern "C"
#define TIMM_OSAL_Warning(fmt,...) TIMM_OSAL_WarningExt(TIMM_OSAL_TRACEGRP_SYSTEM, fmt, ##__VA_ARGS__)
/**
+* TIMM_OSAL_Profiling() -- performance analysis trace that must not impact use case perf]
+*/
+#define TIMM_OSAL_Profiling(fmt,...) TIMM_OSAL_ProfilingExt(TIMM_OSAL_TRACEGRP_SYSTEM, fmt, ##__VA_ARGS__)
+
+/**
* TIMM_OSAL_Info() -- general information
*/
#define TIMM_OSAL_Info(fmt,...) TIMM_OSAL_InfoExt(TIMM_OSAL_TRACEGRP_SYSTEM, fmt, ##__VA_ARGS__)
@@ -156,29 +180,34 @@ extern "C"
/**
* TIMM_OSAL_ErrorExt() -- Fatal errors
*/
-#define TIMM_OSAL_ErrorExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(1, tracegrp, "ERROR: "fmt, ##__VA_ARGS__)
+#define TIMM_OSAL_ErrorExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(TIMM_OSAL_TRACE_LEVEL_ERROR, tracegrp, "ERROR: "fmt, ##__VA_ARGS__)
/**
* TIMM_OSAL_WarningExt() -- Warnings that are useful to know about
*/
-#define TIMM_OSAL_WarningExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(2, tracegrp, "WARNING: "fmt, ##__VA_ARGS__)
+#define TIMM_OSAL_WarningExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(TIMM_OSAL_TRACE_LEVEL_WARNING, tracegrp, "WARNING: "fmt, ##__VA_ARGS__)
+
+/**
+* TIMM_OSAL_ProfilingExt() -- performance analysis trace that must not impact use case perf]
+*/
+#define TIMM_OSAL_ProfilingExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(TIMM_OSAL_TRACE_LEVEL_PROFILING, tracegrp, "PROFILING: "fmt, ##__VA_ARGS__)
/**
* TIMM_OSAL_InfoExt() -- general information
*/
-#define TIMM_OSAL_InfoExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(3, tracegrp, "INFO: "fmt, ##__VA_ARGS__)
+#define TIMM_OSAL_InfoExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(TIMM_OSAL_TRACE_LEVEL_INFO, tracegrp, "INFO: "fmt, ##__VA_ARGS__)
/**
* TIMM_OSAL_DebugExt() -- most-commonly used statement for us developers
*/
-#define TIMM_OSAL_DebugExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(4, tracegrp, "TRACE: "fmt, ##__VA_ARGS__)
+#define TIMM_OSAL_DebugExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(TIMM_OSAL_TRACE_LEVEL_DEBUG, tracegrp, "TRACE: "fmt, ##__VA_ARGS__)
/**
* TIMM_OSAL_EnteringExt() -- "ENTERING <function>" statements
* TIMM_OSAL_ExitingExt() -- "EXITING <function>" statements
*/
-#define TIMM_OSAL_EnteringExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(5, tracegrp, "ENTER: "fmt, ##__VA_ARGS__)
-#define TIMM_OSAL_ExitingExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(5, tracegrp, "EXIT: "fmt, ##__VA_ARGS__)
+#define TIMM_OSAL_EnteringExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(TIMM_OSAL_TRACE_LEVEL_ENTERING, tracegrp, "ENTER: "fmt, ##__VA_ARGS__)
+#define TIMM_OSAL_ExitingExt(tracegrp, fmt, ...) __TIMM_OSAL_Trace(TIMM_OSAL_TRACE_LEVEL_EXITING, tracegrp, "EXIT: "fmt, ##__VA_ARGS__)
#ifdef __cplusplus
diff --git a/domx/mm_osal/src/timm_osal_pipes.c b/domx/mm_osal/src/timm_osal_pipes.c
index 0354dd9..1c38354 100755
--- a/domx/mm_osal/src/timm_osal_pipes.c
+++ b/domx/mm_osal/src/timm_osal_pipes.c
@@ -322,8 +322,8 @@ TIMM_OSAL_ERRORTYPE TIMM_OSAL_ReadFromPipe(TIMM_OSAL_PTR pPipe,
bReturnStatus = TIMM_OSAL_ERR_PIPE_EMPTY;
goto EXIT;
}
- if ((timeout != TIMM_OSAL_NO_SUSPEND) &&
- (timeout != TIMM_OSAL_SUSPEND))
+ if ((timeout !=TIMM_OSAL_NO_SUSPEND) &&
+ (timeout != (TIMM_OSAL_S32)TIMM_OSAL_SUSPEND))
{
TIMM_OSAL_Warning("Only infinite or no timeouts \
supported. Going to read with infinite timeout now");
@@ -361,22 +361,8 @@ TIMM_OSAL_ERRORTYPE TIMM_OSAL_ClearPipe(TIMM_OSAL_PTR pPipe)
{
TIMM_OSAL_ERRORTYPE bReturnStatus = TIMM_OSAL_ERR;
-#if 0
- TIMM_OSAL_ERRORTYPE bReturnStatus = TIMM_OSAL_ERR_NONE;
- STATUS status = NU_SUCCESS;
-
- TIMM_OSAL_PIPE *pHandle = (TIMM_OSAL_PIPE *) pPipe;
+ TIMM_OSAL_Warning("This function is currently not implemented");
- status = NU_Reset_Pipe(&(pHandle->pipe));
-
- if (NU_SUCCESS != status)
- {
- TIMM_OSAL_Error("NU_Reset_Pipe failed!!!");
- bReturnStatus =
- TIMM_OSAL_ERR_CREATE(TIMM_OSAL_ERR, TIMM_OSAL_COMP_PIPES,
- status);
- }
-#endif
return bReturnStatus;
}
@@ -395,19 +381,6 @@ TIMM_OSAL_ERRORTYPE TIMM_OSAL_IsPipeReady(TIMM_OSAL_PTR pPipe)
TIMM_OSAL_ERRORTYPE bReturnStatus = TIMM_OSAL_ERR;
TIMM_OSAL_PIPE *pHandle = (TIMM_OSAL_PIPE *) pPipe;
-#if 0
- TIMM_OSAL_PIPE *pHandle = (TIMM_OSAL_PIPE *) pPipe;
- PI_PCB *pipe = (PI_PCB *) & (pHandle->pipe);
-
- if (0 != pipe->pi_messages)
- {
- return TIMM_OSAL_ERR_NONE;
- } else
- {
- return TIMM_OSAL_ERR_NOT_READY;
- }
-#endif
-
if (pHandle->messageCount <= 0)
{
bReturnStatus = TIMM_OSAL_ERR_NOT_READY;
@@ -435,14 +408,6 @@ TIMM_OSAL_ERRORTYPE TIMM_OSAL_GetPipeReadyMessageCount(TIMM_OSAL_PTR pPipe,
{
TIMM_OSAL_ERRORTYPE bReturnStatus = TIMM_OSAL_ERR_NONE;
TIMM_OSAL_PIPE *pHandle = (TIMM_OSAL_PIPE *) pPipe;
-#if 0
-
- TIMM_OSAL_PIPE *pHandle = (TIMM_OSAL_PIPE *) pPipe;
- PI_PCB *pipe = (PI_PCB *) & (pHandle->pipe);
-
- *count = pipe->pi_messages;
-
-#endif
*count = pHandle->messageCount;
return bReturnStatus;
diff --git a/domx/mm_osal/src/timm_osal_trace.c b/domx/mm_osal/src/timm_osal_trace.c
index e9aae1b..498054f 100755
--- a/domx/mm_osal/src/timm_osal_trace.c
+++ b/domx/mm_osal/src/timm_osal_trace.c
@@ -59,6 +59,7 @@
#ifdef _Android
#define LOG_TAG "DOMX"
#include <utils/Log.h>
+#include <cutils/properties.h>
#endif
/**
@@ -73,7 +74,7 @@
#define TIMM_OSAL_DEBUG_TRACE_DETAIL 2
#endif
-#define DEFAULT_TRACE_LEVEL 1
+#define DEFAULT_TRACE_LEVEL TIMM_OSAL_TRACE_LEVEL_ERROR
static int trace_level = -1;
@@ -90,6 +91,34 @@ static const char *simplify_path(const char *file)
return file;
}
+void TIMM_OSAL_UpdateTraceLevel(void)
+{
+ char *val = getenv("TIMM_OSAL_DEBUG_TRACE_LEVEL");
+
+ if (val)
+ {
+ trace_level = strtol(val, NULL, 0);
+ }
+ else
+ {
+#ifdef _Android
+ char value[PROPERTY_VALUE_MAX];
+ int val;
+
+ property_get("debug.domx.trace_level", value, "0");
+ val = atoi(value);
+ if ( (!val) || (val < 0) )
+ {
+ trace_level = DEFAULT_TRACE_LEVEL;
+ }
+ else
+ trace_level = val;
+#else
+ trace_level = DEFAULT_TRACE_LEVEL;
+#endif
+ }
+}
+
void __TIMM_OSAL_TraceFunction(const __TIMM_OSAL_TRACE_LOCATION * loc,
const char *fmt, ...)
{
@@ -108,23 +137,38 @@ void __TIMM_OSAL_TraceFunction(const __TIMM_OSAL_TRACE_LOCATION * loc,
#ifdef _Android
+#if 0 // Original for reference
#if ( TIMM_OSAL_DEBUG_TRACE_DETAIL > 1 )
ALOGD("%s:%d\t%s()\t", simplify_path(loc->file), loc->line,
loc->function);
#endif
+#else // Prints function_name for ERROR, WARNING and ENTRY/EXIT
+ if ( (loc->level == TIMM_OSAL_TRACE_LEVEL_ERROR) || (loc->level == TIMM_OSAL_TRACE_LEVEL_WARNING) || (loc->level == TIMM_OSAL_TRACE_LEVEL_ENTERING) )
+ ALOGD("%s:%d\t%s()\t", simplify_path(loc->file), loc->line,
+ loc->function);
+#endif
+
char string[1000];
vsprintf(string, fmt, ap);
ALOGD("%s",string);
#else
+#if 0 // Original for reference
#if ( TIMM_OSAL_DEBUG_TRACE_DETAIL > 1 )
printf("%s:%d\t%s()\t", simplify_path(loc->file), loc->line,
loc->function);
#endif
+#else // Prints function_name for ERROR, WARNING and ENTRY/EXIT
+ if ( (loc->level == 1) || (loc->level == 2) || (loc->level == 5) )
+ printf("%s:%d\t%s()\t", simplify_path(loc->file), loc->line,
+ loc->function);
+#endif
+
vprintf(fmt, ap);
#endif
+
va_end(ap);
}
}
diff --git a/domx/omx_core/Android.mk b/domx/omx_core/Android.mk
index 1c217b3..843ace9 100644
--- a/domx/omx_core/Android.mk
+++ b/domx/omx_core/Android.mk
@@ -12,8 +12,7 @@ LOCAL_C_INCLUDES += \
LOCAL_SHARED_LIBRARIES := \
libdl \
- liblog \
- libmm_osal
+ libmm_osal
LOCAL_CFLAGS += -DSTATIC_TABLE -D_Android -DCHECK_SECURE_STATE
LOCAL_MODULE:= libOMX_Core
diff --git a/domx/omx_core/inc/OMX_Audio.h b/domx/omx_core/inc/OMX_Audio.h
index 04f1a99..42caa9f 100755
--- a/domx/omx_core/inc/OMX_Audio.h
+++ b/domx/omx_core/inc/OMX_Audio.h
@@ -1,23 +1,40 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/*
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
@@ -36,7 +53,7 @@ extern "C" {
/* Each OMX header must include all required header files to allow the
* header to compile without errors. The includes below are required
- * for this header file to compile successfully
+ * for this header file to compile successfully
*/
#include <OMX_Core.h>
@@ -44,7 +61,7 @@ extern "C" {
/** @defgroup midi MIDI
* @ingroup audio
*/
-
+
/** @defgroup effects Audio effects
* @ingroup audio
*/
@@ -54,10 +71,10 @@ extern "C" {
* @{
*/
-/** Enumeration used to define the possible audio codings.
- * If "OMX_AUDIO_CodingUnused" is selected, the coding selection must
- * be done in a vendor specific way. Since this is for an audio
- * processing element this enum is relevant. However, for another
+/** Enumeration used to define the possible audio codings.
+ * If "OMX_AUDIO_CodingUnused" is selected, the coding selection must
+ * be done in a vendor specific way. Since this is for an audio
+ * processing element this enum is relevant. However, for another
* type of component other enums would be in this area.
*/
typedef enum OMX_AUDIO_CODINGTYPE {
@@ -89,14 +106,15 @@ typedef enum OMX_AUDIO_CODINGTYPE {
OMX_AUDIO_CodingWMA, /**< Any variant of WMA encoded data */
OMX_AUDIO_CodingRA, /**< Any variant of RA encoded data */
OMX_AUDIO_CodingMIDI, /**< Any variant of MIDI encoded data */
- OMX_AUDIO_CodingKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_CodingFLAC, /**< Any variant of FLAC encoded data */
+ OMX_AUDIO_CodingKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_CodingVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_CodingMax = 0x7FFFFFFF
} OMX_AUDIO_CODINGTYPE;
-/** The PortDefinition structure is used to define all of the parameters
- * necessary for the compliant component to setup an input or an output audio
+/** The PortDefinition structure is used to define all of the parameters
+ * necessary for the compliant component to setup an input or an output audio
* path. If additional information is needed to define the parameters of the
* port (such as frequency), additional structures must be sent such as the
* OMX_AUDIO_PARAM_PCMMODETYPE structure to supply the extra parameters for the port.
@@ -104,11 +122,11 @@ typedef enum OMX_AUDIO_CODINGTYPE {
typedef struct OMX_AUDIO_PORTDEFINITIONTYPE {
OMX_STRING cMIMEType; /**< MIME type of data for the port */
OMX_NATIVE_DEVICETYPE pNativeRender; /** < platform specific reference
- for an output device,
+ for an output device,
otherwise this field is 0 */
- OMX_BOOL bFlagErrorConcealment; /**< Turns on error concealment if it is
+ OMX_BOOL bFlagErrorConcealment; /**< Turns on error concealment if it is
supported by the OMX component */
- OMX_AUDIO_CODINGTYPE eEncoding; /**< Type of data expected for this
+ OMX_AUDIO_CODINGTYPE eEncoding; /**< Type of data expected for this
port (e.g. PCM, AMR, MP3, etc) */
} OMX_AUDIO_PORTDEFINITIONTYPE;
@@ -125,15 +143,15 @@ typedef struct OMX_AUDIO_PARAM_PORTFORMATTYPE {
} OMX_AUDIO_PARAM_PORTFORMATTYPE;
-/** PCM mode type */
-typedef enum OMX_AUDIO_PCMMODETYPE {
- OMX_AUDIO_PCMModeLinear = 0, /**< Linear PCM encoded data */
- OMX_AUDIO_PCMModeALaw, /**< A law PCM encoded data (G.711) */
- OMX_AUDIO_PCMModeMULaw, /**< Mu law PCM encoded data (G.711) */
- OMX_AUDIO_PCMModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+/** PCM mode type */
+typedef enum OMX_AUDIO_PCMMODETYPE {
+ OMX_AUDIO_PCMModeLinear = 0, /**< Linear PCM encoded data */
+ OMX_AUDIO_PCMModeALaw, /**< A law PCM encoded data (G.711) */
+ OMX_AUDIO_PCMModeMULaw, /**< Mu law PCM encoded data (G.711) */
+ OMX_AUDIO_PCMModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_PCMModeVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_AUDIO_PCMModeMax = 0x7FFFFFFF
-} OMX_AUDIO_PCMMODETYPE;
+ OMX_AUDIO_PCMModeMax = 0x7FFFFFFF
+} OMX_AUDIO_PCMMODETYPE;
typedef enum OMX_AUDIO_CHANNELTYPE {
@@ -147,45 +165,45 @@ typedef enum OMX_AUDIO_CHANNELTYPE {
OMX_AUDIO_ChannelCS = 0x7, /**< Back surround */
OMX_AUDIO_ChannelLR = 0x8, /**< Left rear. */
OMX_AUDIO_ChannelRR = 0x9, /**< Right rear. */
- OMX_AUDIO_ChannelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_ChannelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_ChannelVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_AUDIO_ChannelMax = 0x7FFFFFFF
+ OMX_AUDIO_ChannelMax = 0x7FFFFFFF
} OMX_AUDIO_CHANNELTYPE;
#define OMX_AUDIO_MAXCHANNELS 16 /**< maximum number distinct audio channels that a buffer may contain */
#define OMX_MIN_PCMPAYLOAD_MSEC 5 /**< Minimum audio buffer payload size for uncompressed (PCM) audio */
-/** PCM format description */
-typedef struct OMX_AUDIO_PARAM_PCMMODETYPE {
- OMX_U32 nSize; /**< Size of this structure, in Bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U32 nChannels; /**< Number of channels (e.g. 2 for stereo) */
- OMX_NUMERICALDATATYPE eNumData; /**< indicates PCM data as signed or unsigned */
- OMX_ENDIANTYPE eEndian; /**< indicates PCM data as little or big endian */
- OMX_BOOL bInterleaved; /**< True for normal interleaved data; false for
- non-interleaved data (e.g. block data) */
- OMX_U32 nBitPerSample; /**< Bit per sample */
- OMX_U32 nSamplingRate; /**< Sampling rate of the source data. Use 0 for
- variable or unknown sampling rate. */
- OMX_AUDIO_PCMMODETYPE ePCMMode; /**< PCM mode enumeration */
+/** PCM format description */
+typedef struct OMX_AUDIO_PARAM_PCMMODETYPE {
+ OMX_U32 nSize; /**< Size of this structure, in Bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< port that this structure applies to */
+ OMX_U32 nChannels; /**< Number of channels (e.g. 2 for stereo) */
+ OMX_NUMERICALDATATYPE eNumData; /**< indicates PCM data as signed or unsigned */
+ OMX_ENDIANTYPE eEndian; /**< indicates PCM data as little or big endian */
+ OMX_BOOL bInterleaved; /**< True for normal interleaved data; false for
+ non-interleaved data (e.g. block data) */
+ OMX_U32 nBitPerSample; /**< Bit per sample */
+ OMX_U32 nSamplingRate; /**< Sampling rate of the source data. Use 0 for
+ variable or unknown sampling rate. */
+ OMX_AUDIO_PCMMODETYPE ePCMMode; /**< PCM mode enumeration */
OMX_AUDIO_CHANNELTYPE eChannelMapping[OMX_AUDIO_MAXCHANNELS]; /**< Slot i contains channel defined by eChannelMap[i] */
-} OMX_AUDIO_PARAM_PCMMODETYPE;
+} OMX_AUDIO_PARAM_PCMMODETYPE;
/** Audio channel mode. This is used by both AAC and MP3, although the names are more appropriate
- * for the MP3. For example, JointStereo for MP3 is CouplingChannels for AAC.
+ * for the MP3. For example, JointStereo for MP3 is CouplingChannels for AAC.
*/
typedef enum OMX_AUDIO_CHANNELMODETYPE {
- OMX_AUDIO_ChannelModeStereo = 0, /**< 2 channels, the bitrate allocation between those
+ OMX_AUDIO_ChannelModeStereo = 0, /**< 2 channels, the bitrate allocation between those
two channels changes accordingly to each channel information */
- OMX_AUDIO_ChannelModeJointStereo, /**< mode that takes advantage of what is common between
+ OMX_AUDIO_ChannelModeJointStereo, /**< mode that takes advantage of what is common between
2 channels for higher compression gain */
- OMX_AUDIO_ChannelModeDual, /**< 2 mono-channels, each channel is encoded with half
+ OMX_AUDIO_ChannelModeDual, /**< 2 mono-channels, each channel is encoded with half
the bitrate of the overall bitrate */
OMX_AUDIO_ChannelModeMono, /**< Mono channel mode */
- OMX_AUDIO_ChannelModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_ChannelModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_ChannelModeVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_ChannelModeMax = 0x7FFFFFFF
} OMX_AUDIO_CHANNELMODETYPE;
@@ -195,7 +213,7 @@ typedef enum OMX_AUDIO_MP3STREAMFORMATTYPE {
OMX_AUDIO_MP3StreamFormatMP1Layer3 = 0, /**< MP3 Audio MPEG 1 Layer 3 Stream format */
OMX_AUDIO_MP3StreamFormatMP2Layer3, /**< MP3 Audio MPEG 2 Layer 3 Stream format */
OMX_AUDIO_MP3StreamFormatMP2_5Layer3, /**< MP3 Audio MPEG2.5 Layer 3 Stream format */
- OMX_AUDIO_MP3StreamFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_MP3StreamFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_MP3StreamFormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_MP3StreamFormatMax = 0x7FFFFFFF
} OMX_AUDIO_MP3STREAMFORMATTYPE;
@@ -225,7 +243,7 @@ typedef enum OMX_AUDIO_AACSTREAMFORMATTYPE {
OMX_AUDIO_AACStreamFormatADIF, /**< AAC Audio Data Interchange Format */
OMX_AUDIO_AACStreamFormatMP4FF, /**< AAC inside MPEG-4/ISO File Format */
OMX_AUDIO_AACStreamFormatRAW, /**< AAC Raw Format */
- OMX_AUDIO_AACStreamFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_AACStreamFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_AACStreamFormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_AACStreamFormatMax = 0x7FFFFFFF
} OMX_AUDIO_AACSTREAMFORMATTYPE;
@@ -244,7 +262,8 @@ typedef enum OMX_AUDIO_AACPROFILETYPE{
OMX_AUDIO_AACObjectERLC = 17, /**< ER AAC Low Complexity object (Error Resilient AAC-LC) */
OMX_AUDIO_AACObjectLD = 23, /**< AAC Low Delay object (Error Resilient) */
OMX_AUDIO_AACObjectHE_PS = 29, /**< AAC High Efficiency with Parametric Stereo coding (HE-AAC v2, object type PS) */
- OMX_AUDIO_AACObjectKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_AACObjectELD = 39, /** AAC Enhanced Low Delay. NOTE: Pending Khronos standardization **/
+ OMX_AUDIO_AACObjectKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_AACObjectVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_AACObjectMax = 0x7FFFFFFF
} OMX_AUDIO_AACPROFILETYPE;
@@ -317,18 +336,31 @@ typedef struct OMX_AUDIO_PARAM_VORBISTYPE {
constraints to be enforced by the encoder. This mode can
be slower, and may also be lower quality. It is
primarily useful for streaming. */
- OMX_BOOL bDownmix; /**< Downmix input from stereo to mono (has no effect on
- non-stereo streams). Useful for lower-bitrate encoding. */
+ OMX_BOOL bDownmix; /**< Downmix input from stereo to mono (has no effect on
+ non-stereo streams). Useful for lower-bitrate encoding. */
} OMX_AUDIO_PARAM_VORBISTYPE;
+/** FLAC params */
+typedef struct OMX_AUDIO_PARAM_FLACTYPE {
+ OMX_U32 nSize; /**< size of the structure in bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< port that this structure applies to */
+ OMX_U32 nChannels; /**< Number of channels */
+ OMX_U32 nSampleRate; /**< Sampling rate of the source data. Use 0 for
+ unknown sampling rate. */
+ OMX_U32 nCompressionLevel;/**< FLAC compression level, from 0 (fastest compression)
+ to 8 (highest compression */
+} OMX_AUDIO_PARAM_FLACTYPE;
+
+
/** WMA Version */
typedef enum OMX_AUDIO_WMAFORMATTYPE {
OMX_AUDIO_WMAFormatUnused = 0, /**< format unused or unknown */
OMX_AUDIO_WMAFormat7, /**< Windows Media Audio format 7 */
OMX_AUDIO_WMAFormat8, /**< Windows Media Audio format 8 */
OMX_AUDIO_WMAFormat9, /**< Windows Media Audio format 9 */
- OMX_AUDIO_WMAFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_WMAFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_WMAFormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_WMAFormatMax = 0x7FFFFFFF
} OMX_AUDIO_WMAFORMATTYPE;
@@ -340,7 +372,7 @@ typedef enum OMX_AUDIO_WMAPROFILETYPE {
OMX_AUDIO_WMAProfileL1, /**< Windows Media audio version 9 profile L1 */
OMX_AUDIO_WMAProfileL2, /**< Windows Media audio version 9 profile L2 */
OMX_AUDIO_WMAProfileL3, /**< Windows Media audio version 9 profile L3 */
- OMX_AUDIO_WMAProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_WMAProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_WMAProfileVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_WMAProfileMax = 0x7FFFFFFF
} OMX_AUDIO_WMAPROFILETYPE;
@@ -362,7 +394,7 @@ typedef struct OMX_AUDIO_PARAM_WMATYPE {
OMX_U32 nSuperBlockAlign; /**< WMA Type-specific data */
} OMX_AUDIO_PARAM_WMATYPE;
-/**
+/**
* RealAudio format
*/
typedef enum OMX_AUDIO_RAFORMATTYPE {
@@ -374,32 +406,32 @@ typedef enum OMX_AUDIO_RAFORMATTYPE {
OMX_AUDIO_RA10_LOSSLESS, /**< RealAudio Lossless */
OMX_AUDIO_RA10_MULTICHANNEL, /**< RealAudio Multichannel */
OMX_AUDIO_RA10_VOICE, /**< RealAudio Voice for bitrates below 15 kbps */
- OMX_AUDIO_RAFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_RAFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_RAFormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_RAFormatMax = 0x7FFFFFFF
} OMX_AUDIO_RAFORMATTYPE;
-/** RA (Real Audio) params */
-typedef struct OMX_AUDIO_PARAM_RATYPE {
- OMX_U32 nSize; /**< Size of this structure, in Bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< Port that this structure applies to */
- OMX_U32 nChannels; /**< Number of channels */
- OMX_U32 nSamplingRate; /**< is the sampling rate of the source data */
- OMX_U32 nBitsPerFrame; /**< is the value for bits per frame */
- OMX_U32 nSamplePerFrame; /**< is the value for samples per frame */
- OMX_U32 nCouplingQuantBits; /**< is the number of coupling quantization bits in the stream */
- OMX_U32 nCouplingStartRegion; /**< is the coupling start region in the stream */
- OMX_U32 nNumRegions; /**< is the number of regions value */
+/** RA (Real Audio) params */
+typedef struct OMX_AUDIO_PARAM_RATYPE {
+ OMX_U32 nSize; /**< Size of this structure, in Bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< Port that this structure applies to */
+ OMX_U32 nChannels; /**< Number of channels */
+ OMX_U32 nSamplingRate; /**< is the sampling rate of the source data */
+ OMX_U32 nBitsPerFrame; /**< is the value for bits per frame */
+ OMX_U32 nSamplePerFrame; /**< is the value for samples per frame */
+ OMX_U32 nCouplingQuantBits; /**< is the number of coupling quantization bits in the stream */
+ OMX_U32 nCouplingStartRegion; /**< is the coupling start region in the stream */
+ OMX_U32 nNumRegions; /**< is the number of regions value */
OMX_AUDIO_RAFORMATTYPE eFormat; /**< is the RealAudio audio format */
-} OMX_AUDIO_PARAM_RATYPE;
+} OMX_AUDIO_PARAM_RATYPE;
/** SBC Allocation Method Type */
typedef enum OMX_AUDIO_SBCALLOCMETHODTYPE {
OMX_AUDIO_SBCAllocMethodLoudness, /**< Loudness allocation method */
OMX_AUDIO_SBCAllocMethodSNR, /**< SNR allocation method */
- OMX_AUDIO_SBCAllocMethodKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_SBCAllocMethodKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_SBCAllocMethodVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_SBCAllocMethodMax = 0x7FFFFFFF
} OMX_AUDIO_SBCALLOCMETHODTYPE;
@@ -424,18 +456,18 @@ typedef struct OMX_AUDIO_PARAM_SBCTYPE {
} OMX_AUDIO_PARAM_SBCTYPE;
-/** ADPCM stream format parameters */
-typedef struct OMX_AUDIO_PARAM_ADPCMTYPE {
- OMX_U32 nSize; /**< size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U32 nChannels; /**< Number of channels in the data stream (not
- necessarily the same as the number of channels
- to be rendered. */
- OMX_U32 nBitsPerSample; /**< Number of bits in each sample */
- OMX_U32 nSampleRate; /**< Sampling rate of the source data. Use 0 for
- variable or unknown sampling rate. */
-} OMX_AUDIO_PARAM_ADPCMTYPE;
+/** ADPCM stream format parameters */
+typedef struct OMX_AUDIO_PARAM_ADPCMTYPE {
+ OMX_U32 nSize; /**< size of the structure in bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< port that this structure applies to */
+ OMX_U32 nChannels; /**< Number of channels in the data stream (not
+ necessarily the same as the number of channels
+ to be rendered. */
+ OMX_U32 nBitsPerSample; /**< Number of bits in each sample */
+ OMX_U32 nSampleRate; /**< Sampling rate of the source data. Use 0 for
+ variable or unknown sampling rate. */
+} OMX_AUDIO_PARAM_ADPCMTYPE;
/** G723 rate */
@@ -443,25 +475,25 @@ typedef enum OMX_AUDIO_G723RATE {
OMX_AUDIO_G723ModeUnused = 0, /**< AMRNB Mode unused / unknown */
OMX_AUDIO_G723ModeLow, /**< 5300 bps */
OMX_AUDIO_G723ModeHigh, /**< 6300 bps */
- OMX_AUDIO_G723ModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_G723ModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_G723ModeVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_G723ModeMax = 0x7FFFFFFF
} OMX_AUDIO_G723RATE;
/** G723 - Sample rate must be 8 KHz */
-typedef struct OMX_AUDIO_PARAM_G723TYPE {
- OMX_U32 nSize; /**< size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U32 nChannels; /**< Number of channels in the data stream (not
- necessarily the same as the number of channels
- to be rendered. */
- OMX_BOOL bDTX; /**< Enable Discontinuous Transmisssion */
+typedef struct OMX_AUDIO_PARAM_G723TYPE {
+ OMX_U32 nSize; /**< size of the structure in bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< port that this structure applies to */
+ OMX_U32 nChannels; /**< Number of channels in the data stream (not
+ necessarily the same as the number of channels
+ to be rendered. */
+ OMX_BOOL bDTX; /**< Enable Discontinuous Transmisssion */
OMX_AUDIO_G723RATE eBitRate; /**< todo: Should this be moved to a config? */
- OMX_BOOL bHiPassFilter; /**< Enable High Pass Filter */
- OMX_BOOL bPostFilter; /**< Enable Post Filter */
-} OMX_AUDIO_PARAM_G723TYPE;
+ OMX_BOOL bHiPassFilter; /**< Enable High Pass Filter */
+ OMX_BOOL bPostFilter; /**< Enable Post Filter */
+} OMX_AUDIO_PARAM_G723TYPE;
/** ITU G726 (ADPCM) rate */
@@ -471,22 +503,22 @@ typedef enum OMX_AUDIO_G726MODE {
OMX_AUDIO_G726Mode24, /**< 24 kbps */
OMX_AUDIO_G726Mode32, /**< 32 kbps, most common rate, also G721 */
OMX_AUDIO_G726Mode40, /**< 40 kbps */
- OMX_AUDIO_G726ModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_G726ModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_G726ModeVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_G726ModeMax = 0x7FFFFFFF
} OMX_AUDIO_G726MODE;
-/** G.726 stream format parameters - must be at 8KHz */
-typedef struct OMX_AUDIO_PARAM_G726TYPE {
- OMX_U32 nSize; /**< size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U32 nChannels; /**< Number of channels in the data stream (not
- necessarily the same as the number of channels
- to be rendered. */
+/** G.726 stream format parameters - must be at 8KHz */
+typedef struct OMX_AUDIO_PARAM_G726TYPE {
+ OMX_U32 nSize; /**< size of the structure in bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< port that this structure applies to */
+ OMX_U32 nChannels; /**< Number of channels in the data stream (not
+ necessarily the same as the number of channels
+ to be rendered. */
OMX_AUDIO_G726MODE eG726Mode;
-} OMX_AUDIO_PARAM_G726TYPE;
+} OMX_AUDIO_PARAM_G726TYPE;
/** G729 coder type */
@@ -495,7 +527,7 @@ typedef enum OMX_AUDIO_G729TYPE {
OMX_AUDIO_G729A, /**< ITU G.729 annex A encoded data */
OMX_AUDIO_G729B, /**< ITU G.729 with annex B encoded data */
OMX_AUDIO_G729AB, /**< ITU G.729 annexes A and B encoded data */
- OMX_AUDIO_G729KhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_G729KhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_G729VendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_G729Max = 0x7FFFFFFF
} OMX_AUDIO_G729TYPE;
@@ -514,23 +546,23 @@ typedef struct OMX_AUDIO_PARAM_G729TYPE {
} OMX_AUDIO_PARAM_G729TYPE;
-/** AMR Frame format */
-typedef enum OMX_AUDIO_AMRFRAMEFORMATTYPE {
- OMX_AUDIO_AMRFrameFormatConformance = 0, /**< Frame Format is AMR Conformance
- (Standard) Format */
- OMX_AUDIO_AMRFrameFormatIF1, /**< Frame Format is AMR Interface
- Format 1 */
- OMX_AUDIO_AMRFrameFormatIF2, /**< Frame Format is AMR Interface
- Format 2*/
- OMX_AUDIO_AMRFrameFormatFSF, /**< Frame Format is AMR File Storage
- Format */
- OMX_AUDIO_AMRFrameFormatRTPPayload, /**< Frame Format is AMR Real-Time
- Transport Protocol Payload Format */
- OMX_AUDIO_AMRFrameFormatITU, /**< Frame Format is ITU Format (added at Motorola request) */
- OMX_AUDIO_AMRFrameFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+/** AMR Frame format */
+typedef enum OMX_AUDIO_AMRFRAMEFORMATTYPE {
+ OMX_AUDIO_AMRFrameFormatConformance = 0, /**< Frame Format is AMR Conformance
+ (Standard) Format */
+ OMX_AUDIO_AMRFrameFormatIF1, /**< Frame Format is AMR Interface
+ Format 1 */
+ OMX_AUDIO_AMRFrameFormatIF2, /**< Frame Format is AMR Interface
+ Format 2*/
+ OMX_AUDIO_AMRFrameFormatFSF, /**< Frame Format is AMR File Storage
+ Format */
+ OMX_AUDIO_AMRFrameFormatRTPPayload, /**< Frame Format is AMR Real-Time
+ Transport Protocol Payload Format */
+ OMX_AUDIO_AMRFrameFormatITU, /**< Frame Format is ITU Format (added at Motorola request) */
+ OMX_AUDIO_AMRFrameFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_AMRFrameFormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_AUDIO_AMRFrameFormatMax = 0x7FFFFFFF
-} OMX_AUDIO_AMRFRAMEFORMATTYPE;
+ OMX_AUDIO_AMRFrameFormatMax = 0x7FFFFFFF
+} OMX_AUDIO_AMRFRAMEFORMATTYPE;
/** AMR band mode */
@@ -538,7 +570,7 @@ typedef enum OMX_AUDIO_AMRBANDMODETYPE {
OMX_AUDIO_AMRBandModeUnused = 0, /**< AMRNB Mode unused / unknown */
OMX_AUDIO_AMRBandModeNB0, /**< AMRNB Mode 0 = 4750 bps */
OMX_AUDIO_AMRBandModeNB1, /**< AMRNB Mode 1 = 5150 bps */
- OMX_AUDIO_AMRBandModeNB2, /**< AMRNB Mode 2 = 5900 bps */
+ OMX_AUDIO_AMRBandModeNB2, /**< AMRNB Mode 2 = 5900 bps */
OMX_AUDIO_AMRBandModeNB3, /**< AMRNB Mode 3 = 6700 bps */
OMX_AUDIO_AMRBandModeNB4, /**< AMRNB Mode 4 = 7400 bps */
OMX_AUDIO_AMRBandModeNB5, /**< AMRNB Mode 5 = 7950 bps */
@@ -546,36 +578,36 @@ typedef enum OMX_AUDIO_AMRBANDMODETYPE {
OMX_AUDIO_AMRBandModeNB7, /**< AMRNB Mode 7 = 12200 bps */
OMX_AUDIO_AMRBandModeWB0, /**< AMRWB Mode 0 = 6600 bps */
OMX_AUDIO_AMRBandModeWB1, /**< AMRWB Mode 1 = 8850 bps */
- OMX_AUDIO_AMRBandModeWB2, /**< AMRWB Mode 2 = 12650 bps */
- OMX_AUDIO_AMRBandModeWB3, /**< AMRWB Mode 3 = 14250 bps */
+ OMX_AUDIO_AMRBandModeWB2, /**< AMRWB Mode 2 = 12650 bps */
+ OMX_AUDIO_AMRBandModeWB3, /**< AMRWB Mode 3 = 14250 bps */
OMX_AUDIO_AMRBandModeWB4, /**< AMRWB Mode 4 = 15850 bps */
OMX_AUDIO_AMRBandModeWB5, /**< AMRWB Mode 5 = 18250 bps */
OMX_AUDIO_AMRBandModeWB6, /**< AMRWB Mode 6 = 19850 bps */
OMX_AUDIO_AMRBandModeWB7, /**< AMRWB Mode 7 = 23050 bps */
- OMX_AUDIO_AMRBandModeWB8, /**< AMRWB Mode 8 = 23850 bps */
- OMX_AUDIO_AMRBandModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_AMRBandModeWB8, /**< AMRWB Mode 8 = 23850 bps */
+ OMX_AUDIO_AMRBandModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_AMRBandModeVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_AMRBandModeMax = 0x7FFFFFFF
} OMX_AUDIO_AMRBANDMODETYPE;
-
-
-/** AMR Discontinuous Transmission mode */
-typedef enum OMX_AUDIO_AMRDTXMODETYPE {
- OMX_AUDIO_AMRDTXModeOff = 0, /**< AMR Discontinuous Transmission Mode is disabled */
- OMX_AUDIO_AMRDTXModeOnVAD1, /**< AMR Discontinuous Transmission Mode using
- Voice Activity Detector 1 (VAD1) is enabled */
- OMX_AUDIO_AMRDTXModeOnVAD2, /**< AMR Discontinuous Transmission Mode using
- Voice Activity Detector 2 (VAD2) is enabled */
- OMX_AUDIO_AMRDTXModeOnAuto, /**< The codec will automatically select between
- Off, VAD1 or VAD2 modes */
+
+
+/** AMR Discontinuous Transmission mode */
+typedef enum OMX_AUDIO_AMRDTXMODETYPE {
+ OMX_AUDIO_AMRDTXModeOff = 0, /**< AMR Discontinuous Transmission Mode is disabled */
+ OMX_AUDIO_AMRDTXModeOnVAD1, /**< AMR Discontinuous Transmission Mode using
+ Voice Activity Detector 1 (VAD1) is enabled */
+ OMX_AUDIO_AMRDTXModeOnVAD2, /**< AMR Discontinuous Transmission Mode using
+ Voice Activity Detector 2 (VAD2) is enabled */
+ OMX_AUDIO_AMRDTXModeOnAuto, /**< The codec will automatically select between
+ Off, VAD1 or VAD2 modes */
OMX_AUDIO_AMRDTXasEFR, /**< DTX as EFR instead of AMR standard (3GPP 26.101, frame type =8,9,10) */
- OMX_AUDIO_AMRDTXModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_AMRDTXModeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_AMRDTXModeVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_AUDIO_AMRDTXModeMax = 0x7FFFFFFF
-} OMX_AUDIO_AMRDTXMODETYPE;
-
+ OMX_AUDIO_AMRDTXModeMax = 0x7FFFFFFF
+} OMX_AUDIO_AMRDTXMODETYPE;
+
/** AMR params */
typedef struct OMX_AUDIO_PARAM_AMRTYPE {
@@ -584,7 +616,7 @@ typedef struct OMX_AUDIO_PARAM_AMRTYPE {
OMX_U32 nPortIndex; /**< port that this structure applies to */
OMX_U32 nChannels; /**< Number of channels */
OMX_U32 nBitRate; /**< Bit rate read only field */
- OMX_AUDIO_AMRBANDMODETYPE eAMRBandMode; /**< AMR Band Mode enumeration */
+ OMX_AUDIO_AMRBANDMODETYPE eAMRBandMode; /**< AMR Band Mode enumeration */
OMX_AUDIO_AMRDTXMODETYPE eAMRDTXMode; /**< AMR DTX Mode enumeration */
OMX_AUDIO_AMRFRAMEFORMATTYPE eAMRFrameFormat; /**< AMR frame format enumeration */
} OMX_AUDIO_PARAM_AMRTYPE;
@@ -692,7 +724,7 @@ typedef enum OMX_AUDIO_CDMARATETYPE {
OMX_AUDIO_CDMARateQuarter, /**< CDMA encoded frame in quarter rate */
OMX_AUDIO_CDMARateEighth, /**< CDMA encoded frame in eighth rate (DTX)*/
OMX_AUDIO_CDMARateErasure, /**< CDMA erasure frame */
- OMX_AUDIO_CDMARateKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_CDMARateKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_CDMARateVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_CDMARateMax = 0x7FFFFFFF
} OMX_AUDIO_CDMARATETYPE;
@@ -764,7 +796,7 @@ typedef struct OMX_AUDIO_PARAM_SMVTYPE {
} OMX_AUDIO_PARAM_SMVTYPE;
-/** MIDI Format
+/** MIDI Format
* @ingroup midi
*/
typedef enum OMX_AUDIO_MIDIFORMATTYPE
@@ -777,33 +809,33 @@ typedef enum OMX_AUDIO_MIDIFORMATTYPE
OMX_AUDIO_MIDIFormatXMF0, /**< eXtensible Music Format type 0 */
OMX_AUDIO_MIDIFormatXMF1, /**< eXtensible Music Format type 1 */
OMX_AUDIO_MIDIFormatMobileXMF, /**< Mobile XMF (eXtensible Music Format type 2) */
- OMX_AUDIO_MIDIFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_MIDIFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_MIDIFormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_MIDIFormatMax = 0x7FFFFFFF
} OMX_AUDIO_MIDIFORMATTYPE;
-/** MIDI params
+/** MIDI params
* @ingroup midi
*/
typedef struct OMX_AUDIO_PARAM_MIDITYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U32 nFileSize; /**< size of the MIDI file in bytes, where the entire
- MIDI file passed in, otherwise if 0x0, the MIDI data
- is merged and streamed (instead of passed as an
+ OMX_U32 nFileSize; /**< size of the MIDI file in bytes, where the entire
+ MIDI file passed in, otherwise if 0x0, the MIDI data
+ is merged and streamed (instead of passed as an
entire MIDI file) */
- OMX_BU32 sMaxPolyphony; /**< Specifies the maximum simultaneous polyphonic
- voices. A value of zero indicates that the default
- polyphony of the device is used */
- OMX_BOOL bLoadDefaultSound; /**< Whether to load default sound
+ OMX_BU32 sMaxPolyphony; /**< Specifies the maximum simultaneous polyphonic
+ voices. A value of zero indicates that the default
+ polyphony of the device is used */
+ OMX_BOOL bLoadDefaultSound; /**< Whether to load default sound
bank at initialization */
- OMX_AUDIO_MIDIFORMATTYPE eMidiFormat; /**< Version of the MIDI file */
+ OMX_AUDIO_MIDIFORMATTYPE eMidiFormat; /**< Version of the MIDI file */
} OMX_AUDIO_PARAM_MIDITYPE;
-/** Type of the MIDI sound bank
+/** Type of the MIDI sound bank
* @ingroup midi
*/
typedef enum OMX_AUDIO_MIDISOUNDBANKTYPE {
@@ -812,13 +844,13 @@ typedef enum OMX_AUDIO_MIDISOUNDBANKTYPE {
OMX_AUDIO_MIDISoundBankDLS2, /**< DLS version 2 */
OMX_AUDIO_MIDISoundBankMobileDLSBase, /**< Mobile DLS, using the base functionality */
OMX_AUDIO_MIDISoundBankMobileDLSPlusOptions, /**< Mobile DLS, using the specification-defined optional feature set */
- OMX_AUDIO_MIDISoundBankKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_MIDISoundBankKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_MIDISoundBankVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_MIDISoundBankMax = 0x7FFFFFFF
} OMX_AUDIO_MIDISOUNDBANKTYPE;
-/** Bank Layout describes how bank MSB & LSB are used in the DLS instrument definitions sound bank
+/** Bank Layout describes how bank MSB & LSB are used in the DLS instrument definitions sound bank
* @ingroup midi
*/
typedef enum OMX_AUDIO_MIDISOUNDBANKLAYOUTTYPE {
@@ -826,13 +858,13 @@ typedef enum OMX_AUDIO_MIDISOUNDBANKLAYOUTTYPE {
OMX_AUDIO_MIDISoundBankLayoutGM, /**< GS layout (based on bank MSB 0x00) */
OMX_AUDIO_MIDISoundBankLayoutGM2, /**< General MIDI 2 layout (using MSB 0x78/0x79, LSB 0x00) */
OMX_AUDIO_MIDISoundBankLayoutUser, /**< Does not conform to any bank numbering standards */
- OMX_AUDIO_MIDISoundBankLayoutKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_MIDISoundBankLayoutKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_MIDISoundBankLayoutVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_MIDISoundBankLayoutMax = 0x7FFFFFFF
} OMX_AUDIO_MIDISOUNDBANKLAYOUTTYPE;
-/** MIDI params to load/unload user soundbank
+/** MIDI params to load/unload user soundbank
* @ingroup midi
*/
typedef struct OMX_AUDIO_PARAM_MIDILOADUSERSOUNDTYPE {
@@ -847,8 +879,8 @@ typedef struct OMX_AUDIO_PARAM_MIDILOADUSERSOUNDTYPE {
} OMX_AUDIO_PARAM_MIDILOADUSERSOUNDTYPE;
-/** Structure for Live MIDI events and MIP messages.
- * (MIP = Maximum Instantaneous Polyphony; part of the SP-MIDI standard.)
+/** Structure for Live MIDI events and MIP messages.
+ * (MIP = Maximum Instantaneous Polyphony; part of the SP-MIDI standard.)
* @ingroup midi
*/
typedef struct OMX_AUDIO_CONFIG_MIDIIMMEDIATEEVENTTYPE {
@@ -857,12 +889,12 @@ typedef struct OMX_AUDIO_CONFIG_MIDIIMMEDIATEEVENTTYPE {
OMX_U32 nPortIndex; /**< Port that this structure applies to */
OMX_U32 nMidiEventSize; /**< Size of immediate MIDI events or MIP message in bytes */
OMX_U8 nMidiEvents[1]; /**< MIDI event array to be rendered immediately, or an
- array for the MIP message buffer, where the size is
+ array for the MIP message buffer, where the size is
indicated by nMidiEventSize */
} OMX_AUDIO_CONFIG_MIDIIMMEDIATEEVENTTYPE;
-/** MIDI sound bank/ program pair in a given channel
+/** MIDI sound bank/ program pair in a given channel
* @ingroup midi
*/
typedef struct OMX_AUDIO_CONFIG_MIDISOUNDBANKPROGRAMTYPE {
@@ -872,29 +904,29 @@ typedef struct OMX_AUDIO_CONFIG_MIDISOUNDBANKPROGRAMTYPE {
OMX_U32 nChannel; /**< Valid channel values range from 1 to 16 */
OMX_U16 nIDProgram; /**< Valid program ID range is 1 to 128 */
OMX_U16 nIDSoundBank; /**< Sound bank ID */
- OMX_U32 nUserSoundBankIndex;/**< User soundbank index, easier to access soundbanks
+ OMX_U32 nUserSoundBankIndex;/**< User soundbank index, easier to access soundbanks
by index if multiple banks are present */
} OMX_AUDIO_CONFIG_MIDISOUNDBANKPROGRAMTYPE;
-/** MIDI control
+/** MIDI control
* @ingroup midi
*/
typedef struct OMX_AUDIO_CONFIG_MIDICONTROLTYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_BS32 sPitchTransposition; /**< Pitch transposition in semitones, stored as Q22.10
+ OMX_BS32 sPitchTransposition; /**< Pitch transposition in semitones, stored as Q22.10
format based on JAVA MMAPI (JSR-135) requirement */
OMX_BU32 sPlayBackRate; /**< Relative playback rate, stored as Q14.17 fixed-point
number based on JSR-135 requirement */
- OMX_BU32 sTempo ; /**< Tempo in beats per minute (BPM), stored as Q22.10
+ OMX_BU32 sTempo ; /**< Tempo in beats per minute (BPM), stored as Q22.10
fixed-point number based on JSR-135 requirement */
- OMX_U32 nMaxPolyphony; /**< Specifies the maximum simultaneous polyphonic
- voices. A value of zero indicates that the default
+ OMX_U32 nMaxPolyphony; /**< Specifies the maximum simultaneous polyphonic
+ voices. A value of zero indicates that the default
polyphony of the device is used */
OMX_U32 nNumRepeat; /**< Number of times to repeat playback */
- OMX_U32 nStopTime; /**< Time in milliseconds to indicate when playback
+ OMX_U32 nStopTime; /**< Time in milliseconds to indicate when playback
will stop automatically. Set to zero if not used */
OMX_U16 nChannelMuteMask; /**< 16 bit mask for channel mute status */
OMX_U16 nChannelSoloMask; /**< 16 bit mask for channel solo status */
@@ -906,22 +938,22 @@ typedef struct OMX_AUDIO_CONFIG_MIDICONTROLTYPE {
} OMX_AUDIO_CONFIG_MIDICONTROLTYPE;
-/** MIDI Playback States
+/** MIDI Playback States
* @ingroup midi
*/
typedef enum OMX_AUDIO_MIDIPLAYBACKSTATETYPE {
- OMX_AUDIO_MIDIPlayBackStateUnknown = 0, /**< Unknown state or state does not map to
+ OMX_AUDIO_MIDIPlayBackStateUnknown = 0, /**< Unknown state or state does not map to
other defined states */
- OMX_AUDIO_MIDIPlayBackStateClosedEngaged, /**< No MIDI resource is currently open.
- The MIDI engine is currently processing
+ OMX_AUDIO_MIDIPlayBackStateClosedEngaged, /**< No MIDI resource is currently open.
+ The MIDI engine is currently processing
MIDI events. */
- OMX_AUDIO_MIDIPlayBackStateParsing, /**< A MIDI resource is open and is being
- primed. The MIDI engine is currently
+ OMX_AUDIO_MIDIPlayBackStateParsing, /**< A MIDI resource is open and is being
+ primed. The MIDI engine is currently
processing MIDI events. */
- OMX_AUDIO_MIDIPlayBackStateOpenEngaged, /**< A MIDI resource is open and primed but
+ OMX_AUDIO_MIDIPlayBackStateOpenEngaged, /**< A MIDI resource is open and primed but
not playing. The MIDI engine is currently
processing MIDI events. The transition to
- this state is only possible from the
+ this state is only possible from the
OMX_AUDIO_MIDIPlayBackStatePlaying state,
when the 'playback head' reaches the end
of media data or the playback stops due
@@ -936,95 +968,95 @@ typedef enum OMX_AUDIO_MIDIPLAYBACKSTATETYPE {
no audible MIDI content during playback
currently. The situation may change if
resources are freed later.*/
- OMX_AUDIO_MIDIPlayBackStateKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_MIDIPlayBackStateKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_MIDIPlayBackStateVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_MIDIPlayBackStateMax = 0x7FFFFFFF
} OMX_AUDIO_MIDIPLAYBACKSTATETYPE;
-/** MIDI status
+/** MIDI status
* @ingroup midi
*/
typedef struct OMX_AUDIO_CONFIG_MIDISTATUSTYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U16 nNumTracks; /**< Number of MIDI tracks in the file, read only field.
- NOTE: May not return a meaningful value until the entire
+ OMX_U16 nNumTracks; /**< Number of MIDI tracks in the file, read only field.
+ NOTE: May not return a meaningful value until the entire
file is parsed and buffered. */
- OMX_U32 nDuration; /**< The length of the currently open MIDI resource
- in milliseconds. NOTE: May not return a meaningful value
- until the entire file is parsed and buffered. */
- OMX_U32 nPosition; /**< Current Position of the MIDI resource being played
+ OMX_U32 nDuration; /**< The length of the currently open MIDI resource
+ in milliseconds. NOTE: May not return a meaningful value
+ until the entire file is parsed and buffered. */
+ OMX_U32 nPosition; /**< Current Position of the MIDI resource being played
in milliseconds */
- OMX_BOOL bVibra; /**< Does Vibra track exist? NOTE: May not return a meaningful
+ OMX_BOOL bVibra; /**< Does Vibra track exist? NOTE: May not return a meaningful
value until the entire file is parsed and buffered. */
- OMX_U32 nNumMetaEvents; /**< Total number of MIDI Meta Events in the currently
- open MIDI resource. NOTE: May not return a meaningful value
+ OMX_U32 nNumMetaEvents; /**< Total number of MIDI Meta Events in the currently
+ open MIDI resource. NOTE: May not return a meaningful value
until the entire file is parsed and buffered. */
- OMX_U32 nNumActiveVoices; /**< Number of active voices in the currently playing
- MIDI resource. NOTE: May not return a meaningful value until
+ OMX_U32 nNumActiveVoices; /**< Number of active voices in the currently playing
+ MIDI resource. NOTE: May not return a meaningful value until
the entire file is parsed and buffered. */
OMX_AUDIO_MIDIPLAYBACKSTATETYPE eMIDIPlayBackState; /**< MIDI playback state enumeration, read only field */
} OMX_AUDIO_CONFIG_MIDISTATUSTYPE;
/** MIDI Meta Event structure one per Meta Event.
- * MIDI Meta Events are like audio metadata, except that they are interspersed
- * with the MIDI content throughout the file and are not localized in the header.
- * As such, it is necessary to retrieve information about these Meta Events from
- * the engine, as it encounters these Meta Events within the MIDI content.
- * For example, SMF files can have up to 14 types of MIDI Meta Events (copyright,
- * author, default tempo, etc.) scattered throughout the file.
+ * MIDI Meta Events are like audio metadata, except that they are interspersed
+ * with the MIDI content throughout the file and are not localized in the header.
+ * As such, it is necessary to retrieve information about these Meta Events from
+ * the engine, as it encounters these Meta Events within the MIDI content.
+ * For example, SMF files can have up to 14 types of MIDI Meta Events (copyright,
+ * author, default tempo, etc.) scattered throughout the file.
* @ingroup midi
*/
-typedef struct OMX_AUDIO_CONFIG_MIDIMETAEVENTTYPE{
- OMX_U32 nSize; /**< size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U32 nIndex; /**< Index of Meta Event */
- OMX_U8 nMetaEventType; /**< Meta Event Type, 7bits (i.e. 0 - 127) */
- OMX_U32 nMetaEventSize; /**< size of the Meta Event in bytes */
+typedef struct OMX_AUDIO_CONFIG_MIDIMETAEVENTTYPE{
+ OMX_U32 nSize; /**< size of the structure in bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< port that this structure applies to */
+ OMX_U32 nIndex; /**< Index of Meta Event */
+ OMX_U8 nMetaEventType; /**< Meta Event Type, 7bits (i.e. 0 - 127) */
+ OMX_U32 nMetaEventSize; /**< size of the Meta Event in bytes */
OMX_U32 nTrack; /**< track number for the meta event */
OMX_U32 nPosition; /**< Position of the meta-event in milliseconds */
-} OMX_AUDIO_CONFIG_MIDIMETAEVENTTYPE;
+} OMX_AUDIO_CONFIG_MIDIMETAEVENTTYPE;
-/** MIDI Meta Event Data structure - one per Meta Event.
+/** MIDI Meta Event Data structure - one per Meta Event.
* @ingroup midi
- */
-typedef struct OMX_AUDIO_CONFIG_MIDIMETAEVENTDATATYPE{
- OMX_U32 nSize; /**< size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U32 nIndex; /**< Index of Meta Event */
- OMX_U32 nMetaEventSize; /**< size of the Meta Event in bytes */
- OMX_U8 nData[1]; /**< array of one or more bytes of meta data
- as indicated by the nMetaEventSize field */
-} OMX_AUDIO_CONFIG__MIDIMETAEVENTDATATYPE;
+ */
+typedef struct OMX_AUDIO_CONFIG_MIDIMETAEVENTDATATYPE{
+ OMX_U32 nSize; /**< size of the structure in bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< port that this structure applies to */
+ OMX_U32 nIndex; /**< Index of Meta Event */
+ OMX_U32 nMetaEventSize; /**< size of the Meta Event in bytes */
+ OMX_U8 nData[1]; /**< array of one or more bytes of meta data
+ as indicated by the nMetaEventSize field */
+} OMX_AUDIO_CONFIG__MIDIMETAEVENTDATATYPE;
/** Audio Volume adjustment for a port */
typedef struct OMX_AUDIO_CONFIG_VOLUMETYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< Port index indicating which port to
- set. Select the input port to set
- just that port's volume. Select the
- output port to adjust the master
+ OMX_U32 nPortIndex; /**< Port index indicating which port to
+ set. Select the input port to set
+ just that port's volume. Select the
+ output port to adjust the master
volume. */
- OMX_BOOL bLinear; /**< Is the volume to be set in linear (0.100)
+ OMX_BOOL bLinear; /**< Is the volume to be set in linear (0.100)
or logarithmic scale (mB) */
OMX_BS32 sVolume; /**< Volume linear setting in the 0..100 range, OR
Volume logarithmic setting for this port. The values
for volume are in mB (millibels = 1/100 dB) relative
- to a gain of 1 (e.g. the output is the same as the
- input level). Values are in mB from nMax
+ to a gain of 1 (e.g. the output is the same as the
+ input level). Values are in mB from nMax
(maximum volume) to nMin mB (typically negative).
Since the volume is "voltage"
and not a "power", it takes a setting of
-600 mB to decrease the volume by 1/2. If
- a component cannot accurately set the
+ a component cannot accurately set the
volume to the requested value, it must
set the volume to the closest value BELOW
the requested value. When getting the
@@ -1037,27 +1069,27 @@ typedef struct OMX_AUDIO_CONFIG_VOLUMETYPE {
typedef struct OMX_AUDIO_CONFIG_CHANNELVOLUMETYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< Port index indicating which port to
- set. Select the input port to set
- just that port's volume. Select the
- output port to adjust the master
+ OMX_U32 nPortIndex; /**< Port index indicating which port to
+ set. Select the input port to set
+ just that port's volume. Select the
+ output port to adjust the master
volume. */
- OMX_U32 nChannel; /**< channel to select from 0 to N-1,
+ OMX_U32 nChannel; /**< channel to select from 0 to N-1,
using OMX_ALL to apply volume settings
to all channels */
- OMX_BOOL bLinear; /**< Is the volume to be set in linear (0.100) or
+ OMX_BOOL bLinear; /**< Is the volume to be set in linear (0.100) or
logarithmic scale (mB) */
OMX_BS32 sVolume; /**< Volume linear setting in the 0..100 range, OR
- Volume logarithmic setting for this port.
- The values for volume are in mB
+ Volume logarithmic setting for this port.
+ The values for volume are in mB
(millibels = 1/100 dB) relative to a gain
- of 1 (e.g. the output is the same as the
- input level). Values are in mB from nMax
- (maximum volume) to nMin mB (typically negative).
+ of 1 (e.g. the output is the same as the
+ input level). Values are in mB from nMax
+ (maximum volume) to nMin mB (typically negative).
Since the volume is "voltage"
and not a "power", it takes a setting of
-600 mB to decrease the volume by 1/2. If
- a component cannot accurately set the
+ a component cannot accurately set the
volume to the requested value, it must
set the volume to the closest value BELOW
the requested value. When getting the
@@ -1072,12 +1104,12 @@ typedef struct OMX_AUDIO_CONFIG_CHANNELVOLUMETYPE {
typedef struct OMX_AUDIO_CONFIG_BALANCETYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< Port index indicating which port to
- set. Select the input port to set
- just that port's balance. Select the
- output port to adjust the master
+ OMX_U32 nPortIndex; /**< Port index indicating which port to
+ set. Select the input port to set
+ just that port's balance. Select the
+ output port to adjust the master
balance. */
- OMX_S32 nBalance; /**< balance setting for this port
+ OMX_S32 nBalance; /**< balance setting for this port
(-100 to 100, where -100 indicates
all left, and no right */
} OMX_AUDIO_CONFIG_BALANCETYPE;
@@ -1087,10 +1119,10 @@ typedef struct OMX_AUDIO_CONFIG_BALANCETYPE {
typedef struct OMX_AUDIO_CONFIG_MUTETYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< Port index indicating which port to
- set. Select the input port to set
- just that port's mute. Select the
- output port to adjust the master
+ OMX_U32 nPortIndex; /**< Port index indicating which port to
+ set. Select the input port to set
+ just that port's mute. Select the
+ output port to adjust the master
mute. */
OMX_BOOL bMute; /**< Mute setting for this port */
} OMX_AUDIO_CONFIG_MUTETYPE;
@@ -1101,20 +1133,20 @@ typedef struct OMX_AUDIO_CONFIG_CHANNELMUTETYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< port that this structure applies to */
- OMX_U32 nChannel; /**< channel to select from 0 to N-1,
+ OMX_U32 nChannel; /**< channel to select from 0 to N-1,
using OMX_ALL to apply mute settings
to all channels */
OMX_BOOL bMute; /**< Mute setting for this channel */
OMX_BOOL bIsMIDI; /**< TRUE if nChannel refers to a MIDI channel,
- FALSE otherwise */
+ FALSE otherwise */
} OMX_AUDIO_CONFIG_CHANNELMUTETYPE;
-/** Enable / Disable for loudness control, which boosts bass and to a
+/** Enable / Disable for loudness control, which boosts bass and to a
* smaller extent high end frequencies to compensate for hearing
* ability at the extreme ends of the audio spectrum
- */
+ */
typedef struct OMX_AUDIO_CONFIG_LOUDNESSTYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
@@ -1124,33 +1156,33 @@ typedef struct OMX_AUDIO_CONFIG_LOUDNESSTYPE {
/** Enable / Disable for bass, which controls low frequencies
- */
+ */
typedef struct OMX_AUDIO_CONFIG_BASSTYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< port that this structure applies to */
OMX_BOOL bEnable; /**< Enable/disable for bass control */
- OMX_S32 nBass; /**< bass setting for the port, as a
- continuous value from -100 to 100
+ OMX_S32 nBass; /**< bass setting for the port, as a
+ continuous value from -100 to 100
(0 means no change in bass level)*/
} OMX_AUDIO_CONFIG_BASSTYPE;
/** Enable / Disable for treble, which controls high frequencies tones
- */
+ */
typedef struct OMX_AUDIO_CONFIG_TREBLETYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< port that this structure applies to */
OMX_BOOL bEnable; /**< Enable/disable for treble control */
OMX_S32 nTreble; /**< treble setting for the port, as a
- continuous value from -100 to 100
+ continuous value from -100 to 100
(0 means no change in treble level) */
} OMX_AUDIO_CONFIG_TREBLETYPE;
-/** An equalizer is typically used for two reasons: to compensate for an
- * sub-optimal frequency response of a system to make it sound more natural
+/** An equalizer is typically used for two reasons: to compensate for an
+ * sub-optimal frequency response of a system to make it sound more natural
* or to create intentionally some unnatural coloring to the sound to create
* an effect.
* @ingroup effects
@@ -1160,33 +1192,33 @@ typedef struct OMX_AUDIO_CONFIG_EQUALIZERTYPE {
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< port that this structure applies to */
OMX_BOOL bEnable; /**< Enable/disable for equalizer */
- OMX_BU32 sBandIndex; /**< Band number to be set. Upper Limit is
+ OMX_BU32 sBandIndex; /**< Band number to be set. Upper Limit is
N-1, where N is the number of bands, lower limit is 0 */
OMX_BU32 sCenterFreq; /**< Center frequecies in Hz. This is a
- read only element and is used to determine
- the lower, center and upper frequency of
+ read only element and is used to determine
+ the lower, center and upper frequency of
this band. */
OMX_BS32 sBandLevel; /**< band level in millibels */
} OMX_AUDIO_CONFIG_EQUALIZERTYPE;
-/** Stereo widening mode type
+/** Stereo widening mode type
* @ingroup effects
- */
+ */
typedef enum OMX_AUDIO_STEREOWIDENINGTYPE {
OMX_AUDIO_StereoWideningHeadphones, /**< Stereo widening for loudspeakers */
OMX_AUDIO_StereoWideningLoudspeakers, /**< Stereo widening for closely spaced loudspeakers */
- OMX_AUDIO_StereoWideningKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_StereoWideningKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_StereoWideningVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_StereoWideningMax = 0x7FFFFFFF
} OMX_AUDIO_STEREOWIDENINGTYPE;
/** Control for stereo widening, which is a special 2-channel
- * case of the audio virtualizer effect. For example, for 5.1-channel
- * output, it translates to virtual surround sound.
+ * case of the audio virtualizer effect. For example, for 5.1-channel
+ * output, it translates to virtual surround sound.
* @ingroup effects
- */
+ */
typedef struct OMX_AUDIO_CONFIG_STEREOWIDENINGTYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
@@ -1199,10 +1231,10 @@ typedef struct OMX_AUDIO_CONFIG_STEREOWIDENINGTYPE {
/** The chorus effect (or ``choralizer'') is any signal processor which makes
- * one sound source (such as a voice) sound like many such sources singing
- * (or playing) in unison. Since performance in unison is never exact, chorus
- * effects simulate this by making independently modified copies of the input
- * signal. Modifications may include (1) delay, (2) frequency shift, and
+ * one sound source (such as a voice) sound like many such sources singing
+ * (or playing) in unison. Since performance in unison is never exact, chorus
+ * effects simulate this by making independently modified copies of the input
+ * signal. Modifications may include (1) delay, (2) frequency shift, and
* (3) amplitude modulation.
* @ingroup effects
*/
@@ -1213,16 +1245,16 @@ typedef struct OMX_AUDIO_CONFIG_CHORUSTYPE {
OMX_BOOL bEnable; /**< Enable/disable for chorus */
OMX_BU32 sDelay; /**< average delay in milliseconds */
OMX_BU32 sModulationRate; /**< rate of modulation in millihertz */
- OMX_U32 nModulationDepth; /**< depth of modulation as a percentage of
+ OMX_U32 nModulationDepth; /**< depth of modulation as a percentage of
delay (i.e. 0 to 100) */
OMX_BU32 nFeedback; /**< Feedback from chorus output to input in percentage */
} OMX_AUDIO_CONFIG_CHORUSTYPE;
-/** Reverberation is part of the reflected sound that follows the early
- * reflections. In a typical room, this consists of a dense succession of
- * echoes whose energy decays exponentially. The reverberation effect structure
- * as defined here includes both (early) reflections as well as (late) reverberations.
+/** Reverberation is part of the reflected sound that follows the early
+ * reflections. In a typical room, this consists of a dense succession of
+ * echoes whose energy decays exponentially. The reverberation effect structure
+ * as defined here includes both (early) reflections as well as (late) reverberations.
* @ingroup effects
*/
typedef struct OMX_AUDIO_CONFIG_REVERBERATIONTYPE {
@@ -1230,48 +1262,48 @@ typedef struct OMX_AUDIO_CONFIG_REVERBERATIONTYPE {
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< port that this structure applies to */
OMX_BOOL bEnable; /**< Enable/disable for reverberation control */
- OMX_BS32 sRoomLevel; /**< Intensity level for the whole room effect
- (i.e. both early reflections and late
+ OMX_BS32 sRoomLevel; /**< Intensity level for the whole room effect
+ (i.e. both early reflections and late
reverberation) in millibels */
OMX_BS32 sRoomHighFreqLevel; /**< Attenuation at high frequencies
relative to the intensity at low
frequencies in millibels */
OMX_BS32 sReflectionsLevel; /**< Intensity level of early reflections
(relative to room value), in millibels */
- OMX_BU32 sReflectionsDelay; /**< Delay time of the first reflection relative
+ OMX_BU32 sReflectionsDelay; /**< Delay time of the first reflection relative
to the direct path, in milliseconds */
OMX_BS32 sReverbLevel; /**< Intensity level of late reverberation
relative to room level, in millibels */
- OMX_BU32 sReverbDelay; /**< Time delay from the first early reflection
- to the beginning of the late reverberation
+ OMX_BU32 sReverbDelay; /**< Time delay from the first early reflection
+ to the beginning of the late reverberation
section, in milliseconds */
OMX_BU32 sDecayTime; /**< Late reverberation decay time at low
frequencies, in milliseconds */
- OMX_BU32 nDecayHighFreqRatio; /**< Ratio of high frequency decay time relative
+ OMX_BU32 nDecayHighFreqRatio; /**< Ratio of high frequency decay time relative
to low frequency decay time in percent */
OMX_U32 nDensity; /**< Modal density in the late reverberation decay,
in percent (i.e. 0 - 100) */
OMX_U32 nDiffusion; /**< Echo density in the late reverberation decay,
in percent (i.e. 0 - 100) */
- OMX_BU32 sReferenceHighFreq; /**< Reference high frequency in Hertz. This is
- the frequency used as the reference for all
+ OMX_BU32 sReferenceHighFreq; /**< Reference high frequency in Hertz. This is
+ the frequency used as the reference for all
the high-frequency settings above */
} OMX_AUDIO_CONFIG_REVERBERATIONTYPE;
-/** Possible settings for the Echo Cancelation structure to use
+/** Possible settings for the Echo Cancelation structure to use
* @ingroup effects
*/
typedef enum OMX_AUDIO_ECHOCANTYPE {
OMX_AUDIO_EchoCanOff = 0, /**< Echo Cancellation is disabled */
- OMX_AUDIO_EchoCanNormal, /**< Echo Cancellation normal operation -
+ OMX_AUDIO_EchoCanNormal, /**< Echo Cancellation normal operation -
echo from plastics and face */
- OMX_AUDIO_EchoCanHFree, /**< Echo Cancellation optimized for
+ OMX_AUDIO_EchoCanHFree, /**< Echo Cancellation optimized for
Hands Free operation */
- OMX_AUDIO_EchoCanCarKit, /**< Echo Cancellation optimized for
+ OMX_AUDIO_EchoCanCarKit, /**< Echo Cancellation optimized for
Car Kit (longer echo) */
- OMX_AUDIO_EchoCanKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_AUDIO_EchoCanKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_AUDIO_EchoCanVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_AUDIO_EchoCanMax = 0x7FFFFFFF
} OMX_AUDIO_ECHOCANTYPE;
@@ -1280,7 +1312,7 @@ typedef enum OMX_AUDIO_ECHOCANTYPE {
/** Enable / Disable for echo cancelation, which removes undesired echo's
* from the audio
* @ingroup effects
- */
+ */
typedef struct OMX_AUDIO_CONFIG_ECHOCANCELATIONTYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
@@ -1292,7 +1324,7 @@ typedef struct OMX_AUDIO_CONFIG_ECHOCANCELATIONTYPE {
/** Enable / Disable for noise reduction, which undesired noise from
* the audio
* @ingroup effects
- */
+ */
typedef struct OMX_AUDIO_CONFIG_NOISEREDUCTIONTYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
@@ -1308,4 +1340,3 @@ typedef struct OMX_AUDIO_CONFIG_NOISEREDUCTIONTYPE {
#endif
/* File EOF */
-
diff --git a/domx/omx_core/inc/OMX_Component.h b/domx/omx_core/inc/OMX_Component.h
index d595640..c312aef 100755
--- a/domx/omx_core/inc/OMX_Component.h
+++ b/domx/omx_core/inc/OMX_Component.h
@@ -1,23 +1,40 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/*
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
@@ -38,7 +55,7 @@ extern "C" {
/* Each OMX header must include all required header files to allow the
* header to compile without errors. The includes below are required
- * for this header file to compile successfully
+ * for this header file to compile successfully
*/
#include <OMX_Audio.h>
@@ -47,12 +64,12 @@ extern "C" {
#include <OMX_Other.h>
/** @ingroup comp */
-typedef enum OMX_PORTDOMAINTYPE {
- OMX_PortDomainAudio,
- OMX_PortDomainVideo,
- OMX_PortDomainImage,
+typedef enum OMX_PORTDOMAINTYPE {
+ OMX_PortDomainAudio,
+ OMX_PortDomainVideo,
+ OMX_PortDomainImage,
OMX_PortDomainOther,
- OMX_PortDomainKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_PortDomainKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_PortDomainVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_PortDomainMax = 0x7ffffff
} OMX_PORTDOMAINTYPE;
@@ -71,7 +88,7 @@ typedef struct OMX_PARAM_PORTDEFINITIONTYPE {
When disabled a port is unpopulated. A disabled port
is not populated with buffers on a transition to IDLE. */
OMX_BOOL bPopulated; /**< Port is populated with all of its buffers as indicated by
- nBufferCountActual. A disabled port is always unpopulated.
+ nBufferCountActual. A disabled port is always unpopulated.
An enabled port is populated on a transition to OMX_StateIdle
and unpopulated on a transition to loaded. */
OMX_PORTDOMAINTYPE eDomain; /**< Domain of the port. Determines the contents of metadata below. */
@@ -86,26 +103,26 @@ typedef struct OMX_PARAM_PORTDEFINITIONTYPE {
} OMX_PARAM_PORTDEFINITIONTYPE;
/** @ingroup comp */
-typedef struct OMX_PARAM_U32TYPE {
- OMX_U32 nSize; /**< Size of this structure, in Bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U32 nPortIndex; /**< port that this structure applies to */
+typedef struct OMX_PARAM_U32TYPE {
+ OMX_U32 nSize; /**< Size of this structure, in Bytes */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_U32 nPortIndex; /**< port that this structure applies to */
OMX_U32 nU32; /**< U32 value */
} OMX_PARAM_U32TYPE;
/** @ingroup rpm */
typedef enum OMX_SUSPENSIONPOLICYTYPE {
OMX_SuspensionDisabled, /**< No suspension; v1.0 behavior */
- OMX_SuspensionEnabled, /**< Suspension allowed */
- OMX_SuspensionPolicyKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_SuspensionEnabled, /**< Suspension allowed */
+ OMX_SuspensionPolicyKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_SuspensionPolicyStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_SuspensionPolicyMax = 0x7fffffff
} OMX_SUSPENSIONPOLICYTYPE;
/** @ingroup rpm */
typedef struct OMX_PARAM_SUSPENSIONPOLICYTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
OMX_SUSPENSIONPOLICYTYPE ePolicy;
} OMX_PARAM_SUSPENSIONPOLICYTYPE;
@@ -113,22 +130,22 @@ typedef struct OMX_PARAM_SUSPENSIONPOLICYTYPE {
typedef enum OMX_SUSPENSIONTYPE {
OMX_NotSuspended, /**< component is not suspended */
OMX_Suspended, /**< component is suspended */
- OMX_SuspensionKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_SuspensionKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_SuspensionVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_SuspendMax = 0x7FFFFFFF
} OMX_SUSPENSIONTYPE;
/** @ingroup rpm */
typedef struct OMX_PARAM_SUSPENSIONTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_SUSPENSIONTYPE eType;
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_SUSPENSIONTYPE eType;
} OMX_PARAM_SUSPENSIONTYPE ;
typedef struct OMX_CONFIG_BOOLEANTYPE {
OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
- OMX_BOOL bEnabled;
+ OMX_BOOL bEnabled;
} OMX_CONFIG_BOOLEANTYPE;
/* Parameter specifying the content uri to use. */
@@ -154,9 +171,9 @@ typedef struct OMX_PARAM_CONTENTPIPETYPE
typedef struct OMX_RESOURCECONCEALMENTTYPE {
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_BOOL bResourceConcealmentForbidden; /**< disallow the use of resource concealment
- methods (like degrading algorithm quality to
- lower resource consumption or functional bypass)
+ OMX_BOOL bResourceConcealmentForbidden; /**< disallow the use of resource concealment
+ methods (like degrading algorithm quality to
+ lower resource consumption or functional bypass)
on a component as a resolution to resource conflicts. */
} OMX_RESOURCECONCEALMENTTYPE;
@@ -171,7 +188,7 @@ typedef enum OMX_METADATACHARSETTYPE {
OMX_MetadataCharsetJavaConformantUTF8,
OMX_MetadataCharsetUTF7,
OMX_MetadataCharsetImapUTF7,
- OMX_MetadataCharsetUTF16LE,
+ OMX_MetadataCharsetUTF16LE,
OMX_MetadataCharsetUTF16BE,
OMX_MetadataCharsetGB12345,
OMX_MetadataCharsetHZGB2312,
@@ -197,7 +214,7 @@ typedef enum OMX_METADATACHARSETTYPE {
OMX_MetadataCharsetISO2022JP1,
OMX_MetadataCharsetISOEUCJP,
OMX_MetadataCharsetSMS7Bit,
- OMX_MetadataCharsetKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_MetadataCharsetKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_MetadataCharsetVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_MetadataCharsetTypeMax= 0x7FFFFFFF
} OMX_METADATACHARSETTYPE;
@@ -209,7 +226,7 @@ typedef enum OMX_METADATASCOPETYPE
OMX_MetadataScopeTopLevel,
OMX_MetadataScopePortLevel,
OMX_MetadataScopeNodeLevel,
- OMX_MetadataScopeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_MetadataScopeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_MetadataScopeVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_MetadataScopeTypeMax = 0x7fffffff
} OMX_METADATASCOPETYPE;
@@ -220,7 +237,7 @@ typedef enum OMX_METADATASEARCHMODETYPE
OMX_MetadataSearchValueSizeByIndex,
OMX_MetadataSearchItemByIndex,
OMX_MetadataSearchNextItemByKey,
- OMX_MetadataSearchKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_MetadataSearchKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_MetadataSearchVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_MetadataSearchTypeMax = 0x7fffffff
} OMX_METADATASEARCHMODETYPE;
@@ -241,7 +258,7 @@ typedef struct OMX_CONFIG_METADATAITEMTYPE
OMX_VERSIONTYPE nVersion;
OMX_METADATASCOPETYPE eScopeMode;
OMX_U32 nScopeSpecifier;
- OMX_U32 nMetadataItemIndex;
+ OMX_U32 nMetadataItemIndex;
OMX_METADATASEARCHMODETYPE eSearchMode;
OMX_METADATACHARSETTYPE eKeyCharset;
OMX_U8 nKeySizeUsed;
@@ -270,30 +287,30 @@ typedef struct OMX_CONFIG_CONTAINERNODEIDTYPE
OMX_VERSIONTYPE nVersion;
OMX_BOOL bAllKeys;
OMX_U32 nParentNodeID;
- OMX_U32 nNodeIndex;
- OMX_U32 nNodeID;
+ OMX_U32 nNodeIndex;
+ OMX_U32 nNodeID;
OMX_STRING cNodeName;
OMX_BOOL bIsLeafType;
} OMX_CONFIG_CONTAINERNODEIDTYPE;
/** @ingroup metadata */
-typedef struct OMX_PARAM_METADATAFILTERTYPE
-{
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_BOOL bAllKeys; /* if true then this structure refers to all keys and
+typedef struct OMX_PARAM_METADATAFILTERTYPE
+{
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_BOOL bAllKeys; /* if true then this structure refers to all keys and
* the three key fields below are ignored */
OMX_METADATACHARSETTYPE eKeyCharset;
- OMX_U32 nKeySizeUsed;
- OMX_U8 nKey [128];
+ OMX_U32 nKeySizeUsed;
+ OMX_U8 nKey [128];
OMX_U32 nLanguageCountrySizeUsed;
OMX_U8 nLanguageCountry[128];
- OMX_BOOL bEnabled; /* if true then key is part of filter (e.g.
+ OMX_BOOL bEnabled; /* if true then key is part of filter (e.g.
* retained for query later). If false then
* key is not part of filter */
-} OMX_PARAM_METADATAFILTERTYPE;
+} OMX_PARAM_METADATAFILTERTYPE;
-/** The OMX_HANDLETYPE structure defines the component handle. The component
+/** The OMX_HANDLETYPE structure defines the component handle. The component
* handle is used to access all of the component's public methods and also
* contains pointers to the component's private data area. The component
* handle is initialized by the OMX core (with help from the component)
@@ -301,7 +318,7 @@ typedef struct OMX_PARAM_METADATAFILTERTYPE
* successfully loaded, the application can safely access any of the
* component's public functions (although some may return an error because
* the state is inappropriate for the access).
- *
+ *
* @ingroup comp
*/
typedef struct OMX_COMPONENTTYPE
@@ -312,26 +329,26 @@ typedef struct OMX_COMPONENTTYPE
function will fill in this value. */
OMX_U32 nSize;
- /** nVersion is the version of the OMX specification that the structure
- is built against. It is the responsibility of the creator of this
- structure to initialize this value and every user of this structure
- should verify that it knows how to use the exact version of
+ /** nVersion is the version of the OMX specification that the structure
+ is built against. It is the responsibility of the creator of this
+ structure to initialize this value and every user of this structure
+ should verify that it knows how to use the exact version of
this structure found herein. */
OMX_VERSIONTYPE nVersion;
- /** pComponentPrivate is a pointer to the component private data area.
- This member is allocated and initialized by the component when the
- component is first loaded. The application should not access this
+ /** pComponentPrivate is a pointer to the component private data area.
+ This member is allocated and initialized by the component when the
+ component is first loaded. The application should not access this
data area. */
OMX_PTR pComponentPrivate;
- /** pApplicationPrivate is a pointer that is a parameter to the
- OMX_GetHandle method, and contains an application private value
- provided by the IL client. This application private data is
+ /** pApplicationPrivate is a pointer that is a parameter to the
+ OMX_GetHandle method, and contains an application private value
+ provided by the IL client. This application private data is
returned to the IL Client by OMX in all callbacks */
OMX_PTR pApplicationPrivate;
- /** refer to OMX_GetComponentVersion in OMX_core.h or the OMX IL
+ /** refer to OMX_GetComponentVersion in OMX_core.h or the OMX IL
specification for details on the GetComponentVersion method.
*/
OMX_ERRORTYPE (*GetComponentVersion)(
@@ -341,7 +358,7 @@ typedef struct OMX_COMPONENTTYPE
OMX_OUT OMX_VERSIONTYPE* pSpecVersion,
OMX_OUT OMX_UUIDTYPE* pComponentUUID);
- /** refer to OMX_SendCommand in OMX_core.h or the OMX IL
+ /** refer to OMX_SendCommand in OMX_core.h or the OMX IL
specification for details on the SendCommand method.
*/
OMX_ERRORTYPE (*SendCommand)(
@@ -350,43 +367,43 @@ typedef struct OMX_COMPONENTTYPE
OMX_IN OMX_U32 nParam1,
OMX_IN OMX_PTR pCmdData);
- /** refer to OMX_GetParameter in OMX_core.h or the OMX IL
+ /** refer to OMX_GetParameter in OMX_core.h or the OMX IL
specification for details on the GetParameter method.
*/
OMX_ERRORTYPE (*GetParameter)(
- OMX_IN OMX_HANDLETYPE hComponent,
- OMX_IN OMX_INDEXTYPE nParamIndex,
+ OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_INDEXTYPE nParamIndex,
OMX_INOUT OMX_PTR pComponentParameterStructure);
- /** refer to OMX_SetParameter in OMX_core.h or the OMX IL
+ /** refer to OMX_SetParameter in OMX_core.h or the OMX IL
specification for details on the SetParameter method.
*/
OMX_ERRORTYPE (*SetParameter)(
- OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_INDEXTYPE nIndex,
OMX_IN OMX_PTR pComponentParameterStructure);
- /** refer to OMX_GetConfig in OMX_core.h or the OMX IL
+ /** refer to OMX_GetConfig in OMX_core.h or the OMX IL
specification for details on the GetConfig method.
*/
OMX_ERRORTYPE (*GetConfig)(
OMX_IN OMX_HANDLETYPE hComponent,
- OMX_IN OMX_INDEXTYPE nIndex,
+ OMX_IN OMX_INDEXTYPE nIndex,
OMX_INOUT OMX_PTR pComponentConfigStructure);
- /** refer to OMX_SetConfig in OMX_core.h or the OMX IL
+ /** refer to OMX_SetConfig in OMX_core.h or the OMX IL
specification for details on the SetConfig method.
*/
OMX_ERRORTYPE (*SetConfig)(
OMX_IN OMX_HANDLETYPE hComponent,
- OMX_IN OMX_INDEXTYPE nIndex,
+ OMX_IN OMX_INDEXTYPE nIndex,
OMX_IN OMX_PTR pComponentConfigStructure);
- /** refer to OMX_GetExtensionIndex in OMX_core.h or the OMX IL
+ /** refer to OMX_GetExtensionIndex in OMX_core.h or the OMX IL
specification for details on the GetExtensionIndex method.
*/
OMX_ERRORTYPE (*GetExtensionIndex)(
@@ -395,50 +412,50 @@ typedef struct OMX_COMPONENTTYPE
OMX_OUT OMX_INDEXTYPE* pIndexType);
- /** refer to OMX_GetState in OMX_core.h or the OMX IL
+ /** refer to OMX_GetState in OMX_core.h or the OMX IL
specification for details on the GetState method.
*/
OMX_ERRORTYPE (*GetState)(
OMX_IN OMX_HANDLETYPE hComponent,
OMX_OUT OMX_STATETYPE* pState);
-
+
/** The ComponentTunnelRequest method will interact with another OMX
component to determine if tunneling is possible and to setup the
- tunneling. The return codes for this method can be used to
+ tunneling. The return codes for this method can be used to
determine if tunneling is not possible, or if tunneling is not
- supported.
-
+ supported.
+
Base profile components (i.e. non-interop) do not support this
- method and should return OMX_ErrorNotImplemented
+ method and should return OMX_ErrorNotImplemented
- The interop profile component MUST support tunneling to another
- interop profile component with a compatible port parameters.
+ The interop profile component MUST support tunneling to another
+ interop profile component with a compatible port parameters.
A component may also support proprietary communication.
-
- If proprietary communication is supported the negotiation of
- proprietary communication is done outside of OMX in a vendor
- specific way. It is only required that the proper result be
- returned and the details of how the setup is done is left
- to the component implementation.
-
+
+ If proprietary communication is supported the negotiation of
+ proprietary communication is done outside of OMX in a vendor
+ specific way. It is only required that the proper result be
+ returned and the details of how the setup is done is left
+ to the component implementation.
+
When this method is invoked when nPort in an output port, the
component will:
- 1. Populate the pTunnelSetup structure with the output port's
+ 1. Populate the pTunnelSetup structure with the output port's
requirements and constraints for the tunnel.
When this method is invoked when nPort in an input port, the
component will:
- 1. Query the necessary parameters from the output port to
+ 1. Query the necessary parameters from the output port to
determine if the ports are compatible for tunneling
2. If the ports are compatible, the component should store
the tunnel step provided by the output port
3. Determine which port (either input or output) is the buffer
supplier, and call OMX_SetParameter on the output port to
indicate this selection.
-
+
The component will return from this call within 5 msec.
-
+
@param [in] hComp
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle method.
@@ -446,7 +463,7 @@ typedef struct OMX_COMPONENTTYPE
nPort is used to select the port on the component to be used
for tunneling.
@param [in] hTunneledComp
- Handle of the component to tunnel with. This is the component
+ Handle of the component to tunnel with. This is the component
handle returned by the call to the OMX_GetHandle method. When
this parameter is 0x0 the component should setup the port for
communication with the application / IL Client.
@@ -469,9 +486,9 @@ typedef struct OMX_COMPONENTTYPE
OMX_IN OMX_U32 nPort,
OMX_IN OMX_HANDLETYPE hTunneledComp,
OMX_IN OMX_U32 nTunneledPort,
- OMX_INOUT OMX_TUNNELSETUPTYPE* pTunnelSetup);
+ OMX_INOUT OMX_TUNNELSETUPTYPE* pTunnelSetup);
- /** refer to OMX_UseBuffer in OMX_core.h or the OMX IL
+ /** refer to OMX_UseBuffer in OMX_core.h or the OMX IL
specification for details on the UseBuffer method.
@ingroup buf
*/
@@ -483,7 +500,7 @@ typedef struct OMX_COMPONENTTYPE
OMX_IN OMX_U32 nSizeBytes,
OMX_IN OMX_U8* pBuffer);
- /** refer to OMX_AllocateBuffer in OMX_core.h or the OMX IL
+ /** refer to OMX_AllocateBuffer in OMX_core.h or the OMX IL
specification for details on the AllocateBuffer method.
@ingroup buf
*/
@@ -494,7 +511,7 @@ typedef struct OMX_COMPONENTTYPE
OMX_IN OMX_PTR pAppPrivate,
OMX_IN OMX_U32 nSizeBytes);
- /** refer to OMX_FreeBuffer in OMX_core.h or the OMX IL
+ /** refer to OMX_FreeBuffer in OMX_core.h or the OMX IL
specification for details on the FreeBuffer method.
@ingroup buf
*/
@@ -503,7 +520,7 @@ typedef struct OMX_COMPONENTTYPE
OMX_IN OMX_U32 nPortIndex,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffer);
- /** refer to OMX_EmptyThisBuffer in OMX_core.h or the OMX IL
+ /** refer to OMX_EmptyThisBuffer in OMX_core.h or the OMX IL
specification for details on the EmptyThisBuffer method.
@ingroup buf
*/
@@ -511,7 +528,7 @@ typedef struct OMX_COMPONENTTYPE
OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffer);
- /** refer to OMX_FillThisBuffer in OMX_core.h or the OMX IL
+ /** refer to OMX_FillThisBuffer in OMX_core.h or the OMX IL
specification for details on the FillThisBuffer method.
@ingroup buf
*/
@@ -526,10 +543,10 @@ typedef struct OMX_COMPONENTTYPE
Handle of the component to be accessed. This is the component
handle returned by the call to the GetHandle function.
@param [in] pCallbacks
- pointer to an OMX_CALLBACKTYPE structure used to provide the
+ pointer to an OMX_CALLBACKTYPE structure used to provide the
callback information to the component
@param [in] pAppData
- pointer to an application defined value. It is anticipated that
+ pointer to an application defined value. It is anticipated that
the application will pass a pointer to a data structure or a "this
pointer" in this area to allow the callback (in the application)
to determine the context of the call
@@ -539,7 +556,7 @@ typedef struct OMX_COMPONENTTYPE
*/
OMX_ERRORTYPE (*SetCallbacks)(
OMX_IN OMX_HANDLETYPE hComponent,
- OMX_IN OMX_CALLBACKTYPE* pCallbacks,
+ OMX_IN OMX_CALLBACKTYPE* pCallbacks,
OMX_IN OMX_PTR pAppData);
/** ComponentDeInit method is used to deinitialize the component
diff --git a/domx/omx_core/inc/OMX_ContentPipe.h b/domx/omx_core/inc/OMX_ContentPipe.h
index 5f6310c..6b1025e 100755
--- a/domx/omx_core/inc/OMX_ContentPipe.h
+++ b/domx/omx_core/inc/OMX_ContentPipe.h
@@ -1,23 +1,40 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/*
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
@@ -64,83 +81,83 @@
/** Map types from OMX standard types only here so interface is as generic as possible. */
typedef OMX_U32 CPresult;
-typedef char * CPstring;
+typedef char * CPstring;
typedef void * CPhandle;
typedef OMX_U32 CPuint;
-typedef OMX_S32 CPint;
-typedef char CPbyte;
+typedef OMX_S32 CPint;
+typedef char CPbyte;
typedef OMX_BOOL CPbool;
-/** enumeration of origin types used in the CP_PIPETYPE's Seek function
+/** enumeration of origin types used in the CP_PIPETYPE's Seek function
* @ingroup cp
*/
typedef enum CP_ORIGINTYPE {
- CP_OriginBegin,
- CP_OriginCur,
- CP_OriginEnd,
- CP_OriginKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ CP_OriginBegin,
+ CP_OriginCur,
+ CP_OriginEnd,
+ CP_OriginKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
CP_OriginVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
CP_OriginMax = 0X7FFFFFFF
} CP_ORIGINTYPE;
-/** enumeration of contact access types used in the CP_PIPETYPE's Open function
+/** enumeration of contact access types used in the CP_PIPETYPE's Open function
* @ingroup cp
*/
typedef enum CP_ACCESSTYPE {
- CP_AccessRead,
- CP_AccessWrite,
- CP_AccessReadWrite ,
- CP_AccessKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ CP_AccessRead,
+ CP_AccessWrite,
+ CP_AccessReadWrite ,
+ CP_AccessKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
CP_AccessVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
CP_AccessMax = 0X7FFFFFFF
} CP_ACCESSTYPE;
-/** enumeration of results returned by the CP_PIPETYPE's CheckAvailableBytes function
+/** enumeration of results returned by the CP_PIPETYPE's CheckAvailableBytes function
* @ingroup cp
*/
typedef enum CP_CHECKBYTESRESULTTYPE
{
- CP_CheckBytesOk, /**< There are at least the request number
+ CP_CheckBytesOk, /**< There are at least the request number
of bytes available */
- CP_CheckBytesNotReady, /**< The pipe is still retrieving bytes
- and presently lacks sufficient bytes.
- Client will be called when they are
+ CP_CheckBytesNotReady, /**< The pipe is still retrieving bytes
+ and presently lacks sufficient bytes.
+ Client will be called when they are
sufficient bytes are available. */
- CP_CheckBytesInsufficientBytes , /**< The pipe has retrieved all bytes
- but those available are less than those
+ CP_CheckBytesInsufficientBytes , /**< The pipe has retrieved all bytes
+ but those available are less than those
requested */
CP_CheckBytesAtEndOfStream, /**< The pipe has reached the end of stream
and no more bytes are available. */
CP_CheckBytesOutOfBuffers, /**< All read/write buffers are currently in use. */
- CP_CheckBytesKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ CP_CheckBytesKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
CP_CheckBytesVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
CP_CheckBytesMax = 0X7FFFFFFF
} CP_CHECKBYTESRESULTTYPE;
-/** enumeration of content pipe events sent to the client callback.
+/** enumeration of content pipe events sent to the client callback.
* @ingroup cp
*/
typedef enum CP_EVENTTYPE{
CP_BytesAvailable, /** bytes requested in a CheckAvailableBytes call are now available*/
CP_Overflow, /** enumeration of content pipe events sent to the client callback*/
CP_PipeDisconnected , /** enumeration of content pipe events sent to the client callback*/
- CP_EventKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ CP_EventKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
CP_EventVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
CP_EventMax = 0X7FFFFFFF
} CP_EVENTTYPE;
-/** content pipe definition
+/** content pipe definition
* @ingroup cp
*/
typedef struct CP_PIPETYPE
{
- /** Open a content stream for reading or writing. */
+ /** Open a content stream for reading or writing. */
CPresult (*Open)( CPhandle* hContent, CPstring szURI, CP_ACCESSTYPE eAccess );
- /** Close a content stream. */
+ /** Close a content stream. */
CPresult (*Close)( CPhandle hContent );
- /** Create a content source and open it for writing. */
+ /** Create a content source and open it for writing. */
CPresult (*Create)( CPhandle *hContent, CPstring szURI );
/** Check the that specified number of bytes are available for reading or writing (depending on access type).*/
@@ -154,19 +171,19 @@ typedef struct CP_PIPETYPE
/** Retrieve data of the specified size from the content stream (advance content pointer by size of data).
Note: pipe client provides pointer. This function is appropriate for small high frequency reads. */
- CPresult (*Read)( CPhandle hContent, CPbyte *pData, CPuint nSize);
+ CPresult (*Read)( CPhandle hContent, CPbyte *pData, CPuint nSize);
- /** Retrieve a buffer allocated by the pipe that contains the requested number of bytes.
+ /** Retrieve a buffer allocated by the pipe that contains the requested number of bytes.
Buffer contains the next block of bytes, as specified by nSize, of the content. nSize also
- returns the size of the block actually read. Content pointer advances the by the returned size.
- Note: pipe provides pointer. This function is appropriate for large reads. The client must call
- ReleaseReadBuffer when done with buffer.
+ returns the size of the block actually read. Content pointer advances the by the returned size.
+ Note: pipe provides pointer. This function is appropriate for large reads. The client must call
+ ReleaseReadBuffer when done with buffer.
In some cases the requested block may not reside in contiguous memory within the
- pipe implementation. For instance if the pipe leverages a circular buffer then the requested
- block may straddle the boundary of the circular buffer. By default a pipe implementation
+ pipe implementation. For instance if the pipe leverages a circular buffer then the requested
+ block may straddle the boundary of the circular buffer. By default a pipe implementation
performs a copy in this case to provide the block to the pipe client in one contiguous buffer.
- If, however, the client sets bForbidCopy, then the pipe returns only those bytes preceding the memory
+ If, however, the client sets bForbidCopy, then the pipe returns only those bytes preceding the memory
boundary. Here the client may retrieve the data in segments over successive calls. */
CPresult (*ReadBuffer)( CPhandle hContent, CPbyte **ppBuffer, CPuint *nSize, CPbool bForbidCopy);
@@ -175,14 +192,14 @@ typedef struct CP_PIPETYPE
/** Write data of the specified size to the content (advance content pointer by size of data).
Note: pipe client provides pointer. This function is appropriate for small high frequency writes. */
- CPresult (*Write)( CPhandle hContent, CPbyte *data, CPuint nSize);
+ CPresult (*Write)( CPhandle hContent, CPbyte *data, CPuint nSize);
- /** Retrieve a buffer allocated by the pipe used to write data to the content.
+ /** Retrieve a buffer allocated by the pipe used to write data to the content.
Client will fill buffer with output data. Note: pipe provides pointer. This function is appropriate
for large writes. The client must call WriteBuffer when done it has filled the buffer with data.*/
CPresult (*GetWriteBuffer)( CPhandle hContent, CPbyte **ppBuffer, CPuint nSize);
- /** Deliver a buffer obtained via GetWriteBuffer to the pipe. Pipe will write the
+ /** Deliver a buffer obtained via GetWriteBuffer to the pipe. Pipe will write the
the contents of the buffer to content and advance content pointer by the size of the buffer */
CPresult (*WriteBuffer)( CPhandle hContent, CPbyte *pBuffer, CPuint nFilledSize);
diff --git a/domx/omx_core/inc/OMX_Core.h b/domx/omx_core/inc/OMX_Core.h
index a076f2f..729dc36 100755
--- a/domx/omx_core/inc/OMX_Core.h
+++ b/domx/omx_core/inc/OMX_Core.h
@@ -1,23 +1,40 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/*
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
@@ -36,14 +53,14 @@ extern "C" {
/* Each OMX header shall include all required header files to allow the
* header to compile without errors. The includes below are required
- * for this header file to compile successfully
+ * for this header file to compile successfully
*/
#include <OMX_Index.h>
/** The OMX_COMMANDTYPE enumeration is used to specify the action in the
- * OMX_SendCommand macro.
+ * OMX_SendCommand macro.
* @ingroup core
*/
typedef enum OMX_COMMANDTYPE
@@ -53,7 +70,7 @@ typedef enum OMX_COMMANDTYPE
OMX_CommandPortDisable, /**< Disable a port on a component. */
OMX_CommandPortEnable, /**< Enable a port on a component. */
OMX_CommandMarkBuffer, /**< Mark a component/buffer for observation */
- OMX_CommandKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_CommandKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_CommandVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_CommandMax = 0X7FFFFFFF
} OMX_COMMANDTYPE;
@@ -76,28 +93,28 @@ typedef enum OMX_COMMANDTYPE
* specified configuration and will transition to the idle state if the
* allocation is successful. If the component cannot successfully
* transition to the idle state for any reason, the state of the component
- * shall be fully rolled back to the Loaded state (e.g. all allocated
+ * shall be fully rolled back to the Loaded state (e.g. all allocated
* resources shall be released). When the component receives the command
* to go to the Executing state, it shall begin processing buffers by
* sending all input buffers it holds to the application. While
* the component is in the Idle state, the application may also send the
* Pause command. If the component receives the pause command while in the
- * Idle state, the component shall send all input buffers it holds to the
+ * Idle state, the component shall send all input buffers it holds to the
* application, but shall not begin processing buffers. This will allow the
* application to prefill buffers.
- *
+ *
* @ingroup comp
*/
typedef enum OMX_STATETYPE
{
- OMX_StateInvalid, /**< component has detected that it's internal data
+ OMX_StateInvalid, /**< component has detected that it's internal data
structures are corrupted to the point that
it cannot determine it's state properly */
OMX_StateLoaded, /**< component has been loaded but has not completed
initialization. The OMX_SetParameter macro
- and the OMX_GetParameter macro are the only
- valid macros allowed to be sent to the
+ and the OMX_GetParameter macro are the only
+ valid macros allowed to be sent to the
component in this state. */
OMX_StateIdle, /**< component initialization has been completed
successfully and the component is ready to
@@ -105,17 +122,17 @@ typedef enum OMX_STATETYPE
OMX_StateExecuting, /**< component has accepted the start command and
is processing data (if data is available) */
OMX_StatePause, /**< component has received pause command */
- OMX_StateWaitForResources, /**< component is waiting for resources, either after
+ OMX_StateWaitForResources, /**< component is waiting for resources, either after
preemption or before it gets the resources requested.
See specification for complete details. */
- OMX_StateKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_StateKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_StateVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_StateMax = 0X7FFFFFFF
} OMX_STATETYPE;
-/** The OMX_ERRORTYPE enumeration defines the standard OMX Errors. These
- * errors should cover most of the common failure cases. However,
- * vendors are free to add additional error messages of their own as
+/** The OMX_ERRORTYPE enumeration defines the standard OMX Errors. These
+ * errors should cover most of the common failure cases. However,
+ * vendors are free to add additional error messages of their own as
* long as they follow these rules:
* 1. Vendor error messages shall be in the range of 0x90000000 to
* 0x9000FFFF.
@@ -186,25 +203,25 @@ typedef enum OMX_ERRORTYPE
/** This error occurs when trying to transition into the state you are already in */
OMX_ErrorSameState = (OMX_S32) 0x80001012,
- /** Resources allocated to an executing or paused component have been
+ /** Resources allocated to an executing or paused component have been
preempted, causing the component to return to the idle state */
- OMX_ErrorResourcesPreempted = (OMX_S32) 0x80001013,
+ OMX_ErrorResourcesPreempted = (OMX_S32) 0x80001013,
- /** A non-supplier port sends this error to the IL client (via the EventHandler callback)
+ /** A non-supplier port sends this error to the IL client (via the EventHandler callback)
during the allocation of buffers (on a transition from the LOADED to the IDLE state or
- on a port restart) when it deems that it has waited an unusually long time for the supplier
+ on a port restart) when it deems that it has waited an unusually long time for the supplier
to send it an allocated buffer via a UseBuffer call. */
OMX_ErrorPortUnresponsiveDuringAllocation = (OMX_S32) 0x80001014,
- /** A non-supplier port sends this error to the IL client (via the EventHandler callback)
- during the deallocation of buffers (on a transition from the IDLE to LOADED state or
- on a port stop) when it deems that it has waited an unusually long time for the supplier
+ /** A non-supplier port sends this error to the IL client (via the EventHandler callback)
+ during the deallocation of buffers (on a transition from the IDLE to LOADED state or
+ on a port stop) when it deems that it has waited an unusually long time for the supplier
to request the deallocation of a buffer header via a FreeBuffer call. */
OMX_ErrorPortUnresponsiveDuringDeallocation = (OMX_S32) 0x80001015,
- /** A supplier port sends this error to the IL client (via the EventHandler callback)
- during the stopping of a port (either on a transition from the IDLE to LOADED
- state or a port stop) when it deems that it has waited an unusually long time for
+ /** A supplier port sends this error to the IL client (via the EventHandler callback)
+ during the stopping of a port (either on a transition from the IDLE to LOADED
+ state or a port stop) when it deems that it has waited an unusually long time for
the non-supplier to return a buffer via an EmptyThisBuffer or FillThisBuffer call. */
OMX_ErrorPortUnresponsiveDuringStop = (OMX_S32) 0x80001016,
@@ -212,7 +229,7 @@ typedef enum OMX_ERRORTYPE
OMX_ErrorIncorrectStateTransition = (OMX_S32) 0x80001017,
/* Attempting a command that is not allowed during the present state. */
- OMX_ErrorIncorrectStateOperation = (OMX_S32) 0x80001018,
+ OMX_ErrorIncorrectStateOperation = (OMX_S32) 0x80001018,
/** The values encapsulated in the parameter or config structure are not supported. */
OMX_ErrorUnsupportedSetting = (OMX_S32) 0x80001019,
@@ -232,12 +249,12 @@ typedef enum OMX_ERRORTYPE
/** Component suspended due to an inability to acquire dynamic resources */
OMX_ErrorDynamicResourcesUnavailable = (OMX_S32) 0x8000101E,
- /** When the macroblock error reporting is enabled the component returns new error
+ /** When the macroblock error reporting is enabled the component returns new error
for every frame that has errors */
OMX_ErrorMbErrorsInFrame = (OMX_S32) 0x8000101F,
/** A component reports this error when it cannot parse or determine the format of an input stream. */
- OMX_ErrorFormatNotDetected = (OMX_S32) 0x80001020,
+ OMX_ErrorFormatNotDetected = (OMX_S32) 0x80001020,
/** The content open operation failed. */
OMX_ErrorContentPipeOpenFailed = (OMX_S32) 0x80001021,
@@ -251,7 +268,7 @@ typedef enum OMX_ERRORTYPE
/** Tunneling is unsupported by the component*/
OMX_ErrorTunnelingUnsupported = (OMX_S32) 0x80001024,
- OMX_ErrorKhronosExtensions = (OMX_S32)0x8F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_ErrorKhronosExtensions = (OMX_S32)0x8F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_ErrorVendorStartUnused = (OMX_S32)0x90000000, /**< Reserved region for introducing Vendor Extensions */
OMX_ErrorMax = 0x7FFFFFFF
} OMX_ERRORTYPE;
@@ -287,69 +304,69 @@ typedef struct OMX_PARAM_COMPONENTROLETYPE {
OMX_U8 cRole[OMX_MAX_STRINGNAME_SIZE]; /**< name of standard component which defines component role */
} OMX_PARAM_COMPONENTROLETYPE;
-/** End of Stream Buffer Flag:
+/** End of Stream Buffer Flag:
*
- * A component sets EOS when it has no more data to emit on a particular
- * output port. Thus an output port shall set EOS on the last buffer it
- * emits. A component's determination of when an output port should
+ * A component sets EOS when it has no more data to emit on a particular
+ * output port. Thus an output port shall set EOS on the last buffer it
+ * emits. A component's determination of when an output port should
* cease sending data is implemenation specific.
* @ingroup buf
*/
-#define OMX_BUFFERFLAG_EOS 0x00000001
+#define OMX_BUFFERFLAG_EOS 0x00000001
-/** Start Time Buffer Flag:
+/** Start Time Buffer Flag:
*
* The source of a stream (e.g. a demux component) sets the STARTTIME
* flag on the buffer that contains the starting timestamp for the
* stream. The starting timestamp corresponds to the first data that
* should be displayed at startup or after a seek.
* The first timestamp of the stream is not necessarily the start time.
- * For instance, in the case of a seek to a particular video frame,
- * the target frame may be an interframe. Thus the first buffer of
+ * For instance, in the case of a seek to a particular video frame,
+ * the target frame may be an interframe. Thus the first buffer of
* the stream will be the intra-frame preceding the target frame and
* the starttime will occur with the target frame (with any other
* required frames required to reconstruct the target intervening).
*
- * The STARTTIME flag is directly associated with the buffer's
- * timestamp ' thus its association to buffer data and its
+ * The STARTTIME flag is directly associated with the buffer's
+ * timestamp ' thus its association to buffer data and its
* propagation is identical to the timestamp's.
*
- * When a Sync Component client receives a buffer with the
- * STARTTIME flag it shall perform a SetConfig on its sync port
+ * When a Sync Component client receives a buffer with the
+ * STARTTIME flag it shall perform a SetConfig on its sync port
* using OMX_ConfigTimeClientStartTime and passing the buffer's
* timestamp.
- *
+ *
* @ingroup buf
*/
#define OMX_BUFFERFLAG_STARTTIME 0x00000002
-
-/** Decode Only Buffer Flag:
+
+/** Decode Only Buffer Flag:
*
* The source of a stream (e.g. a demux component) sets the DECODEONLY
* flag on any buffer that should shall be decoded but should not be
- * displayed. This flag is used, for instance, when a source seeks to
- * a target interframe that requires the decode of frames preceding the
- * target to facilitate the target's reconstruction. In this case the
- * source would emit the frames preceding the target downstream
+ * displayed. This flag is used, for instance, when a source seeks to
+ * a target interframe that requires the decode of frames preceding the
+ * target to facilitate the target's reconstruction. In this case the
+ * source would emit the frames preceding the target downstream
* but mark them as decode only.
*
- * The DECODEONLY is associated with buffer data and propagated in a
+ * The DECODEONLY is associated with buffer data and propagated in a
* manner identical to the buffer timestamp.
*
- * A component that renders data should ignore all buffers with
+ * A component that renders data should ignore all buffers with
* the DECODEONLY flag set.
- *
+ *
* @ingroup buf
*/
#define OMX_BUFFERFLAG_DECODEONLY 0x00000004
-/* Data Corrupt Flag: This flag is set when the IL client believes the data in the associated buffer is corrupt
+/* Data Corrupt Flag: This flag is set when the IL client believes the data in the associated buffer is corrupt
* @ingroup buf
*/
@@ -357,29 +374,29 @@ typedef struct OMX_PARAM_COMPONENTROLETYPE {
/* End of Frame: The buffer contains exactly one end of frame and no data
* occurs after the end of frame. This flag is an optional hint. The absence
- * of this flag does not imply the absence of an end of frame within the buffer.
+ * of this flag does not imply the absence of an end of frame within the buffer.
* @ingroup buf
*/
#define OMX_BUFFERFLAG_ENDOFFRAME 0x00000010
-/* Sync Frame Flag: This flag is set when the buffer content contains a coded sync frame '
- * a frame that has no dependency on any other frame information
+/* Sync Frame Flag: This flag is set when the buffer content contains a coded sync frame '
+ * a frame that has no dependency on any other frame information
* @ingroup buf
*/
#define OMX_BUFFERFLAG_SYNCFRAME 0x00000020
/* Extra data present flag: there is extra data appended to the data stream
- * residing in the buffer
- * @ingroup buf
+ * residing in the buffer
+ * @ingroup buf
*/
#define OMX_BUFFERFLAG_EXTRADATA 0x00000040
-/** Codec Config Buffer Flag:
+/** Codec Config Buffer Flag:
* OMX_BUFFERFLAG_CODECCONFIG is an optional flag that is set by an
* output port when all bytes in the buffer form part or all of a set of
* codec specific configuration data. Examples include SPS/PPS nal units
* for OMX_VIDEO_CodingAVC or AudioSpecificConfig data for
-* OMX_AUDIO_CodingAAC. Any component that for a given stream sets
+* OMX_AUDIO_CodingAAC. Any component that for a given stream sets
* OMX_BUFFERFLAG_CODECCONFIG shall not mix codec configuration bytes
* with frame data in the same buffer, and shall send all buffers
* containing codec configuration bytes before any buffers containing
@@ -399,50 +416,50 @@ typedef struct OMX_BUFFERHEADERTYPE
{
OMX_U32 nSize; /**< size of the structure in bytes */
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
- OMX_U8* pBuffer; /**< Pointer to actual block of memory
+ OMX_U8* pBuffer; /**< Pointer to actual block of memory
that is acting as the buffer */
OMX_U32 nAllocLen; /**< size of the buffer allocated, in bytes */
- OMX_U32 nFilledLen; /**< number of bytes currently in the
+ OMX_U32 nFilledLen; /**< number of bytes currently in the
buffer */
OMX_U32 nOffset; /**< start offset of valid data in bytes from
the start of the buffer */
OMX_PTR pAppPrivate; /**< pointer to any data the application
wants to associate with this buffer */
OMX_PTR pPlatformPrivate; /**< pointer to any data the platform
- wants to associate with this buffer */
+ wants to associate with this buffer */
OMX_PTR pInputPortPrivate; /**< pointer to any data the input port
wants to associate with this buffer */
OMX_PTR pOutputPortPrivate; /**< pointer to any data the output port
wants to associate with this buffer */
- OMX_HANDLETYPE hMarkTargetComponent; /**< The component that will generate a
+ OMX_HANDLETYPE hMarkTargetComponent; /**< The component that will generate a
mark event upon processing this buffer. */
- OMX_PTR pMarkData; /**< Application specific data associated with
- the mark sent on a mark event to disambiguate
+ OMX_PTR pMarkData; /**< Application specific data associated with
+ the mark sent on a mark event to disambiguate
this mark from others. */
OMX_U32 nTickCount; /**< Optional entry that the component and
application can update with a tick count
when they access the component. This
value should be in microseconds. Since
this is a value relative to an arbitrary
- starting point, this value cannot be used
+ starting point, this value cannot be used
to determine absolute time. This is an
optional entry and not all components
will update it.*/
- OMX_TICKS nTimeStamp; /**< Timestamp corresponding to the sample
- starting at the first logical sample
- boundary in the buffer. Timestamps of
+ OMX_TICKS nTimeStamp; /**< Timestamp corresponding to the sample
+ starting at the first logical sample
+ boundary in the buffer. Timestamps of
successive samples within the buffer may
- be inferred by adding the duration of the
+ be inferred by adding the duration of the
of the preceding buffer to the timestamp
of the preceding buffer.*/
OMX_U32 nFlags; /**< buffer specific flags */
- OMX_U32 nOutputPortIndex; /**< The index of the output port (if any) using
+ OMX_U32 nOutputPortIndex; /**< The index of the output port (if any) using
this buffer */
OMX_U32 nInputPortIndex; /**< The index of the input port (if any) using
this buffer */
} OMX_BUFFERHEADERTYPE;
-/** The OMX_EXTRADATATYPE enumeration is used to define the
+/** The OMX_EXTRADATATYPE enumeration is used to define the
* possible extra data payload types.
* NB: this enum is binary backwards compatible with the previous
* OMX_EXTRADATA_QUANT define. This should be replaced with
@@ -450,9 +467,9 @@ typedef struct OMX_BUFFERHEADERTYPE
*/
typedef enum OMX_EXTRADATATYPE
{
- OMX_ExtraDataNone = 0, /**< Indicates that no more extra data sections follow */
+ OMX_ExtraDataNone = 0, /**< Indicates that no more extra data sections follow */
OMX_ExtraDataQuantization, /**< The data payload contains quantization data */
- OMX_ExtraDataKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_ExtraDataKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_ExtraDataVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_ExtraDataMax = 0x7FFFFFFF
} OMX_EXTRADATATYPE;
@@ -460,7 +477,7 @@ typedef enum OMX_EXTRADATATYPE
typedef struct OMX_OTHER_EXTRADATATYPE {
OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
+ OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_EXTRADATATYPE eType; /* Extra Data type */
OMX_U32 nDataSize; /* Size of the supporting data to follow */
@@ -473,7 +490,7 @@ typedef struct OMX_PORT_PARAM_TYPE {
OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPorts; /**< The number of ports for this component */
OMX_U32 nStartPortNumber; /** first port number for this type of port */
-} OMX_PORT_PARAM_TYPE;
+} OMX_PORT_PARAM_TYPE;
/** @ingroup comp */
typedef enum OMX_EVENTTYPE
@@ -482,14 +499,14 @@ typedef enum OMX_EVENTTYPE
OMX_EventError, /**< component has detected an error condition */
OMX_EventMark, /**< component has detected a buffer mark */
OMX_EventPortSettingsChanged, /**< component is reported a port settings change */
- OMX_EventBufferFlag, /**< component has detected an EOS */
+ OMX_EventBufferFlag, /**< component has detected an EOS */
OMX_EventResourcesAcquired, /**< component has been granted resources and is
automatically starting the state change from
OMX_StateWaitForResources to OMX_StateIdle. */
OMX_EventComponentResumed, /**< Component resumed due to reacquisition of resources */
OMX_EventDynamicResourcesAvailable, /**< Component has acquired previously unavailable dynamic resources */
OMX_EventPortFormatDetected, /**< Component has detected a supported format. */
- OMX_EventKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_EventKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_EventVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_EventMax = 0x7FFFFFFF
} OMX_EVENTTYPE;
@@ -500,7 +517,7 @@ typedef struct OMX_CALLBACKTYPE
event of interest occurs. Events are defined in the OMX_EVENTTYPE
enumeration. Please see that enumeration for details of what will
be returned for each type of event. Callbacks should not return
- an error to the component, so if an error occurs, the application
+ an error to the component, so if an error occurs, the application
shall handle it internally. This is a blocking call.
The application should return from this call within 5 msec to avoid
@@ -510,14 +527,14 @@ typedef struct OMX_CALLBACKTYPE
handle of the component to access. This is the component
handle returned by the call to the GetHandle function.
@param pAppData
- pointer to an application defined value that was provided in the
+ pointer to an application defined value that was provided in the
pAppData parameter to the OMX_GetHandle method for the component.
- This application defined value is provided so that the application
+ This application defined value is provided so that the application
can have a component specific context when receiving the callback.
@param eEvent
Event that the component wants to notify the application about.
@param nData1
- nData will be the OMX_ERRORTYPE for an error event and will be
+ nData will be the OMX_ERRORTYPE for an error event and will be
an OMX_COMMANDTYPE for a command complete event and OMX_INDEXTYPE for a OMX_PortSettingsChanged event.
@param nData2
nData2 will hold further information related to the event. Can be OMX_STATETYPE for
@@ -536,21 +553,21 @@ typedef struct OMX_CALLBACKTYPE
OMX_IN OMX_PTR pEventData);
/** The EmptyBufferDone method is used to return emptied buffers from an
- input port back to the application for reuse. This is a blocking call
+ input port back to the application for reuse. This is a blocking call
so the application should not attempt to refill the buffers during this
call, but should queue them and refill them in another thread. There
is no error return, so the application shall handle any errors generated
- internally.
-
+ internally.
+
The application should return from this call within 5 msec.
-
+
@param hComponent
handle of the component to access. This is the component
handle returned by the call to the GetHandle function.
@param pAppData
- pointer to an application defined value that was provided in the
+ pointer to an application defined value that was provided in the
pAppData parameter to the OMX_GetHandle method for the component.
- This application defined value is provided so that the application
+ This application defined value is provided so that the application
can have a component specific context when receiving the callback.
@param pBuffer
pointer to an OMX_BUFFERHEADERTYPE structure allocated with UseBuffer
@@ -563,23 +580,23 @@ typedef struct OMX_CALLBACKTYPE
OMX_IN OMX_BUFFERHEADERTYPE* pBuffer);
/** The FillBufferDone method is used to return filled buffers from an
- output port back to the application for emptying and then reuse.
- This is a blocking call so the application should not attempt to
- empty the buffers during this call, but should queue the buffers
- and empty them in another thread. There is no error return, so
- the application shall handle any errors generated internally. The
+ output port back to the application for emptying and then reuse.
+ This is a blocking call so the application should not attempt to
+ empty the buffers during this call, but should queue the buffers
+ and empty them in another thread. There is no error return, so
+ the application shall handle any errors generated internally. The
application shall also update the buffer header to indicate the
- number of bytes placed into the buffer.
+ number of bytes placed into the buffer.
The application should return from this call within 5 msec.
-
+
@param hComponent
handle of the component to access. This is the component
handle returned by the call to the GetHandle function.
@param pAppData
- pointer to an application defined value that was provided in the
+ pointer to an application defined value that was provided in the
pAppData parameter to the OMX_GetHandle method for the component.
- This application defined value is provided so that the application
+ This application defined value is provided so that the application
can have a component specific context when receiving the callback.
@param pBuffer
pointer to an OMX_BUFFERHEADERTYPE structure allocated with UseBuffer
@@ -603,13 +620,13 @@ typedef enum OMX_BUFFERSUPPLIERTYPE
or don't care */
OMX_BufferSupplyInput, /**< input port supplies the buffers */
OMX_BufferSupplyOutput, /**< output port supplies the buffers */
- OMX_BufferSupplyKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_BufferSupplyKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_BufferSupplyVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_BufferSupplyMax = 0x7FFFFFFF
} OMX_BUFFERSUPPLIERTYPE;
-/** buffer supplier parameter
+/** buffer supplier parameter
* @ingroup tun
*/
typedef struct OMX_PARAM_BUFFERSUPPLIERTYPE {
@@ -620,61 +637,61 @@ typedef struct OMX_PARAM_BUFFERSUPPLIERTYPE {
} OMX_PARAM_BUFFERSUPPLIERTYPE;
-/**< indicates that buffers received by an input port of a tunnel
- may not modify the data in the buffers
+/**< indicates that buffers received by an input port of a tunnel
+ may not modify the data in the buffers
@ingroup tun
*/
-#define OMX_PORTTUNNELFLAG_READONLY 0x00000001
+#define OMX_PORTTUNNELFLAG_READONLY 0x00000001
/** The OMX_TUNNELSETUPTYPE structure is used to pass data from an output
port to an input port as part the two ComponentTunnelRequest calls
- resulting from a OMX_SetupTunnel call from the IL Client.
+ resulting from a OMX_SetupTunnel call from the IL Client.
@ingroup tun
- */
+ */
typedef struct OMX_TUNNELSETUPTYPE
{
OMX_U32 nTunnelFlags; /**< bit flags for tunneling */
OMX_BUFFERSUPPLIERTYPE eSupplier; /**< supplier preference */
-} OMX_TUNNELSETUPTYPE;
+} OMX_TUNNELSETUPTYPE;
/* OMX Component headers is included to enable the core to use
- macros for functions into the component for OMX release 1.0.
+ macros for functions into the component for OMX release 1.0.
Developers should not access any structures or data from within
the component header directly */
/* TO BE REMOVED - #include <OMX_Component.h> */
-/** GetComponentVersion will return information about the component.
+/** GetComponentVersion will return information about the component.
This is a blocking call. This macro will go directly from the
application to the component (via a core macro). The
component will return from this call within 5 msec.
@param [in] hComponent
handle of component to execute the command
@param [out] pComponentName
- pointer to an empty string of length 128 bytes. The component
- will write its name into this string. The name will be
- terminated by a single zero byte. The name of a component will
- be 127 bytes or less to leave room for the trailing zero byte.
+ pointer to an empty string of length 128 bytes. The component
+ will write its name into this string. The name will be
+ terminated by a single zero byte. The name of a component will
+ be 127 bytes or less to leave room for the trailing zero byte.
An example of a valid component name is "OMX.ABC.ChannelMixer\0".
@param [out] pComponentVersion
- pointer to an OMX Version structure that the component will fill
- in. The component will fill in a value that indicates the
- component version. NOTE: the component version is NOT the same
- as the OMX Specification version (found in all structures). The
- component version is defined by the vendor of the component and
+ pointer to an OMX Version structure that the component will fill
+ in. The component will fill in a value that indicates the
+ component version. NOTE: the component version is NOT the same
+ as the OMX Specification version (found in all structures). The
+ component version is defined by the vendor of the component and
its value is entirely up to the component vendor.
@param [out] pSpecVersion
- pointer to an OMX Version structure that the component will fill
- in. The SpecVersion is the version of the specification that the
- component was built against. Please note that this value may or
- may not match the structure's version. For example, if the
- component was built against the 2.0 specification, but the
- application (which creates the structure is built against the
+ pointer to an OMX Version structure that the component will fill
+ in. The SpecVersion is the version of the specification that the
+ component was built against. Please note that this value may or
+ may not match the structure's version. For example, if the
+ component was built against the 2.0 specification, but the
+ application (which creates the structure is built against the
1.0 specification the versions would be different.
@param [out] pComponentUUID
- pointer to the UUID of the component which will be filled in by
- the component. The UUID is a unique identifier that is set at
- RUN time for the component and is unique to each instantion of
+ pointer to the UUID of the component which will be filled in by
+ the component. The UUID is a unique identifier that is set at
+ RUN time for the component and is unique to each instantion of
the component.
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
@@ -697,46 +714,46 @@ typedef struct OMX_TUNNELSETUPTYPE
/** Send a command to the component. This call is a non-blocking call.
The component should check the parameters and then queue the command
- to the component thread to be executed. The component thread shall
- send the EventHandler() callback at the conclusion of the command.
+ to the component thread to be executed. The component thread shall
+ send the EventHandler() callback at the conclusion of the command.
This macro will go directly from the application to the component (via
a core macro). The component will return from this call within 5 msec.
-
+
When the command is "OMX_CommandStateSet" the component will queue a
state transition to the new state idenfied in nParam.
-
+
When the command is "OMX_CommandFlush", to flush a port's buffer queues,
- the command will force the component to return all buffers NOT CURRENTLY
- BEING PROCESSED to the application, in the order in which the buffers
+ the command will force the component to return all buffers NOT CURRENTLY
+ BEING PROCESSED to the application, in the order in which the buffers
were received.
-
- When the command is "OMX_CommandPortDisable" or
+
+ When the command is "OMX_CommandPortDisable" or
"OMX_CommandPortEnable", the component's port (given by the value of
- nParam) will be stopped or restarted.
-
+ nParam) will be stopped or restarted.
+
When the command "OMX_CommandMarkBuffer" is used to mark a buffer, the
pCmdData will point to a OMX_MARKTYPE structure containing the component
handle of the component to examine the buffer chain for the mark. nParam1
contains the index of the port on which the buffer mark is applied.
- Specification text for more details.
-
+ Specification text for more details.
+
@param [in] hComponent
handle of component to execute the command
@param [in] Cmd
Command for the component to execute
@param [in] nParam
- Parameter for the command to be executed. When Cmd has the value
- OMX_CommandStateSet, value is a member of OMX_STATETYPE. When Cmd has
- the value OMX_CommandFlush, value of nParam indicates which port(s)
- to flush. -1 is used to flush all ports a single port index will
+ Parameter for the command to be executed. When Cmd has the value
+ OMX_CommandStateSet, value is a member of OMX_STATETYPE. When Cmd has
+ the value OMX_CommandFlush, value of nParam indicates which port(s)
+ to flush. -1 is used to flush all ports a single port index will
only flush that port. When Cmd has the value "OMX_CommandPortDisable"
- or "OMX_CommandPortEnable", the component's port is given by
+ or "OMX_CommandPortEnable", the component's port is given by
the value of nParam. When Cmd has the value "OMX_CommandMarkBuffer"
the components pot is given by the value of nParam.
@param [in] pCmdData
Parameter pointing to the OMX_MARKTYPE structure when Cmd has the value
- "OMX_CommandMarkBuffer".
+ "OMX_CommandMarkBuffer".
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
OMX_ErrorNone. Otherwise the appropriate OMX error will be returned.
@@ -754,21 +771,21 @@ typedef struct OMX_TUNNELSETUPTYPE
pCmdData) /* Macro End */
-/** The OMX_GetParameter macro will get one of the current parameter
- settings from the component. This macro cannot only be invoked when
+/** The OMX_GetParameter macro will get one of the current parameter
+ settings from the component. This macro cannot only be invoked when
the component is in the OMX_StateInvalid state. The nParamIndex
parameter is used to indicate which structure is being requested from
- the component. The application shall allocate the correct structure
- and shall fill in the structure size and version information before
+ the component. The application shall allocate the correct structure
+ and shall fill in the structure size and version information before
invoking this macro. When the parameter applies to a port, the
caller shall fill in the appropriate nPortIndex value indicating the
- port on which the parameter applies. If the component has not had
- any settings changed, then the component should return a set of
- valid DEFAULT parameters for the component. This is a blocking
- call.
-
+ port on which the parameter applies. If the component has not had
+ any settings changed, then the component should return a set of
+ valid DEFAULT parameters for the component. This is a blocking
+ call.
+
The component should return from this call within 20 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@@ -776,7 +793,7 @@ typedef struct OMX_TUNNELSETUPTYPE
Index of the structure to be filled. This value is from the
OMX_INDEXTYPE enumeration.
@param [in,out] pComponentParameterStructure
- Pointer to application allocated structure to be filled by the
+ Pointer to application allocated structure to be filled by the
component.
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
@@ -797,17 +814,17 @@ typedef struct OMX_TUNNELSETUPTYPE
structure to a component. Each structure shall be sent one at a time,
in a separate invocation of the macro. This macro can only be
invoked when the component is in the OMX_StateLoaded state, or the
- port is disabled (when the parameter applies to a port). The
+ port is disabled (when the parameter applies to a port). The
nParamIndex parameter is used to indicate which structure is being
- passed to the component. The application shall allocate the
- correct structure and shall fill in the structure size and version
+ passed to the component. The application shall allocate the
+ correct structure and shall fill in the structure size and version
information (as well as the actual data) before invoking this macro.
The application is free to dispose of this structure after the call
- as the component is required to copy any data it shall retain. This
- is a blocking call.
-
+ as the component is required to copy any data it shall retain. This
+ is a blocking call.
+
The component should return from this call within 20 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@@ -832,18 +849,18 @@ typedef struct OMX_TUNNELSETUPTYPE
pComponentParameterStructure) /* Macro End */
-/** The OMX_GetConfig macro will get one of the configuration structures
- from a component. This macro can be invoked anytime after the
- component has been loaded. The nParamIndex call parameter is used to
- indicate which structure is being requested from the component. The
- application shall allocate the correct structure and shall fill in the
- structure size and version information before invoking this macro.
- If the component has not had this configuration parameter sent before,
- then the component should return a set of valid DEFAULT values for the
- component. This is a blocking call.
-
+/** The OMX_GetConfig macro will get one of the configuration structures
+ from a component. This macro can be invoked anytime after the
+ component has been loaded. The nParamIndex call parameter is used to
+ indicate which structure is being requested from the component. The
+ application shall allocate the correct structure and shall fill in the
+ structure size and version information before invoking this macro.
+ If the component has not had this configuration parameter sent before,
+ then the component should return a set of valid DEFAULT values for the
+ component. This is a blocking call.
+
The component should return from this call within 5 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@@ -851,13 +868,13 @@ typedef struct OMX_TUNNELSETUPTYPE
Index of the structure to be filled. This value is from the
OMX_INDEXTYPE enumeration.
@param [in,out] pComponentConfigStructure
- pointer to application allocated structure to be filled by the
+ pointer to application allocated structure to be filled by the
component.
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
OMX_ErrorNone. Otherwise the appropriate OMX error will be returned.
@ingroup comp
-*/
+*/
#define OMX_GetConfig( \
hComponent, \
nConfigIndex, \
@@ -868,18 +885,18 @@ typedef struct OMX_TUNNELSETUPTYPE
pComponentConfigStructure) /* Macro End */
-/** The OMX_SetConfig macro will send one of the configuration
+/** The OMX_SetConfig macro will send one of the configuration
structures to a component. Each structure shall be sent one at a time,
- each in a separate invocation of the macro. This macro can be invoked
- anytime after the component has been loaded. The application shall
- allocate the correct structure and shall fill in the structure size
- and version information (as well as the actual data) before invoking
- this macro. The application is free to dispose of this structure after
- the call as the component is required to copy any data it shall retain.
- This is a blocking call.
-
+ each in a separate invocation of the macro. This macro can be invoked
+ anytime after the component has been loaded. The application shall
+ allocate the correct structure and shall fill in the structure size
+ and version information (as well as the actual data) before invoking
+ this macro. The application is free to dispose of this structure after
+ the call as the component is required to copy any data it shall retain.
+ This is a blocking call.
+
The component should return from this call within 5 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@@ -904,22 +921,22 @@ typedef struct OMX_TUNNELSETUPTYPE
pComponentConfigStructure) /* Macro End */
-/** The OMX_GetExtensionIndex macro will invoke a component to translate
- a vendor specific configuration or parameter string into an OMX
- structure index. There is no requirement for the vendor to support
- this command for the indexes already found in the OMX_INDEXTYPE
- enumeration (this is done to save space in small components). The
+/** The OMX_GetExtensionIndex macro will invoke a component to translate
+ a vendor specific configuration or parameter string into an OMX
+ structure index. There is no requirement for the vendor to support
+ this command for the indexes already found in the OMX_INDEXTYPE
+ enumeration (this is done to save space in small components). The
component shall support all vendor supplied extension indexes not found
- in the master OMX_INDEXTYPE enumeration. This is a blocking call.
-
+ in the master OMX_INDEXTYPE enumeration. This is a blocking call.
+
The component should return from this call within 5 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the GetHandle function.
@param [in] cParameterName
OMX_STRING that shall be less than 128 characters long including
- the trailing null byte. This is the string that will get
+ the trailing null byte. This is the string that will get
translated by the component into a configuration index.
@param [out] pIndexType
a pointer to a OMX_INDEXTYPE to receive the index value.
@@ -938,18 +955,18 @@ typedef struct OMX_TUNNELSETUPTYPE
pIndexType) /* Macro End */
-/** The OMX_GetState macro will invoke the component to get the current
+/** The OMX_GetState macro will invoke the component to get the current
state of the component and place the state value into the location
- pointed to by pState.
-
+ pointed to by pState.
+
The component should return from this call within 5 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@param [out] pState
pointer to the location to receive the state. The value returned
- is one of the OMX_STATETYPE members
+ is one of the OMX_STATETYPE members
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
OMX_ErrorNone. Otherwise the appropriate OMX error will be returned.
@@ -964,17 +981,17 @@ typedef struct OMX_TUNNELSETUPTYPE
/** The OMX_UseBuffer macro will request that the component use
- a buffer (and allocate its own buffer header) already allocated
- by another component, or by the IL Client. This is a blocking
+ a buffer (and allocate its own buffer header) already allocated
+ by another component, or by the IL Client. This is a blocking
call.
-
+
The component should return from this call within 20 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@param [out] ppBuffer
- pointer to an OMX_BUFFERHEADERTYPE structure used to receive the
+ pointer to an OMX_BUFFERHEADERTYPE structure used to receive the
pointer to the buffer header
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
@@ -998,25 +1015,25 @@ typedef struct OMX_TUNNELSETUPTYPE
pBuffer)
-/** The OMX_AllocateBuffer macro will request that the component allocate
- a new buffer and buffer header. The component will allocate the
- buffer and the buffer header and return a pointer to the buffer
+/** The OMX_AllocateBuffer macro will request that the component allocate
+ a new buffer and buffer header. The component will allocate the
+ buffer and the buffer header and return a pointer to the buffer
header. This is a blocking call.
-
+
The component should return from this call within 5 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@param [out] ppBuffer
- pointer to an OMX_BUFFERHEADERTYPE structure used to receive
+ pointer to an OMX_BUFFERHEADERTYPE structure used to receive
the pointer to the buffer header
@param [in] nPortIndex
nPortIndex is used to select the port on the component the buffer will
be used with. The port can be found by using the nPortIndex
value as an index into the Port Definition array of the component.
@param [in] pAppPrivate
- pAppPrivate is used to initialize the pAppPrivate member of the
+ pAppPrivate is used to initialize the pAppPrivate member of the
buffer header structure.
@param [in] nSizeBytes
size of the buffer to allocate. Used when bAllocateNew is true.
@@ -1024,7 +1041,7 @@ typedef struct OMX_TUNNELSETUPTYPE
If the command successfully executes, the return code will be
OMX_ErrorNone. Otherwise the appropriate OMX error will be returned.
@ingroup comp buf
- */
+ */
#define OMX_AllocateBuffer( \
hComponent, \
ppBuffer, \
@@ -1040,13 +1057,13 @@ typedef struct OMX_TUNNELSETUPTYPE
/** The OMX_FreeBuffer macro will release a buffer header from the component
- which was allocated using either OMX_AllocateBuffer or OMX_UseBuffer. If
- the component allocated the buffer (see the OMX_UseBuffer macro) then
- the component shall free the buffer and buffer header. This is a
- blocking call.
-
+ which was allocated using either OMX_AllocateBuffer or OMX_UseBuffer. If
+ the component allocated the buffer (see the OMX_UseBuffer macro) then
+ the component shall free the buffer and buffer header. This is a
+ blocking call.
+
The component should return from this call within 20 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@@ -1071,17 +1088,17 @@ typedef struct OMX_TUNNELSETUPTYPE
pBuffer) /* Macro End */
-/** The OMX_EmptyThisBuffer macro will send a buffer full of data to an
+/** The OMX_EmptyThisBuffer macro will send a buffer full of data to an
input port of a component. The buffer will be emptied by the component
and returned to the application via the EmptyBufferDone call back.
This is a non-blocking call in that the component will record the buffer
- and return immediately and then empty the buffer, later, at the proper
- time. As expected, this macro may be invoked only while the component
+ and return immediately and then empty the buffer, later, at the proper
+ time. As expected, this macro may be invoked only while the component
is in the OMX_StateExecuting. If nPortIndex does not specify an input
- port, the component shall return an error.
-
+ port, the component shall return an error.
+
The component should return from this call within 5 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@@ -1101,17 +1118,17 @@ typedef struct OMX_TUNNELSETUPTYPE
pBuffer) /* Macro End */
-/** The OMX_FillThisBuffer macro will send an empty buffer to an
+/** The OMX_FillThisBuffer macro will send an empty buffer to an
output port of a component. The buffer will be filled by the component
and returned to the application via the FillBufferDone call back.
This is a non-blocking call in that the component will record the buffer
- and return immediately and then fill the buffer, later, at the proper
- time. As expected, this macro may be invoked only while the component
+ and return immediately and then fill the buffer, later, at the proper
+ time. As expected, this macro may be invoked only while the component
is in the OMX_ExecutingState. If nPortIndex does not specify an output
- port, the component shall return an error.
-
+ port, the component shall return an error.
+
The component should return from this call within 5 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@@ -1135,14 +1152,14 @@ typedef struct OMX_TUNNELSETUPTYPE
/** The OMX_UseEGLImage macro will request that the component use
a EGLImage provided by EGL (and allocate its own buffer header)
This is a blocking call.
-
+
The component should return from this call within 20 msec.
-
+
@param [in] hComponent
Handle of the component to be accessed. This is the component
handle returned by the call to the OMX_GetHandle function.
@param [out] ppBuffer
- pointer to an OMX_BUFFERHEADERTYPE structure used to receive the
+ pointer to an OMX_BUFFERHEADERTYPE structure used to receive the
pointer to the buffer header. Note that the memory location used
for this buffer is NOT visible to the IL Client.
@param [in] nPortIndex
@@ -1150,13 +1167,13 @@ typedef struct OMX_TUNNELSETUPTYPE
be used with. The port can be found by using the nPortIndex
value as an index into the Port Definition array of the component.
@param [in] pAppPrivate
- pAppPrivate is used to initialize the pAppPrivate member of the
+ pAppPrivate is used to initialize the pAppPrivate member of the
buffer header structure.
@param [in] eglImage
eglImage contains the handle of the EGLImage to use as a buffer on the
- specified port. The component is expected to validate properties of
+ specified port. The component is expected to validate properties of
the EGLImage against the configuration of the port to ensure the component
- can use the EGLImage as a buffer.
+ can use the EGLImage as a buffer.
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
OMX_ErrorNone. Otherwise the appropriate OMX error will be returned.
@@ -1177,8 +1194,8 @@ typedef struct OMX_TUNNELSETUPTYPE
/** The OMX_Init method is used to initialize the OMX core. It shall be the
first call made into OMX and it should only be executed one time without
- an interviening OMX_Deinit call.
-
+ an interviening OMX_Deinit call.
+
The core should return from this call within 20 msec.
@return OMX_ERRORTYPE
@@ -1189,13 +1206,13 @@ typedef struct OMX_TUNNELSETUPTYPE
OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Init(void);
-/** The OMX_Deinit method is used to deinitialize the OMX core. It shall be
- the last call made into OMX. In the event that the core determines that
- thare are components loaded when this call is made, the core may return
+/** The OMX_Deinit method is used to deinitialize the OMX core. It shall be
+ the last call made into OMX. In the event that the core determines that
+ thare are components loaded when this call is made, the core may return
with an error rather than try to unload the components.
-
+
The core should return from this call within 20 msec.
-
+
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
OMX_ErrorNone. Otherwise the appropriate OMX error will be returned.
@@ -1212,23 +1229,23 @@ OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Deinit(void);
installation of new components, it is only requried to detect newly
installed components when the first call to enumerate component names
is made (i.e. when nIndex is 0x0).
-
+
The core should return from this call in 20 msec.
-
+
@param [out] cComponentName
pointer to a null terminated string with the component name. The
names of the components are strings less than 127 bytes in length
- plus the trailing null for a maximum size of 128 bytes. An example
- of a valid component name is "OMX.TI.AUDIO.DSP.MIXER\0". Names are
- assigned by the vendor, but shall start with "OMX." and then have
+ plus the trailing null for a maximum size of 128 bytes. An example
+ of a valid component name is "OMX.TI.AUDIO.DSP.MIXER\0". Names are
+ assigned by the vendor, but shall start with "OMX." and then have
the Vendor designation next.
@param [in] nNameLength
- number of characters in the cComponentName string. With all
- component name strings restricted to less than 128 characters
+ number of characters in the cComponentName string. With all
+ component name strings restricted to less than 128 characters
(including the trailing null) it is recomended that the caller
provide a input string for the cComponentName of 128 characters.
@param [in] nIndex
- number containing the enumeration index for the component.
+ number containing the enumeration index for the component.
Multiple calls to OMX_ComponentNameEnum with increasing values
of nIndex will enumerate through the component names in the
system until OMX_ErrorNoMore is returned. The value of nIndex
@@ -1236,7 +1253,7 @@ OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Deinit(void);
in the system.
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
- OMX_ErrorNone. When the value of nIndex exceeds the number of
+ OMX_ErrorNone. When the value of nIndex exceeds the number of
components in the system minus 1, OMX_ErrorNoMore will be
returned. Otherwise the appropriate OMX error will be returned.
@ingroup core
@@ -1249,18 +1266,18 @@ OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_ComponentNameEnum(
/** The OMX_GetHandle method will locate the component specified by the
component name given, load that component into memory and then invoke
- the component's methods to create an instance of the component.
-
+ the component's methods to create an instance of the component.
+
The core should return from this call within 20 msec.
-
+
@param [out] pHandle
pointer to an OMX_HANDLETYPE pointer to be filled in by this method.
@param [in] cComponentName
pointer to a null terminated string with the component name. The
names of the components are strings less than 127 bytes in length
- plus the trailing null for a maximum size of 128 bytes. An example
- of a valid component name is "OMX.TI.AUDIO.DSP.MIXER\0". Names are
- assigned by the vendor, but shall start with "OMX." and then have
+ plus the trailing null for a maximum size of 128 bytes. An example
+ of a valid component name is "OMX.TI.AUDIO.DSP.MIXER\0". Names are
+ assigned by the vendor, but shall start with "OMX." and then have
the Vendor designation next.
@param [in] pAppData
pointer to an application defined value that will be returned
@@ -1268,24 +1285,24 @@ OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_ComponentNameEnum(
of the callback.
@param [in] pCallBacks
pointer to a OMX_CALLBACKTYPE structure that will be passed to the
- component to initialize it with.
+ component to initialize it with.
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
OMX_ErrorNone. Otherwise the appropriate OMX error will be returned.
@ingroup core
*/
OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_GetHandle(
- OMX_OUT OMX_HANDLETYPE* pHandle,
+ OMX_OUT OMX_HANDLETYPE* pHandle,
OMX_IN OMX_STRING cComponentName,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_CALLBACKTYPE* pCallBacks);
-/** The OMX_FreeHandle method will free a handle allocated by the OMX_GetHandle
+/** The OMX_FreeHandle method will free a handle allocated by the OMX_GetHandle
method. If the component reference count goes to zero, the component will
- be unloaded from memory.
-
- The core should return from this call within 20 msec when the component is
+ be unloaded from memory.
+
+ The core should return from this call within 20 msec when the component is
in the OMX_StateLoaded state.
@param [in] hComponent
@@ -1304,34 +1321,34 @@ OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_FreeHandle(
/** The OMX_SetupTunnel method will handle the necessary calls to the components
to setup the specified tunnel the two components. NOTE: This is
an actual method (not a #define macro). This method will make calls into
- the component ComponentTunnelRequest method to do the actual tunnel
- connection.
+ the component ComponentTunnelRequest method to do the actual tunnel
+ connection.
- The ComponentTunnelRequest method on both components will be called.
- This method shall not be called unless the component is in the
+ The ComponentTunnelRequest method on both components will be called.
+ This method shall not be called unless the component is in the
OMX_StateLoaded state except when the ports used for the tunnel are
disabled. In this case, the component may be in the OMX_StateExecuting,
- OMX_StatePause, or OMX_StateIdle states.
+ OMX_StatePause, or OMX_StateIdle states.
The core should return from this call within 20 msec.
-
+
@param [in] hOutput
Handle of the component to be accessed. Also this is the handle
of the component whose port, specified in the nPortOutput parameter
will be used the source for the tunnel. This is the component handle
- returned by the call to the OMX_GetHandle function. There is a
+ returned by the call to the OMX_GetHandle function. There is a
requirement that hOutput be the source for the data when
tunelling (i.e. nPortOutput is an output port). If 0x0, the component
specified in hInput will have it's port specified in nPortInput
setup for communication with the application / IL client.
@param [in] nPortOutput
nPortOutput is used to select the source port on component to be
- used in the tunnel.
+ used in the tunnel.
@param [in] hInput
This is the component to setup the tunnel with. This is the handle
of the component whose port, specified in the nPortInput parameter
will be used the destination for the tunnel. This is the component handle
- returned by the call to the OMX_GetHandle function. There is a
+ returned by the call to the OMX_GetHandle function. There is a
requirement that hInput be the destination for the data when
tunelling (i.e. nPortInut is an input port). If 0x0, the component
specified in hOutput will have it's port specified in nPortPOutput
@@ -1342,9 +1359,9 @@ OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_FreeHandle(
@return OMX_ERRORTYPE
If the command successfully executes, the return code will be
OMX_ErrorNone. Otherwise the appropriate OMX error will be returned.
- When OMX_ErrorNotImplemented is returned, one or both components is
+ When OMX_ErrorNotImplemented is returned, one or both components is
a non-interop component and does not support tunneling.
-
+
On failure, the ports of both components are setup for communication
with the application / IL Client.
@ingroup core tun
@@ -1354,50 +1371,50 @@ OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_SetupTunnel(
OMX_IN OMX_U32 nPortOutput,
OMX_IN OMX_HANDLETYPE hInput,
OMX_IN OMX_U32 nPortInput);
-
+
/** @ingroup cp */
OMX_API OMX_ERRORTYPE OMX_GetContentPipe(
OMX_OUT OMX_HANDLETYPE *hPipe,
OMX_IN OMX_STRING szURI);
/** The OMX_GetComponentsOfRole method will return the number of components that support the given
- role and (if the compNames field is non-NULL) the names of those components. The call will fail if
+ role and (if the compNames field is non-NULL) the names of those components. The call will fail if
an insufficiently sized array of names is supplied. To ensure the array is sufficiently sized the
client should:
* first call this function with the compNames field NULL to determine the number of component names
- * second call this function with the compNames field pointing to an array of names allocated
+ * second call this function with the compNames field pointing to an array of names allocated
according to the number returned by the first call.
The core should return from this call within 5 msec.
-
+
@param [in] role
- This is generic standard component name consisting only of component class
+ This is generic standard component name consisting only of component class
name and the type within that class (e.g. 'audio_decoder.aac').
@param [inout] pNumComps
- This is used both as input and output.
-
+ This is used both as input and output.
+
If compNames is NULL, the input is ignored and the output specifies how many components support
the given role.
-
- If compNames is not NULL, on input it bounds the size of the input structure and
+
+ If compNames is not NULL, on input it bounds the size of the input structure and
on output, it specifies the number of components string names listed within the compNames parameter.
@param [inout] compNames
- If NULL this field is ignored. If non-NULL this points to an array of 128-byte strings which accepts
- a list of the names of all physical components that implement the specified standard component name.
+ If NULL this field is ignored. If non-NULL this points to an array of 128-byte strings which accepts
+ a list of the names of all physical components that implement the specified standard component name.
Each name is NULL terminated. numComps indicates the number of names.
@ingroup core
*/
-OMX_API OMX_ERRORTYPE OMX_GetComponentsOfRole (
+OMX_API OMX_ERRORTYPE OMX_GetComponentsOfRole (
OMX_IN OMX_STRING role,
OMX_INOUT OMX_U32 *pNumComps,
OMX_INOUT OMX_U8 **compNames);
/** The OMX_GetRolesOfComponent method will return the number of roles supported by the given
- component and (if the roles field is non-NULL) the names of those roles. The call will fail if
+ component and (if the roles field is non-NULL) the names of those roles. The call will fail if
an insufficiently sized array of names is supplied. To ensure the array is sufficiently sized the
client should:
* first call this function with the roles field NULL to determine the number of role names
- * second call this function with the roles field pointing to an array of names allocated
+ * second call this function with the roles field pointing to an array of names allocated
according to the number returned by the first call.
The core should return from this call within 5 msec.
@@ -1405,20 +1422,20 @@ OMX_API OMX_ERRORTYPE OMX_GetComponentsOfRole (
@param [in] compName
This is the name of the component being queried about.
@param [inout] pNumRoles
- This is used both as input and output.
-
+ This is used both as input and output.
+
If roles is NULL, the input is ignored and the output specifies how many roles the component supports.
-
- If compNames is not NULL, on input it bounds the size of the input structure and
+
+ If compNames is not NULL, on input it bounds the size of the input structure and
on output, it specifies the number of roles string names listed within the roles parameter.
@param [out] roles
- If NULL this field is ignored. If non-NULL this points to an array of 128-byte strings
- which accepts a list of the names of all standard components roles implemented on the
+ If NULL this field is ignored. If non-NULL this points to an array of 128-byte strings
+ which accepts a list of the names of all standard components roles implemented on the
specified component name. numComps indicates the number of names.
@ingroup core
*/
-OMX_API OMX_ERRORTYPE OMX_GetRolesOfComponent (
- OMX_IN OMX_STRING compName,
+OMX_API OMX_ERRORTYPE OMX_GetRolesOfComponent (
+ OMX_IN OMX_STRING compName,
OMX_INOUT OMX_U32 *pNumRoles,
OMX_OUT OMX_U8 **roles);
diff --git a/domx/omx_core/inc/OMX_IVCommon.h b/domx/omx_core/inc/OMX_IVCommon.h
index 4c4995c..ca21e87 100755
--- a/domx/omx_core/inc/OMX_IVCommon.h
+++ b/domx/omx_core/inc/OMX_IVCommon.h
@@ -1,27 +1,44 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/**
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
-/**
+/**
* @file OMX_IVCommon.h - OpenMax IL version 1.1.2
* The structures needed by Video and Image components to exchange
* parameters and configuration data with the components.
@@ -36,7 +53,7 @@ extern "C" {
/**
* Each OMX header must include all required header files to allow the header
* to compile without errors. The includes below are required for this header
- * file to compile successfully
+ * file to compile successfully
*/
#include <OMX_Core.h>
@@ -47,8 +64,8 @@ extern "C" {
*/
-/**
- * Enumeration defining possible uncompressed image/video formats.
+/**
+ * Enumeration defining possible uncompressed image/video formats.
*
* ENUMS:
* Unused : Placeholder value when format is N/A
@@ -96,7 +113,7 @@ typedef enum OMX_COLOR_FORMATTYPE {
OMX_COLOR_Format16bitBGR565,
OMX_COLOR_Format18bitRGB666,
OMX_COLOR_Format18bitARGB1665,
- OMX_COLOR_Format19bitARGB1666,
+ OMX_COLOR_Format19bitARGB1666,
OMX_COLOR_Format24bitRGB888,
OMX_COLOR_Format24bitBGR888,
OMX_COLOR_Format24bitARGB1887,
@@ -119,53 +136,62 @@ typedef enum OMX_COLOR_FORMATTYPE {
OMX_COLOR_FormatRawBayer8bit,
OMX_COLOR_FormatRawBayer10bit,
OMX_COLOR_FormatRawBayer8bitcompressed,
- OMX_COLOR_FormatL2,
- OMX_COLOR_FormatL4,
- OMX_COLOR_FormatL8,
- OMX_COLOR_FormatL16,
- OMX_COLOR_FormatL24,
+ OMX_COLOR_FormatL2,
+ OMX_COLOR_FormatL4,
+ OMX_COLOR_FormatL8,
+ OMX_COLOR_FormatL16,
+ OMX_COLOR_FormatL24,
OMX_COLOR_FormatL32,
OMX_COLOR_FormatYUV420PackedSemiPlanar,
OMX_COLOR_FormatYUV422PackedSemiPlanar,
OMX_COLOR_Format18BitBGR666,
OMX_COLOR_Format24BitARGB6666,
OMX_COLOR_Format24BitABGR6666,
- OMX_COLOR_FormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_COLOR_FormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_COLOR_FormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
+ /**<Reserved android opaque colorformat. Tells the encoder that
+ * the actual colorformat will be relayed by the
+ * Gralloc Buffers.
+ * FIXME: In the process of reserving some enum values for
+ * Android-specific OMX IL colorformats. Change this enum to
+ * an acceptable range once that is done.
+ * */
+ OMX_COLOR_FormatAndroidOpaque = 0x7F000789,
+ OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00,
OMX_COLOR_FormatMax = 0x7FFFFFFF
} OMX_COLOR_FORMATTYPE;
-/**
+/**
* Defines the matrix for conversion from RGB to YUV or vice versa.
- * iColorMatrix should be initialized with the fixed point values
+ * iColorMatrix should be initialized with the fixed point values
* used in converting between formats.
*/
typedef struct OMX_CONFIG_COLORCONVERSIONTYPE {
OMX_U32 nSize; /**< Size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version info */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version info */
OMX_U32 nPortIndex; /**< Port that this struct applies to */
OMX_S32 xColorMatrix[3][3]; /**< Stored in signed Q16 format */
OMX_S32 xColorOffset[4]; /**< Stored in signed Q16 format */
}OMX_CONFIG_COLORCONVERSIONTYPE;
-/**
- * Structure defining percent to scale each frame dimension. For example:
+/**
+ * Structure defining percent to scale each frame dimension. For example:
* To make the width 50% larger, use fWidth = 1.5 and to make the width
* 1/2 the original size, use fWidth = 0.5
*/
typedef struct OMX_CONFIG_SCALEFACTORTYPE {
OMX_U32 nSize; /**< Size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version info */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version info */
OMX_U32 nPortIndex; /**< Port that this struct applies to */
OMX_S32 xWidth; /**< Fixed point value stored as Q16 */
OMX_S32 xHeight; /**< Fixed point value stored as Q16 */
}OMX_CONFIG_SCALEFACTORTYPE;
-/**
- * Enumeration of possible image filter types
+/**
+ * Enumeration of possible image filter types
*/
typedef enum OMX_IMAGEFILTERTYPE {
OMX_ImageFilterNone,
@@ -176,23 +202,23 @@ typedef enum OMX_IMAGEFILTERTYPE {
OMX_ImageFilterOilPaint,
OMX_ImageFilterHatch,
OMX_ImageFilterGpen,
- OMX_ImageFilterAntialias,
- OMX_ImageFilterDeRing,
+ OMX_ImageFilterAntialias,
+ OMX_ImageFilterDeRing,
OMX_ImageFilterSolarize,
- OMX_ImageFilterKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_ImageFilterKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_ImageFilterVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_ImageFilterMax = 0x7FFFFFFF
} OMX_IMAGEFILTERTYPE;
-/**
- * Image filter configuration
+/**
+ * Image filter configuration
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
- * eImageFilter : Image filter type enumeration
+ * nPortIndex : Port that this structure applies to
+ * eImageFilter : Image filter type enumeration
*/
typedef struct OMX_CONFIG_IMAGEFILTERTYPE {
OMX_U32 nSize;
@@ -202,22 +228,22 @@ typedef struct OMX_CONFIG_IMAGEFILTERTYPE {
} OMX_CONFIG_IMAGEFILTERTYPE;
-/**
- * Customized U and V for color enhancement
+/**
+ * Customized U and V for color enhancement
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* bColorEnhancement : Enable/disable color enhancement
- * nCustomizedU : Practical values: 16-240, range: 0-255, value set for
+ * nCustomizedU : Practical values: 16-240, range: 0-255, value set for
* U component
- * nCustomizedV : Practical values: 16-240, range: 0-255, value set for
+ * nCustomizedV : Practical values: 16-240, range: 0-255, value set for
* V component
*/
typedef struct OMX_CONFIG_COLORENHANCEMENTTYPE {
OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
+ OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_BOOL bColorEnhancement;
OMX_U8 nCustomizedU;
@@ -225,12 +251,12 @@ typedef struct OMX_CONFIG_COLORENHANCEMENTTYPE {
} OMX_CONFIG_COLORENHANCEMENTTYPE;
-/**
- * Define color key and color key mask
+/**
+ * Define color key and color key mask
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* nARGBColor : 32bit Alpha, Red, Green, Blue Color
* nARGBMask : 32bit Mask for Alpha, Red, Green, Blue channels
@@ -244,12 +270,12 @@ typedef struct OMX_CONFIG_COLORKEYTYPE {
} OMX_CONFIG_COLORKEYTYPE;
-/**
- * List of color blend types for pre/post processing
+/**
+ * List of color blend types for pre/post processing
*
* ENUMS:
* None : No color blending present
- * AlphaConstant : Function is (alpha_constant * src) +
+ * AlphaConstant : Function is (alpha_constant * src) +
* (1 - alpha_constant) * dst)
* AlphaPerPixel : Function is (alpha * src) + (1 - alpha) * dst)
* Alternate : Function is alternating pixels from src and dst
@@ -265,21 +291,21 @@ typedef enum OMX_COLORBLENDTYPE {
OMX_ColorBlendAnd,
OMX_ColorBlendOr,
OMX_ColorBlendInvert,
- OMX_ColorBlendKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_ColorBlendKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_ColorBlendVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_ColorBlendMax = 0x7FFFFFFF
} OMX_COLORBLENDTYPE;
-/**
- * Color blend configuration
+/**
+ * Color blend configuration
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
* nRGBAlphaConstant : Constant global alpha values when global alpha is used
- * eColorBlend : Color blend type enumeration
+ * eColorBlend : Color blend type enumeration
*/
typedef struct OMX_CONFIG_COLORBLENDTYPE {
OMX_U32 nSize;
@@ -290,15 +316,15 @@ typedef struct OMX_CONFIG_COLORBLENDTYPE {
} OMX_CONFIG_COLORBLENDTYPE;
-/**
+/**
* Hold frame dimension
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
- * nWidth : Frame width in pixels
- * nHeight : Frame height in pixels
+ * nPortIndex : Port that this structure applies to
+ * nWidth : Frame width in pixels
+ * nHeight : Frame height in pixels
*/
typedef struct OMX_FRAMESIZETYPE {
OMX_U32 nSize;
@@ -310,69 +336,69 @@ typedef struct OMX_FRAMESIZETYPE {
/**
- * Rotation configuration
+ * Rotation configuration
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
- * nRotation : +/- integer rotation value
+ * nRotation : +/- integer rotation value
*/
typedef struct OMX_CONFIG_ROTATIONTYPE {
OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
- OMX_S32 nRotation;
+ OMX_S32 nRotation;
} OMX_CONFIG_ROTATIONTYPE;
-/**
- * Possible mirroring directions for pre/post processing
+/**
+ * Possible mirroring directions for pre/post processing
*
* ENUMS:
- * None : No mirroring
- * Vertical : Vertical mirroring, flip on X axis
- * Horizontal : Horizontal mirroring, flip on Y axis
+ * None : No mirroring
+ * Vertical : Vertical mirroring, flip on X axis
+ * Horizontal : Horizontal mirroring, flip on Y axis
* Both : Both vertical and horizontal mirroring
*/
typedef enum OMX_MIRRORTYPE {
OMX_MirrorNone = 0,
OMX_MirrorVertical,
OMX_MirrorHorizontal,
- OMX_MirrorBoth,
- OMX_MirrorKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_MirrorBoth,
+ OMX_MirrorKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_MirrorVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_MirrorMax = 0x7FFFFFFF
+ OMX_MirrorMax = 0x7FFFFFFF
} OMX_MIRRORTYPE;
-/**
- * Mirroring configuration
+/**
+ * Mirroring configuration
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
- * eMirror : Mirror type enumeration
+ * nPortIndex : Port that this structure applies to
+ * eMirror : Mirror type enumeration
*/
typedef struct OMX_CONFIG_MIRRORTYPE {
OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
+ OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_MIRRORTYPE eMirror;
} OMX_CONFIG_MIRRORTYPE;
-/**
- * Position information only
+/**
+ * Position information only
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
- * nX : X coordinate for the point
- * nY : Y coordinate for the point
- */
+ * nX : X coordinate for the point
+ * nY : Y coordinate for the point
+ */
typedef struct OMX_CONFIG_POINTTYPE {
OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
@@ -382,37 +408,37 @@ typedef struct OMX_CONFIG_POINTTYPE {
} OMX_CONFIG_POINTTYPE;
-/**
- * Frame size plus position
+/**
+ * Frame size plus position
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
* nLeft : X Coordinate of the top left corner of the rectangle
* nTop : Y Coordinate of the top left corner of the rectangle
- * nWidth : Width of the rectangle
- * nHeight : Height of the rectangle
+ * nWidth : Width of the rectangle
+ * nHeight : Height of the rectangle
*/
typedef struct OMX_CONFIG_RECTTYPE {
OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_S32 nLeft;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_S32 nLeft;
OMX_S32 nTop;
OMX_U32 nWidth;
OMX_U32 nHeight;
} OMX_CONFIG_RECTTYPE;
-/**
- * Deblocking state; it is required to be set up before starting the codec
+/**
+ * Deblocking state; it is required to be set up before starting the codec
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
- * bDeblocking : Enable/disable deblocking mode
+ * bDeblocking : Enable/disable deblocking mode
*/
typedef struct OMX_PARAM_DEBLOCKINGTYPE {
OMX_U32 nSize;
@@ -422,13 +448,13 @@ typedef struct OMX_PARAM_DEBLOCKINGTYPE {
} OMX_PARAM_DEBLOCKINGTYPE;
-/**
- * Stabilization state
+/**
+ * Stabilization state
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
* bStab : Enable/disable frame stabilization state
*/
typedef struct OMX_CONFIG_FRAMESTABTYPE {
@@ -439,8 +465,8 @@ typedef struct OMX_CONFIG_FRAMESTABTYPE {
} OMX_CONFIG_FRAMESTABTYPE;
-/**
- * White Balance control type
+/**
+ * White Balance control type
*
* STRUCT MEMBERS:
* SunLight : Referenced in JSR-234
@@ -457,20 +483,20 @@ typedef enum OMX_WHITEBALCONTROLTYPE {
OMX_WhiteBalControlIncandescent,
OMX_WhiteBalControlFlash,
OMX_WhiteBalControlHorizon,
- OMX_WhiteBalControlKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_WhiteBalControlKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_WhiteBalControlVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_WhiteBalControlMax = 0x7FFFFFFF
} OMX_WHITEBALCONTROLTYPE;
-/**
- * White Balance control configuration
+/**
+ * White Balance control configuration
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
- * eWhiteBalControl : White balance enumeration
+ * nPortIndex : Port that this structure applies to
+ * eWhiteBalControl : White balance enumeration
*/
typedef struct OMX_CONFIG_WHITEBALCONTROLTYPE {
OMX_U32 nSize;
@@ -480,8 +506,8 @@ typedef struct OMX_CONFIG_WHITEBALCONTROLTYPE {
} OMX_CONFIG_WHITEBALCONTROLTYPE;
-/**
- * Exposure control type
+/**
+ * Exposure control type
*/
typedef enum OMX_EXPOSURECONTROLTYPE {
OMX_ExposureControlOff = 0,
@@ -494,20 +520,20 @@ typedef enum OMX_EXPOSURECONTROLTYPE {
OMX_ExposureControlBeach,
OMX_ExposureControlLargeAperture,
OMX_ExposureControlSmallApperture,
- OMX_ExposureControlKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_ExposureControlKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_ExposureControlVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_ExposureControlMax = 0x7FFFFFFF
} OMX_EXPOSURECONTROLTYPE;
-/**
- * White Balance control configuration
+/**
+ * White Balance control configuration
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
- * eExposureControl : Exposure control enumeration
+ * nPortIndex : Port that this structure applies to
+ * eExposureControl : Exposure control enumeration
*/
typedef struct OMX_CONFIG_EXPOSURECONTROLTYPE {
OMX_U32 nSize;
@@ -517,16 +543,16 @@ typedef struct OMX_CONFIG_EXPOSURECONTROLTYPE {
} OMX_CONFIG_EXPOSURECONTROLTYPE;
-/**
- * Defines sensor supported mode.
+/**
+ * Defines sensor supported mode.
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
- * nFrameRate : Single shot mode is indicated by a 0
+ * nPortIndex : Port that this structure applies to
+ * nFrameRate : Single shot mode is indicated by a 0
* bOneShot : Enable for single shot, disable for streaming
- * sFrameSize : Framesize
+ * sFrameSize : Framesize
*/
typedef struct OMX_PARAM_SENSORMODETYPE {
OMX_U32 nSize;
@@ -538,13 +564,13 @@ typedef struct OMX_PARAM_SENSORMODETYPE {
} OMX_PARAM_SENSORMODETYPE;
-/**
- * Defines contrast level
+/**
+ * Defines contrast level
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
* nContrast : Values allowed for contrast -100 to 100, zero means no change
*/
typedef struct OMX_CONFIG_CONTRASTTYPE {
@@ -555,14 +581,14 @@ typedef struct OMX_CONFIG_CONTRASTTYPE {
} OMX_CONFIG_CONTRASTTYPE;
-/**
- * Defines brightness level
+/**
+ * Defines brightness level
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
- * nBrightness : 0-100%
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * nBrightness : 0-100%
*/
typedef struct OMX_CONFIG_BRIGHTNESSTYPE {
OMX_U32 nSize;
@@ -572,16 +598,16 @@ typedef struct OMX_CONFIG_BRIGHTNESSTYPE {
} OMX_CONFIG_BRIGHTNESSTYPE;
-/**
- * Defines backlight level configuration for a video sink, e.g. LCD panel
+/**
+ * Defines backlight level configuration for a video sink, e.g. LCD panel
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* nBacklight : Values allowed for backlight 0-100%
- * nTimeout : Number of milliseconds before backlight automatically turns
- * off. A value of 0x0 disables backight timeout
+ * nTimeout : Number of milliseconds before backlight automatically turns
+ * off. A value of 0x0 disables backight timeout
*/
typedef struct OMX_CONFIG_BACKLIGHTTYPE {
OMX_U32 nSize;
@@ -592,12 +618,12 @@ typedef struct OMX_CONFIG_BACKLIGHTTYPE {
} OMX_CONFIG_BACKLIGHTTYPE;
-/**
- * Defines setting for Gamma
+/**
+ * Defines setting for Gamma
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* nGamma : Values allowed for gamma -100 to 100, zero means no change
*/
@@ -609,14 +635,14 @@ typedef struct OMX_CONFIG_GAMMATYPE {
} OMX_CONFIG_GAMMATYPE;
-/**
- * Define for setting saturation
- *
+/**
+ * Define for setting saturation
+ *
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
- * nSaturation : Values allowed for saturation -100 to 100, zero means
+ * nSaturation : Values allowed for saturation -100 to 100, zero means
* no change
*/
typedef struct OMX_CONFIG_SATURATIONTYPE {
@@ -627,14 +653,14 @@ typedef struct OMX_CONFIG_SATURATIONTYPE {
} OMX_CONFIG_SATURATIONTYPE;
-/**
- * Define for setting Lightness
+/**
+ * Define for setting Lightness
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
- * nLightness : Values allowed for lightness -100 to 100, zero means no
+ * nLightness : Values allowed for lightness -100 to 100, zero means no
* change
*/
typedef struct OMX_CONFIG_LIGHTNESSTYPE {
@@ -645,17 +671,17 @@ typedef struct OMX_CONFIG_LIGHTNESSTYPE {
} OMX_CONFIG_LIGHTNESSTYPE;
-/**
- * Plane blend configuration
+/**
+ * Plane blend configuration
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
+ * nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Index of input port associated with the plane.
- * nDepth : Depth of the plane in relation to the screen. Higher
- * numbered depths are "behind" lower number depths.
+ * nDepth : Depth of the plane in relation to the screen. Higher
+ * numbered depths are "behind" lower number depths.
* This number defaults to the Port Index number.
- * nAlpha : Transparency blending component for the entire plane.
+ * nAlpha : Transparency blending component for the entire plane.
* See blending modes for more detail.
*/
typedef struct OMX_CONFIG_PLANEBLENDTYPE {
@@ -667,17 +693,17 @@ typedef struct OMX_CONFIG_PLANEBLENDTYPE {
} OMX_CONFIG_PLANEBLENDTYPE;
-/**
+/**
* Define interlace type
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
- * bEnable : Enable control variable for this functionality
+ * bEnable : Enable control variable for this functionality
* (see below)
- * nInterleavePortIndex : Index of input or output port associated with
- * the interleaved plane.
+ * nInterleavePortIndex : Index of input or output port associated with
+ * the interleaved plane.
* pPlanarPortIndexes[4] : Index of input or output planar ports.
*/
typedef struct OMX_PARAM_INTERLEAVETYPE {
@@ -689,8 +715,8 @@ typedef struct OMX_PARAM_INTERLEAVETYPE {
} OMX_PARAM_INTERLEAVETYPE;
-/**
- * Defines the picture effect used for an input picture
+/**
+ * Defines the picture effect used for an input picture
*/
typedef enum OMX_TRANSITIONEFFECTTYPE {
OMX_EffectNone,
@@ -700,18 +726,18 @@ typedef enum OMX_TRANSITIONEFFECTTYPE {
OMX_EffectDissolve,
OMX_EffectWipe,
OMX_EffectUnspecifiedMixOfTwoScenes,
- OMX_EffectKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_EffectKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_EffectVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_EffectMax = 0x7FFFFFFF
} OMX_TRANSITIONEFFECTTYPE;
-/**
- * Structure used to configure current transition effect
+/**
+ * Structure used to configure current transition effect
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* eEffect : Effect to enable
*/
@@ -723,43 +749,43 @@ typedef struct OMX_CONFIG_TRANSITIONEFFECTTYPE {
} OMX_CONFIG_TRANSITIONEFFECTTYPE;
-/**
- * Defines possible data unit types for encoded video data. The data unit
+/**
+ * Defines possible data unit types for encoded video data. The data unit
* types are used both for encoded video input for playback as well as
- * encoded video output from recording.
+ * encoded video output from recording.
*/
typedef enum OMX_DATAUNITTYPE {
OMX_DataUnitCodedPicture,
OMX_DataUnitVideoSegment,
OMX_DataUnitSeveralSegments,
OMX_DataUnitArbitraryStreamSection,
- OMX_DataUnitKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_DataUnitKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_DataUnitVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_DataUnitMax = 0x7FFFFFFF
} OMX_DATAUNITTYPE;
-/**
- * Defines possible encapsulation types for coded video data unit. The
- * encapsulation information is used both for encoded video input for
- * playback as well as encoded video output from recording.
+/**
+ * Defines possible encapsulation types for coded video data unit. The
+ * encapsulation information is used both for encoded video input for
+ * playback as well as encoded video output from recording.
*/
typedef enum OMX_DATAUNITENCAPSULATIONTYPE {
OMX_DataEncapsulationElementaryStream,
OMX_DataEncapsulationGenericPayload,
OMX_DataEncapsulationRtpPayload,
- OMX_DataEncapsulationKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_DataEncapsulationKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_DataEncapsulationVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_DataEncapsulationMax = 0x7FFFFFFF
} OMX_DATAUNITENCAPSULATIONTYPE;
-/**
- * Structure used to configure the type of being decoded/encoded
+/**
+ * Structure used to configure the type of being decoded/encoded
*/
typedef struct OMX_PARAM_DATAUNITTYPE {
OMX_U32 nSize; /**< Size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< Port that this structure applies to */
OMX_DATAUNITTYPE eUnitType;
OMX_DATAUNITENCAPSULATIONTYPE eEncapsulationType;
@@ -767,25 +793,25 @@ typedef struct OMX_PARAM_DATAUNITTYPE {
/**
- * Defines dither types
+ * Defines dither types
*/
typedef enum OMX_DITHERTYPE {
OMX_DitherNone,
OMX_DitherOrdered,
OMX_DitherErrorDiffusion,
OMX_DitherOther,
- OMX_DitherKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_DitherKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_DitherVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_DitherMax = 0x7FFFFFFF
} OMX_DITHERTYPE;
-/**
- * Structure used to configure current type of dithering
+/**
+ * Structure used to configure current type of dithering
*/
typedef struct OMX_CONFIG_DITHERTYPE {
OMX_U32 nSize; /**< Size of the structure in bytes */
- OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
+ OMX_VERSIONTYPE nVersion; /**< OMX specification version information */
OMX_U32 nPortIndex; /**< Port that this structure applies to */
OMX_DITHERTYPE eDither; /**< Type of dithering to use */
} OMX_CONFIG_DITHERTYPE;
@@ -794,28 +820,28 @@ typedef struct OMX_CONFIG_CAPTUREMODETYPE {
OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex; /**< Port that this structure applies to */
- OMX_BOOL bContinuous; /**< If true then ignore frame rate and emit capture
+ OMX_BOOL bContinuous; /**< If true then ignore frame rate and emit capture
* data as fast as possible (otherwise obey port's frame rate). */
- OMX_BOOL bFrameLimited; /**< If true then terminate capture after the port emits the
- * specified number of frames (otherwise the port does not
- * terminate the capture until instructed to do so by the client).
- * Even if set, the client may manually terminate the capture prior
+ OMX_BOOL bFrameLimited; /**< If true then terminate capture after the port emits the
+ * specified number of frames (otherwise the port does not
+ * terminate the capture until instructed to do so by the client).
+ * Even if set, the client may manually terminate the capture prior
* to reaching the limit. */
OMX_U32 nFrameLimit; /**< Limit on number of frames emitted during a capture (only
* valid if bFrameLimited is set). */
} OMX_CONFIG_CAPTUREMODETYPE;
typedef enum OMX_METERINGTYPE {
-
+
OMX_MeteringModeAverage, /**< Center-weighted average metering. */
OMX_MeteringModeSpot, /**< Spot (partial) metering. */
OMX_MeteringModeMatrix, /**< Matrix or evaluative metering. */
-
- OMX_MeteringKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+
+ OMX_MeteringKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_MeteringVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_EVModeMax = 0x7fffffff
} OMX_METERINGTYPE;
-
+
typedef struct OMX_CONFIG_EXPOSUREVALUETYPE {
OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
@@ -824,14 +850,14 @@ typedef struct OMX_CONFIG_EXPOSUREVALUETYPE {
OMX_S32 xEVCompensation; /**< Fixed point value stored as Q16 */
OMX_U32 nApertureFNumber; /**< e.g. nApertureFNumber = 2 implies "f/2" - Q16 format */
OMX_BOOL bAutoAperture; /**< Whether aperture number is defined automatically */
- OMX_U32 nShutterSpeedMsec; /**< Shutterspeed in milliseconds */
- OMX_BOOL bAutoShutterSpeed; /**< Whether shutter speed is defined automatically */
+ OMX_U32 nShutterSpeedMsec; /**< Shutterspeed in milliseconds */
+ OMX_BOOL bAutoShutterSpeed; /**< Whether shutter speed is defined automatically */
OMX_U32 nSensitivity; /**< e.g. nSensitivity = 100 implies "ISO 100" */
OMX_BOOL bAutoSensitivity; /**< Whether sensitivity is defined automatically */
} OMX_CONFIG_EXPOSUREVALUETYPE;
-/**
- * Focus region configuration
+/**
+ * Focus region configuration
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
@@ -862,8 +888,8 @@ typedef struct OMX_CONFIG_FOCUSREGIONTYPE {
OMX_BOOL bBottomRight;
} OMX_CONFIG_FOCUSREGIONTYPE;
-/**
- * Focus Status type
+/**
+ * Focus Status type
*/
typedef enum OMX_FOCUSSTATUSTYPE {
OMX_FocusStatusOff = 0,
@@ -871,13 +897,13 @@ typedef enum OMX_FOCUSSTATUSTYPE {
OMX_FocusStatusReached,
OMX_FocusStatusUnableToReach,
OMX_FocusStatusLost,
- OMX_FocusStatusKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_FocusStatusKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_FocusStatusVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_FocusStatusMax = 0x7FFFFFFF
} OMX_FOCUSSTATUSTYPE;
-/**
- * Focus status configuration
+/**
+ * Focus status configuration
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
diff --git a/domx/omx_core/inc/OMX_Image.h b/domx/omx_core/inc/OMX_Image.h
index a6d4666..23a0209 100755
--- a/domx/omx_core/inc/OMX_Image.h
+++ b/domx/omx_core/inc/OMX_Image.h
@@ -1,28 +1,45 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/**
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
-/**
+/**
* @file OMX_Image.h - OpenMax IL version 1.1.2
- * The structures needed by Image components to exchange parameters and
+ * The structures needed by Image components to exchange parameters and
* configuration data with the components.
*/
#ifndef OMX_Image_h
@@ -34,9 +51,9 @@ extern "C" {
/**
- * Each OMX header must include all required header files to allow the
- * header to compile without errors. The includes below are required
- * for this header file to compile successfully
+ * Each OMX header must include all required header files to allow the
+ * header to compile without errors. The includes below are required
+ * for this header file to compile successfully
*/
#include <OMX_IVCommon.h>
@@ -47,8 +64,8 @@ extern "C" {
* @{
*/
-/**
- * Enumeration used to define the possible image compression coding.
+/**
+ * Enumeration used to define the possible image compression coding.
*/
typedef enum OMX_IMAGE_CODINGTYPE {
OMX_IMAGE_CodingUnused, /**< Value when format is N/A */
@@ -61,59 +78,59 @@ typedef enum OMX_IMAGE_CODINGTYPE {
OMX_IMAGE_CodingPNG, /**< PNG image format */
OMX_IMAGE_CodingLZW, /**< LZW image format */
OMX_IMAGE_CodingBMP, /**< Windows Bitmap format */
- OMX_IMAGE_CodingKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_IMAGE_CodingKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_IMAGE_CodingVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_IMAGE_CodingMax = 0x7FFFFFFF
} OMX_IMAGE_CODINGTYPE;
/**
- * Data structure used to define an image path. The number of image paths
- * for input and output will vary by type of the image component.
- *
+ * Data structure used to define an image path. The number of image paths
+ * for input and output will vary by type of the image component.
+ *
* Input (aka Source) : Zero Inputs, one Output,
* Splitter : One Input, 2 or more Outputs,
* Processing Element : One Input, one output,
* Mixer : 2 or more inputs, one output,
* Output (aka Sink) : One Input, zero outputs.
- *
- * The PortDefinition structure is used to define all of the parameters
- * necessary for the compliant component to setup an input or an output
- * image path. If additional vendor specific data is required, it should
- * be transmitted to the component using the CustomCommand function.
- * Compliant components will prepopulate this structure with optimal
+ *
+ * The PortDefinition structure is used to define all of the parameters
+ * necessary for the compliant component to setup an input or an output
+ * image path. If additional vendor specific data is required, it should
+ * be transmitted to the component using the CustomCommand function.
+ * Compliant components will prepopulate this structure with optimal
* values during the OMX_GetParameter() command.
*
* STRUCT MEMBERS:
* cMIMEType : MIME type of data for the port
- * pNativeRender : Platform specific reference for a display if a
+ * pNativeRender : Platform specific reference for a display if a
* sync, otherwise this field is 0
- * nFrameWidth : Width of frame to be used on port if
- * uncompressed format is used. Use 0 for
+ * nFrameWidth : Width of frame to be used on port if
+ * uncompressed format is used. Use 0 for
* unknown, don't care or variable
- * nFrameHeight : Height of frame to be used on port if
- * uncompressed format is used. Use 0 for
+ * nFrameHeight : Height of frame to be used on port if
+ * uncompressed format is used. Use 0 for
* unknown, don't care or variable
- * nStride : Number of bytes per span of an image (i.e.
+ * nStride : Number of bytes per span of an image (i.e.
* indicates the number of bytes to get from
- * span N to span N+1, where negative stride
+ * span N to span N+1, where negative stride
* indicates the image is bottom up
* nSliceHeight : Height used when encoding in slices
- * bFlagErrorConcealment : Turns on error concealment if it is supported by
+ * bFlagErrorConcealment : Turns on error concealment if it is supported by
* the OMX component
- * eCompressionFormat : Compression format used in this instance of
- * the component. When OMX_IMAGE_CodingUnused is
+ * eCompressionFormat : Compression format used in this instance of
+ * the component. When OMX_IMAGE_CodingUnused is
* specified, eColorFormat is valid
* eColorFormat : Decompressed format used by this component
- * pNativeWindow : Platform specific reference for a window object if a
- * display sink , otherwise this field is 0x0.
+ * pNativeWindow : Platform specific reference for a window object if a
+ * display sink , otherwise this field is 0x0.
*/
typedef struct OMX_IMAGE_PORTDEFINITIONTYPE {
OMX_STRING cMIMEType;
OMX_NATIVE_DEVICETYPE pNativeRender;
- OMX_U32 nFrameWidth;
+ OMX_U32 nFrameWidth;
OMX_U32 nFrameHeight;
- OMX_S32 nStride;
+ OMX_S32 nStride;
OMX_U32 nSliceHeight;
OMX_BOOL bFlagErrorConcealment;
OMX_IMAGE_CODINGTYPE eCompressionFormat;
@@ -122,18 +139,18 @@ typedef struct OMX_IMAGE_PORTDEFINITIONTYPE {
} OMX_IMAGE_PORTDEFINITIONTYPE;
-/**
- * Port format parameter. This structure is used to enumerate the various
+/**
+ * Port format parameter. This structure is used to enumerate the various
* data input/output format supported by the port.
- *
+ *
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Indicates which port to set
- * nIndex : Indicates the enumeration index for the format from
+ * nIndex : Indicates the enumeration index for the format from
* 0x0 to N-1
- * eCompressionFormat : Compression format used in this instance of the
- * component. When OMX_IMAGE_CodingUnused is specified,
+ * eCompressionFormat : Compression format used in this instance of the
+ * component. When OMX_IMAGE_CodingUnused is specified,
* eColorFormat is valid
* eColorFormat : Decompressed format used by this component
*/
@@ -147,8 +164,8 @@ typedef struct OMX_IMAGE_PARAM_PORTFORMATTYPE {
} OMX_IMAGE_PARAM_PORTFORMATTYPE;
-/**
- * Flash control type
+/**
+ * Flash control type
*
* ENUMS
* Torch : Flash forced constantly on
@@ -160,14 +177,14 @@ typedef enum OMX_IMAGE_FLASHCONTROLTYPE {
OMX_IMAGE_FlashControlRedEyeReduction,
OMX_IMAGE_FlashControlFillin,
OMX_IMAGE_FlashControlTorch,
- OMX_IMAGE_FlashControlKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_IMAGE_FlashControlKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_IMAGE_FlashControlVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_IMAGE_FlashControlMax = 0x7FFFFFFF
} OMX_IMAGE_FLASHCONTROLTYPE;
-/**
- * Flash control configuration
+/**
+ * Flash control configuration
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
@@ -183,29 +200,29 @@ typedef struct OMX_IMAGE_PARAM_FLASHCONTROLTYPE {
} OMX_IMAGE_PARAM_FLASHCONTROLTYPE;
-/**
- * Focus control type
+/**
+ * Focus control type
*/
typedef enum OMX_IMAGE_FOCUSCONTROLTYPE {
OMX_IMAGE_FocusControlOn = 0,
OMX_IMAGE_FocusControlOff,
OMX_IMAGE_FocusControlAuto,
OMX_IMAGE_FocusControlAutoLock,
- OMX_IMAGE_FocusControlKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_IMAGE_FocusControlKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_IMAGE_FocusControlVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_IMAGE_FocusControlMax = 0x7FFFFFFF
} OMX_IMAGE_FOCUSCONTROLTYPE;
-
-/**
- * Focus control configuration
+
+/**
+ * Focus control configuration
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* eFocusControl : Focus control
- * nFocusSteps : Focus can take on values from 0 mm to infinity.
+ * nFocusSteps : Focus can take on values from 0 mm to infinity.
* Interest is only in number of steps over this range.
* nFocusStepIndex : Current focus step index
*/
@@ -219,30 +236,30 @@ typedef struct OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE {
} OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE;
-/**
+/**
* Q Factor for JPEG compression, which controls the tradeoff between image
* quality and size. Q Factor provides a more simple means of controlling
* JPEG compression quality, without directly programming Quantization
- * tables for chroma and luma
+ * tables for chroma and luma
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
- * nQFactor : JPEG Q factor value in the range of 1-100. A factor of 1
- * produces the smallest, worst quality images, and a factor
- * of 100 produces the largest, best quality images. A
- * typical default is 75 for small good quality images
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * nQFactor : JPEG Q factor value in the range of 1-100. A factor of 1
+ * produces the smallest, worst quality images, and a factor
+ * of 100 produces the largest, best quality images. A
+ * typical default is 75 for small good quality images
*/
typedef struct OMX_IMAGE_PARAM_QFACTORTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_U32 nQFactor;
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nQFactor;
} OMX_IMAGE_PARAM_QFACTORTYPE;
-/**
- * Quantization table type
+/**
+ * Quantization table type
*/
typedef enum OMX_IMAGE_QUANTIZATIONTABLETYPE {
@@ -250,27 +267,27 @@ typedef enum OMX_IMAGE_QUANTIZATIONTABLETYPE {
OMX_IMAGE_QuantizationTableChroma,
OMX_IMAGE_QuantizationTableChromaCb,
OMX_IMAGE_QuantizationTableChromaCr,
- OMX_IMAGE_QuantizationTableKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_IMAGE_QuantizationTableKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_IMAGE_QuantizationTableVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_IMAGE_QuantizationTableMax = 0x7FFFFFFF
} OMX_IMAGE_QUANTIZATIONTABLETYPE;
-/**
+/**
* JPEG quantization tables are used to determine DCT compression for
- * YUV data, as an alternative to specifying Q factor, providing exact
- * control of compression
+ * YUV data, as an alternative to specifying Q factor, providing exact
+ * control of compression
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* eQuantizationTable : Quantization table type
- * nQuantizationMatrix[64] : JPEG quantization table of coefficients stored
- * in increasing columns then by rows of data (i.e.
- * row 1, ... row 8). Quantization values are in
+ * nQuantizationMatrix[64] : JPEG quantization table of coefficients stored
+ * in increasing columns then by rows of data (i.e.
+ * row 1, ... row 8). Quantization values are in
* the range 0-255 and stored in linear order
- * (i.e. the component will zig-zag the
- * quantization table data if required internally)
+ * (i.e. the component will zig-zag the
+ * quantization table data if required internally)
*/
typedef struct OMX_IMAGE_PARAM_QUANTIZATIONTABLETYPE {
OMX_U32 nSize;
@@ -281,9 +298,9 @@ typedef struct OMX_IMAGE_PARAM_QUANTIZATIONTABLETYPE {
} OMX_IMAGE_PARAM_QUANTIZATIONTABLETYPE;
-/**
- * Huffman table type, the same Huffman table is applied for chroma and
- * luma component
+/**
+ * Huffman table type, the same Huffman table is applied for chroma and
+ * luma component
*/
typedef enum OMX_IMAGE_HUFFMANTABLETYPE {
OMX_IMAGE_HuffmanTableAC = 0,
@@ -292,23 +309,23 @@ typedef enum OMX_IMAGE_HUFFMANTABLETYPE {
OMX_IMAGE_HuffmanTableACChroma,
OMX_IMAGE_HuffmanTableDCLuma,
OMX_IMAGE_HuffmanTableDCChroma,
- OMX_IMAGE_HuffmanTableKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_IMAGE_HuffmanTableKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_IMAGE_HuffmanTableVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_IMAGE_HuffmanTableMax = 0x7FFFFFFF
} OMX_IMAGE_HUFFMANTABLETYPE;
-/**
- * JPEG Huffman table
+/**
+ * JPEG Huffman table
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* eHuffmanTable : Huffman table type
- * nNumberOfHuffmanCodeOfLength[16] : 0-16, number of Huffman codes of each
+ * nNumberOfHuffmanCodeOfLength[16] : 0-16, number of Huffman codes of each
* possible length
- * nHuffmanTable[256] : 0-255, the size used for AC and DC
- * HuffmanTable are 16 and 162
+ * nHuffmanTable[256] : 0-255, the size used for AC and DC
+ * HuffmanTable are 16 and 162
*/
typedef struct OMX_IMAGE_PARAM_HUFFMANTTABLETYPE {
OMX_U32 nSize;
diff --git a/domx/omx_core/inc/OMX_Index.h b/domx/omx_core/inc/OMX_Index.h
index 44d4ea7..bfb8739 100755
--- a/domx/omx_core/inc/OMX_Index.h
+++ b/domx/omx_core/inc/OMX_Index.h
@@ -1,23 +1,40 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/*
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
@@ -37,22 +54,22 @@ extern "C" {
/* Each OMX header must include all required header files to allow the
* header to compile without errors. The includes below are required
- * for this header file to compile successfully
+ * for this header file to compile successfully
*/
#include <OMX_Types.h>
/** The OMX_INDEXTYPE enumeration is used to select a structure when either
- * getting or setting parameters and/or configuration data. Each entry in
- * this enumeration maps to an OMX specified structure. When the
+ * getting or setting parameters and/or configuration data. Each entry in
+ * this enumeration maps to an OMX specified structure. When the
* OMX_GetParameter, OMX_SetParameter, OMX_GetConfig or OMX_SetConfig methods
* are used, the second parameter will always be an entry from this enumeration
* and the third entry will be the structure shown in the comments for the entry.
- * For example, if the application is initializing a cropping function, the
- * OMX_SetConfig command would have OMX_IndexConfigCommonInputCrop as the second parameter
- * and would send a pointer to an initialized OMX_RECTTYPE structure as the
+ * For example, if the application is initializing a cropping function, the
+ * OMX_SetConfig command would have OMX_IndexConfigCommonInputCrop as the second parameter
+ * and would send a pointer to an initialized OMX_RECTTYPE structure as the
* third parameter.
- *
+ *
* The enumeration entries named with the OMX_Config prefix are sent using
* the OMX_SetConfig command and the enumeration entries named with the
* OMX_PARAM_ prefix are sent using the OMX_SetParameter command.
@@ -69,11 +86,11 @@ typedef enum OMX_INDEXTYPE {
OMX_IndexParamActiveStream, /**< reference: OMX_PARAM_U32TYPE */
OMX_IndexParamSuspensionPolicy, /**< reference: OMX_PARAM_SUSPENSIONPOLICYTYPE */
OMX_IndexParamComponentSuspended, /**< reference: OMX_PARAM_SUSPENSIONTYPE */
- OMX_IndexConfigCapturing, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexConfigCaptureMode, /**< reference: OMX_CONFIG_CAPTUREMODETYPE */
- OMX_IndexAutoPauseAfterCapture, /**< reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigCapturing, /**< reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigCaptureMode, /**< reference: OMX_CONFIG_CAPTUREMODETYPE */
+ OMX_IndexAutoPauseAfterCapture, /**< reference: OMX_CONFIG_BOOLEANTYPE */
OMX_IndexParamContentURI, /**< reference: OMX_PARAM_CONTENTURITYPE */
- OMX_IndexParamCustomContentPipe, /**< reference: OMX_PARAM_CONTENTPIPETYPE */
+ OMX_IndexParamCustomContentPipe, /**< reference: OMX_PARAM_CONTENTPIPETYPE */
OMX_IndexParamDisableResourceConcealment, /**< reference: OMX_RESOURCECONCEALMENTTYPE */
OMX_IndexConfigMetadataItemCount, /**< reference: OMX_CONFIG_METADATAITEMCOUNTTYPE */
OMX_IndexConfigContainerNodeCount, /**< reference: OMX_CONFIG_CONTAINERNODECOUNTTYPE */
@@ -86,7 +103,7 @@ typedef enum OMX_INDEXTYPE {
OMX_IndexPortStartUnused = 0x02000000,
OMX_IndexParamPortDefinition, /**< reference: OMX_PARAM_PORTDEFINITIONTYPE */
- OMX_IndexParamCompBufferSupplier, /**< reference: OMX_PARAM_BUFFERSUPPLIERTYPE */
+ OMX_IndexParamCompBufferSupplier, /**< reference: OMX_PARAM_BUFFERSUPPLIERTYPE */
OMX_IndexReservedStartUnused = 0x03000000,
/* Audio parameters and configurations */
@@ -118,6 +135,7 @@ typedef enum OMX_INDEXTYPE {
OMX_IndexParamAudioEvrc, /**< reference: OMX_AUDIO_PARAM_EVRCTYPE */
OMX_IndexParamAudioSmv, /**< reference: OMX_AUDIO_PARAM_SMVTYPE */
OMX_IndexParamAudioVorbis, /**< reference: OMX_AUDIO_PARAM_VORBISTYPE */
+ OMX_IndexParamAudioFlac, /**< reference: OMX_AUDIO_PARAM_FLACTYPE */
OMX_IndexConfigAudioMidiImmediateEvent, /**< reference: OMX_AUDIO_CONFIG_MIDIIMMEDIATEEVENTTYPE */
OMX_IndexConfigAudioMidiControl, /**< reference: OMX_AUDIO_CONFIG_MIDICONTROLTYPE */
@@ -238,10 +256,10 @@ typedef enum OMX_INDEXTYPE {
OMX_IndexConfigTimeSeekMode, /**< reference: OMX_TIME_CONFIG_SEEKMODETYPE */
- OMX_IndexKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_IndexKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
/* Vendor specific area */
OMX_IndexVendorStartUnused = 0x7F000000,
- /* Vendor specific structures should be in the range of 0x7F000000
+ /* Vendor specific structures should be in the range of 0x7F000000
to 0x7FFFFFFE. This range is not broken out by vendor, so
private indexes are not guaranteed unique and therefore should
only be sent to the appropriate component. */
diff --git a/domx/omx_core/inc/OMX_Other.h b/domx/omx_core/inc/OMX_Other.h
index 2cce9f8..a9b0c4e 100755
--- a/domx/omx_core/inc/OMX_Other.h
+++ b/domx/omx_core/inc/OMX_Other.h
@@ -1,3 +1,20 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/*
* Copyright (c) 2008 The Khronos Group Inc.
*
diff --git a/domx/omx_core/inc/OMX_TI_Common.h b/domx/omx_core/inc/OMX_TI_Common.h
index 457adb6..b39163f 100755
--- a/domx/omx_core/inc/OMX_TI_Common.h
+++ b/domx/omx_core/inc/OMX_TI_Common.h
@@ -208,6 +208,128 @@ typedef struct OMX_TI_PARAM_METADATABUFFERINFO {
OMX_U32 nMetaDataSize;
} OMX_TI_PARAM_METADATABUFFERINFO;
+/*===============================================================*/
+/** OMX_TI_BUFFERTYPE : This enumberation defines the type of
+ * buffer that is exchanged with the OMX
+ * component port
+ *
+ * OMX_TI_BufferTypeDefault : Default buffer type accessed via a
+ * single virtual address
+ * OMX_TI_BufferTypeVirtual2D : Multiple virtual buffers describing a
+ * 2D buffer
+ * OMX_TI_BufferTypePlatform1D : Platform specific 1D buffer handle
+ * OMX_TI_BufferTypePlatform2D : Platform specific buffer handles
+ * describing a 2D buffer
+ * OMX_TI_BufferTypePhysicalPageList : List of a given number of physical pages
+ * OMX_TI_BufferTypeHardwareReserved1D:Harware reserve space only that can
+ * accomodate a 1D buffer by mapping memory
+ * to it
+ */
+/*===============================================================*/
+typedef enum OMX_TI_BUFFERTYPE {
+ OMX_TI_BufferTypeDefault = 0,
+ OMX_TI_BufferTypeVirtual2D,
+ OMX_TI_BufferTypePlatform1D,
+ OMX_TI_BufferTypePlatform2D,
+ OMX_TI_BufferTypePhysicalPageList,
+ OMX_TI_BufferTypeHardwareReserved1D,
+ OMX_TI_BufferTypeMax = 0x7FFFFFFF
+} OMX_TI_BUFFERTYPE;
+
+/*===============================================================*/
+/** OMX_TI_BUFFERDESCRIPTOR_TYPE : This buffer descriptor structure is used
+ * to convey additional buffer information
+ * when OMX_TI_IndexUseBufferDescriptor is
+ * enabled and it is passed via pBuffer
+ * in OMX_BUFFERHEADERTYPE
+ *
+ * @ param nSize : Size of the structure.
+ * @ param eBufType : Specifies type of buffer
+ * @ param nNumOfBuf : Number of component buffers of eBufType
+ * @ param pBuf : Array of buffers of type eBufType
+ */
+/*===============================================================*/
+typedef struct OMX_TI_BUFFERDESCRIPTOR_TYPE {
+ OMX_U32 nSize;
+ OMX_TI_BUFFERTYPE eBufType;
+ OMX_U32 nNumOfBuf;
+ OMX_PTR pBuf[3];
+} OMX_TI_BUFFERDESCRIPTOR_TYPE;
+
+/*===============================================================*/
+/** OMX_TI_PARAM_USEBUFFERDESCRIPTOR : This parameter is used to enable/disable
+ * buffer descriptor mode. When enabled,
+ * the pBuffer in OMX buffer header points
+ * to a buffer descriptor structure
+ * OMX_TI_BUFFERDESCRIPTOR_TYPE instead of
+ * the buffer directly.
+ *
+ * @ param nSize : Size of the structure.
+ * @ param nVersion : Version.
+ * @ param nPortIndex : Port index on which the parameter will
+ * be applied.
+ * @ param bEnabled : Whether buffer descriptor mode is
+ * enabled or not. Set to FALSE (disabled)
+ * by default.
+ */
+/*===============================================================*/
+typedef struct OMX_TI_PARAM_USEBUFFERDESCRIPTOR {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bEnabled;
+ OMX_TI_BUFFERTYPE eBufferType;
+} OMX_TI_PARAM_USEBUFFERDESCRIPTOR;
+
+
+/*===============================================================*/
+/** OMX_TI_PARAM_COMPONENTBUFALLOCTYPE :This parameter is used to query/set
+ * internal buffers used by OMX component
+ * after allocation by the user of OMX
+ * component during regular OMX buffer
+ * allocation/free life cycle
+ *
+ * @ param nSize : Size of the structure.
+ * @ param nVersion : Version.
+ * @ param nPortIndex : Port index on which the parameter will
+ * be applied.
+ * @ param nIndex : Present buffer number whose requirement
+ * is queried and then set
+ * @ param eBufType : Present nIndex'ed buffer type
+ * @ param pBuf : Buffer communication
+ * @ param nAllocWidth : Size of buffer (Width in case of 2D)
+ * @ param nAllocLines : Size of buffer (1 in case of 1D)
+ * @ param nOffset : Offset from which buffer communicated is
+ * valid
+ */
+/*===============================================================*/
+typedef struct OMX_TI_PARAM_COMPONENTBUFALLOCTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nIndex;
+ OMX_TI_BUFFERTYPE eBufType;
+ OMX_PTR pBuf[3];
+ OMX_U32 nAllocWidth;
+ OMX_U32 nAllocLines;
+ OMX_U32 nOffset;
+} OMX_TI_PARAM_COMPONENTBUFALLOCTYPE;
+
+/*===============================================================*/
+/** OMX_TI_COMPONENT_HANDLE : This parameter is used to retrieve
+ * the component handle by the client.
+ *
+ * @ param nSize : Size of the structure.
+ * @ param nVersion : Version.
+ * @ param pHandle : Component Handle
+ */
+/*===============================================================*/
+typedef struct OMX_TI_COMPONENT_HANDLE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_HANDLETYPE pHandle;
+} OMX_TI_COMPONENT_HANDLE;
+
/*******************************************************************
* PRIVATE DECLARATIONS: defined here, used only here
*******************************************************************/
diff --git a/domx/omx_core/inc/OMX_TI_IVCommon.h b/domx/omx_core/inc/OMX_TI_IVCommon.h
index cfe228a..c2b5bb8 100755
--- a/domx/omx_core/inc/OMX_TI_IVCommon.h
+++ b/domx/omx_core/inc/OMX_TI_IVCommon.h
@@ -53,6 +53,7 @@ extern "C" {
#include <OMX_IVCommon.h>
#include <OMX_Image.h>
+#define DCC_PATH "/data/misc/camera/"
#define MAX_URI_LENGTH (OMX_MAX_STRINGNAME_SIZE)
#define MAX_ALGOAREAS (35)
@@ -67,7 +68,8 @@ extern "C" {
/* ======================================================================= */
typedef enum OMX_JPEG_COMPRESSEDMODETYPE {
OMX_JPEG_ModeChunk = 0,
- OMX_JPEG_ModeNonChunk
+ OMX_JPEG_ModeNonChunk,
+ OMX_JPEG_CompressedmodeMax = 0x7fffffff
}OMX_JPEG_COMPRESSEDMODETYPE ;
@@ -85,7 +87,8 @@ typedef enum OMX_JPEG_UNCOMPRESSEDMODETYPE {
OMX_JPEG_UncompressedModeFrame = 0,
OMX_JPEG_UncompressedModeSlice,
OMX_JPEG_UncompressedModeStitch,
- OMX_JPEG_UncompressedModeBurst
+ OMX_JPEG_UncompressedModeBurst,
+ OMX_JPEG_UncompressedModeMax = 0x7fffffff
}OMX_JPEG_UNCOMPRESSEDMODETYPE;
@@ -360,7 +363,8 @@ typedef struct OMX_CONFIG_SCALEQUALITYTYPE {
typedef enum OMX_SMOOTHZOOMMODE{
OMX_Off=0, /**< default OFF */
OMX_Increase,
- OMX_Decrease
+ OMX_Decrease,
+ OMX_SmoothZoomModeMax = 0x7fffffff
}OMX_SMOOTHZOOMMODE;
@@ -406,7 +410,8 @@ typedef enum OMX_EXTIMAGEFILTERTYPE {
OMX_TI_ImageFilterWhiteBoard,
OMX_TI_ImageFilterBlackBoard,
OMX_TI_ImageFilterAqua,
- OMX_TI_ImageFilterPosterize
+ OMX_TI_ImageFilterPosterize,
+ OMX_ImageFilterTypeMax = 0x7fffffff
} OMX_EXTIMAGEFILTERTYPE;
@@ -457,6 +462,8 @@ typedef enum OMX_BRACKETMODETYPE {
OMX_BracketFlashPower,
OMX_BracketAperture,
OMX_BracketTemporal,
+ OMX_BracketExposureGainAbsolute,
+ OMX_BracketVectorShot,
OMX_BrackerTypeKhronosExtensions = 0x6f000000,
OMX_BrackerTypeVendorStartUnused = 0x7f000000,
OMX_BracketTypeMax = 0x7FFFFFFF
@@ -467,8 +474,9 @@ typedef struct OMX_CONFIG_BRACKETINGTYPE {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_BRACKETMODETYPE eBracketMode;
- OMX_U32 nNbrBracketingValues;
- OMX_S32 nBracketValues[10]; /**< 10 can be assumed */
+ OMX_U32 nNbrBracketingValues;
+ OMX_S32 nBracketValues[10]; /**< 10 can be assumed */
+ OMX_S32 nBracketValues2[10]; /**< 10 can be assumed */
} OMX_CONFIG_BRACKETINGTYPE;
@@ -508,7 +516,7 @@ typedef enum OMX_CAMOPERATINGMODETYPE {
OMX_CaptureImageProfileOpticalCorr1,
OMX_CaptureImageProfileOpticalCorr2,
OMX_CaptureImageProfileExtended1,
- OMX_CaptureStereoImageCapture,
+ OMX_CaptureStereoImageCapture,
OMX_CaptureImageMemoryInput,
OMX_CaptureVideo,
OMX_CaptureHighSpeedVideo,
@@ -516,8 +524,18 @@ typedef enum OMX_CAMOPERATINGMODETYPE {
OMX_TI_CaptureDummy,
OMX_TI_CaptureGestureRecognition,
OMX_TI_CaptureImageProfileZeroShutterLag,
- OMX_CamOperatingModeMax = 0x7fffffff
+ OMX_TI_SinglePreview,
+ OMX_TI_StereoGestureRecognition,
+ OMX_TI_CPCam,
+ OMX_TI_StereoVideo,
+ OMX_CaptureHighQualityVideo,
+ // Put new entries here so OMX_CamOperatingModeMax always points to
+ // the last one
+ OMX_TI_CamOperatingModeCount,
+ OMX_CamOperatingModeMax = OMX_TI_CamOperatingModeCount - 1,
+ OMX_CamOperatingMode = 0x7fffffff
} OMX_CAMOPERATINGMODETYPE;
+
/**
* Capture mode setting: applicable to multi shot capture also including bracketing.
*
@@ -668,7 +686,11 @@ typedef enum OMX_IMAGE_EXTFOCUSCONTROLTYPE {
OMX_IMAGE_FocusControlPortrait, /**< from Xena */
OMX_IMAGE_FocusControlExtended, /**< from Xena */
OMX_IMAGE_FocusControlContinousNormal, /**< from Xena */
- OMX_IMAGE_FocusControlContinousExtended /**< from Xena */
+ OMX_IMAGE_FocusControlContinousExtended, /**< from Xena */
+ OMX_IMAGE_FocusControlContinousFacePriority,
+ OMX_IMAGE_FocusControlContinousRegionPriority,
+ OMX_IMAGE_FocusControlContinousPicture,
+ OMX_IMAGE_FocusControlTypeMax = 0x7fffffff
} OMX_IMAGE_EXTFOCUSCONTROLTYPE;
@@ -834,7 +856,8 @@ typedef enum OMX_EXTWHITEBALCONTROLTYPE {
OMX_TI_WhiteBalControlSunset,
OMX_TI_WhiteBalControlShade,
OMX_TI_WhiteBalControlTwilight,
- OMX_TI_WhiteBalControlWarmFluorescent
+ OMX_TI_WhiteBalControlWarmFluorescent,
+ OMX_TI_WhiteBalControlMax = 0x7fffffff
} OMX_EXTWHITEBALCONTROLTYPE;
/**
@@ -934,7 +957,8 @@ OMX_PROCESSINGTYPE eProc;
typedef enum OMX_HISTTYPE{
OMX_HistControlLuminance = 0, /**< Luminance histogram is calculated (Y)*/
OMX_HistControlColorComponents, /**< A histogram per color component (R, G, B) is calculated*/
- OMX_HistControlChrominanceComponents /**< A histogram per chrominance component (Cb, Cr) is calculated.*/
+ OMX_HistControlChrominanceComponents, /**< A histogram per chrominance component (Cb, Cr) is calculated.*/
+ OMX_HistControl_32BIT_PATCH = 0x7FFFFFFF
}OMX_HISTTYPE;
/**
@@ -971,7 +995,7 @@ typedef struct OMX_CONFIG_HISTOGRAMTYPE {
} OMX_CONFIG_HISTOGRAMTYPE;
/**
- * Enums for HIST component type.
+ * OMX_HISTCOMPONENTTYPE Enumerated Value
*/
typedef enum OMX_HISTCOMPONENTTYPE{
OMX_HISTCOMP_Y = 0, /**< Luminance histogram (Y) */
@@ -980,41 +1004,141 @@ typedef enum OMX_HISTCOMPONENTTYPE{
OMX_HISTCOMP_G, /**< Green histogram component (G)*/
OMX_HISTCOMP_B, /**< Blue histogram component (B)*/
OMX_HISTCOMP_Cb, /**< Chroma blue histogram component (Cb)*/
- OMX_HISTCOMP_Cr /**< Chroma red histogram component (Cr) */
+ OMX_HISTCOMP_Cr, /**< Chroma red histogram component (Cr) */
+ OMX_HISTCOMP_32BIT_PATCH = 0x7FFFFFFF
}OMX_HISTCOMPONENTTYPE;
- /**
+/**
* The OMX_TI_CAMERAVIEWTYPE enumeration is used to identify the
- * particular camera view that the rest of the data in the structure is
- * associated with.
- */
-typedef enum OMX_TI_CAMERAVIEWTYPE
-{
- OMX_2D, /**< Camera view in 2D sensor configuration */
- OMX_Left, /**< Left camera view in stereo sensor configuration */
- OMX_Right, /**< Right camera view in stereo sensor configuration */
+ * particular camera view and frame type that the rest of
+ * the data in the structure is associated with.
+ */
+typedef enum OMX_TI_CAMERAVIEWTYPE {
+ OMX_2D_Prv, /**< Camera view in 2D for preview */
+ OMX_2D_Snap, /**< Camera view in 2D for snapshot */
+ OMX_2D_Cap, /**< Camera view in 2D for capture */
+ OMX_3D_Left_Prv, /**< Left camera view in 3D for preview */
+ OMX_3D_Left_Snap, /**< Left camera view in 3D for snapshot */
+ OMX_3D_Left_Cap, /**< Left camera view in 3D for capture */
+ OMX_3D_Right_Prv, /**< Right camera view in 3D for preview */
+ OMX_3D_Right_Snap, /**< Right camera view in 3D for snapshot */
+ OMX_3D_Right_Cap, /**< Right camera view in 3D for capture */
OMX_TI_CAMERAVIEWTYPE_32BIT_PATCH = 0x7FFFFFFF
} OMX_TI_CAMERAVIEWTYPE;
+
+#define OMX_OTHER_EXTRADATATYPE_SIZE ((OMX_U32)(((OMX_OTHER_EXTRADATATYPE *)0x0)->data)) /**< Size of OMX_OTHER_EXTRADATATYPE
+ without Data[1] and without padding */
+
/**
- * nSize is the size of the structure including the length of data field containing
- * the histogram data.
- * nBins is the number of bins in the histogram.
- * eComponentType specifies the type of the histogram bins according to enum.
- * It can be selected to generate multiple component types, then the extradata struct
- * is repeated for each component type.
- * data[1] first byte of the histogram data
+ * The extra data having DCC data is described with the following structure.
+ * This data contains single flags and values
+ * (not arrays) that have general usage for camera applications.
*/
-typedef struct OMX_HISTOGRAMTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
+typedef struct OMX_TI_DCCDATATYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
OMX_TI_CAMERAVIEWTYPE eCameraView;
- OMX_U32 nBins;
- OMX_HISTCOMPONENTTYPE eComponentType;
- OMX_U8 data[1];
-} OMX_HISTOGRAMTYPE;
+ OMX_U32 nCameraModuleId;
+ OMX_U32 nDccDescriptorId;
+ OMX_U32 nAlgorithmVendorId;
+ OMX_U32 nUseCaseId;
+ OMX_U32 nOffset;
+ OMX_PTR pData;
+} OMX_TI_DCCDATATYPE;
+/**
+ * The extra data type to feed the camera re-processing function
+ */
+typedef struct OMX_TI_CAMREPROCMETATYPE {
+ OMX_U32 nExpTime;
+ OMX_U32 nGain;
+} OMX_TI_CAMREPROCMETATYPE;
+
+/**
+ * The extra data vector shot feedback info
+ * nConfigId : Same id that cames with
+ * OMX_TI_CONFIG_ENQUEUESHOTCONFIGS::nShotConfig[x].nConfigId
+ * for particular shot config.
+ * nFrameNum : Frame number in vect shot repeat sequence.
+ * Starts from 1 for every shot config.
+ *
+ * nExpMin : The exposure time lower limit,[us]
+ * nExpMax : The exposure time upper limit,[us]
+ * nGainMin : The analog gain lower limit,[0,01EV]
+ * nGainMax : The analog gain upper limit,[0,01EV]
+ *
+ * nReqEC : Requested total exposure compensation
+ * nReqExpTime : Requested exposure time
+ * nReqGain : Requested gain
+ *
+ * nExpTime : Exposure time of this frame.
+ * nAGain : Analog gain of this frame.
+ *
+ * nSenExpTimeErr : Exposure time error in us.
+ * If the requested exposure time is ExpReq
+ * and the one produced by the sensor is nExpTime then:
+ * nExpTimeErr = nExpTime - ExpReq.
+ * nSenAGainErr: Analog gain error as multiplier (in Q8 format).
+ *
+ * nDevEV : The total exposure deviation,[us]
+ * nDevExpTime : The exposure time deviation after flicker reduction,[us]
+ * nDevAGain : The analog gain deviation after flicker reduction,[0,01EV]
+ */
+typedef struct OMX_TI_VECTSHOTINFOTYPE {
+ OMX_U32 nConfigId;
+ OMX_U32 nFrameNum;
+ OMX_U32 nExpMin;
+ OMX_U32 nExpMax;
+ OMX_U32 nGainMin;
+ OMX_U32 nGainMax;
+ OMX_S32 nReqEC;
+ OMX_S32 nReqExpTime;
+ OMX_S32 nReqGain;
+ OMX_U32 nExpTime;
+ OMX_U32 nAGain;
+ OMX_S32 nSenExpTimeErr;
+ OMX_U32 nSenAGainErr;
+ OMX_S32 nDevEV;
+ OMX_S32 nDevExpTime;
+ OMX_S32 nDevAGain;
+} OMX_TI_VECTSHOTINFOTYPE;
+
+/*
+ * LSC gain table size
+ */
+#define OMX_TI_LSC_GAIN_TABLE_SIZE (80 * 1024)
+
+/**
+ * Possible LSC table gain formats
+ */
+typedef enum OMX_TI_LSC_GAIN_FORMAT_TYPE {
+ OMX_TI_LSC_GAIN_FORMAT_0Q8,
+ OMX_TI_LSC_GAIN_FORMAT_0Q8_PLUS_1,
+ OMX_TI_LSC_GAIN_FORMAT_1Q7,
+ OMX_TI_LSC_GAIN_FORMAT_1Q7_PLUS_1,
+ OMX_TI_LSC_GAIN_FORMAT_2Q6,
+ OMX_TI_LSC_GAIN_FORMAT_2Q6_PLUS_1,
+ OMX_TI_LSC_GAIN_FORMAT_3Q5,
+ OMX_TI_LSC_GAIN_FORMAT_3Q5_PLUS_1,
+ OMX_TI_LSC_GAIN_FORMAT = 0x7FFFFFFF
+} OMX_TI_LSC_GAIN_FORMAT_TYPE;
+
+/**
+ * The extra data for LSC table
+ * bApplied : If true the table is applied to the frame.
+ * eGainFormat : Paxel format
+ * nWidth : LSC table width in paxels
+ * nHeight : LSC table height in paxels
+ * pGainTable : LSC gain table
+ */
+typedef struct OMX_TI_LSCTABLETYPE {
+ OMX_BOOL bApplied;
+ OMX_TI_LSC_GAIN_FORMAT_TYPE eGainFormat;
+ OMX_U32 nWidth;
+ OMX_U32 nHeight;
+ OMX_U8 pGainTable[OMX_TI_LSC_GAIN_TABLE_SIZE];
+} OMX_TI_LSCTABLETYPE;
-#define OMX_OTHER_EXTRADATATYPE_SIZE ( (OMX_U32)(((OMX_OTHER_EXTRADATATYPE*)0x0)->data) ) /**< Size of OMX_OTHER_EXTRADATATYPE**/
/**
* The extra data having ancillary data is described with the following structure.
* This data contains single flags and values
@@ -1060,6 +1184,11 @@ typedef struct OMX_TI_ANCILLARYDATATYPE {
OMX_U8 nDCCStatus;
} OMX_TI_ANCILLARYDATATYPE;
+/**
+ * White Balance Results data
+ * The extra data having white balance results data is
+ * described with the following structure..
+ */
typedef struct OMX_TI_WHITEBALANCERESULTTYPE {
OMX_U32 nSize; /**< Size */
OMX_VERSIONTYPE nVersion; /**< Version */
@@ -1087,15 +1216,17 @@ typedef struct OMX_TI_UNSATURATEDREGIONSTYPE {
OMX_U32 nPortIndex; /**< Port Index */
OMX_U16 nPaxelsX; /**< The number of paxels in the horizontal direction */
OMX_U16 nPaxelsY; /**< The number of paxels in the vertical direction */
- OMX_U16 data[1]; /**< the first value of an array of values that represent */
+ OMX_U16 data[1]; /**< the first value of an array of values that represent
+ the percentage of unsaturated pixels within the associated paxel */
} OMX_TI_UNSATURATEDREGIONSTYPE;
/**
* OMX_BARCODETYPE
*/
-typedef enum OMX_BARCODETYPE{
+typedef enum OMX_BARCODETYPE {
OMX_BARCODE1D = 0, /**< 1D barcode */
OMX_BARCODE2D, /**< 2D barcode */
+ OMX_BarcodeMax = 0x7fffffff
}OMX_BARCODETYPE;
/**
* Brcode detection data
@@ -1111,12 +1242,13 @@ typedef struct OMX_BARCODEDETECTIONTYPE {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_TI_CAMERAVIEWTYPE eCameraView;
- OMX_S32 nLeft;
- OMX_S32 nTop;
- OMX_U32 nWidth;
- OMX_U32 nHeight;
- OMX_S32 nOrientation;
- OMX_BARCODETYPE eBarcodetype;
+ OMX_S32 nLeft; /**< The leftmost coordinate of the detected area rectangle */
+ OMX_S32 nTop; /**< Topmost coordinate of the detected area rectangle */
+ OMX_U32 nWidth; /**< The width of the detected area rectangle in pixels */
+ OMX_U32 nHeight; /**< The height of the detected area rectangle in pixels */
+ OMX_S32 nOrientation; /**< The orientation of the axis of the detected object.
+ This refers to the angle between the vertical axis of barcode and the horizontal axis */
+ OMX_BARCODETYPE eBarcodetype; /**< An enumeration specifying the barcode type, as listed in the given table */
} OMX_BARCODEDETECTIONTYPE;
/**
@@ -1131,10 +1263,10 @@ typedef struct OMX_FRONTOBJDETECTIONTYPE {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_TI_CAMERAVIEWTYPE eCameraView;
- OMX_S32 nLeft;
- OMX_S32 nTop;
- OMX_U32 nWidth;
- OMX_U32 nHeight;
+ OMX_S32 nLeft; /**< The leftmost coordinate of the detected area rectangle */
+ OMX_S32 nTop; /**< The topmost coordinate of the detected area rectangle */
+ OMX_U32 nWidth; /**< The width of the detected area rectangle in pixels */
+ OMX_U32 nHeight; /**< The height of the detected area rectangle in pixels */
} OMX_FRONTOBJDETECTIONTYPE;
/**
@@ -1147,8 +1279,9 @@ typedef struct OMX_DISTANCEESTIMATIONTYPE {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_TI_CAMERAVIEWTYPE eCameraView;
- OMX_U32 nDistance;
- OMX_U32 nLargestDiscrepancy;
+ OMX_U32 nDistance; /**< Estimated distance to the object in millimeters */
+ OMX_U32 nLargestDiscrepancy; /**< the estimated largest discrepancy of the distance to the object in millimeters.
+ When equal to MAX_INT the discrepancy is unknown */
} OMX_DISTANCEESTIMATIONTYPE;
/**
@@ -1162,8 +1295,10 @@ typedef struct OMX_MOTIONESTIMATIONTYPE {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_TI_CAMERAVIEWTYPE eCameraView;
- OMX_S32 nPanX;
- OMX_S32 nPanY;
+ OMX_S32 nPanX; /**< The detected translation in horizontal direction.
+ The value is represented as pixels in Q16-format */
+ OMX_S32 nPanY; /**< The detected translation in vertical direction.
+ The value is represented as pixels in Q16-format */
} OMX_MOTIONESTIMATIONTYPE;
@@ -1181,11 +1316,12 @@ typedef struct OMX_FOCUSREGIONTYPE {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_TI_CAMERAVIEWTYPE eCameraView;
- OMX_U32 nRefPortIndex;
- OMX_S32 nLeft;
- OMX_S32 nTop;
- OMX_U32 nWidth;
- OMX_U32 nHeight;
+ OMX_U32 nRefPortIndex; /**< The port the image frame size is defined on.
+ This image frame size is used as reference for the focus region rectangle */
+ OMX_S32 nLeft; /**< The leftmost coordinate of the focus region rectangle */
+ OMX_S32 nTop; /**< The topmost coordinate of the focus region rectangle */
+ OMX_U32 nWidth; /**< The width of the focus region rectangle in pixels */
+ OMX_U32 nHeight; /**< The height of the focus region rectangle in pixels */
} OMX_FOCUSREGIONTYPE;
/**
@@ -1194,7 +1330,8 @@ typedef struct OMX_FOCUSREGIONTYPE {
*/
typedef enum OMX_ISOSETTINGTYPE{
OMX_Auto = 0, /**< */
- OMX_IsoManual /**< */
+ OMX_IsoManual, /**< */
+ OMX_IsoSettingMax = 0x7fffffff
}OMX_ISOSETTINGTYPE;
/**
@@ -1237,6 +1374,38 @@ typedef struct OMX_CONFIG_SENSORTYPE {
} OMX_CONFIG_SENSORTYPE;
/**
+* Sensor Detect
+*/
+typedef struct OMX_TI_PARAM_SENSORDETECT {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bSensorDetect;
+} OMX_TI_PARAM_SENSORDETECT;
+
+/**
+ * OMX_BAYERCOMPRESSION
+ *
+ */
+typedef enum OMX_BAYERCOMPRESSION {
+ OMX_BAYER_UNPACKED,
+ OMX_BAYER_PACKED10,
+ OMX_BAYER_ALAW,
+ OMX_BAYER_DPCM,
+ OMX_BAYER_MAX = 0x7FFFFFFF
+} OMX_BAYERCOMPRESSION;
+
+/**
+* Sensor Detect
+*/
+typedef struct OMX_TI_PARAM_BAYERCOMPRESSION {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BAYERCOMPRESSION eBayerCompression;
+} OMX_TI_PARAM_BAYERCOMPRESSION;
+
+/**
* Sensor custom data type
*/
typedef struct OMX_CONFIG_SENSORCUSTOMDATATYPE {
@@ -1256,7 +1425,8 @@ typedef enum OMX_OBJDETECTQUALITY{
OMX_Default, /**< The default detection, should be used when no control of the detection quality is given.*/
OMX_BetterDetection, /**< A detection that levels correct detection with speed*/
OMX_BestDtection, /**< A detection that prioritizes correct detection*/
- OMX_AUTODETECTION /**< Automatically decide which object detection quality is best.*/
+ OMX_AUTODETECTION, /**< Automatically decide which object detection quality is best.*/
+ OMX_ObjDetectQualityMax = 0x7fffffff
}OMX_OBJDETECTQUALITY;
/**
@@ -1299,7 +1469,8 @@ typedef struct OMX_CONFIG_OBJDETECTIONTYPE {
*/
typedef enum OMX_DISTTYPE{
OMX_DistanceControlFocus = 0, /**< focus objects distance type*/
- OMX_DISTANCECONTROL_RECT /**< Evaluated distance to the object found in the rectangelar area indicated as input region. */
+ OMX_DISTANCECONTROL_RECT, /**< Evaluated distance to the object found in the rectangelar area indicated as input region. */
+ OMX_DistTypeMax = 0x7fffffff
}OMX_DISTTYPE;
@@ -1345,12 +1516,25 @@ typedef struct OMX_CONFIG_DISTANCETYPE {
*
*/
typedef struct OMX_FACEATTRIBUTE {
- OMX_U32 nARGBEyeColor;
- OMX_U32 nARGBSkinColor;
- OMX_U32 nARGBHairColor;
- OMX_U32 nSmileScore;
- OMX_U32 nBlinkScore;
- OMX_U32 xIdentity[4];
+ OMX_U32 nARGBEyeColor; /**< The indicates a 32-bit eye color of the person,
+ where bits 0-7 are blue, bits 15-8 are green, bits 24-16 are red,
+ and bits 31-24 are for alpha. */
+ OMX_U32 nARGBSkinColor; /**< The indicates a 32-bit skin color of the person,
+ where bits 0-7 are blue, bits 15-8 are green, bits 24-16 are red,
+ and bits 31-24 are for alpha */
+ OMX_U32 nARGBHairColor; /**< the indicates a 32-bit hair color of the person,
+ where bits 0-7 are blue, bits 15-8 are green, bits 24-16 are red,
+ and bits 31-24 are for alpha */
+ OMX_U32 nSmileScore; /**< Smile detection score between 0 and 100, where 0 means not detecting,
+ 1 means least certain and 100 means most certain a smile is detected */
+ OMX_U32 nBlinkScore; /**< Eye-blink detection score between 0 and 100, where 0 means not detecting,
+ 1 means least certain and 100 means most certain an eye-blink is detected */
+ OMX_U32 xIdentity[4]; /**< represents the identity of the face. With identity equal to zero this is not supported.
+ This can be used by a face recognition application.
+ The component shall not reuse an identity value unless the same face.
+ Can be used to track detected faces when it moves between frames.
+ Specific usage of this field is implementation dependent.
+ It can be some kind of ID */
} OMX_FACEATTRIBUTE;
/**
@@ -1376,16 +1560,22 @@ typedef struct OMX_TI_FACERESULT {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_TI_CAMERAVIEWTYPE eCameraView;
- OMX_U32 nScore;
- OMX_S32 nLeft;
- OMX_S32 nTop;
-OMX_U32 nWidth;
-OMX_U32 nHeight;
+ OMX_U32 nScore; /**< Detection score between 0 and 100, where 0 means unknown score,
+ 1 means least certain and 100 means most certain the detection is correct */
+ OMX_S32 nLeft; /**< The leftmost coordinate of the detected area rectangle */
+ OMX_S32 nTop; /**< The topmost coordinate of the detected area rectangle */
+ OMX_U32 nWidth; /**< The width of the detected area rectangle in pixels */
+ OMX_U32 nHeight; /**< The height of the detected area rectangle in pixels */
+ // The orientation of the axis of the detected object.
+ // Here roll angle is defined as the angle between the vertical axis of face and the horizontal axis.
+ // All angles can have the value of -180 to 180 degree in Q16 format.
+ // Some face detection algorithm may not be able to fill in the angles, this is denoted by the use of MAX_INT value.
OMX_S32 nOrientationRoll;
OMX_S32 nOrientationYaw;
OMX_S32 nOrientationPitch;
-OMX_U32 nPriority;
-OMX_FACEATTRIBUTE nFaceAttr;
+ //
+ OMX_U32 nPriority; /**< Represents priority of each object when there are multiple objects detected */
+ OMX_FACEATTRIBUTE nFaceAttr; /**< Describe the attributes of the detected face object with the following structure */
} OMX_TI_FACERESULT;
@@ -1400,7 +1590,7 @@ typedef struct OMX_FACEDETECTIONTYPE {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_TI_CAMERAVIEWTYPE eCameraView;
- OMX_U16 ulFaceCount;
+ OMX_U16 ulFaceCount; // faces detected
OMX_TI_FACERESULT tFacePosition[35];// 35 is max faces supported by FDIF
} OMX_FACEDETECTIONTYPE;
@@ -1426,25 +1616,24 @@ typedef struct OMX_TI_MTISTYPE {
* The OMX_EXTRADATATYPE enumeration is used to define the
* possible extra data payload types.
*/
-typedef enum OMX_EXT_EXTRADATATYPE
-{
- OMX_ExifAttributes = 0x7F000001, /**< Reserved region for introducing Vendor Extensions */
+typedef enum OMX_EXT_EXTRADATATYPE {
+ OMX_ExifAttributes = 0x7F000001, /**< 0x7F000001 Reserved region for introducing Vendor Extensions */
OMX_AncillaryData, /**< 0x7F000002 ancillary data */
OMX_WhiteBalance, /**< 0x7F000003 white balance resultant data */
OMX_UnsaturatedRegions, /**< 0x7F000004 unsaturated regions data */
- OMX_FaceDetection, /**< face detect data */
- OMX_BarcodeDetection, /**< bar-code detct data */
- OMX_FrontObjectDetection, /**< Front object detection data */
- OMX_MotionEstimation, /**< motion Estimation data */
- OMX_TI_MTISType, /**< 0x7F000009 MTIS motion Estimation data */
- OMX_DistanceEstimation, /**< disctance estimation */
- OMX_Histogram, /**< histogram */
- OMX_FocusRegion, /**< focus region data */
+ OMX_FaceDetection, /**< 0x7F000005 face detect data */
+ OMX_BarcodeDetection, /**< 0x7F000006 bar-code detct data */
+ OMX_FrontObjectDetection, /**< 0x7F000007 Front object detection data */
+ OMX_MotionEstimation, /**< 0x7F000008 motion Estimation data */
+ OMX_MTISType, /**< 0x7F000009 MTIS motion Estimation data */
+ OMX_DistanceEstimation, /**< 0x7F00000A disctancedistance estimation */
+ OMX_Histogram, /**< 0x7F00000B histogram */
+ OMX_FocusRegion, /**< 0x7F00000C focus region data */
OMX_ExtraDataPanAndScan, /**< 0x7F00000D pan and scan data */
- OMX_RawFormat, /**< custom RAW data format */
- OMX_SensorType, /**< vendor & model of the sensor being used */
- OMX_SensorCustomDataLength, /**< vendor specific custom data length */
- OMX_SensorCustomData, /**< vendor specific data */
+ OMX_RawFormat, /**< 0x7F00000E custom RAW data format */
+ OMX_SensorType, /**< 0x7F00000F vendor & model of the sensor being used */
+ OMX_SensorCustomDataLength, /**< 0x7F000010 vendor specific custom data length */
+ OMX_SensorCustomData, /**< 0x7F000011 vendor specific data */
OMX_TI_FrameLayout, /**< 0x7F000012 vendor specific data */
OMX_TI_SEIinfo2004Frame1, /**< 0x7F000013 Used for 2004 SEI message to be provided by video decoders */
OMX_TI_SEIinfo2004Frame2, /**< 0x7F000014 Used for 2004 SEI message to be provided by video decoders */
@@ -1453,8 +1642,24 @@ typedef enum OMX_EXT_EXTRADATATYPE
OMX_TI_RangeMappingInfo, /**< 0x7F000017 Used for Range mapping info provided by Video Decoders */
OMX_TI_RescalingInfo, /**< 0x7F000018 Used for width/height rescaling info provided by Video Decoders */
OMX_TI_WhiteBalanceOverWrite, /**< 0x7F000019 Used for manual AWB settings */
+ OMX_TI_CPCamData, /**< 0x7F00001A Used for cp cam data */
+ OMX_TI_H264ESliceDataInfo, /**< 0x7F00001B */
+ OMX_TI_DccData, /**< 0x7F00001C Used for dcc data overwrite in the file system */
+ OMX_TI_ProfilerData, /**< 0x7F00001D Used for profiling data */
+ OMX_TI_VectShotInfo, /**< 0x7F00001E Used for vector shot feedback notification */
+ OMX_TI_CamReProcMeta, /**< 0x7F00001F Used for meta data input to camera re-proc function */
+ OMX_TI_LSCTable, /**< 0x7F000020 Lens shading table for corresponding frame */
+ OMX_TI_CodecExtenderErrorFrame1, /**< 0x7F000021 Used for Codec Extended Error to be provided byvideo decoders */
+ OMX_TI_CodecExtenderErrorFrame2, /**< 0x7F000022 Used for Codec Extended Error to be provided byvideo decoders */
+ OMX_TI_MBInfoFrame1, /**< 0x7F000023 Used for MBError message to be provided by videodecoders */
+ OMX_TI_MBInfoFrame2, /**< 0x7F000024 Used for MBError message to be provided by videodecoders */
+ OMX_TI_SEIInfoFrame1, /**< 0x7F000025 Used for SEI message to be provided by video decoders*/
+ OMX_TI_SEIInfoFrame2, /**< 0x7F000026 Used for SEI message to be provided by video decoders*/
+ OMX_TI_VUIInfoFrame1, /**< 0x7F000027 Used for VUI message to be provided by video decoders */
+ OMX_TI_VUIInfoFrame2, /**< 0x7F000028 Used for VUI message to be provided by video decoders */
OMX_TI_ExtraData_Count,
OMX_TI_ExtraData_Max = OMX_TI_ExtraData_Count - 1,
+ OMX_TI_ExtraData_32Bit_Patch = 0x7fffffff
} OMX_EXT_EXTRADATATYPE;
@@ -1472,11 +1677,12 @@ typedef enum OMX_EXT_EXTRADATATYPE
*
*/
typedef struct OMX_CONFIG_EXTRADATATYPE {
- OMX_U32 nSize;
+ OMX_U32 nSize; /**< The size of the structure including data bytes
+ and any padding necessary to ensure 32bit alignment
+ of the next OMX_OTHER_EXTRADATATYPE structure */
OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_EXT_EXTRADATATYPE eExtraDataType;
- OMX_TI_CAMERAVIEWTYPE eCameraView;
+ OMX_U32 nPortIndex; /**< The read-only value containing the index of the port */
+ OMX_EXT_EXTRADATATYPE eExtraDataType; /**< Identifies the extra data payload type */
OMX_BOOL bEnable;
} OMX_CONFIG_EXTRADATATYPE;
@@ -1487,7 +1693,8 @@ typedef struct OMX_CONFIG_EXTRADATATYPE {
typedef enum OMX_JPEGHEADERTYPE{
OMX_NoHeader = 0,
OMX_JFIF,
- OMX_EXIF
+ OMX_EXIF,
+ OMX_JpegHeaderTypeMax = 0x7fffffff
}OMX_JPEGHEADERTYPE;
/**
* Re-start marker configuration
@@ -1553,7 +1760,8 @@ typedef struct OMX_IMAGE_JPEGMAXSIZE {
typedef enum OMX_IMAGESTAMPOPERATION{
OMX_NewImageStamp = 0,
- OMX_Continuation
+ OMX_Continuation,
+ OMX_ImageStapOperationMax = 0x7fffffff
}OMX_IMAGESTAMPOPERATION;
@@ -1759,22 +1967,6 @@ typedef struct OMX_TI_PARAM_DCCURIINFO {
} OMX_TI_PARAM_DCCURIINFO;
/**
- * Structure used to configure DCC buffer
- *
- * STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nDCCURIBuffSize : Size of the pDCCURIBuff in bytes
- * pDCCURIBuff : Pointer to a buffer
- */
-typedef struct OMX_TI_PARAM_DCCURIBUFFER {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nDCCURIBuffSize;
- OMX_U8 *pDCCURIBuff;
-} OMX_TI_PARAM_DCCURIBUFFER;
-
-/**
* Manual White Balance color temperature
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
@@ -1794,10 +1986,10 @@ typedef struct OMX_TI_CONFIG_WHITEBALANCECOLORTEMPTYPE {
*/
typedef enum OMX_TI_CONFIG_FOCUSSPOTMODETYPE {
OMX_FocusSpotDefault = 0, /** Makes CommonFocusRegion to be used. */
- OMX_FocusSpotSinglecenter,
- OMX_FocusSpotMultiNormal,
- OMX_FocusSpotMultiAverage,
- OMX_FocusSpotMultiCenter,
+ OMX_FocusSpotSinglecenter, /** Only central part of the image is used for focus. */
+ OMX_FocusSpotMultiNormal, /** Middle part of the image is used with 100% weight, upper and lower parts are with 50%. */
+ OMX_FocusSpotMultiAverage, /** All the image is used with 100% weight. */
+ OMX_FocusSpotMultiCenter, /** Central part of the image is used with 100% weight, the rest is used with 50%. */
OMX_FocusSpotExtensions = 0x6F000000, /** Reserved region for introducing Khronos Standard Extensions */
OMX_FocusSpotModeStartUnused = 0x7F000000, /** Reserved region for introducing Vendor Extensions */
OMX_FocusSpotModeMax = 0x7FFFFFFF
@@ -1823,7 +2015,9 @@ typedef struct OMX_TI_CONFIG_FOCUSSPOTWEIGHTINGTYPE {
* Enumeration of possible Exposure control types for OMX_EXPOSURECONTROLTYPE
*/
typedef enum OMX_TI_EXTEXPOSURECONTROLTYPE {
- OMX_TI_ExposureControlVeryLong = OMX_ExposureControlVendorStartUnused + 1
+ OMX_TI_ExposureControlVeryLong = OMX_ExposureControlVendorStartUnused + 1,
+ OMX_TI_ExposureControlFacePriority,
+ OMX_TI_ExposureControlMax = 0x7fffffff
} OMX_TI_EXTEXPOSURECONTROLTYPE;
/**
@@ -1919,6 +2113,8 @@ typedef enum OMX_TI_STEREOFRAMELAYOUTTYPE {
OMX_TI_StereoFrameLayout2D,
OMX_TI_StereoFrameLayoutTopBottom,
OMX_TI_StereoFrameLayoutLeftRight,
+ OMX_TI_StereoFrameLayoutTopBottomSubsample,
+ OMX_TI_StereoFrameLayoutLeftRightSubsample,
OMX_TI_StereoFrameLayoutMax = 0x7FFFFFFF
} OMX_TI_STEREOFRAMELAYOUTTYPE;
@@ -1942,14 +2138,11 @@ typedef struct OMX_TI_FRAMELAYOUTTYPE {
* extended color format types.
*/
typedef enum OMX_TI_COLOR_FORMATTYPE {
- OMX_TI_COLOR_FormatYUV420PackedSemiPlanarInterlaced =
- (OMX_COLOR_FORMATTYPE) OMX_COLOR_FormatVendorStartUnused + 1,
OMX_TI_COLOR_FormatRawBayer10bitStereo =
OMX_COLOR_FormatVendorStartUnused + 2, /**< 10 bit raw for stereo */
OMX_TI_COLOR_FormatYUV420PackedSemiPlanar =
(OMX_COLOR_FORMATTYPE) OMX_COLOR_FormatVendorStartUnused + 0x100, /* 0x100 is used since it is the corresponding HAL pixel fromat */
- OMX_COLOR_FormatAndroidOpaque =
- (OMX_COLOR_FORMATTYPE) OMX_COLOR_FormatVendorStartUnused + 0x789 /**< Platform specified opaque format set to unique value 0x789*/
+ OMX_TI_ColorFormatTypeMax = 0x7fffffff
} OMX_TI_COLOR_FORMATTYPE;
/**
@@ -2232,14 +2425,30 @@ typedef struct OMX_TI_CONFIG_EXIF_TAGS {
} OMX_TI_CONFIG_EXIF_TAGS;
/**
+ * The OMX_TI_SENFACING_TYPE enumeration is used to define the
+ * sensor facing.
+ */
+typedef enum OMX_TI_SENFACING_TYPE {
+ OMX_TI_SENFACING_FRONT,
+ OMX_TI_SENFACING_BACK,
+ OMX_TI_SENFACING_MAX = 0x7FFFFFFF
+}OMX_TI_SENFACING_TYPE;
+
+/**
* Structure used to configure current OMX_TI_SENMOUNT_TYPE
*
* @param nSenId
* @param nRotation
+ * @param bMirror
+ * @param bFlip
+ * @param eFacing
*/
typedef struct OMX_TI_SENMOUNT_TYPE {
OMX_U32 nSenId;
OMX_U32 nRotation;
+ OMX_BOOL bMirror;
+ OMX_BOOL bFlip;
+ OMX_TI_SENFACING_TYPE eFacing;
}OMX_TI_SENMOUNT_TYPE;
/**
@@ -2282,15 +2491,17 @@ typedef struct OMX_TI_CONFIG_SHAREDBUFFER {
* nHeightMin : Number of the smallest height supported
* nWidthMax : Number of the biggest width supported
* nHeightMax : Number of the biggest height supported
+ * nMaxResInPixels : Max resolution in pixels. Used for description of 3d resolutions.
*/
typedef struct OMX_TI_CAPRESTYPE {
- OMX_U32 nSize;
+ OMX_U32 nSize; //- OMX struct header not required as this struct wont be queried on its own?
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_U32 nWidthMin; // smallest width supported
OMX_U32 nHeightMin; // smallest height supported
OMX_U32 nWidthMax; // biggest width supported
OMX_U32 nHeightMax; // biggest height supported
+ OMX_U32 nMaxResInPixels;// max resolution in pixels
} OMX_TI_CAPRESTYPE;
/**
@@ -2305,6 +2516,7 @@ typedef struct OMX_TI_CAPRESTYPE {
* ulImageFormatCount : Number of the supported image pixelformat count
* eImageFormats : Array containing the supported image pixelformat count
* tPreviewResRange : Supported preview resolution range
+ * tRotatedPreviewResRange : Supported rotated preview resolution range
* tImageResRange : Supported image resolution range
* tThumbResRange : Supported thumbnail resolution range
* ulWhiteBalanceCount : Supported whitebalance mode count
@@ -2340,53 +2552,130 @@ typedef struct OMX_TI_CAPRESTYPE {
* ulCapVarFPSModesCount : Number of capture FPS modes
* tCapVarFPSModes : Capture FPS modes
* tSenMounting : Sensor mount information
+ * ulAutoConvModesCount : Supported auto convergence modes count
+ * eAutoConvModes : Array containing the auto convergence modes
+ * ulBracketingModesCount : Supported bracketing modes count
+ * eBracketingModes : Array containing the bracketing modes
+ * bGbceSupported : Flag showing if the Gbce is supported
+ * bRawJpegSupported : Flag showing if the Raw + Jpeg is supported
+ * ulImageCodingFormatCount : Supported image coding formats count
+ * eImageCodingFormat : Array containing the image coding formats
+ * uSenNativeResWidth : Sensor native resolution width
+ * uSenNativeResHeight : Sensor native resolution height
+ * ulAlgoAreasFocusCount : Supported number of AlgoAreas for focus areas
+ * ulAlgoAreasExposureCount : Supported number of AlgoAreas for exposure areas
+ * bAELockSupported : Flag showing if the AE Lock is supported
+ * bAWBLockSupported : Flag showing if the AWB Lock is supported
+ * bAFLockSupported : Flag showing if the Af Lock is supported
+ * nFocalLength : Focal length defined in terms of 0.01mm
+ * ulPrvFrameLayoutCount : supported frame layout count for preview
+ * ePrvFrameLayout : Array containing the frame layouts for preview
+ * ulCapFrameLayoutCount : supported frame layout count for capture
+ * eCapFrameLayout : Array containing the frame layouts for capture
+ * bVideoNoiseFilterSupported : Flag showing if the video noise filter is supported
+ * bVideoStabilizationSupported : Flag showing if the video stabilization is supported
+ * bStillCapDuringVideoSupported : Flag showing if the still capture is supported during video
+ * bMechanicalMisalignmentSupported : Flag showing if the mechanical misalignment is supported
+ * bFacePrioritySupported : Flag showing if the face priority is supported
+ * bRegionPrioritySupported : Flag showing if the region priority is supported
+ * bGlbceSupported : Flag showing if the GLBCE is supported
+ * nManualConvMin : Manual convergence min value
+ * nManualConvMax : Manual convergence max value
+ * nManualExpMin : Manual exposure time min value
+ * nManualExpMax : Manual exposure time max value
+ * nBrightnessMin : Brightness min value
+ * nBrightnessMax : Brightness max value
+ * nContrastMin : Contrast min value
+ * nContrastMax : Contrast max value
+ * nSharpnessMin : Sharpness min value
+ * nSharpnessMax : Sharpness max value
+ * nSaturationMin : Saturation min value
+ * nSaturationMax : Saturation max value
*/
typedef struct OMX_TI_CAPTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_U16 ulPreviewFormatCount; // supported preview pixelformat count
- OMX_COLOR_FORMATTYPE ePreviewFormats[100];
- OMX_U16 ulImageFormatCount; // supported image pixelformat count
- OMX_COLOR_FORMATTYPE eImageFormats[100];
- OMX_TI_CAPRESTYPE tPreviewResRange; // supported preview resolution range
- OMX_TI_CAPRESTYPE tImageResRange; // supported image resolution range
- OMX_TI_CAPRESTYPE tThumbResRange; // supported thumbnail resolution range
- OMX_U16 ulWhiteBalanceCount; // supported whitebalance mode count
- OMX_WHITEBALCONTROLTYPE eWhiteBalanceModes[100];
- OMX_U16 ulColorEffectCount; // supported effects count
- OMX_IMAGEFILTERTYPE eColorEffects[100];
- OMX_S32 xMaxWidthZoom; // Fixed point value stored as Q16
- OMX_S32 xMaxHeightZoom; // Fixed point value stored as Q16
- OMX_U16 ulFlickerCount; // supported anti-flicker mode count
- OMX_COMMONFLICKERCANCELTYPE eFlicker[100];
- OMX_U16 ulExposureModeCount; // supported exposure mode count
- OMX_EXPOSURECONTROLTYPE eExposureModes[100];
- OMX_BOOL bLensDistortionCorrectionSupported;
- OMX_BOOL bISONoiseFilterSupported;
- OMX_S32 xEVCompensationMin; // Fixed point value stored as Q16
- OMX_S32 xEVCompensationMax; // Fixed point value stored as Q16
- OMX_U32 nSensitivityMax; // nSensitivityMax = 100 implies maximum supported equal to "ISO 100"
- OMX_U16 ulFocusModeCount; // supported focus mode count
- OMX_IMAGE_FOCUSCONTROLTYPE eFocusModes[100];
- OMX_U16 ulSceneCount; // supported scene count
- OMX_SCENEMODETYPE eSceneModes[100];
- OMX_U16 ulFlashCount; // supported flash modes count
- OMX_IMAGE_FLASHCONTROLTYPE eFlashModes[100];
- OMX_U32 xFramerateMin; // Fixed point value stored as Q16
- OMX_U32 xFramerateMax; // Fixed point value stored as Q16
- OMX_BOOL bContrastSupported;
- OMX_BOOL bSaturationSupported;
- OMX_BOOL bBrightnessSupported;
- OMX_BOOL bProcessingLevelSupported;
- OMX_BOOL bQFactorSupported;
- OMX_U16 ulPrvVarFPSModesCount; // supported variable FPS preview modes count
- OMX_TI_VARFPSTYPE tPrvVarFPSModes[10];
- OMX_U16 ulCapVarFPSModesCount; // supported variable FPS capture modes count
- OMX_TI_VARFPSTYPE tCapVarFPSModes[10];
- OMX_TI_SENMOUNT_TYPE tSenMounting;
- OMX_U16 ulAlgoAreasFocusCount; // supported number of AlgoAreas for focus areas
- OMX_U16 ulAlgoAreasExposureCount; // supported number of AlgoAreas for exposure areas
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U16 ulPreviewFormatCount; // supported preview pixelformat count
+ OMX_COLOR_FORMATTYPE ePreviewFormats[32];
+ OMX_U16 ulImageFormatCount; // supported image pixelformat count
+ OMX_COLOR_FORMATTYPE eImageFormats[32];
+ OMX_TI_CAPRESTYPE tPreviewResRange; // supported preview resolution range
+ OMX_TI_CAPRESTYPE tRotatedPreviewResRange; // supported rotated preview resolution range
+ OMX_TI_CAPRESTYPE tImageResRange; // supported image resolution range
+ OMX_TI_CAPRESTYPE tThumbResRange; // supported thumbnail resolution range
+ OMX_U16 ulWhiteBalanceCount; // supported whitebalance mode count
+ OMX_WHITEBALCONTROLTYPE eWhiteBalanceModes[32];
+ OMX_U16 ulColorEffectCount; // supported effects count
+ OMX_IMAGEFILTERTYPE eColorEffects[32];
+ OMX_S32 xMaxWidthZoom; // Fixed point value stored as Q16
+ OMX_S32 xMaxHeightZoom; // Fixed point value stored as Q16
+ OMX_U16 ulFlickerCount; // supported anti-flicker mode count
+ OMX_COMMONFLICKERCANCELTYPE eFlicker[32];
+ OMX_U16 ulExposureModeCount; // supported exposure mode count
+ OMX_EXPOSURECONTROLTYPE eExposureModes[32];
+ OMX_BOOL bLensDistortionCorrectionSupported;
+ OMX_BOOL bISONoiseFilterSupported;
+ OMX_S32 xEVCompensationMin; // Fixed point value stored as Q16
+ OMX_S32 xEVCompensationMax; // Fixed point value stored as Q16
+ OMX_U32 nSensitivityMax; // nSensitivityMax = 100 implies maximum supported equal to "ISO 100"
+ OMX_U16 ulFocusModeCount; // supported focus mode count
+ OMX_IMAGE_FOCUSCONTROLTYPE eFocusModes[32];
+ OMX_U16 ulSceneCount; // supported scene count
+ OMX_SCENEMODETYPE eSceneModes[64];
+ OMX_U16 ulFlashCount; // supported flash modes count
+ OMX_IMAGE_FLASHCONTROLTYPE eFlashModes[32];
+ OMX_U32 xFramerateMin; // Fixed point value stored as Q16
+ OMX_U32 xFramerateMax; // Fixed point value stored as Q16
+ OMX_BOOL bContrastSupported;
+ OMX_BOOL bSaturationSupported;
+ OMX_BOOL bBrightnessSupported;
+ OMX_BOOL bProcessingLevelSupported;
+ OMX_BOOL bQFactorSupported;
+ OMX_U16 ulPrvVarFPSModesCount; // supported variable FPS preview modes count
+ OMX_TI_VARFPSTYPE tPrvVarFPSModes[10];
+ OMX_U16 ulCapVarFPSModesCount; // supported variable FPS capture modes count
+ OMX_TI_VARFPSTYPE tCapVarFPSModes[10];
+ OMX_TI_SENMOUNT_TYPE tSenMounting;
+ OMX_U16 ulAutoConvModesCount; // supported auto convergence modes count
+ OMX_TI_AUTOCONVERGENCEMODETYPE eAutoConvModes[32];
+ OMX_U16 ulBracketingModesCount; // supported bracketing modes count
+ OMX_BRACKETMODETYPE eBracketingModes[32];
+ OMX_BOOL bGbceSupported; // Flag showing if the Gbce is supported
+ OMX_BOOL bRawJpegSupported; // Flag showing if the Raw + Jpeg issupported
+ OMX_U16 ulImageCodingFormatCount;
+ OMX_IMAGE_CODINGTYPE eImageCodingFormat[32];
+ OMX_U16 uSenNativeResWidth;
+ OMX_U16 uSenNativeResHeight;
+ OMX_U16 ulAlgoAreasFocusCount;
+ OMX_U16 ulAlgoAreasExposureCount;
+ OMX_BOOL bAELockSupported;
+ OMX_BOOL bAWBLockSupported;
+ OMX_BOOL bAFLockSupported;
+ OMX_U16 nFocalLength;
+ OMX_U16 ulPrvFrameLayoutCount; // supported frame layout count
+ OMX_TI_STEREOFRAMELAYOUTTYPE ePrvFrameLayout[16];
+ OMX_U16 ulCapFrameLayoutCount; // supported frame layout count
+ OMX_TI_STEREOFRAMELAYOUTTYPE eCapFrameLayout[16];
+ OMX_BOOL bVideoNoiseFilterSupported;
+ OMX_BOOL bVideoStabilizationSupported;
+ OMX_BOOL bStillCapDuringVideoSupported;
+ OMX_BOOL bMechanicalMisalignmentSupported;
+ OMX_BOOL bFacePrioritySupported;
+ OMX_BOOL bRegionPrioritySupported;
+ OMX_BOOL bGlbceSupported;
+ OMX_S16 nManualConvMin;
+ OMX_S16 nManualConvMax;
+ OMX_U16 nManualExpMin;
+ OMX_U16 nManualExpMax;
+ OMX_S16 nBrightnessMin;
+ OMX_S16 nBrightnessMax;
+ OMX_S16 nContrastMin;
+ OMX_S16 nContrastMax;
+ OMX_S16 nSharpnessMin;
+ OMX_S16 nSharpnessMax;
+ OMX_S16 nSaturationMin;
+ OMX_S16 nSaturationMax;
} OMX_TI_CAPTYPE;
@@ -2467,25 +2756,6 @@ typedef struct OMX_TI_CONFIG_FOCUSDISTANCETYPE {
OMX_S32 nLensPosition;
} OMX_TI_CONFIG_FOCUSDISTANCETYPE;
-/*
-* STRUCT MEMBERS:
-* nSize : Size of the structure in bytes
-* nVersion : OMX specification version information
-* nPortIndex : Port that this structure applies to
-* pAAAskipBuff : Pointer to a buffer
-* AAAskipBuffId : Id of the send buffer
-* AAAskipBuffSize : Size of the sent buffer
-*/
-typedef struct OMX_TI_CONFIG_AAASKIPBUFFERTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_PTR pAAAskipBuff;
- OMX_U32 AAAskipBuffId;
- OMX_U32 AAAskipBuffSize;
-} OMX_TI_CONFIG_AAASKIPBUFFERTYPE;
-
-
/**
* The OMX_TI_BRIGHTNESSCONTRASTCRTLTYPE enumeration is used to define the
* brightness and contrast mode types.
@@ -2532,6 +2802,459 @@ typedef struct OMX_TI_CONFIG_VARFRMRANGETYPE {
} OMX_TI_CONFIG_VARFRMRANGETYPE;
/**
+ * Single preview capture modes
+ */
+ typedef enum OMX_TI_SINGLEPREVIEWMODETYPE {
+ OMX_TI_SinglePreviewMode_PreviewOnly,
+ OMX_TI_SinglePreviewMode_Video,
+ OMX_TI_SinglePreviewMode_ImageCapture,
+ OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed,
+ OMX_TI_SinglePreviewMode_Reprocess,
+ OMX_TI_SinglePreviewMode = 0x7FFFFFFF
+ } OMX_TI_SINGLEPREVIEWMODETYPE;
+
+/**
+ * Define configuration structure for
+ * single preview capture mode
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * eMode : Select the subusecase mode (Video/HQ/HS)
+ */
+ typedef struct OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_TI_SINGLEPREVIEWMODETYPE eMode;
+ } OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE;
+
+
+/**
+ * Configuratin structure for freeze AWB parameter modifications.
+ *
+ * @param nSize Size of the structure in bytes.
+ * @param nVersion OMX specification version information.
+ * @param nPortIndex Port index to which to apply.
+ * @param nTimeDelay Time for which the AWB parameters to be frozen.
+ * measured in milliseconds
+ */
+ typedef struct OMX_TI_CONFIG_FREEZE_AWB {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nTimeDelay;
+ } OMX_TI_CONFIG_FREEZE_AWB;
+
+/**
+ * Configuration structure used to set
+ * minimum time between two sequential WB coefficients modifications.
+ *
+ * @param nSize Size of the structure in bytes.
+ * @param nVersion OMX specification version information.
+ * @param nPortIndex Port index to which to apply.
+ * @param nDelayTime The time in milliseconds.
+ */
+ typedef struct OMX_TI_CONFIG_AWB_DELAY {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nDelayTime;
+ } OMX_TI_CONFIG_AWB_DELAY;
+
+/**
+ * Configuration structure used to set
+ * minimum time delay between
+ * two sequential AE parameters modifications
+ *
+ * @param nSize Size of the structure in bytes.
+ * @param nVersion OMX specification version information.
+ * @param nPortIndex Port index to which to apply.
+ * @param nDelayTime The time in milliseconds.
+ */
+ typedef struct OMX_TI_CONFIG_AE_DELAY {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nDelayTime;
+ } OMX_TI_CONFIG_AE_DELAY;
+
+
+/**
+ * Configuration structure used to freeze AE modifications
+ * for a nTimeDelay milliseconds
+ *
+ * @param nSize Size of the structure in bytes.
+ * @param nVersion OMX specification version information.
+ * @param nPortIndex Port index to which to apply.
+ * @param nTimeDelay The time in milliseconds.
+ */
+ typedef struct OMX_TI_CONFIG_FREEZE_AE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nTimeDelay;
+ } OMX_TI_CONFIG_FREEZE_AE;
+
+/**
+ * Configuration structure used to set
+ * the AE gain threshold
+ *
+ * @param nSize Size of the structure in bytes.
+ * @param nVersion OMX specification version information.
+ * @param uMinTH Minimum value for AE gain.
+ * @param uMaxTH Maximum value for AE gain.
+ */
+ typedef struct OMX_TI_CONFIG_AE_THRESHOLD {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 uMinTH;
+ OMX_U32 uMaxTH;
+ } OMX_TI_CONFIG_AE_THRESHOLD;
+
+/**
+ * Enumeration describing the main gestures
+ */
+ typedef enum OMX_TI_GESTURES_TYPE {
+ OMX_TI_GESTURE_NO_GESTURE = 0x70000001,
+ OMX_TI_GESTURE_SWIPE_RIGHT,
+ OMX_TI_GESTURE_SWIPE_LEFT,
+ OMX_TI_GESTURE_FIST_RIGHT,
+ OMX_TI_GESTURE_FIST_LEFT,
+
+ OMX_TI_GESTURE_COUNT,
+ OMX_TI_GESTURE_MAX = 0x7FFFFFFF
+ } OMX_TI_GESTURES_TYPE;
+
+/**
+ * Enumeration describing the main gesture objects
+ */
+ typedef enum OMX_TI_OBJECT_TYPE {
+ OMX_TI_OBJECT_PALM,
+ OMX_TI_OBJECT_FIST,
+ OMX_TI_OBJECT_FACE,
+
+ OMX_TI_OBJECT_MAX = 0x7FFFFFFF
+ } OMX_TI_OBJECT_TYPE;
+
+/**
+ * Data structure carrying information about
+ * objects located at a certain area of frame buffer.
+ *
+ * @param nSize Size of the structure in bytes.
+ * @param nVersion OMX specification version information.
+ * @param nPortIndex Port index to which to apply.
+ * @param eType The object type.
+ * @param nTop The top coordinate.
+ * @param nLeft The left coordinate.
+ * @param nWidth The width of the object.
+ * @param nHeight The height of the object.
+ */
+ typedef struct OMX_CONFIG_OBJECT_RECT_TYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_TI_OBJECT_TYPE eType;
+ OMX_S32 nTop;
+ OMX_S32 nLeft;
+ OMX_U32 nWidth;
+ OMX_U32 nHeight;
+ } OMX_CONFIG_OBJECT_RECT_TYPE;
+
+/**
+ * Data structure carrying information about
+ * gestures detected at a certain frame.
+ *
+ * @param nSize Size of the structure in bytes.
+ * @param nVersion OMX specification version information.
+ * @param nPortIndex Port index to which to apply.
+ * @param nTimeStamp Frame id.
+ * @param eType Type of the gesture detected at that frame.
+ * @param nNumDetectedGestures Number ot the areas of the frame in which this gesture is detected.
+ * @param nGestureAreas The areas where this gesture is detected.
+ */
+ typedef struct OMX_TI_CONFIG_GESTURES_INFO {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_TICKS nTimeStamp;
+ OMX_TI_GESTURES_TYPE eType;
+ OMX_U32 nNumDetectedGestures;
+ OMX_CONFIG_OBJECT_RECT_TYPE nGestureAreas[35];
+ } OMX_TI_CONFIG_GESTURES_INFO;
+
+/**
+* Define the frames queue len for ZSL
+*
+* STRUCT MEMBERS:
+* nSize: Size of the structure in bytes
+* nVersion: OMX specification version information
+* nHistoryLen: History len in number of frames
+*/
+ typedef struct OMX_TI_PARAM_ZSLHISTORYLENTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nHistoryLen;
+ } OMX_TI_PARAM_ZSLHISTORYLENTYPE;
+
+/**
+* Define the frame delay in ms for ZSL
+*
+* STRUCT MEMBERS:
+* nSize: Size of the structure in bytes
+* nVersion: OMX specification version information
+* nDelay: Capture frame delay in ms
+*/
+ typedef struct OMX_TI_CONFIG_ZSLDELAYTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_S32 nDelay;
+ } OMX_TI_CONFIG_ZSLDELAYTYPE;
+
+/**
+* AlogAreas purpose
+* This type specifies the purpose of areas specified in OMX_ALGOAREASTYPE.
+* */
+ typedef enum OMX_ALGOAREAPURPOSE{
+ OMX_AlgoAreaFocus = 0, // Multi region focus
+ OMX_AlgoAreaExposure,
+ }OMX_ALGOAREAPURPOSE;
+
+ typedef struct OMX_ALGOAREA {
+ OMX_S32 nLeft; /**< The leftmost coordinate of the area rectangle */
+ OMX_S32 nTop; /**< The topmost coordinate of the area rectangle */
+ OMX_U32 nWidth; /**< The width of the area rectangle in pixels */
+ OMX_U32 nHeight; /**< The height of the area rectangle in pixels */
+ OMX_U32 nPriority; /**< Priority - ranges from 1 to 1000 */
+ }OMX_ALGOAREA;
+
+/**
+* Algorythm areas type
+* This type defines areas for Multi Region Focus,
+* or another algorithm region parameters,
+* such as Multi Region Auto Exposure.
+*
+* STRUCT MEMBERS:
+* nSize : Size of the structure in bytes
+* nVersion : OMX specification version information
+* nPortIndex : Port index
+* tAreaPosition : Area definition - coordinates and purpose - Multi Region Focus, Auto Exposure, etc.
+* nNumAreas : Number of areas defined in the array
+* nAlgoAreaPurpose : Algo area purpose - eg. Multi Region Focus is OMX_AlgoAreaFocus
+*/
+ typedef struct OMX_ALGOAREASTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+
+ OMX_U32 nNumAreas;
+ OMX_ALGOAREA tAlgoAreas[MAX_ALGOAREAS];
+ OMX_ALGOAREAPURPOSE nAlgoAreaPurpose;
+ } OMX_ALGOAREASTYPE;
+
+/*==========================================================================*/
+/*!
+@brief OMX_TI_PARAM_ENHANCEDPORTRECONFIG : Suport added to new port reconfig usage
+@param bUsePortReconfigForCrop Enables port reconfig for crop.
+@param bUsePortReconfigForPadding Enables port reconfig for padding
+*/
+/*==========================================================================*/
+
+typedef struct OMX_TI_PARAM_ENHANCEDPORTRECONFIG {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bUsePortReconfigForCrop;
+ OMX_BOOL bUsePortReconfigForPadding;
+} OMX_TI_PARAM_ENHANCEDPORTRECONFIG;
+
+
+typedef struct {
+ OMX_U16 nVPos; //!< AEWINSTART WINSV //AFPAXSTART PAXSV
+ OMX_U8 nVSize; //!< AEWWIN1 WINW //AFPAX1 PAXH
+ OMX_U16 nHPos; //!< AEWINSTART WINSH //AFPAXSTART PAXSH
+ OMX_U8 nHSize; //!< AEWWIN1 WINH //AFPAX1 PAXW
+ OMX_U8 nVCount; //!< AEWWIN1 WINVC //AFPAX2 PAXVC
+ OMX_U8 nVIncr; //!< AEWSUBWIN AEWINCV //AFPAX2 AFINCV
+ OMX_U8 nHCount; //!< AEWWIN1 WINHC //AFPAX2 PAXHC
+ OMX_U8 nHIncr; //!< AEWSUBWIN AEWINCH //AFPAX2 AFINCH
+}OMX_TI_H3aPAXELCFG;
+
+typedef struct {
+ /** Average value for red pixels in current paxel */
+ OMX_U16 red;
+ /** Average value for green pixels in current paxel */
+ OMX_U16 green;
+ /** Average value for blue pixels in current paxel */
+ OMX_U16 blue;
+ /** Flag indicating whether current paxel is valid 0:invalid, !0:valid */
+ OMX_U16 valid;
+} OMX_TI_H3AAEWBPAXELDATA;
+
+typedef struct OMX_TI_H3AAFDATA {
+ OMX_U32 nSize; /**< The size of the structure
+ including the length of data field containing the histogram data */
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_TI_CAMERAVIEWTYPE eCameraView;
+ OMX_U8 *data;
+} OMX_TI_H3AAFDATA;
+
+/**
+* Data structure carrying information about
+* VTC slice height.
+*
+* @param nSize Size of the structure in bytes.
+* @param nVersion OMX specification version information.
+* @param nSliceHeight Definition of slice height.
+*
+*
+*
+*
+*/
+typedef struct OMX_TI_PARAM_VTCSLICE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nSliceHeight;
+ OMX_U32 nInternalBuffers;
+ OMX_PTR IonBufhdl[2];
+} OMX_TI_PARAM_VTCSLICE;
+
+
+/**
+ * nSize is the size of the structure including the length of data field containing
+ * the histogram data.
+ * nBins is the number of bins in the histogram.
+ * eComponentType specifies the type of the histogram bins according to enum.
+ * It can be selected to generate multiple component types, then the extradata struct
+ * is repeated for each component type.
+ */
+typedef struct OMX_TI_HISTOGRAMTYPE {
+ OMX_U32 nSize; /**< The size of the structure
+ including the length of data field containing the histogram data */
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_TI_CAMERAVIEWTYPE eCameraView;
+ OMX_U32 nBins; /**< The number of bins in the histogram */
+ OMX_HISTCOMPONENTTYPE eComponentType; /**< Specifies the type of the histogram bins according to enum.
+ It can be selected to generate multiple component types,
+ then the extradata struct is repeated for each component type */
+ OMX_U8 *data;
+} OMX_TI_HISTOGRAMTYPE;
+
+
+
+typedef struct OMX_TI_CPCAMDATA {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_TI_CAMERAVIEWTYPE eCameraView;
+
+ /* Face Detect */
+ OMX_U16 ulFaceCount; // faces detected
+ OMX_TI_FACERESULT tFacePosition[35]; // 35 is max faces supported by FDIF
+
+
+ /**** Ancillary Data ******/
+ OMX_U32 nFrameNumber;
+ OMX_U16 nInputImageHeight;
+ OMX_U16 nInputImageWidth;
+ OMX_U16 nOutputImageHeight;
+ OMX_U16 nOutputImageWidth;
+ OMX_U16 nDigitalZoomFactor;
+ OMX_S16 nCropCenterColumn;
+ OMX_S16 nCropCenterRow;
+ OMX_U16 nOpticalZoomValue;
+ OMX_U8 nAFStatus;
+ OMX_U8 nAWBStatus;
+ OMX_U8 nAEStatus;
+ OMX_U32 nExposureTime;
+ OMX_U16 nEVCompensation;
+ OMX_U8 nDigitalGainValue;
+ OMX_U8 nAnalogGainValue;
+ OMX_U16 nCurrentISO;
+ OMX_U16 nReferenceISO;
+ OMX_U8 nApertureValue;
+ OMX_U8 nPixelRange;
+ OMX_U8 nCameraShake;
+ OMX_U8 nNumFacesDetected;
+
+ /* Not Yet Supported */
+ OMX_U16 nFocalDistance;
+ OMX_U16 nShotNumber;
+ OMX_U8 nFlashStatus;
+
+
+ /*** White Balance gains ****/
+ /**< White Balance Color Temperature in Kelvins */
+ OMX_U16 nColorTemperature;
+
+ /**< Bayer applied R color channel gain in (U13Q9) */
+ OMX_U16 nGainR;
+
+ /**< Bayer applied Gr color channel gain in (U13Q9) */
+ OMX_U16 nGainGR;
+
+ /**< Bayer applied Gb color channel gain in (U13Q9) */
+ OMX_U16 nGainGB;
+
+ /**< Bayer applied B color channel gain in (U13Q9) */
+ OMX_U16 nGainB;
+
+ /* BELOW ARE NOT SUPPORTED , Default set to 0 */
+ OMX_S16 nOffsetR; /**< Bayer applied R color channel offset */
+ OMX_S16 nOffsetGR; /**< Bayer applied Gr color channel offset */
+ OMX_S16 nOffsetGB; /**< Bayer applied Gb color channel offset */
+ OMX_S16 nOffsetB; /**< Bayer applied B color channel offset */
+
+
+ /* AEWB,AF,HIST data size */
+ OMX_U32 nAewbDataSize;
+ OMX_U32 nAfDataSize;
+ OMX_U32 nHistSize;
+
+
+
+ /*** H3A AF-AEW DATA ***/
+ OMX_TI_H3aPAXELCFG tAfPaxelWin;
+ OMX_TI_H3aPAXELCFG tAewbPaxelWin;
+ OMX_TI_H3AAEWBPAXELDATA *tpPaxel;
+ OMX_TI_H3AAFDATA tH3A_Af;
+ /* Histogram */
+ OMX_TI_HISTOGRAMTYPE Histogram;
+
+
+} OMX_TI_CPCAMDATA;
+
+/**
+* Start/Stop mechanical misalignment
+*
+* STRUCT MEMBERS:
+* nSize: Size of the structure in bytes
+* nVersion: OMX specification version information
+* nDelay: Capture frame delay in ms
+*/
+ typedef struct OMX_TI_CONFIG_MM {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_BOOL bMM;
+ } OMX_TI_CONFIG_MM;
+
+/**
+* Start/Stop Affine transformation for Mm/Ac
+*
+* STRUCT MEMBERS:
+* nSize: Size of the structure in bytes
+* nVersion: OMX specification version information
+* bAffine: Enable / Disable
+*/
+ typedef struct OMX_TI_PARAM_AFFINE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_BOOL bAffine;
+ } OMX_TI_PARAM_AFFINE;
+
+/**
* A pointer to this struct is passed to the OMX_SetParameter when the extension
* index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension
* is given.
@@ -2561,91 +3284,329 @@ typedef struct OMX_TI_PARAMNATIVEBUFFERUSAGE {
OMX_U32 nUsage;
} OMX_TI_PARAMNATIVEBUFFERUSAGE;
-/*==========================================================================*/
-/*!
-@brief OMX_TI_PARAM_ENHANCEDPORTRECONFIG : Suport added to new port reconfig usage
-@param bUsePortReconfigForCrop Enables port reconfig for crop.
-@param bUsePortReconfigForPadding Enables port reconfig for padding
-*/
-/*==========================================================================*/
-
-typedef struct OMX_TI_PARAM_ENHANCEDPORTRECONFIG {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_BOOL bUsePortReconfigForCrop;
- OMX_BOOL bUsePortReconfigForPadding;
-} OMX_TI_PARAM_ENHANCEDPORTRECONFIG;
+/**
+ * OMX_TI_ZSL_PRIORITY_TYPE Enumerated Value
+ */
+typedef enum OMX_TI_ZSL_PRIORITY_TYPE {
+ OMX_TI_ZSL_PRIORITY_TIME,
+ OMX_TI_ZSL_PRIORITY_FOCUS,
+ OMX_TI_ZSL_PRIORITY = 0x7FFFFFFF
+} OMX_TI_ZSL_PRIORITY_TYPE;
/**
-* Define the frames queue len for ZSL
+* Define the priority tha twill be used to select ZSL frame
*
* STRUCT MEMBERS:
* nSize: Size of the structure in bytes
* nVersion: OMX specification version information
-* nHistoryLen: History len in number of frames
+* ePriority: Priority
*/
-typedef struct OMX_TI_PARAM_ZSLHISTORYLENTYPE {
- OMX_U32 nSize;
+typedef struct OMX_TI_CONFIG_ZSLFRAMESELECTPRIOTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_TI_ZSL_PRIORITY_TYPE ePriority;
+} OMX_TI_CONFIG_ZSLFRAMESELECTPRIOTYPE;
+
+/**
+* MIPI, ECC, and CRC counters
+* Mipi counter counts the frames from the MIPI receiver (CSI_RX).
+* TCMD application will use this test
+* to validate the MIPI channel integrity (TX to RX).
+*
+* STRUCT MEMBERS:
+* nSize : Size of the structure in bytes
+* nVersion : OMX specification version information
+* nPortIndex : Port that this structure applies to
+* bResetMIPICounter : if OMX_SetConfig() is called with value True
+* for this parameter, the MIPICounter shall be reset to 0, by ducati.
+* nMIPICounter : MIPI frame counter
+* nECCCounter : ECC counter
+* nCRCCounter : CRC counter
+*/
+typedef struct OMX_CONFIG_MIPICOUNTERS {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bResetMIPICounter;
+ OMX_U32 nMIPICounter;
+ OMX_U32 nECCCounter;
+ OMX_U32 nCRCCounter;
+ OMX_U32 nFifoOvfCounter;
+ OMX_U32 nOCPCounter;
+ OMX_U32 nEccCorrCounter;
+ OMX_U32 SoTErrCnt;
+ OMX_U32 SoTSyncErrCnt;
+ OMX_U32 ULPMCnt;
+ OMX_U32 ULPMExitCnt;
+ OMX_U32 ULPMEnterCnt;
+ OMX_U32 ControlErrCnt;
+ OMX_U32 ErrEscapeCnt;
+ OMX_U32 CSIRxTimeoutCnt;
+ OMX_U32 bStopStartCntrs;
+} OMX_CONFIG_MIPICOUNTERS;
+
+/**
+* CSI Timing Register
+*
+* STRUCT MEMBERS:
+* nSize : Size of the structure in bytes
+* nVersion : OMX specification version information
+* nPortIndex : Port that this structure applies to
+* nReadWrite : if OMX_SetConfig() is called with value True
+* for this parameter, the ISS_CAMERARX_CORE1_REG0 register will be
+* written with the supplied values below.
+* nThsSettle :
+* nThsTerm :
+* nHsClkCfg :
+*/
+typedef struct OMX_CONFIG_CSITIMINGRW {
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
- OMX_U32 nHistoryLen;
-} OMX_TI_PARAM_ZSLHISTORYLENTYPE;
+ OMX_U32 nPortIndex;
+ OMX_U8 nReadWrite;
+ OMX_U8 nThsSettle;
+ OMX_U8 nThsTerm;
+ OMX_U8 nHsClkCfg;
+} OMX_CONFIG_CSITIMINGRW;
/**
-* Define the frame delay in ms for ZSL
+* CSI Complex IO Data
*
* STRUCT MEMBERS:
-* nSize: Size of the structure in bytes
-* nVersion: OMX specification version information
-* nDelay: Capture frame delay in ms
+* nSize : Size of the structure in bytes
+* nVersion : OMX specification version information
+* nPortIndex : Port that this structure applies to
+* nFrameCount : Recieved Frames on the CSI2Rx
+* nLaneCount : Number of active lanes
+* nCSISpeed : CSI2Rx speed
*/
-typedef struct OMX_TI_CONFIG_ZSLDELAYTYPE {
- OMX_U32 nSize;
+typedef struct OMX_CONFIG_CSICMPXIO {
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
- OMX_S32 nDelay;
-} OMX_TI_CONFIG_ZSLDELAYTYPE;
+ OMX_U32 nPortIndex;
+ OMX_U32 nFrameCount;
+ OMX_U32 nLaneCount;
+ OMX_U32 nCSISpeed;
+} OMX_CONFIG_CSICMPXIO;
/**
- * AlogAreas purpose
- * This type specifies the purpose of areas specified in OMX_ALGOAREASTYPE.
- * */
-typedef enum OMX_ALGOAREAPURPOSE{
- OMX_AlgoAreaFocus = 0, // Multi region focus
- OMX_AlgoAreaExposure,
-}OMX_ALGOAREAPURPOSE;
+ * Auto Focus Score
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * nAutoFocusScore : Auto Focus Score
+ */
+typedef struct OMX_CONFIG_AUTOFOCUSSCORE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nAutoFocusScore;
+} OMX_CONFIG_AUTOFOCUSSCORE;
+
+/**
+ * Color Bar test pattern
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * bEnableColorBars : Enable Color Bars test pattern
+ */
+typedef struct OMX_CONFIG_COLORBARS {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 bEnableColorBars;
+} OMX_CONFIG_COLORBARS;
+
+/**
+* Sensor OTP EEEPROM data
+*
+* STRUCT MEMBERS:
+* nSize : Size of the structure in bytes
+* nVersion : OMX specification version information
+* nPortIndex : Port that this structure applies to
+* pData : pointer to the client's buffer
+* nDataSize : size of the EEPROM data in bytes
+* nClientDataSize : size of the client's buffer
+* SensorIndex : index of the eeprom buffer
+*/
+typedef struct OMX_CONFIG_OTPEEPROM {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_PTR pData;
+ OMX_U32 nDataSize;
+ OMX_U32 nClientDataSize;
+ OMX_U8 SensorIndex;
+}OMX_CONFIG_OTPEEPROM;
-typedef struct OMX_ALGOAREA {
- OMX_S32 nLeft; /**< The leftmost coordinate of the area rectangle */
- OMX_S32 nTop; /**< The topmost coordinate of the area rectangle */
- OMX_U32 nWidth; /**< The width of the area rectangle in pixels */
- OMX_U32 nHeight; /**< The height of the area rectangle in pixels */
- OMX_U32 nPriority; /**< Priority - ranges from 1 to 1000 */
-}OMX_ALGOAREA;
+/**
+ * The OMX_ISP_TYPE enumeration is used to define the
+ * TI ISP & ST ISP types.
+ */
+typedef enum OMX_ISP_TYPE {
+ OMX_TIISP = 0,
+ OMX_STISP= 1,
+ OMX_ISPUnknown
+} OMX_ISP_TYPE;
/**
- * Algorythm areas type
- * This type defines areas for Multi Region Focus,
- * or another algorithm region parameters,
- * such as Multi Region Auto Exposure.
+* ISP Information
+*
+* STRUCT MEMBERS:
+* nSize : Size of the structure in bytes
+* nVersion : OMX specification version information
+* nPortIndex : Port that this structure applies to
+* eIspType : ISP Type (TI ISP/ ST ISP)
+* nHardwareVersion : Hardware version of ISP
+* nSoftwareVersion : Software version of ISP
+*/
+typedef struct OMX_CONFIG_ISPINFO {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_ISP_TYPE eIspType;
+ OMX_U32 nHardwareVersion;
+ OMX_U32 nSoftwareVersion;
+ OMX_S8 cDucatiVersion[32];
+}OMX_CONFIG_ISPINFO;
+
+typedef enum OMX_TI_PORTTAPPOINTTYPE {
+ OMX_TI_PortTap_Bayer_SensorOutput,
+ OMX_TI_PortTap_Bayer_PostLsc,
+ OMX_TI_PortTap_Bayer_PreBayerToYUVConversion,
+ OMX_TI_PortTap_YUV_PostBayerToYUVConversion,
+ OMX_TI_PortTap_YUV_PreJPEGCompression,
+ OMX_TI_PortTap = 0x7FFFFFFF
+} OMX_TI_PORTTAPPOINTTYPE;
+
+/**
+ * Define configuration structure for
+ * tap in/out points for the selected port
*
* STRUCT MEMBERS:
- * nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nPortIndex : Port index
- * tAreaPosition : Area definition - coordinates and purpose - Multi Region Focus, Auto Exposure, etc.
- * nNumAreas : Number of areas defined in the array
- * nAlgoAreaPurpose : Algo area purpose - eg. Multi Region Focus is OMX_AlgoAreaFocus
- */
-typedef struct OMX_ALGOAREASTYPE {
- OMX_U32 nSize;
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * eTapPoint : Select the tap in/out point for the port
+ */
+typedef struct OMX_TI_CONFIG_PORTTAPPOINTTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_TI_PORTTAPPOINTTYPE eTapPoint;
+} OMX_TI_CONFIG_PORTTAPPOINTTYPE;
+
+/**
+ * Available methods to apply vect shot exposure and gain
+ */
+typedef enum OMX_TI_EXPGAINAPPLYMETHODTYPE {
+ OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE,
+ OMX_TI_EXPGAINAPPLYMETHOD_RELATIVE,
+ OMX_TI_EXPGAINAPPLYMETHOD_FORCE_RELATIVE,
+ OMX_TI_EXPGAINAPPLYMETHOD_FORCE_ABSOLUTE,
+ OMX_TI_EXPGAINAPPLYMETHOD = 0x7FFFFFFF
+} OMX_TI_EXPGAINAPPLYMETHODTYPE;
+
+/**
+ * Define configuration structure for
+ * shot configuration for the selected port
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * nConfigId : A unique config identification number that will be
+ * put in ancillary data for the corresponding output frame
+ * nFrames : Number of sequential frames that will use this
+ * configuration
+ * nEC : Total exposure compensation value
+ * nExp : Exposure value for this configuration slot
+ * nGain : Gain value for this configuration slot
+ * eExpGainApplyMethod : Selects the method which will be used to apply exposure and gain
+ * bNoSnapshot : Determinates whether a snapshot image will be send
+ * on the preview port for this shot config
+ */
+typedef struct OMX_TI_CONFIG_SHOTCONFIG {
+ OMX_U32 nConfigId;
+ OMX_U32 nFrames;
+ OMX_S32 nEC;
+ OMX_S32 nExp;
+ OMX_S32 nGain;
+ OMX_TI_EXPGAINAPPLYMETHODTYPE eExpGainApplyMethod;
+ OMX_BOOL bNoSnapshot;
+} OMX_TI_CONFIG_SHOTCONFIG;
+
+/**
+ * Define configuration structure for
+ * shot configuration vector for the selected port
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * bFlushQueue : If TRUE: Flush queue and abort processing before enqueing
+ * new shot configurations
+ * nNumConfigs : Number of valid configurations in the nShotConfig array
+ * nShotConfig : Array of shot configurations
+ * nSlotsAvilable : Return value with number of available slots in the queue
+ */
+typedef struct OMX_TI_CONFIG_ENQUEUESHOTCONFIGS {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bFlushQueue;
+ OMX_U32 nNumConfigs;
+ OMX_TI_CONFIG_SHOTCONFIG nShotConfig[5];
+} OMX_TI_CONFIG_ENQUEUESHOTCONFIGS;
+
+/**
+ * Define configuration structure to
+ * query available/free shots in shot queue.
+ * Will be supported only as GetConfig function.
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * nAvailableShots : Number of available shots
+ */
+typedef struct OMX_TI_CONFIG_QUERYAVAILABLESHOTS {
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
+ OMX_U32 nPortIndex;
+ OMX_U32 nAvailableShots;
+} OMX_TI_CONFIG_QUERYAVAILABLESHOTS;
- OMX_U32 nNumAreas;
- OMX_ALGOAREA tAlgoAreas[MAX_ALGOAREAS];
- OMX_ALGOAREAPURPOSE nAlgoAreaPurpose;
-} OMX_ALGOAREASTYPE;
+/**
+ * Available vector shot capture stop methods
+ */
+typedef enum OMX_TI_VECTSHOTSTOPMETHOD {
+ OMX_TI_VECTSHOTSTOPMETHOD_GOTO_PREVIEW,
+ OMX_TI_VECTSHOTSTOPMETHOD_WAIT_IN_CAPTURE,
+ OMX_TI_VECTSHOTSTOPMETHOD_MAX = 0x7FFFFFFF
+} OMX_TI_VECTSHOTSTOPMETHOD;
+/**
+ * Define configuration structure to
+ * specify the beahvior of vector shot capture
+ * when the shot queue is empty
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * eStopMethod : Select the stop method
+ */
+typedef struct OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_TI_VECTSHOTSTOPMETHOD eStopMethod;
+} OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE;
#ifdef __cplusplus
diff --git a/domx/omx_core/inc/OMX_TI_Image.h b/domx/omx_core/inc/OMX_TI_Image.h
index 4035218..88f7b26 100755
--- a/domx/omx_core/inc/OMX_TI_Image.h
+++ b/domx/omx_core/inc/OMX_TI_Image.h
@@ -53,10 +53,11 @@
* The OMX_TI_IMAGE_CODINGTYPE enumeration is used to define the
* extended image coding types.
*/
-typedef enum OMX_TI_IMAGE_CODINGTYPE
-{
+typedef enum OMX_TI_IMAGE_CODINGTYPE {
OMX_TI_IMAGE_CodingJPS = OMX_IMAGE_CodingVendorStartUnused + 1, /**< JPS format */
- OMX_TI_IMAGE_CodingMPO /**< MPO format */
+ OMX_TI_IMAGE_CodingMPO, /**< MPO format */
+ OMX_TI_IMAGE_CodingJPEG_SOC,
+ OMX_TI_IMAGE_CodingJPEG_SOC_SPOOFED
} OMX_TI_IMAGE_CODINGTYPE;
#endif /* OMX_TI_IMAGE_H */
diff --git a/domx/omx_core/inc/OMX_TI_Index.h b/domx/omx_core/inc/OMX_TI_Index.h
index 61b1e27..3a50ff1 100755
--- a/domx/omx_core/inc/OMX_TI_Index.h
+++ b/domx/omx_core/inc/OMX_TI_Index.h
@@ -57,6 +57,7 @@ extern "C" {
******************************************************************/
#include <OMX_Types.h>
+
/*******************************************************************
* EXTERNAL REFERENCE NOTE: only use if not found in header file
*******************************************************************/
@@ -73,160 +74,193 @@ typedef enum OMX_TI_INDEXTYPE {
OMX_IndexConfigAutoPauseAfterCapture = OMX_IndexAutoPauseAfterCapture,
/* Vendor specific area for storing indices */
+
+ /*Common Indices*/
OMX_TI_IndexConfigChannelName = ((OMX_INDEXTYPE)OMX_IndexVendorStartUnused + 1), /**< reference: OMX_CONFIG_CHANNELNAME */
- OMX_TI_IndexParamJPEGUncompressedMode, /**< reference: OMX_JPEG_PARAM_UNCOMPRESSEDMODETYPE */
- OMX_TI_IndexParamJPEGCompressedMode, /**< reference: OMX_JPEG_PARAM_COMPRESSEDMODETYPE */
- OMX_TI_IndexParamDecodeSubregion, /**< reference: OMX_IMAGE_PARAM_DECODE_SUBREGION */
+ OMX_TI_IndexParamJPEGUncompressedMode, /**< 0x7F000002 reference: OMX_JPEG_PARAM_UNCOMPRESSEDMODETYPE */
+ OMX_TI_IndexParamJPEGCompressedMode, /**< 0x7F000003 reference: OMX_JPEG_PARAM_COMPRESSEDMODETYPE */
+ OMX_TI_IndexParamDecodeSubregion, /**< 0x7F000004 reference: OMX_IMAGE_PARAM_DECODE_SUBREGION */
/* H264 Encoder Indices*/
- OMX_TI_IndexParamVideoDataSyncMode, //!< Refer to OMX_VIDEO_PARAM_DATASYNCMODETYPE structure
- OMX_TI_IndexParamVideoNALUsettings, //!< use OMX_VIDEO_PARAM_AVCNALUCONTROLTYPE to configure the type os NALU to send along with the Different Frame Types
- OMX_TI_IndexParamVideoMEBlockSize, //!< use OMX_VIDEO_PARAM_MEBLOCKSIZETYPE to specify the minimum block size used for motion estimation
- OMX_TI_IndexParamVideoIntraPredictionSettings, //!< use OMX_VIDEO_PARAM_INTRAPREDTYPE to configure the intra prediction modes used for different block sizes
- OMX_TI_IndexParamVideoEncoderPreset, //!< use OMX_VIDEO_PARAM_ENCODER_PRESETTYPE to select the encoding mode & rate control preset
- OMX_TI_IndexParamVideoFrameDataContentSettings, //!< use OMX_TI_VIDEO_PARAM_FRAMEDATACONTENTTYPE to configure the data content tpye
- OMX_TI_IndexParamVideoTransformBlockSize, //!< use OMX_VIDEO_PARAM_TRANSFORM_BLOCKSIZETYPE to specify the block size used for ttransformation
- OMX_TI_IndexParamVideoVUIsettings, //!use OMX_VIDEO_PARAM_VUIINFOTYPE
- OMX_TI_IndexParamVideoAdvancedFMO,
- OMX_TI_IndexConfigVideoPixelInfo, //!< Use OMX_VIDEO_CONFIG_PIXELINFOTYPE structure to know the pixel aspectratio & pixel range
- OMX_TI_IndexConfigVideoMESearchRange, //!< use OMX_VIDEO_CONFIG_MESEARCHRANGETYPE to specify the ME Search settings
- OMX_TI_IndexConfigVideoQPSettings, //!< use OMX_TI_VIDEO_CONFIG_QPSETTINGS to specify the ME Search settings
- OMX_TI_IndexConfigSliceSettings, //!<use OMX_VIDEO_CONFIG_SLICECODINGTYPE to specify the ME Search settings
- OMX_TI_IndexParamAVCInterlaceSettings, //!< use OMX_TI_VIDEO_PARAM_AVCINTERLACECODING to specify the ME Search settings
- OMX_TI_IndexParamStereoInfo2004Settings, //!< use OMX_TI_VIDEO_AVCENC_STEREOINFO2004 to specify the 2004 SEI for AVC Encoder
- OMX_TI_IndexParamStereoFramePacking2010Settings, //!< use OMX_TI_VIDEO_AVCENC_FRAMEPACKINGINFO2010 to specify 2010 SEI for AVC Encoder
-
+ OMX_TI_IndexParamVideoDataSyncMode, /**< 0x7F000005 Refer to OMX_VIDEO_PARAM_DATASYNCMODETYPE structure */
+ OMX_TI_IndexParamVideoNALUsettings, /**< 0x7F000006 use OMX_VIDEO_PARAM_AVCNALUCONTROLTYPE to configure the type os NALU to send along with the Different Frame Types */
+ OMX_TI_IndexParamVideoMEBlockSize, /**< 0x7F000007 use OMX_VIDEO_PARAM_MEBLOCKSIZETYPE to specify the minimum block size used for motion estimation */
+ OMX_TI_IndexParamVideoIntraPredictionSettings, /**< 0x7F000008 use OMX_VIDEO_PARAM_INTRAPREDTYPE to configure the intra prediction modes used for different block sizes */
+ OMX_TI_IndexParamVideoEncoderPreset, /**< 0x7F000009 use OMX_VIDEO_PARAM_ENCODER_PRESETTYPE to select the encoding mode & rate control preset */
+ OMX_TI_IndexParamVideoFrameDataContentSettings, /**< 0x7F00000A use OMX_TI_VIDEO_PARAM_FRAMEDATACONTENTTYPE to configure the data content tpye */
+ OMX_TI_IndexParamVideoTransformBlockSize, /**< 0x7F00000B use OMX_VIDEO_PARAM_TRANSFORM_BLOCKSIZETYPE to specify the block size used for ttransformation */
+ OMX_TI_IndexParamVideoVUIsettings, /**< 0x7F00000C use OMX_VIDEO_PARAM_VUIINFOTYPE */
+ OMX_TI_IndexParamVideoAdvancedFMO, /**< 0x7F00000D reference: TODO: */
+ OMX_TI_IndexConfigVideoPixelInfo, /**< 0x7F00000E Use OMX_VIDEO_CONFIG_PIXELINFOTYPE structure to know the pixel aspectratio & pixel range */
+ OMX_TI_IndexConfigVideoMESearchRange, /**< 0x7F00000F use OMX_VIDEO_CONFIG_MESEARCHRANGETYPE to specify the ME Search settings */
+ OMX_TI_IndexConfigVideoQPSettings, /**< 0x7F000010 use OMX_TI_VIDEO_CONFIG_QPSETTINGS to specify the ME Search settings */
+ OMX_TI_IndexConfigSliceSettings, /**< 0x7F000011 use OMX_VIDEO_CONFIG_SLICECODINGTYPE to specify the ME Search settings */
+ OMX_TI_IndexParamAVCInterlaceSettings, /**< 0x7F000012 use OMX_TI_VIDEO_PARAM_AVCINTERLACECODING to specify the interlace settings for AVC encoder */
+ OMX_TI_IndexParamStereoInfo2004Settings, /**< 0x7F000013 use OMX_TI_VIDEO_AVCENC_STEREOINFO2004 to specify the 2004 SEI for AVC Encoder */
+ OMX_TI_IndexParamStereoFramePacking2010Settings, /**< 0x7F000014 use OMX_TI_VIDEO_AVCENC_FRAMEPACKINGINFO2010 to specify the 2010 SEI for AVC Encoder */
/* Camera Indices */
- OMX_TI_IndexConfigSensorSelect, /**< reference: OMX_CONFIG_SENSORSELECTTYPE */
- OMX_IndexConfigFlickerCancel, /**< reference: OMX_CONFIG_FLICKERCANCELTYPE */
- OMX_IndexConfigSensorCal, /**< reference: OMX_CONFIG_SENSORCALTYPE */
- OMX_IndexConfigISOSetting, /**< reference: OMX_CONFIG_ISOSETTINGTYPE */
- OMX_TI_IndexConfigSceneMode, /**< reference: OMX_CONFIG_SCENEMODETYPE */
-
- OMX_IndexConfigDigitalZoomSpeed, /**< reference: OMX_CONFIG_DIGITALZOOMSPEEDTYPE */
- OMX_IndexConfigDigitalZoomTarget, /**< reference: OMX_CONFIG_DIGITALZOOMTARGETTYPE */
-
- OMX_IndexConfigCommonScaleQuality, /**< reference: OMX_CONFIG_SCALEQUALITYTYPE */
-
- OMX_IndexConfigCommonDigitalZoomQuality, /**< reference: OMX_CONFIG_SCALEQUALITYTYPE */
-
- OMX_IndexConfigOpticalZoomSpeed, /**< reference: OMX_CONFIG_DIGITALZOOMSPEEDTYPE */
- OMX_IndexConfigOpticalZoomTarget, /**< reference: OMX_CONFIG_DIGITALZOOMTARGETTYPE */
-
- OMX_IndexConfigSmoothZoom, /**< reference: OMX_CONFIG_SMOOTHZOOMTYPE */
-
- OMX_IndexConfigBlemish, /**< reference: OMX_CONFIG_BLEMISHTYPE */
-
- OMX_IndexConfigExtCaptureMode, /**< reference: OMX_CONFIG_EXTCAPTUREMODETYPE */
- OMX_IndexConfigExtPrepareCapturing, /**< reference : OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexConfigExtCapturing, /**< reference : OMX_CONFIG_EXTCAPTURING */
-
- OMX_IndexCameraOperatingMode, /**< OMX_CONFIG_CAMOPERATINGMODETYPE */
- OMX_IndexConfigDigitalFlash, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexConfigPrivacyIndicator, /**< reference: OMX_CONFIG_BOOLEANTYPE */
-
- OMX_IndexConfigTorchMode, /**< reference: OMX_CONFIG_TORCHMODETYPE */
-
- OMX_IndexConfigSlowSync, /**< reference: OMX_CONFIG_BOOLEANTYPE */
-
- OMX_IndexConfigExtFocusRegion, /**< reference : OMX_CONFIG_EXTFOCUSREGIONTYPE */
- OMX_IndexConfigFocusAssist, /**< reference: OMX_CONFIG_BOOLEANTYPE */
-
- OMX_IndexConfigImageFocusLock, /**< reference: OMX_IMAGE_CONFIG_LOCKTYPE */
- OMX_IndexConfigImageWhiteBalanceLock, /**< reference: OMX_IMAGE_CONFIG_LOCKTYPE */
- OMX_IndexConfigImageExposureLock, /**< reference: OMX_IMAGE_CONFIG_LOCKTYPE */
- OMX_IndexConfigImageAllLock, /**< reference: OMX_IMAGE_CONFIG_LOCKTYPE */
-
- OMX_IndexConfigImageDeNoiseLevel, /**< reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
- OMX_IndexConfigSharpeningLevel, /**< reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
- OMX_IndexConfigDeBlurringLevel, /**< reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
- OMX_IndexConfigChromaCorrection, /**< reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
- OMX_IndexConfigDeMosaicingLevel, /**< reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
-
- OMX_IndexConfigCommonWhiteBalanceGain, /**< reference: OMX_CONFIG_WHITEBALGAINTYPE */
-
- OMX_IndexConfigCommonRGB2RGB, /**< reference: OMX_CONFIG_COLORCONVERSIONTYPE_II */
- OMX_IndexConfigCommonRGB2YUV, /**< reference: OMX_CONFIG_COLORCONVERSIONTYPE_II */
- OMX_IndexConfigCommonYUV2RGB, /**< reference : OMX_CONFIG_EXT_COLORCONVERSIONTYPE */
-
- OMX_IndexConfigCommonGammaTable, /**< reference: OMX_CONFIG_GAMMATABLETYPE */
-
- OMX_IndexConfigImageFaceDetection, /**< reference: OMX_CONFIG_OBJDETECTIONTYPE */
- OMX_IndexConfigImageBarcodeDetection, /**< reference: OMX_CONFIG_EXTRADATATYPE */
- OMX_IndexConfigImageSmileDetection, /**< reference: OMX_CONFIG_OBJDETECTIONTYPE */
- OMX_IndexConfigImageBlinkDetection, /**< reference: OMX_CONFIG_OBJDETECTIONTYPE */
- OMX_IndexConfigImageFrontObjectDetection, /**< reference: OMX_CONFIG_EXTRADATATYPE */
- OMX_IndexConfigHistogramMeasurement, /**< reference: OMX_CONFIG_HISTOGRAMTYPE */
- OMX_IndexConfigDistanceMeasurement, /**< reference: OMX_CONFIG_EXTRADATATYPE */
-
- OMX_IndexConfigSnapshotToPreview, /**< reference: OMX_CONFIG_BOOLEANTYPE */
-
- OMX_IndexConfigJpegHeaderType , /**< reference : OMX_CONFIG_JPEGHEEADERTYPE */
- OMX_IndexParamJpegMaxSize, /**< reference: OMX_IMAGE_JPEGMAXSIZE */
-
- OMX_IndexConfigRestartMarker, /**< reference: OMX_CONFIG_BOOLEANTYPE */
-
- OMX_IndexParamImageStampOverlay, /**< reference: OMX_PARAM_IMAGESTAMPOVERLAYTYPE */
- OMX_IndexParamThumbnail, /**< reference: OMX_PARAM_THUMBNAILTYPE */
- OMX_IndexConfigImageStabilization, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexConfigMotionTriggeredImageStabilisation, /**< reference : OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexConfigRedEyeRemoval, /**< reference: OMX_CONFIG_REDEYEREMOVALTYPE */
- OMX_IndexParamHighISONoiseFiler, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexParamLensDistortionCorrection, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexParamLocalBrightnessAndContrast, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexConfigChromaticAberrationCorrection, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexParamVideoCaptureYUVRange, /**< reference: OMX_PARAM_VIDEOYUVRANGETYPE */
-
- OMX_IndexConfigFocusRegion, /**< reference: OMX_CONFIG_EXTFOCUSREGIONTYPE */
- OMX_IndexConfigImageMotionEstimation, /**< reference: OMX_CONFIG_OBJDETECTIONTYPE */
- OMX_IndexParamProcessingOrder, /**< reference: OMX_CONFIGPROCESSINGORDERTYPE */
- OMX_IndexParamFrameStabilisation, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_IndexParamVideoNoiseFilter, /**< reference: OMX_PARAM_VIDEONOISEFILTERTYPE */
-
- OMX_IndexConfigOtherExtraDataControl, /**< reference: OMX_CONFIG_EXTRADATATYPE */
- OMX_TI_IndexParamBufferPreAnnouncement, /**< reference: OMX_TI_PARAM_BUFFERPREANNOUNCE */
- OMX_TI_IndexConfigBufferRefCountNotification, /**< reference: OMX_TI_CONFIG_BUFFERREFCOUNTNOTIFYTYPE */
- OMX_TI_IndexParam2DBufferAllocDimension, /**< reference: OMX_CONFIG_RECTTYPE */
- OMX_TI_IndexConfigWhiteBalanceManualColorTemp, /**< reference: OMX_TI_CONFIG_WHITEBALANCECOLORTEMPTPYPE */
- OMX_TI_IndexConfigFocusSpotWeighting, /**< reference: OMX_TI_CONFIG_FOCUSSPOTWEIGHTINGTYPE */
- OMX_TI_IndexParamSensorOverClockMode, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_TI_IndexParamDccUriInfo, /**< reference: OMX_TI_PARAM_DCCURIINFO */
- OMX_TI_IndexParamDccUriBuffer, /**< reference: OMX_TI_PARAM_DCCURIBUFFER */
+ OMX_TI_IndexConfigSensorSelect, /**< 0x7F000015 reference: OMX_CONFIG_SENSORSELECTTYPE */
+ OMX_IndexConfigFlickerCancel, /**< 0x7F000016 reference: OMX_CONFIG_FLICKERCANCELTYPE */
+ OMX_IndexConfigSensorCal, /**< 0x7F000017 reference: OMX_CONFIG_SENSORCALTYPE */
+ OMX_IndexConfigISOSetting, /**< 0x7F000018 reference: OMX_CONFIG_ISOSETTINGTYPE */
+ OMX_TI_IndexConfigSceneMode, /**< 0x7F000019 reference: OMX_CONFIG_SCENEMODETYPE */
+ OMX_IndexConfigDigitalZoomSpeed, /**< 0x7F00001A reference: OMX_CONFIG_DIGITALZOOMSPEEDTYPE */
+ OMX_IndexConfigDigitalZoomTarget, /**< 0x7F00001B reference: OMX_CONFIG_DIGITALZOOMTARGETTYPE */
+ OMX_IndexConfigCommonScaleQuality, /**< 0x7F00001C reference: OMX_CONFIG_SCALEQUALITYTYPE */
+ OMX_IndexConfigCommonDigitalZoomQuality, /**< 0x7F00001D reference: OMX_CONFIG_SCALEQUALITYTYPE */
+ OMX_IndexConfigOpticalZoomSpeed, /**< 0x7F00001E reference: OMX_CONFIG_DIGITALZOOMSPEEDTYPE */
+ OMX_IndexConfigOpticalZoomTarget, /**< 0x7F00001F reference: OMX_CONFIG_DIGITALZOOMTARGETTYPE */
+ OMX_IndexConfigSmoothZoom, /**< 0x7F000020 reference: OMX_CONFIG_SMOOTHZOOMTYPE */
+ OMX_IndexConfigBlemish, /**< 0x7F000021 reference: OMX_CONFIG_BLEMISHTYPE */
+ OMX_IndexConfigExtCaptureMode, /**< 0x7F000022 reference: OMX_CONFIG_EXTCAPTUREMODETYPE */
+ OMX_IndexConfigExtPrepareCapturing, /**< 0x7F000023 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigExtCapturing, /**< 0x7F000024 reference: OMX_CONFIG_EXTCAPTURING */
+
+ OMX_IndexCameraOperatingMode, /**< 0x7F000025 reference: OMX_CONFIG_CAMOPERATINGMODETYPE */
+ OMX_IndexParamCameraOperatingMode = OMX_IndexCameraOperatingMode, /**< 0x7F000025 reference: OMX_CONFIG_CAMOPERATINGMODETYPE */
+
+ OMX_IndexConfigDigitalFlash, /**< 0x7F000026 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigPrivacyIndicator, /**< 0x7F000027 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigTorchMode, /**< 0x7F000028 reference: OMX_CONFIG_TORCHMODETYPE */
+ OMX_IndexConfigSlowSync, /**< 0x7F000029 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigExtFocusRegion, /**< 0x7F00002A reference: OMX_CONFIG_EXTFOCUSREGIONTYPE */
+ OMX_IndexConfigFocusAssist, /**< 0x7F00002B reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigImageFocusLock, /**< 0x7F00002C reference: OMX_IMAGE_CONFIG_LOCKTYPE */
+ OMX_IndexConfigImageWhiteBalanceLock, /**< 0x7F00002D reference: OMX_IMAGE_CONFIG_LOCKTYPE */
+ OMX_IndexConfigImageExposureLock, /**< 0x7F00002E reference: OMX_IMAGE_CONFIG_LOCKTYPE */
+ OMX_IndexConfigImageAllLock, /**< 0x7F00002F reference: OMX_IMAGE_CONFIG_LOCKTYPE */
+ OMX_IndexConfigImageDeNoiseLevel, /**< 0x7F000030 reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
+ OMX_IndexConfigSharpeningLevel, /**< 0x7F000031 reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
+ OMX_IndexConfigDeBlurringLevel, /**< 0x7F000032 reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
+ OMX_IndexConfigChromaCorrection, /**< 0x7F000033 reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
+ OMX_IndexConfigDeMosaicingLevel, /**< 0x7F000034 reference: OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE */
+ OMX_IndexConfigCommonWhiteBalanceGain, /**< 0x7F000035 reference: OMX_CONFIG_WHITEBALGAINTYPE */
+ OMX_IndexConfigCommonRGB2RGB, /**< 0x7F000036 reference: OMX_CONFIG_COLORCONVERSIONTYPE_II */
+ OMX_IndexConfigCommonRGB2YUV, /**< 0x7F000037 reference: OMX_CONFIG_COLORCONVERSIONTYPE_II */
+ OMX_IndexConfigCommonYUV2RGB, /**< 0x7F000038 reference: OMX_CONFIG_EXT_COLORCONVERSIONTYPE */
+ OMX_IndexConfigCommonGammaTable, /**< 0x7F000039 reference: OMX_CONFIG_GAMMATABLETYPE */
+ OMX_IndexConfigImageFaceDetection, /**< 0x7F00003A reference: OMX_CONFIG_OBJDETECTIONTYPE */
+ OMX_IndexConfigImageBarcodeDetection, /**< 0x7F00003B reference: OMX_CONFIG_OBJDETECTIONTYPE */
+ OMX_IndexConfigImageSmileDetection, /**< 0x7F00003C reference: OMX_CONFIG_OBJDETECTIONTYPE */
+ OMX_IndexConfigImageBlinkDetection, /**< 0x7F00003D reference: OMX_CONFIG_OBJDETECTIONTYPE */
+ OMX_IndexConfigImageFrontObjectDetection, /**< 0x7F00003E reference: OMX_CONFIG_OBJDETECTIONTYPE */
+ OMX_IndexConfigHistogramMeasurement, /**< 0x7F00003F reference: OMX_CONFIG_HISTOGRAMTYPE */
+ OMX_IndexConfigDistanceMeasurement, /**< 0x7F000040 reference: OMX_CONFIG_DISTANCETYPE */
+ OMX_IndexConfigSnapshotToPreview, /**< 0x7F000041 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigJpegHeaderType, /**< 0x7F000042 reference: OMX_CONFIG_JPEGHEEADERTYPE */
+ OMX_IndexParamJpegMaxSize, /**< 0x7F000043 reference: OMX_IMAGE_JPEGMAXSIZE */
+ OMX_IndexConfigRestartMarker, /**< 0x7F000044 reference: OMX_CONFIG_RSTMARKER */
+ OMX_IndexParamImageStampOverlay, /**< 0x7F000045 reference: OMX_PARAM_IMAGESTAMPOVERLAYTYPE */
+ OMX_IndexParamThumbnail, /**< 0x7F000046 reference: OMX_PARAM_THUMBNAILTYPE */
+ OMX_IndexConfigImageStabilization, /**< 0x7F000047 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigMotionTriggeredImageStabilisation, /**< 0x7F000048 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigRedEyeRemoval, /**< 0x7F000049 reference: OMX_CONFIG_REDEYEREMOVALTYPE */
+ OMX_IndexParamHighISONoiseFiler, /**< 0x7F00004A reference: OMX_PARAM_ISONOISEFILTERTYPE */
+ OMX_IndexParamLensDistortionCorrection, /**< 0x7F00004B reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexParamLocalBrightnessAndContrast, /**< 0x7F00004C reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexConfigChromaticAberrationCorrection, /**< 0x7F00004D reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexParamVideoCaptureYUVRange, /**< 0x7F00004E reference: OMX_PARAM_VIDEOYUVRANGETYPE */
+ OMX_IndexConfigFocusRegion, /**< 0x7F00004F reference: OMX_CONFIG_EXTFOCUSREGIONTYPE */
+ OMX_IndexConfigImageMotionEstimation, /**< 0x7F000050 reference: OMX_CONFIG_OBJDETECTIONTYPE */
+ OMX_IndexParamProcessingOrder, /**< 0x7F000051 reference: OMX_CONFIGPROCESSINGORDERTYPE */
+ OMX_IndexParamFrameStabilisation, /**< 0x7F000052 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_IndexParamVideoNoiseFilter, /**< 0x7F000053 reference: OMX_PARAM_VIDEONOISEFILTERTYPE */
+ OMX_IndexConfigOtherExtraDataControl, /**< 0x7F000054 reference: OMX_CONFIG_EXTRADATATYPE */
+ OMX_TI_IndexParamBufferPreAnnouncement, /**< 0x7F000055 reference: OMX_TI_PARAM_BUFFERPREANNOUNCE */
+ OMX_TI_IndexConfigBufferRefCountNotification, /**< 0x7F000056 reference: OMX_TI_CONFIG_BUFFERREFCOUNTNOTIFYTYPE */
+ OMX_TI_IndexParam2DBufferAllocDimension, /**< 0x7F000057 reference: OMX_TI_PARAM_2DBUFERALLOCDIMENSION */
+ OMX_TI_IndexConfigWhiteBalanceManualColorTemp, /**< 0x7F000058 reference: OMX_CONFIG_WHITEBALANCECOLORTEMPTPYPE */
+ OMX_TI_IndexConfigFocusSpotWeighting, /**< 0x7F000059 reference: OMX_CONFIG_FOCUSSPOTWEIGHTINGTYPE */
+ OMX_TI_IndexParamSensorOverClockMode, /**< 0x7F00005A reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexParamDccUriInfo, /**< 0x7F00005B reference: OMX_PARAM_DCCURIINFO */
+ OMX_TI_IndexParamDccUriBuffer, /**< 0x7F00005C reference: OMX_PARAM_SHAREDBUFFER */
/* MPEG4 and H264 encoder specific Indices */
- OMX_TI_IndexParamVideoIntraRefresh, /**< reference: OMX_TI_VIDEO_PARAM_INTRAREFRESHTYPE */
-
- OMX_TI_IndexConfigShutterCallback, /**< reference: OMX_CONFIG_BOOLEANTYPE */
- OMX_TI_IndexParamVarFrameRate, /**< reference: OMX_PARAM_VARFARAMERATETYPE */
- OMX_TI_IndexConfigAutoConvergence, /**< reference: OMX_TI_CONFIG_CONVERGENCETYPE */
- OMX_TI_IndexConfigRightExposureValue, /**< reference: OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE */
- OMX_TI_IndexConfigExifTags, /**< reference: OMX_TI_CONFIG_SHAREDBUFFER */
- OMX_TI_IndexParamVideoPayloadHeaderFlag, /**< reference: OMX_TI_PARAM_PAYLOADHEADERFLAG */
- OMX_TI_IndexParamVideoIvfMode, /**< reference: OMX_TI_PARAM_IVFFLAG */
- OMX_TI_IndexConfigCamCapabilities, /**< reference: OMX_TI_CONFIG_SHAREDBUFFER */
- OMX_TI_IndexConfigFacePriority3a, /**< reference: OMX_TI_CONFIG_3A_FACE_PRIORITY */
- OMX_TI_IndexConfigRegionPriority3a, /**< reference: OMX_TI_CONFIG_3A_REGION_PRIORITY */
- OMX_TI_IndexParamAutoConvergence, /**< reference: OMX_TI_PARAM_AUTOCONVERGENCETYPE */
- OMX_TI_IndexConfigAAAskipBuffer, /**< reference: OMX_TI_CONFIG_AAASKIPBUFFERTYPE */
- OMX_TI_IndexParamStereoFrmLayout, /**< reference: OMX_TI_FRAMELAYOUTTYPE */
- OMX_TI_IndexConfigLocalBrightnessContrastEnhance, /**< reference: OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE */
- OMX_TI_IndexConfigGlobalBrightnessContrastEnhance, /**< reference: OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE */
- OMX_TI_IndexConfigVarFrmRange, /**< reference: OMX_TI_CONFIG_VARFRMRANGETYPE */
- OMX_TI_IndexParamAVCHRDBufferSizeSetting, /**< reference: OMX_TI_VIDEO_PARAM_AVCHRDBUFFERSETTING */
- OMX_TI_IndexConfigAVCHRDBufferSizeSetting, /**< reference: OMX_TI_VIDEO_CONFIG_AVCHRDBUFFERSETTING */
- OMX_TI_IndexConfigFocusDistance, /**< reference: OMX_TI_CONFIG_FOCUSDISTANCETYPE */
- OMX_TI_IndexUseNativeBuffers, /**< reference: OMX_TI_ParamUseNativeBuffer */
- OMX_TI_IndexParamUseEnhancedPortReconfig, /**< reference: OMX_TI_IndexParamUseEnhancedPortReconfig */
- OMX_TI_IndexEncoderStoreMetadatInBuffers,
- OMX_TI_IndexParamZslHistoryLen, /**< reference: OMX_TI_PARAM_ZSLHISTORYLENTYPE */
- OMX_TI_IndexConfigZslDelay, /**< reference: OMX_TI_CONFIG_ZSLDELAYTYPE */
- OMX_TI_IndexParamMetaDataBufferInfo, /***< reference: OMX_TI_PARAM_METADATABUFFERINFO */
- OMX_TI_IndexConfigZslFrameSelectMethod, /**< reference: OMX_TI_CONFIG_ZSLFRAMESELECTMETHODTYPE */
- OMX_TI_IndexAndroidNativeBufferUsage, /**< reference: OMX_TI_IndexAndroidNativeBufferUsage */
- OMX_TI_IndexConfigAlgoAreas, /**< reference: OMX_PARAM_SHAREDBUFFER (pSharedBuff is OMX_ALGOAREASTYPE) */
- OMX_TI_IndexConfigAutofocusEnable /**< reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexParamVideoIntraRefresh, /**< 0x7F00005D reference: OMX_TI_VIDEO_PARAM_INTRAREFRESHTYPE */
+
+ /* camera indices continues*/
+ OMX_TI_IndexConfigShutterCallback, /**< 0x7F00005E reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexParamVarFrameRate, /**< 0x7F00005F reference: OMX_PARAM_VARFARAMERATETYPE */
+ OMX_TI_IndexConfigAutoConvergence, /**< 0x7F000060 reference: OMX_TI_CONFIG_CONVERGENCETYPE */
+ OMX_TI_IndexConfigRightExposureValue, /**< 0x7F000061 reference: OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE */
+ OMX_TI_IndexConfigExifTags, /**< 0x7F000062 reference: OMX_TI_CONFIG_SHAREDBUFFER */
+ OMX_TI_IndexParamVideoPayloadHeaderFlag, /**< 0x7F000063 reference: OMX_TI_PARAM_PAYLOADHEADERFLAG */
+ OMX_TI_IndexParamVideoIvfMode, /**< 0x7F000064 reference: OMX_TI_PARAM_IVFFLAG */
+ OMX_TI_IndexConfigCamCapabilities, /**< 0x7F000065 reference: OMX_TI_CONFIG_SHAREDBUFFER */
+ OMX_TI_IndexConfigFacePriority3a, /**< 0x7F000066 reference: OMX_TI_CONFIG_3A_FACE_PRIORITY */
+ OMX_TI_IndexConfigRegionPriority3a, /**< 0x7F000067 reference: OMX_TI_CONFIG_3A_REGION_PRIORITY */
+ OMX_TI_IndexParamAutoConvergence, /**< 0x7F000068 reference: OMX_TI_PARAM_AUTOCONVERGENCETYPE */
+ OMX_TI_IndexConfigAAAskipBuffer, /**< 0x7F000069 reference: OMX_TI_CONFIG_SHAREDBUFFER */
+ OMX_TI_IndexParamStereoFrmLayout, /**< 0x7F00006A reference: OMX_TI_FRAMELAYOUTTYPE */
+ OMX_TI_IndexConfigLocalBrightnessContrastEnhance, /**< 0x7F00006B reference: OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE */
+ OMX_TI_IndexConfigGlobalBrightnessContrastEnhance, /**< 0x7F00006C reference: OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE */
+ OMX_TI_IndexConfigVarFrmRange, /**< 0x7F00006D reference: OMX_TI_CONFIG_VARFRMRANGETYPE */
+
+ /*H264 Encoder specific Indices*/
+ OMX_TI_IndexParamAVCHRDBufferSizeSetting, /**< 0x7F00006E reference: OMX_TI_VIDEO_PARAM_AVCHRDBUFFERSETTING */
+ OMX_TI_IndexConfigAVCHRDBufferSizeSetting, /**< 0x7F00006F reference: OMX_TI_VIDEO_CONFIG_AVCHRDBUFFERSETTING */
+ OMX_TI_IndexConfigFocusDistance, /**< 0x7F000070 reference: OMX_TI_CONFIG_FOCUSDISTANCETYPE */
+ OMX_TI_IndexUseNativeBuffers, /**< 0x7F000071 reference: OMX_TI_ParamUseNativeBuffer(used only in proxy) */
+ OMX_TI_IndexConfigSinglePreviewMode, /**< 0x7F000072 reference: */
+ OMX_TI_IndexConfigFreezeAWB, /**< 0x7F000073 reference: */
+ OMX_TI_IndexConfigAWBMinDelayTime, /**< 0x7F000074 reference: */
+ OMX_TI_IndexConfigDetectedGesturesInfo, /**< 0x7F000075 reference: */
+ OMX_TI_IndexConfigAutoExpMinDelayTime, /**< 0x7F000076 reference: */
+ OMX_TI_IndexConfigFreezeAutoExp, /**< 0x7F000077 reference: */
+ OMX_TI_IndexConfigAutoExpThreshold, /**< 0x7F000078 reference: */
+ OMX_TI_IndexParamZslHistoryLen, /**< 0x7F000079 reference: OMX_TI_PARAM_ZSLHISTORYLENTYPE */
+ OMX_TI_IndexConfigZslDelay, /**< 0x7F00007A reference: OMX_TI_CONFIG_ZSLDELAYTYPE */
+ OMX_TI_IndexConfigMechanicalMisalignment, /**< 0x7F00007B reference: OMX_TI_CONFIG_MM */
+ OMX_TI_IndexParamAffineTransform, /**< 0x7F00007C reference: OMX_TI_CONFIG_AFFINE */
+ OMX_TI_IndexParamUseEnhancedPortReconfig, /**< 0x7F00007D reference: OMX_TI_IndexParamUseEnhancedPortReconfig */
+ OMX_TI_IndexEncoderStoreMetadatInBuffers, /**< 0x7F00007E reference: */
+ OMX_TI_IndexParamMetaDataBufferInfo, /**< 0x7F00007F reference: OMX_TI_PARAM_METADATABUFFERINFO */
+ OMX_TI_IndexConfigZslFrameSelectMethod, /**< 0x7F000080 reference: OMX_TI_CONFIG_ZSLFRAMESELECTMETHODTYPE */
+ OMX_TI_IndexAndroidNativeBufferUsage, /**< 0x7F000081 reference: OMX_TI_IndexAndroidNativeBufferUsage */
+ OMX_TI_IndexConfigAlgoAreas, /**< 0x7F000082 reference: OMX_PARAM_SHAREDBUFFER (pSharedBuff is OMX_ALGOAREASTYPE) */
+
+ OMX_TI_IndexParamSensorDetect, /**< 0x7F000083 reference: OMX_TI_PARAM_SENSORDETECT */
+ OMX_TI_IndexParamVideoSvc, /**< 0x7F000084 reference: OMX_TI_VIDEO_PARAM_SVCTYPE */
+ OMX_TI_IndexConfigVideoSvcLayerDetails, /**< 0x7F000085 reference: OMX_TI_VIDEO_CONFIG_SVCLAYERDETAILS */
+ OMX_TI_IndexConfigVideoSvcTargetLayer, /**< 0x7F000086 reference: OMX_TI_VIDEO_CONFIG_SVCTARGETLAYER */
+ OMX_TI_IndexConfigZslFremeSelectPrio, /**< 0x7F000087 reference: OMX_TI_CONFIG_ZSLFRAMESELECTPRIOTYPE */
+
+ OMX_TI_IndexUseBufferDescriptor, /**< 0x7F000088 reference: OMX_TI_PARAM_USEBUFFERDESCRIPTOR */
+ OMX_TI_IndexParamVtcSlice, /**< 0x7F000089 reference: OMX_TI_PARAM_VTCSLICE */
+
+ OMX_TI_IndexConfigAutofocusEnable, /**< 0x7F00008A reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexParamAVCEnableLTRMode, /**< 0x7F00008B reference: OMX_TI_VIDEO_PARAM_AVC_LTRP*/
+ OMX_TI_IndexConfigAVCEnableNextLTR, /**< 0x7F00008C reference: OMX_TI_VIDEO_CONFIG_AVC_LTRP*/
+ OMX_TI_IndexConfigAVCUpdateLTRInterval, /**< 0x7F00008D reference: OMX_TI_VIDEO_CONFIG_AVC_LTRP_INTERVAL*/
+ OMX_TI_IndexParamTimeStampInDecodeOrder, /**< 0x7F00008E reference: OMX_TI_PARAM_TIMESTAMP_IN_DECODE_ORDER */
+ OMX_TI_IndexParamVideoAutoFrameRateUpdate, /**< 0x7F00008F reference: OMX_TI_VIDEO_PARAM_AUTO_FRAMERATE_UPDATE */
+ OMX_TI_IndexParamBayerCompression, /**< 0x7F000090 reference: OMX_TI_PARAM_BAYERCOMPRESSION */
+ OMX_TI_IndexParamSkipGreyOutputFrames, /**< 0x7F000091 reference: OMX_TI_PARAM_SKIP_GREY_OUTPUT_FRAMES */
+ OMX_TI_IndexConfigMipiCounters, /**< 0x7F000092 reference: OMX_CONFIG_MIPICOUNTERS */
+ OMX_TI_IndexConfigCsiTimingRW, /**< 0x7F000093 reference: OMX_CONFIG_CSITIMINGRW */
+ OMX_TI_IndexConfigCSIcomplexIO, /**< 0x7F000094 reference: OMX_CONFIG_CSICMPXIO */
+ OMX_TI_IndexConfigAFScore, /**< 0x7F000095 reference: OMX_CONFIG_AUTOFOCUSSCORE */
+ OMX_TI_IndexConfigColorBars, /**< 0x7F000096 reference: OMX_CONFIG_COLORBARS */
+ OMX_TI_IndexConfigOTPEeprom, /**< 0x7F000097 reference: OMX_CONFIG_OTPEEPROM */
+ OMX_TI_IndexConfigISPInfo, /**< 0x7F000098 reference: OMX_CONFIG_ISPINFO */
+ OMX_TI_IndexConfigPicSizeControlInfo, /**< 0x7F000099 reference: OMX_TI_VIDEO_CONFIG_PICSIZECONTROLINFO */
+ OMX_TI_IndexConfigPortTapPoint, /**< 0x7F00009A reference: OMX_TI_CONFIG_PORTTAPPOINTTYPE */
+ OMX_TI_IndexConfigDisableNSF2, /**< 0x7F00009B reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexConfigDisableSharpening, /**< 0x7F00009C reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexConfigFixedGamma, /**< 0x7F00009D reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexConfigDisableThreeLinColorMap, /**< 0x7F00009E reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexParamComponentBufferAllocation, /**< 0x7F00009F reference: OMX_TI_PARAM_COMPONENTBUFALLOCTYPE */
+ OMX_TI_IndexConfigEnqueueShotConfigs, /**< 0x7F0000A0 reference: OMX_TI_CONFIG_ENQUEUESHOTCONFIGS */
+ OMX_TI_IndexConfigQueryAvailableShots, /**< 0x7F0000A1 reference: OMX_TI_CONFIG_QUERYAVAILABLESHOTS */
+ OMX_TI_IndexConfigDisableNSF1, /**< 0x7F0000A2 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexConfigDisableGIC, /**< 0x7F0000A3 reference: OMX_CONFIG_BOOLEANTYPE */
+ OMX_TI_IndexConfigVectShotStopMethod, /**< 0x7F0000A4 reference: OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE */
+ OMX_TI_IndexParamComponentExpectedSuspensionState, /**< 0x7F0000A5 reference: OMX_PARAM_SUSPENSIONTYPE */
+ OMX_TI_IndexComponentHandle, /**< 0x7F0000A6 reference: OMX_TI_COMPONENT_HANDLE */
+ OMX_TI_IndexParamVideoEnableMetadata, /**< 0x7F0000A7 reference: OMX_TI_PARAM_DECMETADATA */
+ OMX_TI_IndexConfigStreamInterlaceFormats = ((OMX_INDEXTYPE)OMX_IndexVendorStartUnused + 0x100) /**< 0x7F000100 reference: OMX_STREAMINTERLACEFORMATTYPE */
} OMX_TI_INDEXTYPE;
diff --git a/domx/omx_core/inc/OMX_TI_Video.h b/domx/omx_core/inc/OMX_TI_Video.h
index 55000c6..63187e0 100755
--- a/domx/omx_core/inc/OMX_TI_Video.h
+++ b/domx/omx_core/inc/OMX_TI_Video.h
@@ -44,16 +44,17 @@
*!
*! Revision History
*! =====================================================================
- *! 24-Dec-2008 Navneet navneet@ti.com Initial Version
- *! 14-Jul-2009 Radha Purnima radhapurnima@ti.com
- *! 25-Aug-2009 Radha Purnima radhapurnima@ti.com
+ *! 24-Dec-2008 Navneet navneet@ti.com Initial Version
+ *! 14-Jul-2009 Radha Purnima radhapurnima@ti.com
+ *! 25-Aug-2009 Radha Purnima radhapurnima@ti.com
+ *! 16-May-2009 Shivaraj Shetty shettyshivaraj@ti.com
* =========================================================================*/
#ifndef OMX_TI_VIDEO_H
#define OMX_TI_VIDEO_H
#define H264ENC_MAXNUMSLCGPS 2
-
+#define OMXH264E_MAX_SLICE_SUPPORTED 64
#include <OMX_Core.h>
/**
@@ -543,7 +544,9 @@ typedef enum OMX_TI_VIDEO_CODINGTYPE {
OMX_VIDEO_CodingVP6 =
(OMX_VIDEO_CODINGTYPE) OMX_VIDEO_CodingVendorStartUnused +1, /* VP6 */
OMX_VIDEO_CodingVP7, /* VP7 */
- OMX_TI_VIDEO_CodingSORENSONSPK /* Sorenson Spark */
+ OMX_TI_VIDEO_CodingSORENSONSPK, /* Sorenson spark*/
+ OMX_VIDEO_CodingSVC, /**< H.264/SVC */
+ OMX_VIDEO_CodingVP8 /* VP8 */
}OMX_TI_VIDEO_CODINGTYPE;
@@ -719,5 +722,1867 @@ typedef struct OMX_VIDEO_STOREMETADATAINBUFFERSPARAMS {
OMX_BOOL bStoreMetaData;
} OMX_VIDEO_STOREMETADATAINBUFFERSPARAMS;
+
+/**
+ * Interlaced Video Content format
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * nFormat : bitmapped value indentifying the interlaced formats supported by component
+ * nTimeStamp : temporal timestamp information for the second field
+ */
+typedef struct OMX_TI_INTERLACEFORMATTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nFormat;
+ OMX_TICKS nTimeStamp;
+} OMX_TI_INTERLACEFORMATTYPE;
+
+/**
+ * Interlace format types
+ */
+typedef enum OMX_TI_INTERLACETYPE {
+ OMX_InterlaceFrameProgressive= 0x00,
+ OMX_InterlaceInterleaveFrameTopFieldFirst= 0x01,
+ OMX_InterlaceInterleaveFrameBottomFieldFirst= 0x02,
+ OMX_InterlaceFrameTopFieldFirst= 0x04,
+ OMX_InterlaceFrameBottomFieldFirst= 0x08,
+ OMX_InterlaceInterleaveFieldTop= 0x10,
+ OMX_InterlaceInterleaveFieldBottom= 0x20,
+ OMX_InterlaceFmtMask= 0x7FFFFFFF
+} OMX_TI_INTERLACETYPE;
+
+/**
+ * To query if the stream contains interlaced or progressive conten
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * bInterlaceFormat : whether the stream contains interlace or progressive content
+ * OMX_TRUE indicates interlace and OMX_FALSE indicates progressive
+ * nInterlaceFormats : bitmapped value identifying the interlace formats detected within the stream
+ */
+typedef struct OMX_TI_STREAMINTERLACEFORMATTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bInterlaceFormat;
+ OMX_U32 nInterlaceFormats;
+} OMX_TI_STREAMINTERLACEFORMAT;
+
+/*
+@brief OMX_TI_VIDEO_CONFIG_PICSIZECONTROLINFO : Structure to provide the configuration to compute min and max picture size
+@param minPicSizeRatio : This ratio is used to compute minimum picture size in the following manner,
+minPicSize = averagePicSize >> minPicSizeRatio. Allowed values are 1 to 4. Setting this to 0 will enable encoder chosen ratio.
+@param maxPicSizeRatio : This ratio is used to compute maximum picture size in the following manner,
+maxPicSize = averagePicSize * maxPicSizeRatio. Allowed values are 2 to 30. Setting this to 0 or 1 will enable encoder chosen ratio.
+*/
+/* ============================================================================= */
+typedef struct OMX_TI_VIDEO_CONFIG_PICSIZECONTROLINFO {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U16 minPicSizeRatio;
+ OMX_U16 maxPicSizeRatio;
+} OMX_TI_VIDEO_CONFIG_PICSIZECONTROLINFO;
+
+
+/*!====================================================================!
+
+ Currently we only support SVC baseline profile
+
+ * !====================================================================!*/
+ typedef enum OMX_TI_VIDEO_SVCPROFILETYPE {
+ OMX_VIDEO_SVCProfileBaseline = 0x01, /**< Baseline profile */
+ OMX_VIDEO_SVCProfileHigh = 0x02, /**< High profile */
+ OMX_VIDEO_SVCProfileHighIntra = 0x03, /**< High Intra profile */
+ OMX_VIDEO_SVCProfileMax = 0x7FFFFFFF
+ } OMX_TI_VIDEO_SVCPROFILETYPE;
+
+
+/*!====================================================================!
+
+ Currently we support only SVC baseline profile upto level 4 for SVC encoder.
+
+ * !====================================================================!*/
+ typedef enum OMX_TI_VIDEO_SVCLEVELTYPE {
+ OMX_VIDEO_SVCLevel1 = 0x01, /**< Level 1 */
+ OMX_VIDEO_SVCLevel1b = 0x02, /**< Level 1b */
+ OMX_VIDEO_SVCLevel11 = 0x04, /**< Level 1.1 */
+ OMX_VIDEO_SVCLevel12 = 0x08, /**< Level 1.2 */
+ OMX_VIDEO_SVCLevel13 = 0x10, /**< Level 1.3 */
+ OMX_VIDEO_SVCLevel2 = 0x20, /**< Level 2 */
+ OMX_VIDEO_SVCLevel21 = 0x40, /**< Level 2.1 */
+ OMX_VIDEO_SVCLevel22 = 0x80, /**< Level 2.2 */
+ OMX_VIDEO_SVCLevel3 = 0x100, /**< Level 3 */
+ OMX_VIDEO_SVCLevel31 = 0x200, /**< Level 3.1 */
+ OMX_VIDEO_SVCLevel32 = 0x400, /**< Level 3.2 */
+ OMX_VIDEO_SVCLevel4 = 0x800, /**< Level 4 */
+ OMX_VIDEO_SVCLevel41 = 0x1000, /**< Level 4.1 */
+ OMX_VIDEO_SVCLevel42 = 0x2000, /**< Level 4.2 */
+ OMX_VIDEO_SVCLevel5 = 0x4000, /**< Level 5 */
+ OMX_VIDEO_SVCLevel51 = 0x8000, /**< Level 5.1 */
+ OMX_VIDEO_SVCLevelMax = 0x7FFFFFFF
+ } OMX_TI_VIDEO_SVCLEVELTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_STD_PARAMS {
+ OMX_U32 nSliceHeaderSpacing;
+ OMX_U32 nPFrames;
+ OMX_U32 nBFrames;
+ OMX_BOOL bUseHadamard;
+ OMX_U32 nRefFrames;
+ OMX_U32 nRefIdx10ActiveMinus1;
+ OMX_U32 nRefIdx11ActiveMinus1;
+ OMX_BOOL bEnableUEP;
+ /* Not needed as per SVC encoder requirements
+ OMX_BOOL bEnableFMO;
+ OMX_BOOL bEnableASO;
+ OMX_BOOL bEnableRS;
+ */
+ OMX_VIDEO_AVCLOOPFILTERTYPE eLoopFilterMode;
+ OMX_U32 nAllowedPictureTypes;
+ OMX_BOOL bFrameMBsOnly;
+ OMX_BOOL bMBAFF;
+ OMX_BOOL bEntropyCodingCABAC;
+ OMX_BOOL bWeightedPPrediction;
+ OMX_U32 nWeightedBipredicitonMode;
+ OMX_BOOL bconstIpred;
+ OMX_BOOL bDirect8x8Inference;
+ OMX_BOOL bDirectSpatialTemporal;
+ OMX_U32 nCabacInitIdc;
+ } OMX_VIDEO_SVC_STD_PARAMS;
+
+
+ typedef struct OMX_VIDEO_SVC_RECTTYPE {
+ OMX_S32 nLeft;
+ OMX_S32 nTop;
+ OMX_U32 nWidth;
+ OMX_U32 nHeight;
+ } OMX_VIDEO_SVC_RECTTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_BITRATETYPE {
+ OMX_VIDEO_CONTROLRATETYPE eControlRate;
+ OMX_U32 nTargetBitrate;
+ } OMX_VIDEO_SVC_BITRATETYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_MOTIONVECTORTYPE {
+ OMX_VIDEO_MOTIONVECTORTYPE eAccuracy;
+ OMX_BOOL bUnrestrictedMVs;
+ OMX_BOOL bFourMV;
+ OMX_S32 sXSearchRange;
+ OMX_S32 sYSearchRange;
+ } OMX_VIDEO_SVC_MOTIONVECTORTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_QUANTIZATIONTYPE {
+ OMX_U32 nQpI;
+ OMX_U32 nQpP;
+ OMX_U32 nQpB;
+ } OMX_VIDEO_SVC_QUANTIZATIONTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_INTRAREFRESHTYPE {
+ OMX_VIDEO_INTRAREFRESHTYPE eRefreshMode;
+ OMX_U32 nAirMBs;
+ OMX_U32 nAirRef;
+ OMX_U32 nCirMBs;
+ } OMX_VIDEO_SVC_INTRAREFRESHTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_VBSMCTYPE {
+ OMX_BOOL b16x16;
+ OMX_BOOL b16x8;
+ OMX_BOOL b8x16;
+ OMX_BOOL b8x8;
+ OMX_BOOL b8x4;
+ OMX_BOOL b4x8;
+ OMX_BOOL b4x4;
+ } OMX_VIDEO_SVC_VBSMCTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_NALUCONTROLTYPE {
+ OMX_U32 nStartofSequence;
+ OMX_U32 nEndofSequence;
+ OMX_U32 nIDR;
+ OMX_U32 nIntraPicture;
+ OMX_U32 nNonIntraPicture;
+ }OMX_VIDEO_SVC_NALUCONTROLTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_MEBLOCKSIZETYPE {
+ OMX_VIDEO_BLOCKSIZETYPE eMinBlockSizeP;
+ OMX_VIDEO_BLOCKSIZETYPE eMinBlockSizeB;
+ }OMX_VIDEO_SVC_MEBLOCKSIZETYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_INTRAPREDTYPE {
+ OMX_U32 nLumaIntra4x4Enable;
+ OMX_U32 nLumaIntra8x8Enable;
+ OMX_U32 nLumaIntra16x16Enable;
+ OMX_U32 nChromaIntra8x8Enable;
+ OMX_VIDEO_CHROMACOMPONENTTYPE eChromaComponentEnable;
+ }OMX_VIDEO_SVC_INTRAPREDTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_ENCODER_PRESETTYPE {
+ OMX_VIDEO_ENCODING_MODE_PRESETTYPE eEncodingModePreset;
+ OMX_VIDEO_RATECONTROL_PRESETTYPE eRateControlPreset;
+ }OMX_VIDEO_SVC_ENCODER_PRESETTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_VUIINFOTYPE {
+ OMX_BOOL bAspectRatioPresent;
+ OMX_VIDEO_ASPECTRATIOTYPE ePixelAspectRatio;
+ OMX_BOOL bFullRange;
+ }OMX_VIDEO_SVC_VUIINFOTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_HRDBUFFERSETTING {
+ OMX_U32 nInitialBufferLevel;
+ OMX_U32 nHRDBufferSize;
+ OMX_U32 nTargetBitrate;
+ }OMX_VIDEO_SVC_HRDBUFFERSETTING;
+
+
+ typedef struct OMX_VIDEO_SVC_INTRAPERIOD {
+ OMX_U32 nIDRPeriod;
+ OMX_U32 nPFrames;
+ } OMX_VIDEO_SVC_INTRAPERIOD;
+
+
+ typedef struct OMX_VIDEO_SVC_PIXELINFOTYPE {
+ OMX_U32 nWidth;
+ OMX_U32 nHeight;
+ } OMX_VIDEO_SVC_PIXELINFOTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_MESEARCHRANGETYPE {
+ OMX_VIDEO_MOTIONVECTORTYPE eMVAccuracy;
+ OMX_U32 nHorSearchRangeP;
+ OMX_U32 nVerSearchRangeP;
+ OMX_U32 nHorSearchRangeB;
+ OMX_U32 nVerSearchRangeB;
+ }OMX_VIDEO_SVC_MESEARCHRANGETYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_QPSETTINGSTYPE {
+ OMX_U32 nQpI;
+ OMX_U32 nQpMaxI;
+ OMX_U32 nQpMinI;
+ OMX_U32 nQpP;
+ OMX_U32 nQpMaxP;
+ OMX_U32 nQpMinP;
+ OMX_U32 nQpOffsetB;
+ OMX_U32 nQpMaxB;
+ OMX_U32 nQpMinB;
+ }OMX_VIDEO_SVC_QPSETTINGSTYPE;
+
+
+ typedef struct OMX_VIDEO_SVC_SLICECODINGTYPE {
+ OMX_VIDEO_AVCSLICEMODETYPE eSliceMode;
+ OMX_U32 nSlicesize;
+ }OMX_VIDEO_SVC_SLICECODINGTYPE;
+
+
+ typedef struct OMX_VIDEO_EXEC_SVC_HRDBUFFERSETTING {
+ OMX_U32 nHRDBufferSize;
+ OMX_U32 nEncodeBitrate;
+ }OMX_VIDEO_EXEC_SVC_HRDBUFFERSETTING;
+
+/**
+ * SVC params
+ *
+ * STRUCT MEMBERS:
+ * nSize : Size of the structure in bytes
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
+ * nSliceHeaderSpacing : Number of macroblocks between slice header, put
+ * zero if not used
+ * nPFrames : Number of P frames between each I frame
+ * nBFrames : Number of B frames between each I frame
+ * bUseHadamard : Enable/disable Hadamard transform
+ * nRefFrames : Max number of reference frames to use for inter
+ * motion search (1-16)
+ * nRefIdxTrailing : Pic param set ref frame index (index into ref
+ * frame buffer of trailing frames list), B frame
+ * support
+ * nRefIdxForward : Pic param set ref frame index (index into ref
+ * frame buffer of forward frames list), B frame
+ * support
+ * bEnableUEP : Enable/disable unequal error protection. This
+ * is only valid of data partitioning is enabled.
+ * bEnableFMO : Enable/disable flexible macroblock ordering
+ * bEnableASO : Enable/disable arbitrary slice ordering
+ * bEnableRS : Enable/disable sending of redundant slices
+ * eProfile : AVC profile(s) to use
+ * eLevel : AVC level(s) to use
+ * nAllowedPictureTypes : Specifies the picture types allowed in the
+ * bitstream
+ * bFrameMBsOnly : specifies that every coded picture of the
+ * coded video sequence is a coded frame
+ * containing only frame macroblocks
+ * bMBAFF : Enable/disable switching between frame and
+ * field macroblocks within a picture
+ * bEntropyCodingCABAC : Entropy decoding method to be applied for the
+ * syntax elements for which two descriptors appear
+ * in the syntax tables
+ * bWeightedPPrediction : Enable/disable weighted prediction shall not
+ * be applied to P and SP slices
+ * nWeightedBipredicitonMode : Default weighted prediction is applied to B
+ * slices
+ * bconstIpred : Enable/disable intra prediction
+ * bDirect8x8Inference : Specifies the method used in the derivation
+ * process for luma motion vectors for B_Skip,
+ * B_Direct_16x16 and B_Direct_8x8 as specified
+ * in subclause 8.4.1.2 of the AVC spec
+ * bDirectSpatialTemporal : Flag indicating spatial or temporal direct
+ * mode used in B slice coding (related to
+ * bDirect8x8Inference) . Spatial direct mode is
+ * more common and should be the default.
+ * nCabacInitIdx : Index used to init CABAC contexts
+ * eLoopFilterMode : Enable/disable loop filter
+ */
+ typedef struct OMX_TI_VIDEO_PARAM_SVCTYPE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+
+ OMX_U32 nActualFrameWidth;
+ OMX_U32 nActualFrameHeight;
+ OMX_S32 nStride;
+ OMX_U32 xFramerate;
+ OMX_COLOR_FORMATTYPE eColorFormat;
+ OMX_VIDEO_SVC_RECTTYPE sRecType;
+
+ OMX_VIDEO_SVC_STD_PARAMS sBasicParams;
+
+ OMX_U32 nRefFrames;
+ OMX_TI_VIDEO_SVCPROFILETYPE eProfile;
+ OMX_TI_VIDEO_SVCLEVELTYPE eLevel;
+
+ OMX_U32 xEncodeFramerate;
+ OMX_VIDEO_SVC_BITRATETYPE sBitRateParams;
+
+ OMX_VIDEO_SVC_MOTIONVECTORTYPE sMotionVectorParams;
+ OMX_VIDEO_SVC_QUANTIZATIONTYPE sQuantizationParams;
+ OMX_VIDEO_SVC_INTRAREFRESHTYPE sIntraRefreshParams;
+ OMX_VIDEO_SVC_VBSMCTYPE sVBSMCParams;
+
+ //OMX_NALUFORMATSTYPE eNaluFormat;
+ OMX_VIDEO_SVC_NALUCONTROLTYPE sNalUnitParams;
+
+ OMX_VIDEO_SVC_MEBLOCKSIZETYPE sMEBlockSizeParams;
+ OMX_VIDEO_SVC_INTRAPREDTYPE sIntraPredParams;
+ OMX_VIDEO_SVC_ENCODER_PRESETTYPE sEncPresetParams;
+ OMX_VIDEO_TRANSFORMBLOCKSIZETYPE eTransformBlocksize;
+ OMX_VIDEO_SVC_VUIINFOTYPE sVUIInfoParams;
+ OMX_VIDEO_SVC_HRDBUFFERSETTING sHRDBufferParams;
+
+ OMX_U32 nNumTemporalLayers;
+ OMX_S32 nDependencyID;
+ OMX_S32 nQualityID;
+ //OMX_VIDEO_SVC_ENCODE_MODE eModeOfEncode;
+
+ OMX_U32 nErrorConcealmentMode;
+ OMX_U32 nDeblockFilterMode;
+ } OMX_TI_VIDEO_PARAM_SVCTYPE;
+
+ typedef struct OMX_TI_VIDEO_CONFIG_SVCLAYERDETAILS {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+
+ OMX_U32 nNumLayers;
+ OMX_U32 nLayerId;
+ OMX_U8 nPriorityId;
+ OMX_U8 nDependencyId;
+ OMX_U8 nQualityId;
+ OMX_U8 nTemporalId;
+ OMX_U8 nBitrateInfoPresentFlag;
+ OMX_U8 nFramerateInfoPresentFlag;
+ OMX_U8 nFramesizeInfoPresentFlag;
+ OMX_U16 nAvgBitrate;
+ OMX_U16 nMaxBitrate;
+ OMX_U16 nAvgFramerate;
+ OMX_U32 nFrameWidth;
+ OMX_U32 nFrameHeight;
+
+ OMX_U32 nLayerIndex; /* Used to query for individual layer details */
+
+} OMX_TI_VIDEO_CONFIG_SVCLAYERDETAILS;
+
+typedef struct OMX_TI_VIDEO_CONFIG_SVCTARGETLAYER {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+
+ OMX_U32 nSvcTargetLayerDID;
+ OMX_U32 nSvcTargetLayerTID;
+ OMX_U32 nSvcTargetLayerQID;
+
+} OMX_TI_VIDEO_CONFIG_SVCTARGETLAYER;
+/* ========================================================================== */
+/*!
+@brief OMX_TI_VIDEO_SLICEDATAINFO : to configure the Slice Settings
+@param nNumofSlices number of validfields to be read
+@param nSliceSizeConfigured variable that indicates the MaxSlice configured
+ & (n*nSliceSizeConfigured) gives the buff offset
+ for nth slice in the o/p buffer
+@param nSliceSize gives the SliceSize
+*/
+/* ==========================================================================*/
+typedef struct OMX_TI_VIDEO_SLICEDATAINFO {
+ OMX_U32 nNumofSlices;
+ OMX_U32 nSliceSizeConfigured;
+ OMX_U32 nSliceSize[OMXH264E_MAX_SLICE_SUPPORTED];
+} OMX_TI_VIDEO_SLICEDATAINFO;
+/**
+* @brief mode selection for the data that is given to the Codec
+ */
+
+typedef enum _OMX_VIDEO_AVCLTRMODE {
+ OMX_H264ENC_LTRP_NONE = 0,
+ /**< No longterm refernce frame in the sequnce
+ */
+ OMX_H264ENC_LTRP_REFERTOIDR = 1,
+ /**< Mark all the I frames as long term-reference frames and
+ * based on the frame control IH264ENC_Control, refere to
+ * a long-term reference frame (I frame).
+ */
+ OMX_H264ENC_LTRP_REFERTOP_PROACTIVE =2,
+ /**< Two long term frames are supported in this schme and
+ * long-term index marking and refernce frame update is done based
+ * the IH264ENC_Control values
+ */
+ OMX_H264ENC_LTRP_REFERTOP_REACTIVE = 3
+ /**< This is not supported in the current version of encoder
+ */
+} OMX_VIDEO_AVCLTRMODE;
+
+
+/* ============================================================================= */
+/*
+@brief OMX_TI_VIDEO_PARAM_AVC_LTRP : Structure to enable the configuration of Long Term reference Picture feature in H264 Encoder for the session
+Enabling this parameter will instruct encoder to keep its recent I/IDR frame in its reference buffer list.
+So it increases the DDR foot print by one frame buffer
+@param eLTRMode : enables the LongTerm Reference Picture, possible modes: 0, 1, 2
+@param nLTRInterval : interval of the write indicating to codec interms of the frame number
+*/
+/* ============================================================================= */
+typedef struct OMX_TI_VIDEO_PARAM_AVC_LTRP{
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_VIDEO_AVCLTRMODE eLTRMode;
+ OMX_U32 nLTRInterval;
+} OMX_TI_VIDEO_PARAM_AVC_LTRP;
+
+/*
+@brief OMX_TI_VIDEO_CONFIG_AVC_LTRP : Structure to provide the configuration to acknowledge successful decode of previous LTR
+@param eLTRFrameDecoded : tells the decoder that the LTR has been decoded successfully when set to TRUE
+*/
+/* ============================================================================= */
+typedef struct OMX_TI_VIDEO_CONFIG_AVC_LTRP{
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bEnableNextLTR;
+} OMX_TI_VIDEO_CONFIG_AVC_LTRP;
+
+/* ============================================================================= */
+/*
+@brief OMX_TI_VIDEO_CONFIG_AVC_LTRP_INTERVAL : Structure to enable the update of the LTRP
+Interval during runtime
+@param nLTRInterval : interval of the write indicating to codec interms of the frame number
+*/
+/* ============================================================================= */
+typedef struct OMX_TI_VIDEO_CONFIG_AVC_LTRP_INTERVAL{
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nLTRInterval;
+} OMX_TI_VIDEO_CONFIG_AVC_LTRP_INTERVAL;
+
+/* ============================================================================= */
+/*
+@brief OMX_TI_VIDEO_CONFIG_AVC_LTRP_INTERVAL : Structure to enable timestamps in decode order
+ at i/p of decoders.
+*/
+/* ============================================================================= */
+typedef struct OMX_TI_PARAM_TIMESTAMP_IN_DECODE_ORDER{
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_BOOL bEnabled;
+} OMX_TI_PARAM_TIMESTAMP_IN_DECODE_ORDER;
+
+/* ============================================================================= */
+/*
+@brief OMX_TI_VIDEO_PARAM_AUTO_FRAMERATE_UPDATE : Structure to enable dynamic update of frame rate
+*/
+/* ============================================================================= */
+typedef struct OMX_TI_VIDEO_PARAM_AUTO_FRAMERATE_UPDATE {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bEnableAutoVFRUpdate;
+ OMX_U32 nDiffThresholdtoUpdate;
+ OMX_U32 nMaxSessionFrameRate;
+} OMX_TI_VIDEO_PARAM_AUTO_FRAMERATE_UPDATE;
+
+/* ============================================================================= */
+/*
+@brief OMX_TI_PARAM_SKIP_GREY_OUTPUT_FRAMES : Structure to enable feature to skip grey output
+ frames which doesn't have proper reference.
+*/
+/* ============================================================================= */
+typedef struct OMX_TI_PARAM_SKIP_GREY_OUTPUT_FRAMES {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_BOOL bEnabled;
+} OMX_TI_PARAM_SKIP_GREY_OUTPUT_FRAMES;
+
+/* ============================================================================= */
+/*
+@brief OMX_TI_PARAM_DECMETADATA : Structure to enable different codec metadata
+ for video decoders.
+*/
+/* ============================================================================= */
+typedef struct OMX_TI_PARAM_DECMETADATA {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bEnableMBInfo;
+ OMX_BOOL bEnableTranscodeMode;
+ OMX_BOOL bEnableSEIInfo;
+ OMX_BOOL bEnableVUIInfo;
+} OMX_TI_PARAM_DECMETADATA;
+
+/**
+ ******************************************************************************
+ * @enum OMX_TI_VIDEO_MBERRSTATUS
+ * @brief This enum indicates if a MB was in error or not
+ *
+ ******************************************************************************
+*/
+typedef enum {
+ OMX_TI_VIDEO_MB_NOERROR = 0,
+ /**
+ * MB was non-erroneous
+ */
+ OMX_TI_VIDEO_MB_ERROR = 1
+ /**
+ * MB was erroneous
+ */
+} OMX_TI_VIDEO_MBERRSTATUS;
+
+
+/**
+ * Macro definitions required for SEI support: HRD sequence parameter set
+ */
+#define OMX_TI_VIDEO_H264VDEC_MAXCPBCNT 32
+
+/**
+ * Macro definitions required for SEI support: HRD sequence parameter set
+ */
+#define OMX_TI_VIDEO_H264VDEC_MAXUSERDATA_PAYLOAD 300
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_HrdParams
+ *
+ * @brief This structure contains the HRD parameter elements.
+ *
+ * @param cpb_cnt_minus1 : Number of alternative CPB specifications in the
+ * bit-stream
+ * @param bit_rate_scale : Together with bit_rate_value[i], it specifies the
+ * maximum input bit-rate for the ith CPB.
+ * @param cpb_size_scale : Together with cpb_size_value[i], specifies the
+ * maximum CPB size for the ith CPB.
+ * @param bit_rate_value_minus1[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT] :Maximum input bitrate
+ * for the ith CPB
+ * @param cpb_size_value_minus1[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT] :Maximum CPB size for the
+ * ith CPB
+ * @param vbr_cbr_flag[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT] :Specifies the ith CPB is operated
+ * in Constant Bit-rate mode or variable bit-rate mode
+ * @param initial_cpb_removal_delay_length_minus1 :Length in bits of
+ * initial_cpb_removal_length syntax element
+ * @param cpb_removal_delay_length_minus1 :Length in bits of
+ * cpb_removal_delay_length syntax element
+ * @param dpb_output_delay_length_minus1 :Length in bits of
+ * dpb_output_delay_length syntax element
+ * @param time_offset_length : Length in bits of time_offset syntax element
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_HrdParams {
+ OMX_U32 cpb_cnt_minus1;
+ OMX_U8 bit_rate_scale;
+ OMX_U8 cpb_size_scale;
+ OMX_U32 bit_rate_value_minus1[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT];
+ OMX_U32 cpb_size_value_minus1[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT];
+ OMX_U8 vbr_cbr_flag[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT];
+ OMX_U8 initial_cpb_removal_delay_length_minus1;
+ OMX_U8 cpb_removal_delay_length_minus1;
+ OMX_U8 dpb_output_delay_length_minus1;
+ OMX_U8 time_offset_length;
+} OMX_TI_VIDEO_H264VDEC_HrdParams;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SVCVuiParams
+ *
+ * @brief This structure contains VUI message syntax elements for scalable
+ * video stream
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call, c
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ *
+ * @param svc_vui_ext_num_entries_minus1:(svc_vui_ext_num_entries_minus1 + 1)
+ * specifies the number of information
+ * entries that are present in the SVC
+ * VUI parameters extension syntax
+ * structure
+ * @param svc_vui_ext_dependency_id:indicate the max value of DId for the
+ * i-th subset of coded video sequences
+ * @param svc_vui_ext_quality_id:indicate the max value of QId for the
+ * i-th subset of coded video sequences
+ * @param svc_vui_ext_temporal_id: indicate the max value of TId for the
+ * i-th subset of coded video sequences
+ * @param svc_vui_ext_timing_info_present_flag: Flag to tells that
+ * svc_vui_ext_num_units_in_tick,
+ * svc_vui_ext_time_scale,
+ * svc_vui_ext_fixed_frame_rate_flag
+ * are present for current coded
+ * sequence or not.
+ * @param svc_vui_ext_num_units_in_tick: specifies the value of num_units_in_tick
+ * @param svc_vui_ext_time_scale: specifies the value of time_scale
+ * @param svc_vui_ext_fixed_frame_rate_flag: specifies the value of
+ * fixed_frame_rate_flag
+ * @param svc_vui_ext_nal_hrd_parameters_present_flag:specifies the
+ * value of nal_hrd_parameters_present_flag
+ * @param svc_vui_ext_vcl_hrd_parameters_present_flag: ] specifies the
+ * value of vcl_hrd_parameters_present_flag
+ * @param svc_vui_ext_low_delay_hrd_flag: specifies the value
+ * of low_delay_hrd_flag
+ * @param svc_vui_ext_pic_struct_present_flag: specifies the value
+ * of pic_struct_present_flag
+ *
+ ******************************************************************************
+*/
+
+typedef struct sOMX_TI_VIDEO_H264VDEC_SVCVuiParams {
+ OMX_U32 parsed_flag;
+ OMX_U16 svc_vui_ext_num_entries_minus1;
+ OMX_U16 svc_vui_ext_dependency_id;
+ OMX_U16 svc_vui_ext_quality_id;
+ OMX_U16 svc_vui_ext_temporal_id;
+ OMX_U16 svc_vui_ext_timing_info_present_flag;
+ OMX_U32 svc_vui_ext_num_units_in_tick;
+ OMX_U32 svc_vui_ext_time_scale;
+ OMX_U16 svc_vui_ext_fixed_frame_rate_flag;
+ OMX_U16 svc_vui_ext_nal_hrd_parameters_present_flag;
+ OMX_U16 svc_vui_ext_vcl_hrd_parameters_present_flag;
+ OMX_U16 svc_vui_ext_low_delay_hrd_flag;
+ OMX_U16 svc_vui_ext_pic_struct_present_flag;
+} OMX_TI_VIDEO_H264VDEC_SVCVuiParams;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_VuiParams
+ *
+ * @brief This structure contains the VUI Sequence Parameter elements.
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call, c
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param aspect_ratio_info_present_flag :Indicates whether aspect ratio idc
+ * is present or not.
+ * @param aspect_ratio_idc : Aspect ratio of Luma samples
+ * @param sar_width : Horizontal size of sample aspect ratio
+ * @param sar_height : Vertical size of sample aspect ratio
+ * @param overscan_info_present_flag : Cropped decoded pictures are suitable
+ * for display or not.
+ * @param overscan_appropriate_flag : Overscan_appropriate_flag
+ * @param video_signal_type_present_flag : Flag indicates whether
+ * video_format, video_full_range_flag and colour_description_present_
+ * flag are present or not
+ * @param video_format :Video format indexed by a table. For example,PAL/NTSC
+ * @param video_full_range_flag : Black level, luma and chroma ranges. It
+ * should be used for BT.601 compliance
+ * @param colour_description_present_flag:Indicates whether colour_primaries,
+ * transfer_characteristics and matrix_coefficients are present.
+ * @param colour_primaries :Chromaticity co-ordinates of source primaries
+ * @param transfer_characteristics :Opto-electronic transfer characteristics
+ * of the source picture
+ * @param matrix_coefficients :Matrix coefficients for deriving Luma and
+ * chroma data from RGB components.
+ * @param chroma_location_info_present_flag : Flag indicates whether
+ * chroma_sample_loc_type_top field and chroma_sample_loctype
+ * bottom_field are present.
+ * @param chroma_sample_loc_type_top_field : Location of chroma_sample top
+ * field
+ * @param chroma_sample_loc_type_bottom_field :Location of chroma_sample
+ * bottom field
+ * @param timing_info_present_flag :Indicates whether num_units_in_tick,
+ * time_scale, and fixed_frame_rate_flag are present.
+ * @param num_units_in_tick :Number of units of a clock that corresponds to 1
+ * increment of a clock tick counter
+ * @param time_scale :Indicates actual increase in time for 1 increment of a
+ * clock tick counter
+ * @param fixed_frame_rate_flag :Indicates how the temporal distance between
+ * HRD output times of any two output pictures is constrained
+ * @param nal_hrd_parameters_present_flag :Indicates whether
+ * nal_hrd_parameters are present
+ * @param nal_hrd_pars : NAL HRD Parameters
+ * @param vcl_hrd_parameters_present_flag :Indicates whether
+ * vcl_hrd_parameters are present
+ * @param vcl_hrd_pars : VCL HRD Parameters
+ * @param low_delay_hrd_flag :HRD operational mode as in Annex C of the
+ * standard
+ * @param pic_struct_present_flag :Indicates whether picture timing SEI
+ * messages are present
+ * @param bitstream_restriction_flag :Indicates if the bit-stream restriction
+ * parameters are present
+ * @param motion_vectors_over_pic_boundaries_flag :Specifies whether motion
+ * vectors can point to regions outside the picture boundaries
+ * @param max_bytes_per_pic_denom :Maximum number of bytes not exceeded by
+ * the sum of sizes of all VCL NAL units of a single coded picture
+ * @param max_bits_per_mb_denom :Maximum number of bits taken by any coded MB
+ * @param log2_max_mv_length_vertical :Maximum value of any motion vector\u2019s
+ * vertical component
+ * @param log2_max_mv_length_horizontal :Maximum value of any motion vector\u2019s
+ * horizontal component
+ * @param max_dec_frame_reordering :
+ * @param num_reorder_frames :Maximum number of frames that need to be
+ * re-ordered
+ * @param max_dec_frame_buffering :Size of HRD decoded buffer (DPB) in terms
+ * of frame buffers
+ * @param svcVuiParams : struct instance of vui parameters for svc
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_VuiParams {
+ OMX_U32 parsed_flag;
+ OMX_U8 aspect_ratio_info_present_flag;
+ OMX_U32 aspect_ratio_idc;
+ OMX_U32 sar_width;
+ OMX_U32 sar_height;
+ OMX_U8 overscan_info_present_flag;
+ OMX_U8 overscan_appropriate_flag;
+ OMX_U8 video_signal_type_present_flag;
+ OMX_U8 video_format;
+ OMX_U8 video_full_range_flag;
+ OMX_U8 colour_description_present_flag;
+ OMX_U8 colour_primaries;
+ OMX_U8 transfer_characteristics;
+ OMX_U8 matrix_coefficients;
+ OMX_U8 chroma_location_info_present_flag;
+ OMX_U32 chroma_sample_loc_type_top_field;
+ OMX_U32 chroma_sample_loc_type_bottom_field;
+ OMX_U8 timing_info_present_flag;
+ OMX_U32 num_units_in_tick;
+ OMX_U32 time_scale;
+ OMX_U8 fixed_frame_rate_flag;
+ OMX_U8 nal_hrd_parameters_present_flag;
+ OMX_TI_VIDEO_H264VDEC_HrdParams nal_hrd_pars;
+ OMX_U8 vcl_hrd_parameters_present_flag;
+ OMX_TI_VIDEO_H264VDEC_HrdParams vcl_hrd_pars;
+ OMX_U8 low_delay_hrd_flag;
+ OMX_U8 pic_struct_present_flag;
+ OMX_U8 bitstream_restriction_flag;
+ OMX_U8 motion_vectors_over_pic_boundaries_flag;
+ OMX_U32 max_bytes_per_pic_denom;
+ OMX_U32 max_bits_per_mb_denom;
+ OMX_U32 log2_max_mv_length_vertical;
+ OMX_U32 log2_max_mv_length_horizontal;
+ OMX_U32 max_dec_frame_reordering;
+ OMX_U32 num_reorder_frames;
+ OMX_U32 max_dec_frame_buffering;
+ OMX_TI_VIDEO_H264VDEC_SVCVuiParams svcVuiParams;
+} OMX_TI_VIDEO_H264VDEC_VuiParams;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiUserDataRegITUT
+ *
+ * @brief This structure contains the user data SEI msg elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param num_payload_bytes :Specifies the size of the payload
+ * @param itu_t_t35_country_code : A byte having a value specified as a
+ * country code by ITU-T Recommendation T.35 Annex A
+ * @param itu_t_t35_country_code_extension_byte :A byte having a value
+ * specified as a country code by ITU-T Recommendation T.35 Annex B
+ * @param itu_t_t35_payload_byte[] : A byte containing data registered as
+ * specified by ITU-T Recommendation T.35.
+ * @param dataOverflowFlag: This indicates if pay load data is more than the
+ * array size i.e., OMX_TI_VIDEO_H264VDEC_MAXUSERDATA_PAYLOAD.
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiUserDataRegITUT {
+ OMX_U32 parsed_flag;
+ OMX_U32 num_payload_bytes;
+ OMX_U8 itu_t_t35_country_code;
+ OMX_U8 itu_t_t35_country_code_extension_byte;
+ OMX_U8 itu_t_t35_payload_byte[OMX_TI_VIDEO_H264VDEC_MAXUSERDATA_PAYLOAD];
+ OMX_U8 dataOverflowFlag;
+} OMX_TI_VIDEO_H264VDEC_SeiUserDataRegITUT;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiUserDataUnReg
+ *
+ * @brief This structure contains the user data SEI msg elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param num_payload_bytes :Specifies the size of the payload
+ * @param uuid_iso_iec_11578 :Value specified as a UUID according to the
+ * procedures of ISO/IEC 11578:1996 Annex A.
+ * @param user_data_payload_byte :Byte containing data having syntax and
+ * semantics as specified by the UUID generator.
+ * @param dataOverflowFlag: This indicates if pay load data is more than the
+ * array size i.e., OMX_TI_VIDEO_H264VDEC_MAXUSERDATA_PAYLOAD.
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiUserDataUnReg {
+ OMX_U32 parsed_flag;
+ OMX_U32 num_payload_bytes;
+ OMX_U8 uuid_iso_iec_11578[16];
+ OMX_U8 user_data_payload_byte[OMX_TI_VIDEO_H264VDEC_MAXUSERDATA_PAYLOAD];
+ OMX_U8 dataOverflowFlag;
+} OMX_TI_VIDEO_H264VDEC_SeiUserDataUnReg;
+
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiBufferingPeriod
+ *
+ * @brief This structure contains the buffering period SEI msg elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param seq_parameter_set_id :Specifies the sequence parameter set that
+ * contains the sequence HRD attributes
+ * @param nal_cpb_removal_delay :Specifies the delay for the indexed NAL CPB
+ * between the time of arrival in the CPB of the first bit of the
+ * coded data associated with the access unit associated with the
+ * buffering period SEI message and the time of removal from the CPB
+ * of the coded data associated with the same access unit, for the
+ * first buffering period after HRD initialization.
+ * @param nal_cpb_removal_delay_offset :Used for the indexed NAL CPB in
+ * combination with the cpb_removal_delay to specify the initial
+ * delivery time of coded access units to the CPB
+ * @param vcl_cpb_removal_delay :Specifies the delay for the indexed VCL CPB
+ * between the time of arrival in the CPB of the first bit of the
+ * coded data associated with the access unit associated with the
+ * buffering period SEI message and the time of removal from the CPB
+ * of the coded data associated with the same access unit, for the
+ * first buffering period after HRD initialization.
+ * @param vcl_cpb_removal_delay_offset :Used for the indexed VCL CPB in
+ * combination with the cpb_removal_delay to specify the initial
+ * delivery time of coded access units to the CPB
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiBufferingPeriod {
+ OMX_U32 parsed_flag;
+ OMX_U32 seq_parameter_set_id;
+ OMX_U32 nal_cpb_removal_delay[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT];
+ OMX_U32 nal_cpb_removal_delay_offset[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT];
+ OMX_U32 vcl_cpb_removal_delay[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT];
+ OMX_U32 vcl_cpb_removal_delay_offset[OMX_TI_VIDEO_H264VDEC_MAXCPBCNT];
+}OMX_TI_VIDEO_H264VDEC_SeiBufferingPeriod;
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiPanScanRect
+ *
+ * @brief This structure contains the pan scan rectangle SEI msg elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param pan_scan_rect_id :Specifies an identifying number that may be used
+ * to identify the purpose of the pan-scan rectangle
+ * @param pan_scan_rect_cancel_flag :Equal to 1 indicates that the SEI
+ * message cancels the persistence of any previous pan-scan
+ * rectangle SEI message in output order.
+ * pan_scan_rect_cancel_flag equal to 0 indicates that
+ * pan-scan rectangle information follows.
+ * @param pan_scan_cnt_minus1 :Specifies the number of pan-scan rectangles
+ * that are present in the SEI message
+ * @param pan_scan_rect_left_offset :Specifies as signed integer quantities
+ * in units of one-sixteenth sample spacing relative to the luma
+ * sampling grid, the location of the pan-scan rectangle
+ * @param pan_scan_rect_right_offset :Specifies as signed integer quantities
+ * in units of one-sixteenth sample spacing relative to the luma
+ * sampling grid, the location of the pan-scan rectangle
+ * @param pan_scan_rect_top_offset : Top offset
+ * @param pan_scan_rect_bottom_offset : Bottom offset
+ * @param pan_scan_rect_repetition_period :Specifies the persistence of the
+ * pan-scan rectangle SEI message and may specify a picture order
+ * count interval within which another pan-scan rectangle SEI message
+ * with the same value of pan_scan_rect_id or the end of the coded
+ * video sequence shall be present in the bit-stream
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiPanScanRect {
+ OMX_U32 parsed_flag;
+ OMX_U32 pan_scan_rect_id;
+ OMX_U32 pan_scan_rect_cancel_flag;
+ OMX_U32 pan_scan_cnt_minus1;
+ OMX_S32 pan_scan_rect_left_offset[3];
+ OMX_S32 pan_scan_rect_right_offset[3];
+ OMX_S32 pan_scan_rect_top_offset[3];
+ OMX_S32 pan_scan_rect_bottom_offset[3];
+ OMX_U32 pan_scan_rect_repetition_period;
+} OMX_TI_VIDEO_H264VDEC_SeiPanScanRect;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiProgRefineStart
+ *
+ * @brief This structure contains the progressive refinement start SEI msg
+ * elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param progressive_refinement_id :Specifies an identification number for
+ * the progressive refinement operation.
+ * @param num_refinement_steps_minus1 :Specifies the number of reference
+ * frames in the tagged set of consecutive coded pictures
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiProgRefineStart {
+ OMX_U32 parsed_flag;
+ OMX_U32 progressive_refinement_id;
+ OMX_U32 num_refinement_steps_minus1;
+} OMX_TI_VIDEO_H264VDEC_SeiProgRefineStart;
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiProgRefineEnd
+ *
+ * @brief TThis structure contains the progressive refinement end SEI msg
+ * elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param progressive_refinement_id :Specifies an identification number for
+ * the progressive refinement operation.
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiProgRefineEnd {
+ OMX_U32 parsed_flag;
+ OMX_U32 progressive_refinement_id;
+} OMX_TI_VIDEO_H264VDEC_SeiProgRefineEnd;
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiRecoveryPointInfo
+ *
+ * @brief This structure contains the sRecovery Point Info SEI msg elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param exact_match_flag :Indicates whether decoded pictures at and
+ * subsequent to the specified recovery point in output order derived
+ * by starting the decoding process at the access unit associated with
+ * the recovery point SEI message, will be an exact match to the
+ * pictures that would be produced by starting the decoding process
+ * at the location of a previous IDR access unit in the NAL unit stream.
+ * @param recovery_frame_cnt :Specifies the recovery point of output pictures
+ * in output order
+ * @param broken_link_flag :Indicates the presence or absence of a broken
+ * link in the NAL unit stream
+ * @param changing_slice_group_idc :Indicates whether decoded pictures are
+ * correct or approximately correct in content at and subsequent to
+ * the recovery point in output order when all macro-blocks of the
+ * primary coded pictures are decoded within the changing slice group
+ * period.
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiRecoveryPointInfo {
+ OMX_U32 parsed_flag;
+ OMX_U32 recovery_frame_cnt;
+ OMX_U32 exact_match_flag;
+ OMX_U32 broken_link_flag;
+ OMX_U32 changing_slice_group_idc;
+} OMX_TI_VIDEO_H264VDEC_SeiRecoveryPointInfo;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiPictureTiming
+ *
+ * @brief This structure contains the picture timing SEI msg elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param NumClockTs :
+ * @param cpb_removal_delay :Specifies how many clock ticks to wait after
+ * removal from the CPB of the access unit associated with the
+ * most recent buffering period SEI message before removing from
+ * the buffer the access unit data associated with the picture
+ * timing SEI message.
+ * @param dpb_output_delay : Used to compute the DPB output time of the
+ * picture.
+ * @param pic_struct : Indicates whether a picture should be displayed as
+ * a frame or field
+ * @param clock_time_stamp_flag[4]:1 - Indicates number of clock timestamp
+ * syntax elements present and follow immediately
+ * 0 \u2013 Indicates associated clock timestamp syntax
+ * elements not present
+ * @param ct_type[4] : Indicates the scan type(interlaced or progressive)
+ * of the source material
+ * @param nuit_field_based_flag[4] : Used to calculate the clockTimestamp
+ * @param counting_type[4] : Specifies the method of dropping values of
+ * n_frames
+ * @param full_timestamp_flag[4] : 1 - Specifies that the n_frames syntax
+ * element is followed by seconds_value,
+ * minutes_value, and hours_value.
+ * 0 - Specifies that the n_frames syntax
+ * element is followed by seconds_flag
+ * @param discontinuity_flag[4] : Indicates whether the difference between
+ * the current value of clockTimestamp and the value of
+ * clockTimestamp computed from the previous clockTimestamp in
+ * output order can be interpreted as the time difference between
+ * the times of origin or capture of the associated frames or
+ * fields.
+ * @param cnt_dropped_flag[4] : Specifies the skipping of one or more
+ * values of n_frames using the counting method
+ * @param n_frames[4] : Specifies the value of nFrames used to compute
+ * clockTimestamp.
+ * @param seconds_flag[4] : equal to 1 specifies that seconds_value and
+ * minutes_flag are present when
+ * full_timestamp_flag is equal to 0.
+ * @param minutes_flag[4] : equal to 1 specifies that minutes_value and
+ * hours_flag are present when full_timestamp_flag
+ * is equal to 0 and seconds_flag is equal to 1.
+ * @param hours_flag[4] : equal to 1 specifies that hours_value is
+ * present when full_timestamp_flag is equal to 0
+ * and seconds_flag is equal to 1 and minutes_flag
+ * is equal to 1.
+ * @param seconds_value[4] : Specifies the value of sS used to compute
+ * clockTimestamp.
+ * @param minutes_value[4] : Specifies the value of mM used to compute
+ * clockTimestamp.
+ * @param hours_value[4] : Specifies the value of tOffset used to compute
+ * clockTimestamp
+ * @param time_offset[4] : Specifies the value of tOffset used to compute
+ * clockTimestamp
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiPictureTiming {
+ OMX_U32 parsed_flag;
+ OMX_U32 NumClockTs;
+ OMX_U32 cpb_removal_delay;
+ OMX_U32 dpb_output_delay;
+ OMX_U32 pic_struct;
+ OMX_U32 clock_time_stamp_flag[4];
+ OMX_U32 ct_type[4];
+ OMX_U32 nuit_field_based_flag[4];
+ OMX_U32 counting_type[4];
+ OMX_U32 full_timestamp_flag[4];
+ OMX_U32 discontinuity_flag[4];
+ OMX_U32 cnt_dropped_flag[4];
+ OMX_U32 n_frames[4];
+ OMX_U32 seconds_flag[4];
+ OMX_U32 minutes_flag[4];
+ OMX_U32 hours_flag[4];
+ OMX_U32 seconds_value[4];
+ OMX_U32 minutes_value[4];
+ OMX_U32 hours_value[4];
+ OMX_S32 time_offset[4];
+}OMX_TI_VIDEO_H264VDEC_SeiPictureTiming;
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiFullFrameFreezeRep
+ *
+ * @brief This structure contains the full frmae freeze repetition SEI msg
+ * elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param full_frame_freeze_repetition_period :Specifies the persistence of
+ * the full-frame freeze SEI message
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiFullFrameFreezeRep {
+ OMX_U32 parsed_flag;
+ OMX_U32 full_frame_freeze_repetition_period;
+} OMX_TI_VIDEO_H264VDEC_SeiFullFrameFreezeRep;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiFullFrameFreezeRel
+ *
+ * @brief This structure contains frame freeze release SEI msg elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param payloadSize : Size of the frame_freeze_release payload
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiFullFrameFreezeRel {
+ OMX_U32 parsed_flag;
+ OMX_U32 payloadSize;
+} OMX_TI_VIDEO_H264VDEC_SeiFullFrameFreezeRel;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiStereoVideoInfo
+ *
+ * @brief This structure contains stereo video information SEI msg elements
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param field_views_flag : 1 - indicates that all pictures in the current
+ * coded video sequence are fields
+ * 0 - indicates that all pictures in the current
+ * coded video sequence are frames.
+ * @param top_field_is_left_view_flag :
+ * 1 - top field is a left view.
+ * 0 - topfield is right view.
+ * @param current_frame_is_left_view_flag :
+ * 1 - current frame is left view.
+ * 0 - current frame is right view.
+ * @param next_frame_is_second_view_flag :
+ * 1 - current picture and a next picture in
+ * output order form a stereo video pair.
+ * 0 - current picture and a previous picture in
+ * output order form a stereo video pair.
+ * @param left_view_self_contained_flag :
+ * 1 - it will not use right view as a reference
+ * picture for inter prediction
+ * 0 - it may use right view as a reference
+ * picture for inter prediction.
+ * @param right_view_self_contained_flag :
+ * 1 - it will not use left view as a reference
+ * picture for inter prediction
+ * 0 - it may use left view as a reference
+ * picture for inter prediction.
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiStereoVideoInfo {
+ OMX_U32 parsed_flag;
+ OMX_U32 field_views_flag;
+ OMX_U32 top_field_is_left_view_flag;
+ OMX_U32 current_frame_is_left_view_flag;
+ OMX_U32 next_frame_is_second_view_flag;
+ OMX_U32 left_view_self_contained_flag;
+ OMX_U32 right_view_self_contained_flag;
+} OMX_TI_VIDEO_H264VDEC_SeiStereoVideoInfo;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiFramePacking
+ *
+ * @brief This structure contains frame packing arrangement SEI msg elements
+ *
+ * @param frame_packing_arrangement_id :
+ * contains an identifying number that may be used to identify
+ * the usage of the frame packing arrangement SEI message.
+ * @param frame_packing_arrangement_cancel_flag :
+ * 1 - equal to 1 indicates that the frame packing arrangement
+ * SEI message cancels the persistence of any previous frame
+ * packing arrangement SEI message in output order.
+ * 0 - indicates that frame packing arrangement info follows
+ * @param frame_packing_arrangement_type :
+ * indicates the type of packing arrangement of the frames
+ * @param quincunx_sampling_flag :
+ * 1 - indicates that each color component plane of each
+ * constituent frame is quincunx sampled
+ * 0 - indicates that each color component plane of each
+ * constituent frame is not quincunx sampled
+ * @param content_interpretation_type :
+ * 1 - frame 0 being associated with the left view and frame 1
+ * being associated with the right view
+ * 2 - frame 0 being associated with the right view and frame 1
+ * being associated with the left view
+ * @param spatial_flipping_flag :
+ * 1 - spatial flipping is enabled for any one of the frame
+ * constituent, if frame_packing_arrangement_type is 3 or 4.
+ * 0 - spatial flipping is disabled for any one of the frame
+ * constituent, if frame_packing_arrangement_type is 3 or 4.
+ * @param frame0_flipped_flag :
+ * 1 - frame 0 is spatially flipped
+ * 0 - frame 1 is spatially flipped
+ * @param field_views_flag :
+ * 1 - indicates that all pictures in the current coded video
+ * sequence are coded as complementary field pairs.
+ * 0 - indicates that all pictures in the current coded video
+ * sequence are coded as frame.
+ * @param current_frame_is_frame0_flag :
+ * 1 - indicates that the current decoded frame is constituent
+ * frame 0 and the next decoded frame in output order
+ * is constituent frame 1.
+ * 0 - indicates that the current decoded frame is constituent
+ * frame 1 and the next decoded frame in output order
+ * is constituent frame 0.
+ * @param frame0_self_contained_flag :
+ * 1 - indicates that the constituent frame 0 is dependent on
+ * constituent frame 1 in decoding process
+ * 0 - indicates that the constituent frame 0 may dependent on
+ * constituent frame 1 in decoding process
+ * @param frame1_self_contained_flag :
+ * 1 - indicates that the constituent frame 1 is dependent on
+ * constituent frame 0 in decoding process
+ * 0 - indicates that the constituent frame 1 may dependent on
+ * constituent frame 0 in decoding process
+ * @param frame0_grid_position_x :
+ * specifies the horizontal location of the upper left
+ * sample of constituent frame 0 in the units of one
+ * sixteenth of the luma samples
+ * @param frame0_grid_position_y :
+ * specifies the vertical location of the upper left
+ * sample of constituent frame 0 in the units of one
+ * sixteenth of the luma samples
+ * @param frame1_grid_position_x :
+ * specifies the horizontal location of the upper left
+ * sample of constituent frame 1 in the units of one
+ * sixteenth of the luma samples
+ * @param frame1_grid_position_y :
+ * specifies the vertical location of the upper left
+ * sample of constituent frame 1 in the units of one
+ * sixteenth of the luma samples
+ * @param frame_packing_arrangement_reserved_byte :
+ * reserved for the future use.
+ * @param frame_packing_arrangement_repetition_period :
+ * specifies the persistence of the frame packing arrangement
+ * SEI message and may specify a frame order count interval
+ * within which another frame packing arrangement SEI message
+ * with the same value of frame_packing_arrangement_id or the
+ * end of the coded video sequence shall be present in the
+ * bitstream.
+ * @param frame_packing_arrangement_extension_flag :
+ * 0 - indicates that no additional data follows within the
+ * frame packing arrangement SEI message.
+ * 1 - Reserved for the future use.
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiFramePacking {
+ OMX_U32 parsed_flag;
+ OMX_U32 frame_packing_arrangement_id;
+ OMX_U32 frame_packing_arrangement_repetition_period;
+ OMX_U8 frame_packing_arrangement_cancel_flag;
+ OMX_U8 frame_packing_arrangement_type;
+ OMX_U8 quincunx_sampling_flag;
+ OMX_U8 content_interpretation_type;
+ OMX_U8 spatial_flipping_flag;
+ OMX_U8 frame0_flipped_flag;
+ OMX_U8 field_views_flag;
+ OMX_U8 current_frame_is_frame0_flag;
+ OMX_U8 frame0_self_contained_flag;
+ OMX_U8 frame1_self_contained_flag;
+ OMX_U8 frame0_grid_position_x;
+ OMX_U8 frame0_grid_position_y;
+ OMX_U8 frame1_grid_position_x;
+ OMX_U8 frame1_grid_position_y;
+ OMX_U8 frame_packing_arrangement_reserved_byte;
+ OMX_U8 frame_packing_arrangement_extension_flag;
+} OMX_TI_VIDEO_H264VDEC_SeiFramePacking;
+
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_SeiMessages
+ *
+ * @brief This structure contains all the supported SEI msg objects
+ *
+ * @param parsed_flag :1 - Indicates that in the current process call,
+ * contents of the structure is updated
+ * 0 - Indicates contents of the structure is not updated
+ * @param full_frame_freeze : Full-frame freeze SEI message
+ * @param full_frame_freeze_release :Cancels the effect of any full-frame
+ * freeze SEI message sent with pictures that precede the current
+ * picture in the output order.
+ * @param prog_refine_start :Specifies the beginning of a set of consecutive
+ * coded pictures that is labeled as the current picture followed
+ * by a sequence of one or more pictures of refinement of the
+ * quality of the current picture, rather than as a representation
+ * of a continually moving scene.
+ * @param prog_refine_end : Specifies end of progressive refinement.
+ * @param user_data_registered :Message contains user data registered as
+ * specified by ITU-T Recommendation T.35
+ * @param user_data_unregistered :Message contains unregistered user data
+ * identified by a UUID
+ * @param buffering_period_info :Message specifies the buffering period
+ * @param pan_scan_rect :Message specifies the coordinates of a rectangle
+ * relative to the cropping rectangle of the sequence parameter set
+ * @param recovery_pt_info :The recovery point SEI message assists a decoder
+ * in determining when the decoding process will produce acceptable
+ * pictures for display after the decoder initiates random access or
+ * after the encoder indicates a broken link in the sequence.
+ * @param pic_timing :Specifies timing information regarding cpb delays, dpb
+* output delay, and so on.
+ * @param stereo_video_info :stereo video information SEI message consist of
+ * pair of picture forming stereo view content.
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_SeiMessages {
+ OMX_U32 parsed_flag;
+ OMX_TI_VIDEO_H264VDEC_SeiFullFrameFreezeRep full_frame_freeze;
+ OMX_TI_VIDEO_H264VDEC_SeiFullFrameFreezeRel full_frame_freeze_release;
+ OMX_TI_VIDEO_H264VDEC_SeiProgRefineStart prog_refine_start;
+ OMX_TI_VIDEO_H264VDEC_SeiProgRefineEnd prog_refine_end;
+ OMX_TI_VIDEO_H264VDEC_SeiUserDataRegITUT user_data_registered;
+ OMX_TI_VIDEO_H264VDEC_SeiUserDataUnReg user_data_unregistered;
+ OMX_TI_VIDEO_H264VDEC_SeiBufferingPeriod buffering_period_info;
+ OMX_TI_VIDEO_H264VDEC_SeiPanScanRect pan_scan_rect;
+ OMX_TI_VIDEO_H264VDEC_SeiRecoveryPointInfo recovery_pt_info;
+ OMX_TI_VIDEO_H264VDEC_SeiPictureTiming pic_timing;
+ OMX_TI_VIDEO_H264VDEC_SeiStereoVideoInfo stereo_video_info;
+ OMX_TI_VIDEO_H264VDEC_SeiFramePacking frame_packing;
+} OMX_TI_VIDEO_H264VDEC_SeiMessages;
+
+
+/**
+ ******************************************************************************
+ * @struct _sErrConcealStr
+ * @brief This str holds up the required Info for implementing the SCV EC,
+ * this will get updated by H.264 decoder while decoding the SVC
+ * Base/Target Layers
+ *
+ * @param CurrMbInfoBufPointer :Base Address of the current decoded frame
+ * MB Info buffer
+ *
+ * @param CurrMbStatusBufPointer: Base Address of the current decoded frame
+ * MB staus buffer pointer
+ *
+ * @param currFrameY : Base Address of the current decoded Luma
+ * frame buffer pointer (physical pointer)
+ *
+ * @param currFrameUV : Base Address of the current decoded Chroma
+ * frame buffer pointer (physical pointer)
+ *
+ * @param refConclY : Base Address of the ref decoded Luma
+ * frame buffer pointer (virtual pointer)
+ *
+ * @param refConclUV : Base Address of the ref decoded Chroma
+ * frame buffer pointer (virtual pointer)
+ *
+ * @param TilerBaseAddress : TBA vaule for the VDMA
+ *
+ * @param pSliceInfoFlags : Flag to enable slice info
+ *
+ * @param ref_width : Resultant Horizontal LUMA picture size
+ * after Pad size addition on both Left
+ * & Right sides. This gets used as
+ * stride during vDMA programming.
+ * In case of TILER,the stride is fixed,
+ * independant of Picture width, and
+ * only changes with TILER mode.
+ *
+ * @param ref_width_c : Resultant Horizontal CHROMA picture size
+ * after Pad size addition on both Left &
+ * Right sides.
+ *
+ *
+ * @param ref_frame_height : In case of Interlaced streams,the picure
+ * store is different i.e., store each field
+ * by applying PAD on top & bottom lines.
+ * Hence the picture height will be Height
+ * plus four times the Pad size. This
+ * variable holds this resultant value.
+ *
+ * @param mb_width : Picture width in terms of Macroblocks
+ *
+ * @param mb_height : Picture height in terms of Macroblocks.
+ *
+ * @param image_width : Image width of the decoded frame
+ *
+ * @param image_width : Image height of the decoded frame
+ *
+ * @param frameType : Frame type of the current frame.
+ *
+ * @param picaff_frame : Flag to indicate whether current picture
+ * is of Frame type & referring to Field
+ * picture as reference.
+ *
+ * @param mb_aff_frame_flag : Flag to indicate whether the current
+ * decoding picture is MBAFF type.
+ *
+ * @param field_pic_flag : Flag to indicate whether the current
+ * decoding picture is field type.
+ *
+ * @param bottom_field_flag : This parameter equal to 1 specifies that
+ * the slice is part of a coded bottom field.
+ * bottom_field_flag equalto 0 specifies
+ * that the picture is a coded top field.
+ *
+ * @param nonPairedFieldPic : Flag to indicate Non paired field picture.
+ *
+ * @param prev_pic_bottom_field : this variable Indicates if the previous
+ * picture was a bottom field or not (a Flag)
+ ******************************************************************************
+*/
+
+typedef struct OMX_TI_VIDEO_H264VDEC_ErrConcealStr {
+ OMX_S32 ErrConcealmentEnable;
+ OMX_S32 CurrMbInfoBufPointer;
+ OMX_S32 CurrMbStatusBufPointer;
+ OMX_S32 CurrMbInfoIresBufPointer;
+ OMX_S32 currFrameY;
+ OMX_S32 currFrameUV;
+ OMX_S32 refConclY;
+ OMX_S32 refConclUV;
+ OMX_U32 TilerBaseAddress;
+ OMX_U16 ref_width;
+ OMX_U16 ref_width_c;
+ OMX_U16 ref_frame_height;
+ OMX_U16 mb_width;
+ OMX_U16 mb_height;
+ OMX_U16 image_width;
+ OMX_U16 image_height;
+ OMX_U8 frameType;
+ OMX_U8 picaff_frame;
+ OMX_U8 mb_aff_frame_flag;
+ OMX_U8 field_pic_flag;
+ OMX_U8 bottom_field_flag;
+ OMX_U8 nonPairedFieldPic;
+ OMX_U8 prev_pic_bottom_field;
+}OMX_TI_VIDEO_H264VDEC_ErrConcealStr;
+
+/**
+ * Size of sliceinfo flags - We have two slice info flag arrays in SL2, one
+ * for ECD3 and the other for MC3. ECD3 flag is one bit per MB. Since Maximum
+ * supported number of MBs in a frame is 128 x 128 = 16384, we need 16384/8 =
+ * 2048 bytes for the slice info flag array for ECD3. But for the MC3 array,
+ * we always make the next bit also as 1 to enable loading into ping and pong
+ * memories of MCBUF. So we need an extra bit for the MC3 array, to avoid
+ * buffer overflow when the last MB is a new slice. To keep the next SL2 buffer
+ * in 16-byte aligned position (some buffers need it) we round the size to next
+ * multiple of 16, i.e., 2064.
+*/
+#define OMX_TI_VIDEO_SLICEINFO_FLAGSIZE 2064
+
+/**
+ ******************************************************************************
+ * @struct _sErrConcealLayerStr
+ * @brief This str holds up the required Info for implementing the SCV EC,
+ * this will get updated by H.264 decoder while decoding the SVC
+ * Base/Target Layers
+ *
+ * @param svcEcStr : structure instance of sSVCErrConcealStr
+ *
+ * @param pSliceInfoFlags : Array to store the sliceInfo flag
+ *
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_ErrConcealLayerStr {
+ OMX_TI_VIDEO_H264VDEC_ErrConcealStr sECStr;
+ OMX_U8 pSliceInfoFlags[OMX_TI_VIDEO_SLICEINFO_FLAGSIZE];
+}OMX_TI_VIDEO_H264VDEC_ErrConcealLayerStr;
+
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_CommonInfo
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_CommonInfo {
+ OMX_U32 codec_type : 8;
+ OMX_U32 fmt_type : 8;
+ OMX_U32 mb_ll_avail : 1;
+ OMX_U32 mb_ul_avail : 1;
+ OMX_U32 mb_uu_avail : 1;
+ OMX_U32 mb_ur_avail : 1;
+ OMX_U32 pic_bound_l : 1;
+ OMX_U32 pic_bound_u : 1;
+ OMX_U32 pic_bound_r : 1;
+ OMX_U32 pic_bound_b : 1;
+ OMX_U32 first_mb_flag : 1;
+ OMX_U32 error_flag : 1;
+ OMX_U32 zero : 6;
+ OMX_U32 zeroes : 16;
+ OMX_U32 mb_addr : 16;
+
+} OMX_TI_VIDEO_H264VDEC_CommonInfo;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_MotionVector
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_MotionVector {
+ OMX_S16 x;
+ OMX_S16 y;
+} OMX_TI_VIDEO_H264VDEC_MotionVector;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_CabacContext
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_CabacContext {
+ OMX_TI_VIDEO_H264VDEC_MotionVector mvd_l0[4];
+ OMX_TI_VIDEO_H264VDEC_MotionVector mvd_l1[4];
+
+} OMX_TI_VIDEO_H264VDEC_CabacContext;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_TotalCoefLuma
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_TotalCoefLuma {
+ OMX_U8 right[3];
+ OMX_U8 bottom_right;
+ OMX_U8 bottom[3];
+ OMX_U8 zero;
+} OMX_TI_VIDEO_H264VDEC_TotalCoefLuma;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_TotalCoefChroma
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_TotalCoefChroma {
+ OMX_U8 right_cb;
+ OMX_U8 bottom_right_cb;
+ OMX_U8 bottom_cb;
+ OMX_U8 zero;
+ OMX_U8 right_cr;
+ OMX_U8 bottom_right_cr;
+ OMX_U8 bottom_cr;
+ OMX_U8 zero1;
+} OMX_TI_VIDEO_H264VDEC_TotalCoefChroma;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_CavlcContext
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_CavlcContext {
+ unsigned long long zeroes[2];
+ OMX_TI_VIDEO_H264VDEC_TotalCoefLuma total_coef_luma;
+ OMX_TI_VIDEO_H264VDEC_TotalCoefChroma total_coef_chroma;
+
+} OMX_TI_VIDEO_H264VDEC_CavlcContext;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_IntraPredMode
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_IntraPredMode {
+ OMX_U32 ipred_mode0 : 4;
+ OMX_U32 ipred_mode1 : 4;
+ OMX_U32 ipred_mode2 : 4;
+ OMX_U32 ipred_mode3 : 4;
+ OMX_U32 ipred_mode4 : 4;
+ OMX_U32 ipred_mode5 : 4;
+ OMX_U32 ipred_mode6 : 4;
+ OMX_U32 ipred_mode7 : 4;
+ OMX_U32 ipred_mode8 : 4;
+ OMX_U32 ipred_mode9 : 4;
+ OMX_U32 ipred_mode10 : 4;
+ OMX_U32 ipred_mode11 : 4;
+ OMX_U32 ipred_mode12 : 4;
+ OMX_U32 ipred_mode13 : 4;
+ OMX_U32 ipred_mode14 : 4;
+ OMX_U32 ipred_mode15 : 4;
+
+} OMX_TI_VIDEO_H264VDEC_IntraPredMode;
+
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_MbPredType
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_MbPredType {
+ OMX_U32 mbskip : 1;
+ OMX_U32 tr8x8 : 1;
+ OMX_U32 mb_field : 1;
+ OMX_U32 cond_mbskip : 1;
+ OMX_U32 c_ipred_mode : 2;
+ OMX_U32 zero : 1;
+ OMX_U32 end_of_slice : 1;
+ OMX_U32 mb_y_mod2 : 1;
+ OMX_U32 zero1 : 7;
+ OMX_U32 refidx_equal_flag_l0 : 1;
+ OMX_U32 refidx_equal_flag_l1 : 1;
+ OMX_U32 mv_equal_flag_l0 : 1;
+ OMX_U32 mv_equal_flag_l1 : 1;
+ OMX_U32 zeroes : 4;
+ OMX_U32 mb_type : 8;
+ OMX_U8 sub_mb_type[4];
+
+} OMX_TI_VIDEO_H264VDEC_MbPredType;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_QpCbp
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_QpCbp {
+ OMX_U32 cbp;
+ OMX_U8 qp_y;
+ OMX_U8 qp_cb;
+ OMX_U8 qp_cr;
+ OMX_U8 zero;
+} OMX_TI_VIDEO_H264VDEC_QpCbp;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_RefPicControl
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_RefPicControl {
+ OMX_U8 refidx[4];
+ OMX_U8 refpicid[4];
+
+} OMX_TI_VIDEO_H264VDEC_RefPicControl;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_MvBidirectional16
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_MvBidirectional16 {
+ OMX_TI_VIDEO_H264VDEC_MotionVector mv_forward[16];
+ OMX_TI_VIDEO_H264VDEC_MotionVector mv_backward[16];
+} OMX_TI_VIDEO_H264VDEC_MvBidirectional16;
+
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_MvBidirectional4
+ *
+ * @brief
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_MvBidirectional4 {
+ OMX_TI_VIDEO_H264VDEC_MotionVector mv_forward[4];
+ OMX_TI_VIDEO_H264VDEC_MotionVector mv_backward[4];
+
+} OMX_TI_VIDEO_H264VDEC_MvBidirectional4;
+
+/**
+ ******************************************************************************
+ * @struct OMX_TI_VIDEO_H264VDEC_MbInfo
+ *
+ * @brief This structure details the data format for MB information shared to
+ * application. This helps application to understand all fields
+ * the way codec uses MB info internally. This structure is of size
+ * 208 Bytes.
+ *
+ * @param info : This elements gives details about the MB placement in the
+ * frame.
+ *
+ * @param cabac: This field holds the context data for a CABAC coded MB
+ *
+ * @param cavlc: This field holds the context data for a CAVLC coded MB
+ *
+ * @param ipred_mode: This field holds information of intra prediction modes
+ * at 4x4 level, for intra coded MB.
+ *
+ * @param mb_pred_type: This indicates prediction specific details for inter
+ * coded MB
+ *
+ * @param qp_cbp: This gives coded & QP informations for both LUMA & CHROMA
+ * components of a Macro Block.
+ *
+ * @param l0_ref_pic_control: Informs all details about reference indices
+ * at 8x8 block level in L0 direction
+ *
+ * @param l1_ref_pic_control: Informs all details about reference indices
+ * at 8x8 block level in L1 direction
+ *
+ * @param mv_forward: Lists all Motion vectors at 4x4 level in L0 direction
+ *
+ * @param bidirectional16: Lists all Motion vectors at 4x4 level in both
+ * directions
+ *
+ * @param bidirectional4: Lists all Motion vectors at 8x8 level in both
+ * directions
+ *
+ ******************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_H264VDEC_MbInfo {
+ OMX_TI_VIDEO_H264VDEC_CommonInfo info;
+
+ union {
+ OMX_TI_VIDEO_H264VDEC_CabacContext cabac;
+ OMX_TI_VIDEO_H264VDEC_CavlcContext cavlc;
+ } OMX_TI_VIDEO_H264VDEC_context;
+
+ OMX_TI_VIDEO_H264VDEC_IntraPredMode ipred_mode;
+ OMX_TI_VIDEO_H264VDEC_MbPredType mb_pred_type;
+ OMX_TI_VIDEO_H264VDEC_QpCbp qp_cbp;
+ OMX_TI_VIDEO_H264VDEC_RefPicControl l0_ref_pic_control;
+ OMX_TI_VIDEO_H264VDEC_RefPicControl l1_ref_pic_control;
+
+ union {
+ OMX_TI_VIDEO_H264VDEC_MotionVector mv_forward[16];
+ OMX_TI_VIDEO_H264VDEC_MvBidirectional16 bidirectional16;
+ OMX_TI_VIDEO_H264VDEC_MvBidirectional4 bidirectional4;
+ } OMX_TI_VIDEO_H264VDEC_motion_vecs;
+
+} OMX_TI_VIDEO_H264VDEC_MbInfo;
+
+
+
+/**
+********************************************************************************
+* @struct OMX_TI_VIDEO_VC1VDEC_MbInfo
+*
+* @brief MB information structure that is written out by the IVA-HD hardware.
+*
+* @note None:
+*
+********************************************************************************
+*/
+typedef struct OMX_TI_VIDEO_VC1VDEC_MbInfo {
+ /* MB address */
+ OMX_U8 mb_addr;
+ /* Error flag */
+ OMX_U8 error_flag;
+ /* First MB flag */
+ OMX_U8 first_mb_flag;
+ /* Picture bound */
+ OMX_U8 pic_bound_b;
+ /* Upper picture bound */
+ OMX_U8 pic_bound_u;
+ /* Right picture bound */
+ OMX_U8 pic_bound_r;
+ /* Left picture bound */
+ OMX_U8 pic_bound_l;
+ /* Availability of upper right MB */
+ OMX_U8 mb_ur_avail;
+ /* Availability of upper MB */
+ OMX_U8 mb_uu_avail;
+ /* Availability of upper left MB */
+ OMX_U8 mb_ul_avail;
+ /* Availability of left MB */
+ OMX_U8 mb_ll_avail;
+ /* Macroblock header format type */
+ OMX_U8 fmt_type;
+ /* Codec type */
+ OMX_U8 codec_type;
+ /* Indicates DC values of each Y block in current MB */
+ OMX_U8 dc_coef_q_y[4];
+ /* Indicates DC values of Cr block in current MB */
+ OMX_U8 dc_coef_q_cr;
+ /* Indicates DC values of Cb block in current MB */
+ OMX_U8 dc_coef_q_cb;
+ /* Block type of cr block */
+ OMX_U8 block_type_cr;
+ /* Block type of cb block */
+ OMX_U8 block_type_cb;
+ /* Block types of luma */
+ OMX_U8 block_type_y[4];
+ /* In decoding, if the current macroblock is the last macroblock in a slice,*/
+ /* ECD sets 1 to this field during executing the macroblock. Otherwise, ECD */
+ /* sets 0 to this field */
+ OMX_U8 end_of_slice;
+ /* 1 : allow skipping current MB if CBP = 0 */
+ OMX_U8 cond_skip_flag;
+ /* Skipped / non skipped MB */
+ OMX_U8 skip;
+ /* 1 indicates that overlap filtering is in use for the macroblock. */
+ OMX_U8 overlap;
+ /* 1 indicates that AC prediction is in use for the macroblock */
+ OMX_U8 acpred;
+ /* Denotes inter-prediction direction for the macroblock in B-picture */
+ OMX_U8 b_picture_direction;
+ /* Denotes the number of motion vectors. */
+ OMX_U8 mv_mode;
+ /* 1 indicates that the field transform is in use for the macroblock. */
+ OMX_U8 fieldtx;
+ /* 1 indicates that field inter-prediction is in use */
+ OMX_U8 mv_type;
+ /* Equals the reference frame distance */
+ OMX_U8 refdist;
+ /* 1 indicates that macroblock quantizer-scale (MQUANT) overflows */
+ OMX_U8 mquant_overflow;
+ /* Equals the quantizer-scale for the macroblock */
+ OMX_U8 quant;
+ /* 1 indicates that 0.5 shall be added to PQUANT in calculation of */
+ /* quantizer-scale. This field is valid for decoding only. */
+ OMX_U8 halfqp;
+ /* Equals the DC coefficient step size which is derived from MQUANT in the */
+ /* bit-stream */
+ OMX_U8 dc_step_size;
+ /* Denotes the coded sub-block pattern for cr block */
+ OMX_U8 cbp_cr;
+ /* Denotes the coded sub-block pattern for cb block */
+ OMX_U8 cbp_cb;
+ /* Denotes the coded sub-block pattern for luma blocks */
+ OMX_U8 cbp_y[3];
+ /* Denotes the backward reference field picture */
+ OMX_U8 mv_bw_ref_y[4];
+ /* Denotes the forward reference field picture */
+ OMX_U8 mv_fw_ref_y[3];
+ /* Unclipped forward motion vector for luma */
+ OMX_U8 mv_fw_y[4][4];
+ /* Unclipped backward motion vector for luma */
+ OMX_U8 mv_bw_y[1][1];
+ /* Unclipped backward motion vector for chroma */
+ OMX_U8 mv_bw_c[2];
+ /* Unclipped forward motion vector for chroma */
+ OMX_U8 mv_fw_c[2];
+ /* Clipped forward motion vector for luma */
+ OMX_U8 cmv_fw_y[4][4];
+ /* Clipped backward motion vector for luma */
+ OMX_U8 cmv_bw_y[4][4];
+ /* Clipped forward motion vector for chroma */
+ OMX_U8 cmv_fw_c[4][4];
+ /* Clipped backward motion vector for chroma */
+ OMX_U8 cmv_bw_c[4][4];
+
+}OMX_TI_VIDEO_VC1VDEC_MbInfo;
+
#endif /* OMX_TI_VIDEO_H */
diff --git a/domx/omx_core/inc/OMX_Types.h b/domx/omx_core/inc/OMX_Types.h
index 31be916..2fda817 100755
--- a/domx/omx_core/inc/OMX_Types.h
+++ b/domx/omx_core/inc/OMX_Types.h
@@ -1,30 +1,47 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/*
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
/** OMX_Types.h - OpenMax IL version 1.1.2
- * The OMX_Types header file contains the primitive type definitions used by
+ * The OMX_Types header file contains the primitive type definitions used by
* the core, the application and the component. This file may need to be
- * modified to be used on systems that do not have "char" set to 8 bits,
+ * modified to be used on systems that do not have "char" set to 8 bits,
* "short" set to 16 bits and "long" set to 32 bits.
*/
@@ -38,12 +55,12 @@ extern "C" {
/** The OMX_API and OMX_APIENTRY are platform specific definitions used
* to declare OMX function prototypes. They are modified to meet the
* requirements for a particular platform */
-#ifdef __SYMBIAN32__
+#ifdef __SYMBIAN32__
# ifdef __OMX_EXPORTS
# define OMX_API __declspec(dllexport)
# else
# ifdef _WIN32
-# define OMX_API __declspec(dllexport)
+# define OMX_API __declspec(dllexport)
# else
# define OMX_API __declspec(dllimport)
# endif
@@ -65,18 +82,18 @@ extern "C" {
#endif
#ifndef OMX_APIENTRY
-#define OMX_APIENTRY
-#endif
+#define OMX_APIENTRY
+#endif
-/** OMX_IN is used to identify inputs to an OMX function. This designation
- will also be used in the case of a pointer that points to a parameter
+/** OMX_IN is used to identify inputs to an OMX function. This designation
+ will also be used in the case of a pointer that points to a parameter
that is used as an output. */
#ifndef OMX_IN
#define OMX_IN
#endif
-/** OMX_OUT is used to identify outputs from an OMX function. This
- designation will also be used in the case of a pointer that points
+/** OMX_OUT is used to identify outputs from an OMX function. This
+ designation will also be used in the case of a pointer that points
to a parameter that is used as an input. */
#ifndef OMX_OUT
#define OMX_OUT
@@ -84,8 +101,8 @@ extern "C" {
/** OMX_INOUT is used to identify parameters that may be either inputs or
- outputs from an OMX function at the same time. This designation will
- also be used in the case of a pointer that points to a parameter that
+ outputs from an OMX function at the same time. This designation will
+ also be used in the case of a pointer that points to a parameter that
is used both as an input and an output. */
#ifndef OMX_INOUT
#define OMX_INOUT
@@ -103,31 +120,31 @@ extern "C" {
/** @defgroup core OpenMAX IL core
* Functions and structure related to the OMX IL core
*/
-
+
/** @defgroup comp OpenMAX IL component
* Functions and structure related to the OMX IL component
*/
-
-/** @defgroup rpm Resource and Policy Management
+
+/** @defgroup rpm Resource and Policy Management
* Structures for resource and policy management of components
*/
/** @defgroup buf Buffer Management
* Buffer handling functions and structures
*/
-
+
/** @defgroup tun Tunneling
* @ingroup core comp
* Structures and functions to manage tunnels among component ports
*/
-
+
/** @defgroup cp Content Pipes
* @ingroup core
*/
-
+
/** @defgroup metadata Metadata handling
- *
- */
+ *
+ */
/** OMX_U8 is an 8 bit unsigned quantity that is byte aligned */
typedef unsigned char OMX_U8;
@@ -149,7 +166,7 @@ typedef signed long OMX_S32;
/* Users with compilers that cannot accept the "long long" designation should
- define the OMX_SKIP64BIT macro. It should be noted that this may cause
+ define the OMX_SKIP64BIT macro. It should be noted that this may cause
some components to fail to compile if the component was written to require
64 bit integral types. However, these components would NOT compile anyway
since the compiler does not support the way the component was written.
@@ -164,7 +181,7 @@ typedef signed long long OMX_S64;
#elif defined(WIN32)
-/** OMX_U64 is a 64 bit unsigned quantity that is 64 bit word aligned */
+/** OMX_U64 is a 64 bit unsigned quantity that is 64 bit word aligned */
typedef unsigned __int64 OMX_U64;
/** OMX_S64 is a 64 bit signed quantity that is 64 bit word aligned */
@@ -182,7 +199,7 @@ typedef signed long long OMX_S64;
#endif
-/** The OMX_BOOL type is intended to be used to represent a true or a false
+/** The OMX_BOOL type is intended to be used to represent a true or a false
value when passing parameters to and from the OMX core and components. The
OMX_BOOL is a 32 bit quantity and is aligned on a 32 bit word boundary.
*/
@@ -190,8 +207,8 @@ typedef enum OMX_BOOL {
OMX_FALSE = 0,
OMX_TRUE = !OMX_FALSE,
OMX_BOOL_MAX = 0x7FFFFFFF
-} OMX_BOOL;
-
+} OMX_BOOL;
+
/** The OMX_PTR type is intended to be used to pass pointers between the OMX
applications and the OMX Core and components. This is a 32 bit pointer and
is aligned on a 32 bit boundary.
@@ -199,14 +216,14 @@ typedef enum OMX_BOOL {
typedef void* OMX_PTR;
/** The OMX_STRING type is intended to be used to pass "C" type strings between
- the application and the core and component. The OMX_STRING type is a 32
- bit pointer to a zero terminated string. The pointer is word aligned and
- the string is byte aligned.
+ the application and the core and component. The OMX_STRING type is a 32
+ bit pointer to a zero terminated string. The pointer is word aligned and
+ the string is byte aligned.
*/
typedef char* OMX_STRING;
/** The OMX_BYTE type is intended to be used to pass arrays of bytes such as
- buffers between the application and the component and core. The OMX_BYTE
+ buffers between the application and the component and core. The OMX_BYTE
type is a 32 bit pointer to a zero terminated string. The pointer is word
aligned and the string is byte aligned.
*/
@@ -219,7 +236,7 @@ typedef unsigned char* OMX_BYTE;
typedef unsigned char OMX_UUIDTYPE[128];
/** The OMX_DIRTYPE enumeration is used to indicate if a port is an input or
- an output port. This enumeration is common across all component types.
+ an output port. This enumeration is common across all component types.
*/
typedef enum OMX_DIRTYPE
{
@@ -228,8 +245,8 @@ typedef enum OMX_DIRTYPE
OMX_DirMax = 0x7FFFFFFF
} OMX_DIRTYPE;
-/** The OMX_ENDIANTYPE enumeration is used to indicate the bit ordering
- for numerical data (i.e. big endian, or little endian).
+/** The OMX_ENDIANTYPE enumeration is used to indicate the bit ordering
+ for numerical data (i.e. big endian, or little endian).
*/
typedef enum OMX_ENDIANTYPE
{
@@ -239,7 +256,7 @@ typedef enum OMX_ENDIANTYPE
} OMX_ENDIANTYPE;
-/** The OMX_NUMERICALDATATYPE enumeration is used to indicate if data
+/** The OMX_NUMERICALDATATYPE enumeration is used to indicate if data
is signed or unsigned
*/
typedef enum OMX_NUMERICALDATATYPE
@@ -267,16 +284,16 @@ typedef struct OMX_BS32 {
/** Structure representing some time or duration in microseconds. This structure
- * must be interpreted as a signed 64 bit value. The quantity is signed to accommodate
- * negative deltas and preroll scenarios. The quantity is represented in microseconds
+ * must be interpreted as a signed 64 bit value. The quantity is signed to accommodate
+ * negative deltas and preroll scenarios. The quantity is represented in microseconds
* to accomodate high resolution timestamps (e.g. DVD presentation timestamps based
- * on a 90kHz clock) and to allow more accurate and synchronized delivery (e.g.
- * individual audio samples delivered at 192 kHz). The quantity is 64 bit to
+ * on a 90kHz clock) and to allow more accurate and synchronized delivery (e.g.
+ * individual audio samples delivered at 192 kHz). The quantity is 64 bit to
* accommodate a large dynamic range (signed 32 bit values would allow only for plus
* or minus 35 minutes).
*
- * Implementations with limited precision may convert the signed 64 bit value to
- * a signed 32 bit value internally but risk loss of precision.
+ * Implementations with limited precision may convert the signed 64 bit value to
+ * a signed 32 bit value internally but risk loss of precision.
*/
#ifndef OMX_SKIP64BIT
typedef OMX_S64 OMX_TICKS;
@@ -296,17 +313,17 @@ typedef void* OMX_HANDLETYPE;
typedef struct OMX_MARKTYPE
{
- OMX_HANDLETYPE hMarkTargetComponent; /**< The component that will
- generate a mark event upon
+ OMX_HANDLETYPE hMarkTargetComponent; /**< The component that will
+ generate a mark event upon
processing the mark. */
- OMX_PTR pMarkData; /**< Application specific data associated with
- the mark sent on a mark event to disambiguate
+ OMX_PTR pMarkData; /**< Application specific data associated with
+ the mark sent on a mark event to disambiguate
this mark from others. */
} OMX_MARKTYPE;
/** OMX_NATIVE_DEVICETYPE is used to map a OMX video port to the
- * platform & operating specific object used to reference the display
+ * platform & operating specific object used to reference the display
* or can be used by a audio port for native audio rendering */
typedef void* OMX_NATIVE_DEVICETYPE;
@@ -317,7 +334,7 @@ typedef void* OMX_NATIVE_WINDOWTYPE;
/** The OMX_VERSIONTYPE union is used to specify the version for
a structure or component. For a component, the version is entirely
specified by the component vendor. Components doing the same function
- from different vendors may or may not have the same version. For
+ from different vendors may or may not have the same version. For
structures, the version shall be set by the entity that allocates the
structure. For structures specified in the OMX 1.1 specification, the
value of the version shall be set to 1.1.0.0 in all cases. Access to the
diff --git a/domx/omx_core/inc/OMX_Video.h b/domx/omx_core/inc/OMX_Video.h
index 163e450..c44a5fe 100755
--- a/domx/omx_core/inc/OMX_Video.h
+++ b/domx/omx_core/inc/OMX_Video.h
@@ -1,29 +1,46 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
/**
- * Copyright (c) 2008 The Khronos Group Inc.
- *
+ * Copyright (c) 2008 The Khronos Group Inc.
+ *
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject
- * to the following conditions:
+ * to the following conditions:
* The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
+ * in all copies or substantial portions of the Software.
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
-/**
+/**
* @file OMX_Video.h - OpenMax IL version 1.1.2
- * The structures is needed by Video components to exchange parameters
+ * The structures is needed by Video components to exchange parameters
* and configuration data with OMX components.
*/
#ifndef OMX_Video_h
@@ -43,19 +60,19 @@ extern "C" {
/**
* Each OMX header must include all required header files to allow the
* header to compile without errors. The includes below are required
- * for this header file to compile successfully
+ * for this header file to compile successfully
*/
#include <OMX_IVCommon.h>
/**
- * Enumeration used to define the possible video compression codings.
- * NOTE: This essentially refers to file extensions. If the coding is
- * being used to specify the ENCODE type, then additional work
- * must be done to configure the exact flavor of the compression
- * to be used. For decode cases where the user application can
- * not differentiate between MPEG-4 and H.264 bit streams, it is
+ * Enumeration used to define the possible video compression codings.
+ * NOTE: This essentially refers to file extensions. If the coding is
+ * being used to specify the ENCODE type, then additional work
+ * must be done to configure the exact flavor of the compression
+ * to be used. For decode cases where the user application can
+ * not differentiate between MPEG-4 and H.264 bit streams, it is
* up to the codec to handle this.
*/
typedef enum OMX_VIDEO_CODINGTYPE {
@@ -68,58 +85,59 @@ typedef enum OMX_VIDEO_CODINGTYPE {
OMX_VIDEO_CodingRV, /**< all versions of Real Video */
OMX_VIDEO_CodingAVC, /**< H.264/AVC */
OMX_VIDEO_CodingMJPEG, /**< Motion JPEG */
- OMX_VIDEO_CodingKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_CodingVPX, /**< Google VPX, formerly known as On2 VP8 */
+ OMX_VIDEO_CodingKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_CodingVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_CodingMax = 0x7FFFFFFF
} OMX_VIDEO_CODINGTYPE;
/**
- * Data structure used to define a video path. The number of Video paths for
- * input and output will vary by type of the Video component.
- *
+ * Data structure used to define a video path. The number of Video paths for
+ * input and output will vary by type of the Video component.
+ *
* Input (aka Source) : zero Inputs, one Output,
* Splitter : one Input, 2 or more Outputs,
* Processing Element : one Input, one output,
* Mixer : 2 or more inputs, one output,
* Output (aka Sink) : one Input, zero outputs.
- *
- * The PortDefinition structure is used to define all of the parameters
- * necessary for the compliant component to setup an input or an output video
- * path. If additional vendor specific data is required, it should be
- * transmitted to the component using the CustomCommand function. Compliant
- * components will prepopulate this structure with optimal values during the
+ *
+ * The PortDefinition structure is used to define all of the parameters
+ * necessary for the compliant component to setup an input or an output video
+ * path. If additional vendor specific data is required, it should be
+ * transmitted to the component using the CustomCommand function. Compliant
+ * components will prepopulate this structure with optimal values during the
* GetDefaultInitParams command.
*
* STRUCT MEMBERS:
* cMIMEType : MIME type of data for the port
- * pNativeRender : Platform specific reference for a display if a
+ * pNativeRender : Platform specific reference for a display if a
* sync, otherwise this field is 0
- * nFrameWidth : Width of frame to be used on channel if
+ * nFrameWidth : Width of frame to be used on channel if
* uncompressed format is used. Use 0 for unknown,
* don't care or variable
- * nFrameHeight : Height of frame to be used on channel if
+ * nFrameHeight : Height of frame to be used on channel if
* uncompressed format is used. Use 0 for unknown,
* don't care or variable
- * nStride : Number of bytes per span of an image
+ * nStride : Number of bytes per span of an image
* (i.e. indicates the number of bytes to get
* from span N to span N+1, where negative stride
* indicates the image is bottom up
* nSliceHeight : Height used when encoding in slices
- * nBitrate : Bit rate of frame to be used on channel if
- * compressed format is used. Use 0 for unknown,
+ * nBitrate : Bit rate of frame to be used on channel if
+ * compressed format is used. Use 0 for unknown,
* don't care or variable
- * xFramerate : Frame rate to be used on channel if uncompressed
- * format is used. Use 0 for unknown, don't care or
+ * xFramerate : Frame rate to be used on channel if uncompressed
+ * format is used. Use 0 for unknown, don't care or
* variable. Units are Q16 frames per second.
- * bFlagErrorConcealment : Turns on error concealment if it is supported by
+ * bFlagErrorConcealment : Turns on error concealment if it is supported by
* the OMX component
- * eCompressionFormat : Compression format used in this instance of the
- * component. When OMX_VIDEO_CodingUnused is
+ * eCompressionFormat : Compression format used in this instance of the
+ * component. When OMX_VIDEO_CodingUnused is
* specified, eColorFormat is used
* eColorFormat : Decompressed format used by this component
- * pNativeWindow : Platform specific reference for a window object if a
- * display sink , otherwise this field is 0x0.
+ * pNativeWindow : Platform specific reference for a window object if a
+ * display sink , otherwise this field is 0x0.
*/
typedef struct OMX_VIDEO_PORTDEFINITIONTYPE {
OMX_STRING cMIMEType;
@@ -136,19 +154,19 @@ typedef struct OMX_VIDEO_PORTDEFINITIONTYPE {
OMX_NATIVE_WINDOWTYPE pNativeWindow;
} OMX_VIDEO_PORTDEFINITIONTYPE;
-/**
- * Port format parameter. This structure is used to enumerate the various
+/**
+ * Port format parameter. This structure is used to enumerate the various
* data input/output format supported by the port.
- *
+ *
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Indicates which port to set
- * nIndex : Indicates the enumeration index for the format from
+ * nIndex : Indicates the enumeration index for the format from
* 0x0 to N-1
- * eCompressionFormat : Compression format used in this instance of the
- * component. When OMX_VIDEO_CodingUnused is specified,
- * eColorFormat is used
+ * eCompressionFormat : Compression format used in this instance of the
+ * component. When OMX_VIDEO_CodingUnused is specified,
+ * eColorFormat is used
* eColorFormat : Decompressed format used by this component
* xFrameRate : Indicates the video frame rate in Q16 format
*/
@@ -157,14 +175,14 @@ typedef struct OMX_VIDEO_PARAM_PORTFORMATTYPE {
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_U32 nIndex;
- OMX_VIDEO_CODINGTYPE eCompressionFormat;
+ OMX_VIDEO_CODINGTYPE eCompressionFormat;
OMX_COLOR_FORMATTYPE eColorFormat;
OMX_U32 xFramerate;
} OMX_VIDEO_PARAM_PORTFORMATTYPE;
/**
- * This is a structure for configuring video compression quantization
+ * This is a structure for configuring video compression quantization
* parameter values. Codecs may support different QP values for different
* frame types.
*
@@ -174,10 +192,10 @@ typedef struct OMX_VIDEO_PARAM_PORTFORMATTYPE {
* nPortIndex : Port that this structure applies to
* nQpI : QP value to use for index frames
* nQpP : QP value to use for P frames
- * nQpB : QP values to use for bidirectional frames
+ * nQpB : QP values to use for bidirectional frames
*/
typedef struct OMX_VIDEO_PARAM_QUANTIZATIONTYPE {
- OMX_U32 nSize;
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_U32 nQpI;
@@ -186,32 +204,32 @@ typedef struct OMX_VIDEO_PARAM_QUANTIZATIONTYPE {
} OMX_VIDEO_PARAM_QUANTIZATIONTYPE;
-/**
- * Structure for configuration of video fast update parameters.
- *
+/**
+ * Structure for configuration of video fast update parameters.
+ *
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version info
+ * nVersion : OMX specification version info
* nPortIndex : Port that this structure applies to
* bEnableVFU : Enable/Disable video fast update
* nFirstGOB : Specifies the number of the first macroblock row
* nFirstMB : specifies the first MB relative to the specified first GOB
- * nNumMBs : Specifies the number of MBs to be refreshed from nFirstGOB
+ * nNumMBs : Specifies the number of MBs to be refreshed from nFirstGOB
* and nFirstMB
*/
typedef struct OMX_VIDEO_PARAM_VIDEOFASTUPDATETYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_BOOL bEnableVFU;
- OMX_U32 nFirstGOB;
- OMX_U32 nFirstMB;
- OMX_U32 nNumMBs;
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bEnableVFU;
+ OMX_U32 nFirstGOB;
+ OMX_U32 nFirstMB;
+ OMX_U32 nNumMBs;
} OMX_VIDEO_PARAM_VIDEOFASTUPDATETYPE;
-/**
- * Enumeration of possible bitrate control types
+/**
+ * Enumeration of possible bitrate control types
*/
typedef enum OMX_VIDEO_CONTROLRATETYPE {
OMX_Video_ControlRateDisable,
@@ -219,14 +237,14 @@ typedef enum OMX_VIDEO_CONTROLRATETYPE {
OMX_Video_ControlRateConstant,
OMX_Video_ControlRateVariableSkipFrames,
OMX_Video_ControlRateConstantSkipFrames,
- OMX_Video_ControlRateKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_Video_ControlRateKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_Video_ControlRateVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_Video_ControlRateMax = 0x7FFFFFFF
} OMX_VIDEO_CONTROLRATETYPE;
-/**
- * Structure for configuring bitrate mode of a codec.
+/**
+ * Structure for configuring bitrate mode of a codec.
*
* STRUCT MEMBERS:
* nSize : Size of the struct in bytes
@@ -236,23 +254,23 @@ typedef enum OMX_VIDEO_CONTROLRATETYPE {
* nTargetBitrate : Target bitrate to encode with
*/
typedef struct OMX_VIDEO_PARAM_BITRATETYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_VIDEO_CONTROLRATETYPE eControlRate;
- OMX_U32 nTargetBitrate;
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_VIDEO_CONTROLRATETYPE eControlRate;
+ OMX_U32 nTargetBitrate;
} OMX_VIDEO_PARAM_BITRATETYPE;
-/**
- * Enumeration of possible motion vector (MV) types
+/**
+ * Enumeration of possible motion vector (MV) types
*/
typedef enum OMX_VIDEO_MOTIONVECTORTYPE {
OMX_Video_MotionVectorPixel,
OMX_Video_MotionVectorHalfPel,
OMX_Video_MotionVectorQuarterPel,
OMX_Video_MotionVectorEighthPel,
- OMX_Video_MotionVectorKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_Video_MotionVectorKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_Video_MotionVectorVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_Video_MotionVectorMax = 0x7FFFFFFF
} OMX_VIDEO_MOTIONVECTORTYPE;
@@ -261,7 +279,7 @@ typedef enum OMX_VIDEO_MOTIONVECTORTYPE {
/**
* Structure for configuring the number of motion vectors used as well
* as their accuracy.
- *
+ *
* STRUCT MEMBERS:
* nSize : Size of the struct in bytes
* nVersion : OMX spec version info
@@ -284,32 +302,32 @@ typedef struct OMX_VIDEO_PARAM_MOTIONVECTORTYPE {
} OMX_VIDEO_PARAM_MOTIONVECTORTYPE;
-/**
- * Enumeration of possible methods to use for Intra Refresh
+/**
+ * Enumeration of possible methods to use for Intra Refresh
*/
typedef enum OMX_VIDEO_INTRAREFRESHTYPE {
OMX_VIDEO_IntraRefreshCyclic,
OMX_VIDEO_IntraRefreshAdaptive,
OMX_VIDEO_IntraRefreshBoth,
- OMX_VIDEO_IntraRefreshKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_IntraRefreshKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_IntraRefreshVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_IntraRefreshMax = 0x7FFFFFFF
} OMX_VIDEO_INTRAREFRESHTYPE;
/**
- * Structure for configuring intra refresh mode
- *
+ * Structure for configuring intra refresh mode
+ *
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* eRefreshMode : Cyclic, Adaptive, or Both
- * nAirMBs : Number of intra macroblocks to refresh in a frame when
+ * nAirMBs : Number of intra macroblocks to refresh in a frame when
* AIR is enabled
- * nAirRef : Number of times a motion marked macroblock has to be
+ * nAirRef : Number of times a motion marked macroblock has to be
* intra coded
- * nCirMBs : Number of consecutive macroblocks to be coded as "intra"
+ * nCirMBs : Number of consecutive macroblocks to be coded as "intra"
* when CIR is enabled
*/
typedef struct OMX_VIDEO_PARAM_INTRAREFRESHTYPE {
@@ -324,19 +342,19 @@ typedef struct OMX_VIDEO_PARAM_INTRAREFRESHTYPE {
/**
- * Structure for enabling various error correction methods for video
+ * Structure for enabling various error correction methods for video
* compression.
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
- * nPortIndex : Port that this structure applies to
+ * nVersion : OMX specification version information
+ * nPortIndex : Port that this structure applies to
* bEnableHEC : Enable/disable header extension codes (HEC)
* bEnableResync : Enable/disable resynchronization markers
- * nResynchMarkerSpacing : Resynch markers interval (in bits) to be
- * applied in the stream
- * bEnableDataPartitioning : Enable/disable data partitioning
- * bEnableRVLC : Enable/disable reversible variable length
+ * nResynchMarkerSpacing : Resynch markers interval (in bits) to be
+ * applied in the stream
+ * bEnableDataPartitioning : Enable/disable data partitioning
+ * bEnableRVLC : Enable/disable reversible variable length
* coding
*/
typedef struct OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE {
@@ -351,12 +369,12 @@ typedef struct OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE {
} OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE;
-/**
- * Configuration of variable block-size motion compensation (VBSMC)
- *
+/**
+ * Configuration of variable block-size motion compensation (VBSMC)
+ *
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* b16x16 : Enable inter block search 16x16
* b16x8 : Enable inter block search 16x8
@@ -367,11 +385,11 @@ typedef struct OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE {
* b4x4 : Enable inter block search 4x4
*/
typedef struct OMX_VIDEO_PARAM_VBSMCTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_BOOL b16x16;
- OMX_BOOL b16x8;
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL b16x16;
+ OMX_BOOL b16x8;
OMX_BOOL b8x16;
OMX_BOOL b8x8;
OMX_BOOL b8x4;
@@ -380,67 +398,67 @@ typedef struct OMX_VIDEO_PARAM_VBSMCTYPE {
} OMX_VIDEO_PARAM_VBSMCTYPE;
-/**
- * H.263 profile types, each profile indicates support for various
+/**
+ * H.263 profile types, each profile indicates support for various
* performance bounds and different annexes.
*
* ENUMS:
- * Baseline : Baseline Profile: H.263 (V1), no optional modes
- * H320 Coding : H.320 Coding Efficiency Backward Compatibility
+ * Baseline : Baseline Profile: H.263 (V1), no optional modes
+ * H320 Coding : H.320 Coding Efficiency Backward Compatibility
* Profile: H.263+ (V2), includes annexes I, J, L.4
* and T
- * BackwardCompatible : Backward Compatibility Profile: H.263 (V1),
- * includes annex F
- * ISWV2 : Interactive Streaming Wireless Profile: H.263+
- * (V2), includes annexes I, J, K and T
- * ISWV3 : Interactive Streaming Wireless Profile: H.263++
- * (V3), includes profile 3 and annexes V and W.6.3.8
- * HighCompression : Conversational High Compression Profile: H.263++
- * (V3), includes profiles 1 & 2 and annexes D and U
- * Internet : Conversational Internet Profile: H.263++ (V3),
- * includes profile 5 and annex K
- * Interlace : Conversational Interlace Profile: H.263++ (V3),
- * includes profile 5 and annex W.6.3.11
- * HighLatency : High Latency Profile: H.263++ (V3), includes
- * profile 6 and annexes O.1 and P.5
+ * BackwardCompatible : Backward Compatibility Profile: H.263 (V1),
+ * includes annex F
+ * ISWV2 : Interactive Streaming Wireless Profile: H.263+
+ * (V2), includes annexes I, J, K and T
+ * ISWV3 : Interactive Streaming Wireless Profile: H.263++
+ * (V3), includes profile 3 and annexes V and W.6.3.8
+ * HighCompression : Conversational High Compression Profile: H.263++
+ * (V3), includes profiles 1 & 2 and annexes D and U
+ * Internet : Conversational Internet Profile: H.263++ (V3),
+ * includes profile 5 and annex K
+ * Interlace : Conversational Interlace Profile: H.263++ (V3),
+ * includes profile 5 and annex W.6.3.11
+ * HighLatency : High Latency Profile: H.263++ (V3), includes
+ * profile 6 and annexes O.1 and P.5
*/
typedef enum OMX_VIDEO_H263PROFILETYPE {
- OMX_VIDEO_H263ProfileBaseline = 0x01,
- OMX_VIDEO_H263ProfileH320Coding = 0x02,
- OMX_VIDEO_H263ProfileBackwardCompatible = 0x04,
- OMX_VIDEO_H263ProfileISWV2 = 0x08,
- OMX_VIDEO_H263ProfileISWV3 = 0x10,
- OMX_VIDEO_H263ProfileHighCompression = 0x20,
- OMX_VIDEO_H263ProfileInternet = 0x40,
- OMX_VIDEO_H263ProfileInterlace = 0x80,
- OMX_VIDEO_H263ProfileHighLatency = 0x100,
- OMX_VIDEO_H263ProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_H263ProfileBaseline = 0x01,
+ OMX_VIDEO_H263ProfileH320Coding = 0x02,
+ OMX_VIDEO_H263ProfileBackwardCompatible = 0x04,
+ OMX_VIDEO_H263ProfileISWV2 = 0x08,
+ OMX_VIDEO_H263ProfileISWV3 = 0x10,
+ OMX_VIDEO_H263ProfileHighCompression = 0x20,
+ OMX_VIDEO_H263ProfileInternet = 0x40,
+ OMX_VIDEO_H263ProfileInterlace = 0x80,
+ OMX_VIDEO_H263ProfileHighLatency = 0x100,
+ OMX_VIDEO_H263ProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_H263ProfileVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_VIDEO_H263ProfileMax = 0x7FFFFFFF
+ OMX_VIDEO_H263ProfileMax = 0x7FFFFFFF
} OMX_VIDEO_H263PROFILETYPE;
-/**
- * H.263 level types, each level indicates support for various frame sizes,
+/**
+ * H.263 level types, each level indicates support for various frame sizes,
* bit rates, decoder frame rates.
*/
typedef enum OMX_VIDEO_H263LEVELTYPE {
- OMX_VIDEO_H263Level10 = 0x01,
- OMX_VIDEO_H263Level20 = 0x02,
- OMX_VIDEO_H263Level30 = 0x04,
- OMX_VIDEO_H263Level40 = 0x08,
- OMX_VIDEO_H263Level45 = 0x10,
- OMX_VIDEO_H263Level50 = 0x20,
- OMX_VIDEO_H263Level60 = 0x40,
- OMX_VIDEO_H263Level70 = 0x80,
- OMX_VIDEO_H263LevelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_H263Level10 = 0x01,
+ OMX_VIDEO_H263Level20 = 0x02,
+ OMX_VIDEO_H263Level30 = 0x04,
+ OMX_VIDEO_H263Level40 = 0x08,
+ OMX_VIDEO_H263Level45 = 0x10,
+ OMX_VIDEO_H263Level50 = 0x20,
+ OMX_VIDEO_H263Level60 = 0x40,
+ OMX_VIDEO_H263Level70 = 0x80,
+ OMX_VIDEO_H263LevelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_H263LevelVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_VIDEO_H263LevelMax = 0x7FFFFFFF
+ OMX_VIDEO_H263LevelMax = 0x7FFFFFFF
} OMX_VIDEO_H263LEVELTYPE;
-/**
- * Specifies the picture type. These values should be OR'd to signal all
+/**
+ * Specifies the picture type. These values should be OR'd to signal all
* pictures types which are allowed.
*
* ENUMS:
@@ -458,36 +476,36 @@ typedef enum OMX_VIDEO_PICTURETYPE {
OMX_VIDEO_PictureTypeEI = 0x11,
OMX_VIDEO_PictureTypeEP = 0x12,
OMX_VIDEO_PictureTypeS = 0x14,
- OMX_VIDEO_PictureTypeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_PictureTypeKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_PictureTypeVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_PictureTypeMax = 0x7FFFFFFF
} OMX_VIDEO_PICTURETYPE;
-/**
- * H.263 Params
+/**
+ * H.263 Params
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* nPFrames : Number of P frames between each I frame
* nBFrames : Number of B frames between each I frame
* eProfile : H.263 profile(s) to use
* eLevel : H.263 level(s) to use
- * bPLUSPTYPEAllowed : Indicating that it is allowed to use PLUSPTYPE
- * (specified in the 1998 version of H.263) to
- * indicate custom picture sizes or clock
- * frequencies
- * nAllowedPictureTypes : Specifies the picture types allowed in the
+ * bPLUSPTYPEAllowed : Indicating that it is allowed to use PLUSPTYPE
+ * (specified in the 1998 version of H.263) to
+ * indicate custom picture sizes or clock
+ * frequencies
+ * nAllowedPictureTypes : Specifies the picture types allowed in the
* bitstream
- * bForceRoundingTypeToZero : value of the RTYPE bit (bit 6 of MPPTYPE) is
- * not constrained. It is recommended to change
- * the value of the RTYPE bit for each reference
+ * bForceRoundingTypeToZero : value of the RTYPE bit (bit 6 of MPPTYPE) is
+ * not constrained. It is recommended to change
+ * the value of the RTYPE bit for each reference
* picture in error-free communication
- * nPictureHeaderRepetition : Specifies the frequency of picture header
+ * nPictureHeaderRepetition : Specifies the frequency of picture header
* repetition
- * nGOBHeaderInterval : Specifies the interval of non-empty GOB
+ * nGOBHeaderInterval : Specifies the interval of non-empty GOB
* headers in units of GOBs
*/
typedef struct OMX_VIDEO_PARAM_H263TYPE {
@@ -506,8 +524,8 @@ typedef struct OMX_VIDEO_PARAM_H263TYPE {
} OMX_VIDEO_PARAM_H263TYPE;
-/**
- * MPEG-2 profile types, each profile indicates support for various
+/**
+ * MPEG-2 profile types, each profile indicates support for various
* performance bounds and different annexes.
*/
typedef enum OMX_VIDEO_MPEG2PROFILETYPE {
@@ -517,29 +535,29 @@ typedef enum OMX_VIDEO_MPEG2PROFILETYPE {
OMX_VIDEO_MPEG2ProfileSNR, /**< SNR Profile */
OMX_VIDEO_MPEG2ProfileSpatial, /**< Spatial Profile */
OMX_VIDEO_MPEG2ProfileHigh, /**< High Profile */
- OMX_VIDEO_MPEG2ProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_MPEG2ProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_MPEG2ProfileVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_VIDEO_MPEG2ProfileMax = 0x7FFFFFFF
+ OMX_VIDEO_MPEG2ProfileMax = 0x7FFFFFFF
} OMX_VIDEO_MPEG2PROFILETYPE;
-/**
- * MPEG-2 level types, each level indicates support for various frame
- * sizes, bit rates, decoder frame rates. No need
+/**
+ * MPEG-2 level types, each level indicates support for various frame
+ * sizes, bit rates, decoder frame rates. No need
*/
typedef enum OMX_VIDEO_MPEG2LEVELTYPE {
- OMX_VIDEO_MPEG2LevelLL = 0, /**< Low Level */
- OMX_VIDEO_MPEG2LevelML, /**< Main Level */
- OMX_VIDEO_MPEG2LevelH14, /**< High 1440 */
- OMX_VIDEO_MPEG2LevelHL, /**< High Level */
- OMX_VIDEO_MPEG2LevelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_MPEG2LevelLL = 0, /**< Low Level */
+ OMX_VIDEO_MPEG2LevelML, /**< Main Level */
+ OMX_VIDEO_MPEG2LevelH14, /**< High 1440 */
+ OMX_VIDEO_MPEG2LevelHL, /**< High Level */
+ OMX_VIDEO_MPEG2LevelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_MPEG2LevelVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_VIDEO_MPEG2LevelMax = 0x7FFFFFFF
+ OMX_VIDEO_MPEG2LevelMax = 0x7FFFFFFF
} OMX_VIDEO_MPEG2LEVELTYPE;
-/**
- * MPEG-2 params
+/**
+ * MPEG-2 params
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
@@ -551,20 +569,20 @@ typedef enum OMX_VIDEO_MPEG2LEVELTYPE {
* eLevel : MPEG-2 levels(s) to use
*/
typedef struct OMX_VIDEO_PARAM_MPEG2TYPE {
- OMX_U32 nSize;
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_U32 nPFrames;
- OMX_U32 nBFrames;
+ OMX_U32 nPortIndex;
+ OMX_U32 nPFrames;
+ OMX_U32 nBFrames;
OMX_VIDEO_MPEG2PROFILETYPE eProfile;
- OMX_VIDEO_MPEG2LEVELTYPE eLevel;
+ OMX_VIDEO_MPEG2LEVELTYPE eLevel;
} OMX_VIDEO_PARAM_MPEG2TYPE;
-/**
- * MPEG-4 profile types, each profile indicates support for various
+/**
+ * MPEG-4 profile types, each profile indicates support for various
* performance bounds and different annexes.
- *
+ *
* ENUMS:
* - Simple Profile, Levels 1-3
* - Simple Scalable Profile, Levels 1-2
@@ -583,48 +601,48 @@ typedef struct OMX_VIDEO_PARAM_MPEG2TYPE {
* - Advanced Scalable Texture, Levels 2-3
*/
typedef enum OMX_VIDEO_MPEG4PROFILETYPE {
- OMX_VIDEO_MPEG4ProfileSimple = 0x01,
- OMX_VIDEO_MPEG4ProfileSimpleScalable = 0x02,
- OMX_VIDEO_MPEG4ProfileCore = 0x04,
- OMX_VIDEO_MPEG4ProfileMain = 0x08,
- OMX_VIDEO_MPEG4ProfileNbit = 0x10,
- OMX_VIDEO_MPEG4ProfileScalableTexture = 0x20,
- OMX_VIDEO_MPEG4ProfileSimpleFace = 0x40,
- OMX_VIDEO_MPEG4ProfileSimpleFBA = 0x80,
- OMX_VIDEO_MPEG4ProfileBasicAnimated = 0x100,
- OMX_VIDEO_MPEG4ProfileHybrid = 0x200,
- OMX_VIDEO_MPEG4ProfileAdvancedRealTime = 0x400,
- OMX_VIDEO_MPEG4ProfileCoreScalable = 0x800,
- OMX_VIDEO_MPEG4ProfileAdvancedCoding = 0x1000,
- OMX_VIDEO_MPEG4ProfileAdvancedCore = 0x2000,
+ OMX_VIDEO_MPEG4ProfileSimple = 0x01,
+ OMX_VIDEO_MPEG4ProfileSimpleScalable = 0x02,
+ OMX_VIDEO_MPEG4ProfileCore = 0x04,
+ OMX_VIDEO_MPEG4ProfileMain = 0x08,
+ OMX_VIDEO_MPEG4ProfileNbit = 0x10,
+ OMX_VIDEO_MPEG4ProfileScalableTexture = 0x20,
+ OMX_VIDEO_MPEG4ProfileSimpleFace = 0x40,
+ OMX_VIDEO_MPEG4ProfileSimpleFBA = 0x80,
+ OMX_VIDEO_MPEG4ProfileBasicAnimated = 0x100,
+ OMX_VIDEO_MPEG4ProfileHybrid = 0x200,
+ OMX_VIDEO_MPEG4ProfileAdvancedRealTime = 0x400,
+ OMX_VIDEO_MPEG4ProfileCoreScalable = 0x800,
+ OMX_VIDEO_MPEG4ProfileAdvancedCoding = 0x1000,
+ OMX_VIDEO_MPEG4ProfileAdvancedCore = 0x2000,
OMX_VIDEO_MPEG4ProfileAdvancedScalable = 0x4000,
OMX_VIDEO_MPEG4ProfileAdvancedSimple = 0x8000,
- OMX_VIDEO_MPEG4ProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_MPEG4ProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_MPEG4ProfileVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_VIDEO_MPEG4ProfileMax = 0x7FFFFFFF
+ OMX_VIDEO_MPEG4ProfileMax = 0x7FFFFFFF
} OMX_VIDEO_MPEG4PROFILETYPE;
-/**
- * MPEG-4 level types, each level indicates support for various frame
- * sizes, bit rates, decoder frame rates. No need
+/**
+ * MPEG-4 level types, each level indicates support for various frame
+ * sizes, bit rates, decoder frame rates. No need
*/
typedef enum OMX_VIDEO_MPEG4LEVELTYPE {
- OMX_VIDEO_MPEG4Level0 = 0x01, /**< Level 0 */
- OMX_VIDEO_MPEG4Level0b = 0x02, /**< Level 0b */
- OMX_VIDEO_MPEG4Level1 = 0x04, /**< Level 1 */
- OMX_VIDEO_MPEG4Level2 = 0x08, /**< Level 2 */
- OMX_VIDEO_MPEG4Level3 = 0x10, /**< Level 3 */
- OMX_VIDEO_MPEG4Level4 = 0x20, /**< Level 4 */
- OMX_VIDEO_MPEG4Level4a = 0x40, /**< Level 4a */
- OMX_VIDEO_MPEG4Level5 = 0x80, /**< Level 5 */
- OMX_VIDEO_MPEG4LevelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_MPEG4Level0 = 0x01, /**< Level 0 */
+ OMX_VIDEO_MPEG4Level0b = 0x02, /**< Level 0b */
+ OMX_VIDEO_MPEG4Level1 = 0x04, /**< Level 1 */
+ OMX_VIDEO_MPEG4Level2 = 0x08, /**< Level 2 */
+ OMX_VIDEO_MPEG4Level3 = 0x10, /**< Level 3 */
+ OMX_VIDEO_MPEG4Level4 = 0x20, /**< Level 4 */
+ OMX_VIDEO_MPEG4Level4a = 0x40, /**< Level 4a */
+ OMX_VIDEO_MPEG4Level5 = 0x80, /**< Level 5 */
+ OMX_VIDEO_MPEG4LevelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_MPEG4LevelVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_VIDEO_MPEG4LevelMax = 0x7FFFFFFF
+ OMX_VIDEO_MPEG4LevelMax = 0x7FFFFFFF
} OMX_VIDEO_MPEG4LEVELTYPE;
-/**
+/**
* MPEG-4 configuration. This structure handles configuration options
* which are specific to MPEG4 algorithms
*
@@ -632,24 +650,24 @@ typedef enum OMX_VIDEO_MPEG4LEVELTYPE {
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
- * nSliceHeaderSpacing : Number of macroblocks between slice header (H263+
+ * nSliceHeaderSpacing : Number of macroblocks between slice header (H263+
* Annex K). Put zero if not used
* bSVH : Enable Short Video Header mode
* bGov : Flag to enable GOV
- * nPFrames : Number of P frames between each I frame (also called
+ * nPFrames : Number of P frames between each I frame (also called
* GOV period)
* nBFrames : Number of B frames between each I frame
* nIDCVLCThreshold : Value of intra DC VLC threshold
* bACPred : Flag to use ac prediction
* nMaxPacketSize : Maximum size of packet in bytes.
- * nTimeIncRes : Used to pass VOP time increment resolution for MPEG4.
+ * nTimeIncRes : Used to pass VOP time increment resolution for MPEG4.
* Interpreted as described in MPEG4 standard.
* eProfile : MPEG-4 profile(s) to use.
* eLevel : MPEG-4 level(s) to use.
* nAllowedPictureTypes : Specifies the picture types allowed in the bitstream
* nHeaderExtension : Specifies the number of consecutive video packet
* headers within a VOP
- * bReversibleVLC : Specifies whether reversible variable length coding
+ * bReversibleVLC : Specifies whether reversible variable length coding
* is in use
*/
typedef struct OMX_VIDEO_PARAM_MPEG4TYPE {
@@ -673,22 +691,22 @@ typedef struct OMX_VIDEO_PARAM_MPEG4TYPE {
} OMX_VIDEO_PARAM_MPEG4TYPE;
-/**
- * WMV Versions
+/**
+ * WMV Versions
*/
typedef enum OMX_VIDEO_WMVFORMATTYPE {
OMX_VIDEO_WMVFormatUnused = 0x01, /**< Format unused or unknown */
OMX_VIDEO_WMVFormat7 = 0x02, /**< Windows Media Video format 7 */
OMX_VIDEO_WMVFormat8 = 0x04, /**< Windows Media Video format 8 */
OMX_VIDEO_WMVFormat9 = 0x08, /**< Windows Media Video format 9 */
- OMX_VIDEO_WMFFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_WMFFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_WMFFormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_WMVFormatMax = 0x7FFFFFFF
} OMX_VIDEO_WMVFORMATTYPE;
-/**
- * WMV Params
+/**
+ * WMV Params
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
@@ -697,33 +715,33 @@ typedef enum OMX_VIDEO_WMVFORMATTYPE {
* eFormat : Version of WMV stream / data
*/
typedef struct OMX_VIDEO_PARAM_WMVTYPE {
- OMX_U32 nSize;
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_VIDEO_WMVFORMATTYPE eFormat;
} OMX_VIDEO_PARAM_WMVTYPE;
-/**
- * Real Video Version
+/**
+ * Real Video Version
*/
typedef enum OMX_VIDEO_RVFORMATTYPE {
OMX_VIDEO_RVFormatUnused = 0, /**< Format unused or unknown */
OMX_VIDEO_RVFormat8, /**< Real Video format 8 */
OMX_VIDEO_RVFormat9, /**< Real Video format 9 */
OMX_VIDEO_RVFormatG2, /**< Real Video Format G2 */
- OMX_VIDEO_RVFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_RVFormatKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_RVFormatVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_RVFormatMax = 0x7FFFFFFF
} OMX_VIDEO_RVFORMATTYPE;
-/**
- * Real Video Params
+/**
+ * Real Video Params
*
* STUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* eFormat : Version of RV stream / data
* nBitsPerPixel : Bits per pixel coded in the frame
@@ -735,11 +753,11 @@ typedef enum OMX_VIDEO_RVFORMATTYPE {
* nMaxEncodeFrameSize: Max encoded frame size
* bEnablePostFilter : Turn on/off post filter
* bEnableTemporalInterpolation : Turn on/off temporal interpolation
- * bEnableLatencyMode : When enabled, the decoder does not display a decoded
- * frame until it has detected that no enhancement layer
- * frames or dependent B frames will be coming. This
- * detection usually occurs when a subsequent non-B
- * frame is encountered
+ * bEnableLatencyMode : When enabled, the decoder does not display a decoded
+ * frame until it has detected that no enhancement layer
+ * frames or dependent B frames will be coming. This
+ * detection usually occurs when a subsequent non-B
+ * frame is encountered
*/
typedef struct OMX_VIDEO_PARAM_RVTYPE {
OMX_U32 nSize;
@@ -759,8 +777,8 @@ typedef struct OMX_VIDEO_PARAM_RVTYPE {
} OMX_VIDEO_PARAM_RVTYPE;
-/**
- * AVC profile types, each profile indicates support for various
+/**
+ * AVC profile types, each profile indicates support for various
* performance bounds and different annexes.
*/
typedef enum OMX_VIDEO_AVCPROFILETYPE {
@@ -771,15 +789,15 @@ typedef enum OMX_VIDEO_AVCPROFILETYPE {
OMX_VIDEO_AVCProfileHigh10 = 0x10, /**< High 10 profile */
OMX_VIDEO_AVCProfileHigh422 = 0x20, /**< High 4:2:2 profile */
OMX_VIDEO_AVCProfileHigh444 = 0x40, /**< High 4:4:4 profile */
- OMX_VIDEO_AVCProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_AVCProfileKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_AVCProfileVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_VIDEO_AVCProfileMax = 0x7FFFFFFF
+ OMX_VIDEO_AVCProfileMax = 0x7FFFFFFF
} OMX_VIDEO_AVCPROFILETYPE;
-/**
- * AVC level types, each level indicates support for various frame sizes,
- * bit rates, decoder frame rates. No need
+/**
+ * AVC level types, each level indicates support for various frame sizes,
+ * bit rates, decoder frame rates. No need
*/
typedef enum OMX_VIDEO_AVCLEVELTYPE {
OMX_VIDEO_AVCLevel1 = 0x01, /**< Level 1 */
@@ -798,14 +816,14 @@ typedef enum OMX_VIDEO_AVCLEVELTYPE {
OMX_VIDEO_AVCLevel42 = 0x2000, /**< Level 4.2 */
OMX_VIDEO_AVCLevel5 = 0x4000, /**< Level 5 */
OMX_VIDEO_AVCLevel51 = 0x8000, /**< Level 5.1 */
- OMX_VIDEO_AVCLevelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_AVCLevelKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_AVCLevelVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
- OMX_VIDEO_AVCLevelMax = 0x7FFFFFFF
+ OMX_VIDEO_AVCLevelMax = 0x7FFFFFFF
} OMX_VIDEO_AVCLEVELTYPE;
-/**
- * AVC loop filter modes
+/**
+ * AVC loop filter modes
*
* OMX_VIDEO_AVCLoopFilterEnable : Enable
* OMX_VIDEO_AVCLoopFilterDisable : Disable
@@ -815,20 +833,20 @@ typedef enum OMX_VIDEO_AVCLOOPFILTERTYPE {
OMX_VIDEO_AVCLoopFilterEnable = 0,
OMX_VIDEO_AVCLoopFilterDisable,
OMX_VIDEO_AVCLoopFilterDisableSliceBoundary,
- OMX_VIDEO_AVCLoopFilterKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_AVCLoopFilterKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_AVCLoopFilterVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_AVCLoopFilterMax = 0x7FFFFFFF
} OMX_VIDEO_AVCLOOPFILTERTYPE;
-/**
- * AVC params
+/**
+ * AVC params
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
* nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
- * nSliceHeaderSpacing : Number of macroblocks between slice header, put
+ * nSliceHeaderSpacing : Number of macroblocks between slice header, put
* zero if not used
* nPFrames : Number of P frames between each I frame
* nBFrames : Number of B frames between each I frame
@@ -841,85 +859,85 @@ typedef enum OMX_VIDEO_AVCLOOPFILTERTYPE {
* nRefIdxForward : Pic param set ref frame index (index into ref
* frame buffer of forward frames list), B frame
* support
- * bEnableUEP : Enable/disable unequal error protection. This
+ * bEnableUEP : Enable/disable unequal error protection. This
* is only valid of data partitioning is enabled.
* bEnableFMO : Enable/disable flexible macroblock ordering
* bEnableASO : Enable/disable arbitrary slice ordering
* bEnableRS : Enable/disable sending of redundant slices
* eProfile : AVC profile(s) to use
* eLevel : AVC level(s) to use
- * nAllowedPictureTypes : Specifies the picture types allowed in the
+ * nAllowedPictureTypes : Specifies the picture types allowed in the
* bitstream
- * bFrameMBsOnly : specifies that every coded picture of the
- * coded video sequence is a coded frame
+ * bFrameMBsOnly : specifies that every coded picture of the
+ * coded video sequence is a coded frame
* containing only frame macroblocks
- * bMBAFF : Enable/disable switching between frame and
+ * bMBAFF : Enable/disable switching between frame and
* field macroblocks within a picture
- * bEntropyCodingCABAC : Entropy decoding method to be applied for the
- * syntax elements for which two descriptors appear
+ * bEntropyCodingCABAC : Entropy decoding method to be applied for the
+ * syntax elements for which two descriptors appear
* in the syntax tables
- * bWeightedPPrediction : Enable/disable weighted prediction shall not
+ * bWeightedPPrediction : Enable/disable weighted prediction shall not
* be applied to P and SP slices
- * nWeightedBipredicitonMode : Default weighted prediction is applied to B
- * slices
+ * nWeightedBipredicitonMode : Default weighted prediction is applied to B
+ * slices
* bconstIpred : Enable/disable intra prediction
- * bDirect8x8Inference : Specifies the method used in the derivation
- * process for luma motion vectors for B_Skip,
- * B_Direct_16x16 and B_Direct_8x8 as specified
- * in subclause 8.4.1.2 of the AVC spec
+ * bDirect8x8Inference : Specifies the method used in the derivation
+ * process for luma motion vectors for B_Skip,
+ * B_Direct_16x16 and B_Direct_8x8 as specified
+ * in subclause 8.4.1.2 of the AVC spec
* bDirectSpatialTemporal : Flag indicating spatial or temporal direct
- * mode used in B slice coding (related to
- * bDirect8x8Inference) . Spatial direct mode is
+ * mode used in B slice coding (related to
+ * bDirect8x8Inference) . Spatial direct mode is
* more common and should be the default.
* nCabacInitIdx : Index used to init CABAC contexts
* eLoopFilterMode : Enable/disable loop filter
*/
typedef struct OMX_VIDEO_PARAM_AVCTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_U32 nSliceHeaderSpacing;
- OMX_U32 nPFrames;
- OMX_U32 nBFrames;
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nSliceHeaderSpacing;
+ OMX_U32 nPFrames;
+ OMX_U32 nBFrames;
OMX_BOOL bUseHadamard;
- OMX_U32 nRefFrames;
+ OMX_U32 nRefFrames;
OMX_U32 nRefIdx10ActiveMinus1;
OMX_U32 nRefIdx11ActiveMinus1;
- OMX_BOOL bEnableUEP;
- OMX_BOOL bEnableFMO;
- OMX_BOOL bEnableASO;
- OMX_BOOL bEnableRS;
+ OMX_BOOL bEnableUEP;
+ OMX_BOOL bEnableFMO;
+ OMX_BOOL bEnableASO;
+ OMX_BOOL bEnableRS;
OMX_VIDEO_AVCPROFILETYPE eProfile;
- OMX_VIDEO_AVCLEVELTYPE eLevel;
- OMX_U32 nAllowedPictureTypes;
- OMX_BOOL bFrameMBsOnly;
- OMX_BOOL bMBAFF;
- OMX_BOOL bEntropyCodingCABAC;
- OMX_BOOL bWeightedPPrediction;
- OMX_U32 nWeightedBipredicitonMode;
+ OMX_VIDEO_AVCLEVELTYPE eLevel;
+ OMX_U32 nAllowedPictureTypes;
+ OMX_BOOL bFrameMBsOnly;
+ OMX_BOOL bMBAFF;
+ OMX_BOOL bEntropyCodingCABAC;
+ OMX_BOOL bWeightedPPrediction;
+ OMX_U32 nWeightedBipredicitonMode;
OMX_BOOL bconstIpred ;
- OMX_BOOL bDirect8x8Inference;
+ OMX_BOOL bDirect8x8Inference;
OMX_BOOL bDirectSpatialTemporal;
OMX_U32 nCabacInitIdc;
OMX_VIDEO_AVCLOOPFILTERTYPE eLoopFilterMode;
} OMX_VIDEO_PARAM_AVCTYPE;
typedef struct OMX_VIDEO_PARAM_PROFILELEVELTYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_U32 eProfile; /**< type is OMX_VIDEO_AVCPROFILETYPE, OMX_VIDEO_H263PROFILETYPE,
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 eProfile; /**< type is OMX_VIDEO_AVCPROFILETYPE, OMX_VIDEO_H263PROFILETYPE,
or OMX_VIDEO_MPEG4PROFILETYPE depending on context */
- OMX_U32 eLevel; /**< type is OMX_VIDEO_AVCLEVELTYPE, OMX_VIDEO_H263LEVELTYPE,
+ OMX_U32 eLevel; /**< type is OMX_VIDEO_AVCLEVELTYPE, OMX_VIDEO_H263LEVELTYPE,
or OMX_VIDEO_MPEG4PROFILETYPE depending on context */
OMX_U32 nProfileIndex; /**< Used to query for individual profile support information,
- This parameter is valid only for
+ This parameter is valid only for
OMX_IndexParamVideoProfileLevelQuerySupported index,
For all other indices this parameter is to be ignored. */
} OMX_VIDEO_PARAM_PROFILELEVELTYPE;
-/**
- * Structure for dynamically configuring bitrate mode of a codec.
+/**
+ * Structure for dynamically configuring bitrate mode of a codec.
*
* STRUCT MEMBERS:
* nSize : Size of the struct in bytes
@@ -928,18 +946,18 @@ typedef struct OMX_VIDEO_PARAM_PROFILELEVELTYPE {
* nEncodeBitrate : Target average bitrate to be generated in bps
*/
typedef struct OMX_VIDEO_CONFIG_BITRATETYPE {
- OMX_U32 nSize;
- OMX_VERSIONTYPE nVersion;
- OMX_U32 nPortIndex;
- OMX_U32 nEncodeBitrate;
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_U32 nEncodeBitrate;
} OMX_VIDEO_CONFIG_BITRATETYPE;
-/**
+/**
* Defines Encoder Frame Rate setting
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
- * nVersion : OMX specification version information
+ * nVersion : OMX specification version information
* nPortIndex : Port that this structure applies to
* xEncodeFramerate : Encoding framerate represented in Q16 format
*/
@@ -979,8 +997,8 @@ typedef struct OMX_PARAM_MACROBLOCKSTYPE {
OMX_U32 nMacroblocks;
} OMX_PARAM_MACROBLOCKSTYPE;
-/**
- * AVC Slice Mode modes
+/**
+ * AVC Slice Mode modes
*
* OMX_VIDEO_SLICEMODE_AVCDefault : Normal frame encoding, one slice per frame
* OMX_VIDEO_SLICEMODE_AVCMBSlice : NAL mode, number of MBs per frame
@@ -990,13 +1008,13 @@ typedef enum OMX_VIDEO_AVCSLICEMODETYPE {
OMX_VIDEO_SLICEMODE_AVCDefault = 0,
OMX_VIDEO_SLICEMODE_AVCMBSlice,
OMX_VIDEO_SLICEMODE_AVCByteSlice,
- OMX_VIDEO_SLICEMODE_AVCKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
+ OMX_VIDEO_SLICEMODE_AVCKhronosExtensions = 0x6F000000, /**< Reserved region for introducing Khronos Standard Extensions */
OMX_VIDEO_SLICEMODE_AVCVendorStartUnused = 0x7F000000, /**< Reserved region for introducing Vendor Extensions */
OMX_VIDEO_SLICEMODE_AVCLevelMax = 0x7FFFFFFF
} OMX_VIDEO_AVCSLICEMODETYPE;
-/**
- * AVC FMO Slice Mode Params
+/**
+ * AVC FMO Slice Mode Params
*
* STRUCT MEMBERS:
* nSize : Size of the structure in bytes
@@ -1007,7 +1025,7 @@ typedef enum OMX_VIDEO_AVCSLICEMODETYPE {
* eSliceMode : Specifies the type of slice
*/
typedef struct OMX_VIDEO_PARAM_AVCSLICEFMO {
- OMX_U32 nSize;
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_U8 nNumSliceGroups;
@@ -1015,7 +1033,7 @@ typedef struct OMX_VIDEO_PARAM_AVCSLICEFMO {
OMX_VIDEO_AVCSLICEMODETYPE eSliceMode;
} OMX_VIDEO_PARAM_AVCSLICEFMO;
-/**
+/**
* AVC IDR Period Configs
*
* STRUCT MEMBERS:
@@ -1026,14 +1044,14 @@ typedef struct OMX_VIDEO_PARAM_AVCSLICEFMO {
* nPFrames : Specifies internal of coding Intra frames
*/
typedef struct OMX_VIDEO_CONFIG_AVCINTRAPERIOD {
- OMX_U32 nSize;
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_U32 nIDRPeriod;
OMX_U32 nPFrames;
} OMX_VIDEO_CONFIG_AVCINTRAPERIOD;
-/**
+/**
* AVC NAL Size Configs
*
* STRUCT MEMBERS:
@@ -1043,7 +1061,7 @@ typedef struct OMX_VIDEO_CONFIG_AVCINTRAPERIOD {
* nNaluBytes : Specifies the NAL unit size
*/
typedef struct OMX_VIDEO_CONFIG_NALSIZE {
- OMX_U32 nSize;
+ OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_U32 nNaluBytes;
diff --git a/domx/omx_core/src/OMX_Core.c b/domx/omx_core/src/OMX_Core.c
index d585ac5..8d9b529 100755
--- a/domx/omx_core/src/OMX_Core.c
+++ b/domx/omx_core/src/OMX_Core.c
@@ -81,24 +81,31 @@ char compName[60][200];
char *tComponentName[MAXCOMP][MAX_ROLES] = {
/*video and image components */
- {"OMX.TI.DUCATI1.VIDEO.DECODER", "video_decoder.mpeg4",
+ {"OMX.TI.DUCATI1.VIDEO.DECODER",
+ "video_decoder.mpeg4",
"video_decoder.avc",
"video_decoder.h263",
"video_decoder.wmv",
- "video_decoder.vp6",
- "video_decoder.vp7", NULL},
- {"OMX.TI.DUCATI1.VIDEO.DECODER.secure", "video_decoder.mpeg4",
+ "video_decoder.mpeg2",
+ "video_decoder.svc",
+ "video_decoder.sorspk", NULL},
+ {"OMX.TI.DUCATI1.VIDEO.DECODER.secure",
+ "video_decoder.mpeg4",
"video_decoder.avc",
"video_decoder.h263", NULL},
- {"OMX.TI.DUCATI1.VIDEO.H264D", "video_decoder.avc", NULL},
- {"OMX.TI.DUCATI1.VIDEO.H264E", "video_encoder.avc", NULL},
- {"OMX.TI.DUCATI1.VIDEO.MPEG4D", "video_decoder.mpeg4", NULL},
- {"OMX.TI.DUCATI1.VIDEO.MPEG4E", "video_encoder.mpeg4",
- "video_encoder.h263",NULL},
- {"OMX.TI.DUCATI1.VIDEO.VP6D", "video_decoder.vp6", NULL},
- {"OMX.TI.DUCATI1.VIDEO.VP7D", "video_decoder.vp7", NULL},
- {"OMX.TI.DUCATI1.IMAGE.JPEGD", "jpeg_decoder.jpeg", NULL},
+ {"OMX.TI.DUCATI1.VIDEO.H264E",
+ "video_encoder.avc", NULL},
+ {"OMX.TI.DUCATI1.VIDEO.MPEG4E",
+ "video_encoder.mpeg4",
+ "video_encoder.h263",NULL},
{"OMX.TI.DUCATI1.VIDEO.CAMERA", "camera.omx", NULL},
+#ifdef USE_ITTIAM_AAC
+ {"OMX.ITTIAM.AAC.decode", "audio_decoder.aac", NULL},
+ {"OMX.ITTIAM.BSAC.decode", "audio_decoder.aac", NULL},
+#endif
+ {"OMX.ITTIAM.WMA.decode", "audio_decoder.wma", NULL},
+ {"OMX.ITTIAM.WMALSL.decode", "audio_decoder.wmalsl", NULL},
+ {"OMX.ITTIAM.WMAPRO.decode", "audio_decoder.wmapro", NULL},
/* terminate the table */
{NULL, NULL},
};
@@ -215,7 +222,7 @@ OMX_ERRORTYPE OMX_GetHandle(OMX_HANDLETYPE * pHandle,
/* Locate the first empty slot for a component. If no slots
* are available, error out */
- for (i = 0; i < COUNTOF(pModules); i++)
+ for (i = 0; i < (int)COUNTOF(pModules); i++)
{
if (pModules[i] == NULL)
break;
@@ -246,36 +253,42 @@ OMX_ERRORTYPE OMX_GetHandle(OMX_HANDLETYPE * pHandle,
{
TIMM_OSAL_Error("Can't open misc driver device 0x%x\n", errno);
}
-
- ret = read(secure_misc_drv_fd, &mode, sizeof(mode));
- if (ret < 0)
- {
- TIMM_OSAL_Error("Can't read from the misc driver");
- }
- if(mode == enable && strstr(cComponentName,"secure") == NULL)
- {
- TIMM_OSAL_Error("non-secure component not supported in secure mode");
- eError = OMX_ErrorComponentNotFound;
- }
- ret = close(secure_misc_drv_fd);
- if (ret < 0)
- {
- TIMM_OSAL_Error("Can't close the misc driver");
- }
- //Dont allow non-secure usecases if we are in secure state.
- //Else some of the memory regions will be unexpected firewalled.
- //This provides a clean exit in case we are in secure mode.
- if(eError == OMX_ErrorComponentNotFound)
+ else
{
- goto EXIT;
+ ret = read(secure_misc_drv_fd, &mode, sizeof(mode));
+ if (ret != sizeof(mode))
+ {
+ TIMM_OSAL_Error("Can't read from the misc driver");
+ }
+ else
+ {
+ if(mode == enable && strstr(cComponentName,"secure") == NULL)
+ {
+ TIMM_OSAL_Error("non-secure component not supported in secure mode");
+ eError = OMX_ErrorComponentNotFound;
+ }
+ }
+ ret = close(secure_misc_drv_fd);
+ if (ret < 0)
+ {
+ TIMM_OSAL_Error("Can't close the misc driver");
+ }
}
-#endif
+ /* Don't allow non-secure usecases if we are in secure state.
+ * Else some of the memory regions will be unexpected firewalled.
+ * This provides a clean exit in case we are in secure mode. */
+ if (eError == OMX_ErrorComponentNotFound)
+ {
+ goto EXIT;
+ }
+#endif //CHECK_SECURE_STATE
+
//#if 0
pModules[i] = dlopen(buf, RTLD_LAZY | RTLD_GLOBAL);
if (pModules[i] == NULL)
{
- dlError = dlerror();
+ dlError = (char *)dlerror();
TIMM_OSAL_Error("Failed because %s", dlError);
eError = OMX_ErrorComponentNotFound;
goto EXIT;
@@ -369,7 +382,7 @@ OMX_ERRORTYPE OMX_FreeHandle(OMX_HANDLETYPE hComponent)
"OMX_FreeHandle called without calling OMX_Init first");
/* Locate the component handle in the array of handles */
- for (i = 0; i < COUNTOF(pModules); i++)
+ for (i = 0; i < (int)COUNTOF(pModules); i++)
{
if (pComponents[i] == hComponent)
break;
@@ -537,7 +550,7 @@ OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_ComponentNameEnum(OMX_OUT OMX_STRING
CORE_require(count > 0, OMX_ErrorUndefined,
"OMX_GetHandle called without calling OMX_Init first");
- if (nIndex >= tableCount)
+ if (nIndex >= (OMX_U32)tableCount)
{
eError = OMX_ErrorNoMore;
} else
@@ -581,7 +594,7 @@ OMX_API OMX_ERRORTYPE OMX_GetRolesOfComponent(OMX_IN OMX_STRING
CORE_require(count > 0, OMX_ErrorUndefined,
"OMX_GetHandle called without calling OMX_Init first");
- while (!bFound && i < tableCount)
+ while (!bFound && i < (OMX_U32)tableCount)
{
if (strcmp(cComponentName, componentTable[i].name) == 0)
{
@@ -642,7 +655,7 @@ OMX_API OMX_ERRORTYPE OMX_GetComponentsOfRole(OMX_IN OMX_STRING role,
CORE_assert(componentTable[i].pRoleArray[j] != NULL,
OMX_ErrorBadParameter, NULL);
- for (i = 0; i < tableCount; i++)
+ for (i = 0; i < (OMX_U32)tableCount; i++)
{
for (j = 0; j < componentTable[i].nRoles; j++)
{
diff --git a/domx/omx_core/src/OMX_Core_Wrapper.c b/domx/omx_core/src/OMX_Core_Wrapper.c
index 8d61dd4..6fbf3d7 100755
--- a/domx/omx_core/src/OMX_Core_Wrapper.c
+++ b/domx/omx_core/src/OMX_Core_Wrapper.c
@@ -38,17 +38,13 @@
#include "OMX_ComponentRegistry.h"
#include "OMX_Core_Wrapper.h"
-#include <utils/Log.h>
+#include "timm_osal_trace.h"
#undef LOG_TAG
#define LOG_TAG "OMX_CORE"
/** determine capabilities of a component before acually using it */
-#if 0
-#include "ti_omx_config_parser.h"
-#else
extern OMX_BOOL TIOMXConfigParser(OMX_PTR aInputParameters,
OMX_PTR aOutputParameters);
-#endif
#endif
@@ -58,12 +54,12 @@ extern OMX_BOOL TIOMXConfigParser(OMX_PTR aInputParameters,
OMX_BOOL TIOMXConfigParserRedirect(OMX_PTR aInputParameters,
OMX_PTR aOutputParameters)
{
- ALOGV("OMXConfigParserRedirect +\n");
+ TIMM_OSAL_Entering("OMXConfigParserRedirect +\n");
OMX_BOOL Status = OMX_FALSE;
Status = TIOMXConfigParser(aInputParameters, aOutputParameters);
- ALOGV("OMXConfigParserRedirect -\n");
+ TIMM_OSAL_Exiting("OMXConfigParserRedirect -\n");
return Status;
}
#endif
@@ -92,14 +88,14 @@ OMX_ERRORTYPE TIComponentTable_FillBufferDone(OMX_OUT OMX_HANDLETYPE
OMX_API OMX_ERRORTYPE TIOMX_Init(void)
{
- ALOGV("TIOMX_Init\n");
+ TIMM_OSAL_Entering("TIOMX_Init\n");
return OMX_Init();
}
OMX_API OMX_ERRORTYPE TIOMX_Deinit(void)
{
- ALOGV("TIOMX_Deinit\n");
+ TIMM_OSAL_Entering("TIOMX_Deinit\n");
return OMX_Deinit();
}
@@ -108,7 +104,7 @@ OMX_API OMX_ERRORTYPE TIOMX_ComponentNameEnum(OMX_OUT OMX_STRING
cComponentName, OMX_IN OMX_U32 nNameLength, OMX_IN OMX_U32 nIndex)
{
- ALOGV("TIOMX_ComponentNameEnum\n");
+ TIMM_OSAL_Entering("TIOMX_ComponentNameEnum\n");
return OMX_ComponentNameEnum(cComponentName, nNameLength, nIndex);
}
@@ -118,14 +114,14 @@ OMX_API OMX_ERRORTYPE TIOMX_GetHandle(OMX_OUT OMX_HANDLETYPE * pHandle,
OMX_IN OMX_PTR pAppData, OMX_IN OMX_CALLBACKTYPE * pCallBacks)
{
- ALOGV("TIOMX_GetHandle\n");
+ TIMM_OSAL_Entering("TIOMX_GetHandle\n");
return OMX_GetHandle(pHandle, cComponentName, pAppData, pCallBacks);
}
OMX_API OMX_ERRORTYPE TIOMX_FreeHandle(OMX_IN OMX_HANDLETYPE hComponent)
{
- ALOGV("TIOMX_FreeHandle\n");
+ TIMM_OSAL_Entering("TIOMX_FreeHandle\n");
return OMX_FreeHandle(hComponent);
}
@@ -134,7 +130,7 @@ OMX_API OMX_ERRORTYPE TIOMX_GetComponentsOfRole(OMX_IN OMX_STRING role,
OMX_INOUT OMX_U32 * pNumComps, OMX_INOUT OMX_U8 ** compNames)
{
- ALOGV("TIOMX_GetComponentsOfRole\n");
+ TIMM_OSAL_Entering("TIOMX_GetComponentsOfRole\n");
return OMX_GetComponentsOfRole(role, pNumComps, compNames);
}
@@ -143,7 +139,7 @@ OMX_API OMX_ERRORTYPE TIOMX_GetRolesOfComponent(OMX_IN OMX_STRING compName,
OMX_INOUT OMX_U32 * pNumRoles, OMX_OUT OMX_U8 ** roles)
{
- ALOGV("TIOMX_GetRolesOfComponent\n");
+ TIMM_OSAL_Entering("TIOMX_GetRolesOfComponent\n");
return OMX_GetRolesOfComponent(compName, pNumRoles, roles);
}
@@ -153,7 +149,7 @@ OMX_API OMX_ERRORTYPE TIOMX_SetupTunnel(OMX_IN OMX_HANDLETYPE hOutput,
OMX_IN OMX_HANDLETYPE hInput, OMX_IN OMX_U32 nPortInput)
{
- ALOGV("TIOMX_SetupTunnel\n");
+ TIMM_OSAL_Entering("TIOMX_SetupTunnel\n");
return OMX_SetupTunnel(hOutput, nPortOutput, hInput, nPortInput);
}
@@ -162,7 +158,7 @@ OMX_API OMX_ERRORTYPE TIOMX_GetContentPipe(OMX_OUT OMX_HANDLETYPE * hPipe,
OMX_IN OMX_STRING szURI)
{
- ALOGV("TIOMX_GetContentPipe\n");
+ TIMM_OSAL_Entering("TIOMX_GetContentPipe\n");
//return OMX_GetContentPipe(
// hPipe,
diff --git a/domx/omx_proxy_component/Android.mk b/domx/omx_proxy_component/Android.mk
index 7931962..e23613d 100644
--- a/domx/omx_proxy_component/Android.mk
+++ b/domx/omx_proxy_component/Android.mk
@@ -27,7 +27,15 @@ LOCAL_CFLAGS += -D_Android -DSET_STRIDE_PADDING_FROM_PROXY -DANDROID_QUIRK_CHANG
LOCAL_CFLAGS += -DANDROID_QUIRK_LOCK_BUFFER -DUSE_ION -DENABLE_GRALLOC_BUFFERS
LOCAL_MODULE_TAGS:= optional
-LOCAL_SRC_FILES:= omx_video_dec/src/omx_proxy_videodec.c
+LOCAL_SRC_FILES:= omx_video_dec/src/omx_proxy_videodec.c \
+ omx_video_dec/src/omx_proxy_videodec_utils.c
+
+# Uncomment the below 2 lines to enable the run time
+# dump of NV12 buffers from Decoder/Camera
+# based on setprop control
+#LOCAL_CFLAGS += -DENABLE_RAW_BUFFERS_DUMP_UTILITY
+#LOCAL_SHARED_LIBRARIES += libcutils
+
LOCAL_MODULE:= libOMX.TI.DUCATI1.VIDEO.DECODER
include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
@@ -67,6 +75,8 @@ include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_C_INCLUDES += \
+ $(HARDWARE_TI_OMAP4_BASE)/kernel-headers-ti \
+ $(HARDWARE_TI_OMAP4_BASE)/system-core-headers-ti \
$(LOCAL_PATH)/../omx_core/inc \
$(LOCAL_PATH)/../mm_osal/inc \
$(LOCAL_PATH)/../domx \
@@ -102,10 +112,9 @@ LOCAL_C_INCLUDES += \
$(LOCAL_PATH)/../domx \
$(LOCAL_PATH)/../domx/omx_rpc/inc \
system/core/include/cutils \
- $(LOCAL_PATH)/../../hwc \
+ $(HARDWARE_TI_OMAP4_BASE)/hwc \
$(HARDWARE_TI_OMAP4_BASE)/camera/inc \
- frameworks/base/include/media/stagefright \
- frameworks/native/include/media/hardware
+ frameworks/native/include/media/hardware \
LOCAL_SHARED_LIBRARIES := \
libmm_osal \
@@ -113,8 +122,8 @@ LOCAL_SHARED_LIBRARIES := \
libOMX_Core \
liblog \
libdomx \
- libhardware
-
+ libhardware \
+ libcutils
LOCAL_CFLAGS += -DLINUX -DTMS32060 -D_DB_TIOMAP -DSYSLINK_USE_SYSMGR -DSYSLINK_USE_LOADER
LOCAL_CFLAGS += -D_Android -DSET_STRIDE_PADDING_FROM_PROXY -DANDROID_QUIRK_CHANGE_PORT_VALUES
@@ -138,10 +147,9 @@ LOCAL_C_INCLUDES += \
$(LOCAL_PATH)/../domx \
$(LOCAL_PATH)/../domx/omx_rpc/inc \
system/core/include/cutils \
- $(LOCAL_PATH)/../../hwc \
+ $(HARDWARE_TI_OMAP4_BASE)/hwc \
$(HARDWARE_TI_OMAP4_BASE)/camera/inc \
- frameworks/base/include/media/stagefright \
- frameworks/native/include/media/hardware
+ frameworks/native/include/media/hardware \
LOCAL_SHARED_LIBRARIES := \
libmm_osal \
@@ -149,7 +157,8 @@ LOCAL_SHARED_LIBRARIES := \
libOMX_Core \
liblog \
libdomx \
- libhardware
+ libhardware \
+ libcutils
LOCAL_CFLAGS += -DLINUX -DTMS32060 -D_DB_TIOMAP -DSYSLINK_USE_SYSMGR -DSYSLINK_USE_LOADER
LOCAL_CFLAGS += -D_Android -DSET_STRIDE_PADDING_FROM_PROXY -DANDROID_QUIRK_CHANGE_PORT_VALUES
diff --git a/domx/omx_proxy_component/omx_camera/src/omx_proxy_camera.c b/domx/omx_proxy_component/omx_camera/src/omx_proxy_camera.c
index 729c190..f411d28 100755
--- a/domx/omx_proxy_component/omx_camera/src/omx_proxy_camera.c
+++ b/domx/omx_proxy_component/omx_camera/src/omx_proxy_camera.c
@@ -65,6 +65,7 @@
#include <pthread.h>
#include <sys/time.h>
#include <stdlib.h>
+#include <errno.h>
#include <timm_osal_interfaces.h>
#include <OMX_TI_IVCommon.h>
@@ -74,15 +75,12 @@
#ifdef USE_ION
#include <unistd.h>
-#include <ion.h>
+#include <ion/ion.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/eventfd.h>
#include <fcntl.h>
-
-#else
-/* Tiler APIs */
-#include <memmgr.h>
+#include <errno.h>
#endif
#define COMPONENT_NAME "OMX.TI.DUCATI1.VIDEO.CAMERA"
@@ -92,11 +90,7 @@
#define LOG_TAG "CameraHAL"
#define DEFAULT_DCC 1
-#ifdef _Android
-#define DCC_PATH "/data/misc/camera/"
-#else
-#define DCC_PATH "/usr/share/omapcam/"
-#endif
+
#define LINUX_PAGE_SIZE (4 * 1024)
#define _PROXY_OMX_INIT_PARAM(param,type) do { \
@@ -106,6 +100,21 @@
(param)->nVersion.s.nVersionMinor = 1; \
} while(0)
+/* VTC specific changes */
+#define MAX_NUM_INTERNAL_BUFFERS 4
+#define MAX_VTC_WIDTH 1920
+#define MAX_VTC_HEIGHT 1080
+#define BORDER_WIDTH 32
+#define BORDER_HEIGHT 32
+#define MAX_VTC_WIDTH_WITH_VNF (MAX_VTC_WIDTH + BORDER_WIDTH)
+#define MAX_VTC_HEIGHT_WITH_VNF (MAX_VTC_HEIGHT + BORDER_HEIGHT)
+OMX_PTR gCamIonHdl[MAX_NUM_INTERNAL_BUFFERS][2];
+
+/* Tiler heap resservation specific */
+#define OMAP_ION_HEAP_TILER_ALLOCATION_MASK (1<<4)
+/* store handles for tracking and freeing */
+OMX_PTR gComponentBufferAllocation[PROXY_MAXNUMOFPORTS][MAX_NUM_INTERNAL_BUFFERS];
+
/* Incase of multiple instance, making sure DCC is initialized only for
first instance */
static OMX_S16 numofInstance = 0;
@@ -122,9 +131,6 @@ OMX_PTR DCC_Buff = NULL;
OMX_PTR DCC_Buff_ptr = NULL;
int ion_fd;
int mmap_fd;
-
-#else
-MemAllocBlock *MemReqDescTiler;
#endif
OMX_S32 read_DCCdir(OMX_PTR, OMX_STRING *, OMX_U16);
@@ -137,9 +143,11 @@ OMX_ERRORTYPE __PROXY_SetConfig(OMX_HANDLETYPE, OMX_INDEXTYPE,
OMX_ERRORTYPE __PROXY_GetConfig(OMX_HANDLETYPE, OMX_INDEXTYPE,
OMX_PTR, OMX_PTR);
OMX_ERRORTYPE __PROXY_SetParameter(OMX_IN OMX_HANDLETYPE, OMX_INDEXTYPE,
- OMX_PTR, OMX_PTR);
+ OMX_PTR, OMX_PTR, OMX_U32);
OMX_ERRORTYPE __PROXY_GetParameter(OMX_IN OMX_HANDLETYPE, OMX_INDEXTYPE,
OMX_PTR, OMX_PTR);
+OMX_ERRORTYPE PROXY_SendCommand(OMX_HANDLETYPE, OMX_COMMANDTYPE,
+ OMX_U32,OMX_PTR);
OMX_ERRORTYPE CameraMaptoTilerDuc(OMX_TI_CONFIG_SHAREDBUFFER *, OMX_PTR *);
//COREID TARGET_CORE_ID = CORE_APPM3;
@@ -147,6 +155,11 @@ static OMX_ERRORTYPE ComponentPrivateDeInit(OMX_IN OMX_HANDLETYPE hComponent)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
TIMM_OSAL_ERRORTYPE eOsalError = TIMM_OSAL_ERR_NONE;
+ PROXY_COMPONENT_PRIVATE *pCompPrv;
+ OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
+ OMX_U32 i, j;
+
+ pCompPrv = (PROXY_COMPONENT_PRIVATE *) hComp->pComponentPrivate;
if (dcc_flag)
{
@@ -163,6 +176,26 @@ static OMX_ERRORTYPE ComponentPrivateDeInit(OMX_IN OMX_HANDLETYPE hComponent)
PROXY_assert(eOsalError == TIMM_OSAL_ERR_NONE,
OMX_ErrorInsufficientResources, "Mutex release failed");
}
+ for(i=0; i < MAX_NUM_INTERNAL_BUFFERS; i++) {
+ if (gCamIonHdl[i][0] != NULL) {
+ ion_free(pCompPrv->ion_fd, gCamIonHdl[i][0]);
+ gCamIonHdl[i][0] = NULL;
+ }
+ if (gCamIonHdl[i][1] != NULL) {
+ ion_free(pCompPrv->ion_fd, gCamIonHdl[i][1]);
+ gCamIonHdl[i][1] = NULL;
+ }
+
+ }
+
+ for (i = 0; i < PROXY_MAXNUMOFPORTS; i++) {
+ for (j = 0; j < MAX_NUM_INTERNAL_BUFFERS; j++) {
+ if (gComponentBufferAllocation[i][j]) {
+ ion_free(pCompPrv->ion_fd, gComponentBufferAllocation[i][j]);
+ }
+ gComponentBufferAllocation[i][j] = NULL;
+ }
+ }
eError = PROXY_ComponentDeInit(hComponent);
@@ -170,6 +203,73 @@ static OMX_ERRORTYPE ComponentPrivateDeInit(OMX_IN OMX_HANDLETYPE hComponent)
return eError;
}
+static OMX_ERRORTYPE Camera_SendCommand(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_COMMANDTYPE eCmd,
+ OMX_IN OMX_U32 nParam, OMX_IN OMX_PTR pCmdData)
+
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone, eCompReturn;
+ RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
+ PROXY_COMPONENT_PRIVATE *pCompPrv;
+ OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
+ static OMX_BOOL dcc_loaded = OMX_FALSE;
+
+ OMX_ERRORTYPE dcc_eError = OMX_ErrorNone;
+ TIMM_OSAL_ERRORTYPE eOsalError = TIMM_OSAL_ERR_NONE;
+ OMX_U32 i;
+
+ pCompPrv = (PROXY_COMPONENT_PRIVATE *) hComp->pComponentPrivate;
+
+ if ((eCmd == OMX_CommandStateSet) &&
+ (nParam == (OMX_STATETYPE) OMX_StateIdle))
+ {
+ if (!dcc_loaded)
+ {
+ dcc_eError = DCC_Init(hComponent);
+ if (dcc_eError != OMX_ErrorNone)
+ {
+ DOMX_ERROR(" Error in DCC Init");
+ }
+ /* Configure Ducati to use DCC buffer from A9 side
+ *ONLY* if DCC_Init is successful. */
+ if (dcc_eError == OMX_ErrorNone)
+ {
+ dcc_eError = send_DCCBufPtr(hComponent);
+ if (dcc_eError != OMX_ErrorNone)
+ {
+ DOMX_ERROR(" Error in Sending DCC Buf ptr");
+ }
+ DCC_DeInit();
+ }
+ dcc_loaded = OMX_TRUE;
+ }
+ } else if (eCmd == OMX_CommandPortDisable) {
+ int i, j;
+ for (i = 0; i < MAX_NUM_INTERNAL_BUFFERS; i++) {
+ for (j = 0; j < PROXY_MAXNUMOFPORTS; j++) {
+ if (((j == nParam) || (nParam == OMX_ALL)) &&
+ gComponentBufferAllocation[i][j])
+ {
+ ion_free(pCompPrv->ion_fd, gComponentBufferAllocation[i][j]);
+ gComponentBufferAllocation[i][j] = NULL;
+ }
+ }
+ }
+
+ }
+
+
+ eError =
+ PROXY_SendCommand(hComponent,eCmd,nParam,pCmdData);
+
+
+EXIT:
+
+ DOMX_EXIT("eError: %d", eError);
+ return eError;
+
+}
+
/* ===========================================================================*/
/**
* @name CameraGetConfig()
@@ -292,12 +392,141 @@ static OMX_ERRORTYPE CameraSetConfig(OMX_IN OMX_HANDLETYPE
return eError;
}
+static OMX_ERRORTYPE CameraSetParam(OMX_IN OMX_HANDLETYPE
+ hComponent, OMX_IN OMX_INDEXTYPE nParamIndex,
+ OMX_INOUT OMX_PTR pComponentParameterStructure)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ struct ion_handle *handle;
+ OMX_U32 i =0;
+ OMX_S32 ret = 0;
+ PROXY_COMPONENT_PRIVATE *pCompPrv;
+ OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *)hComponent;
+ OMX_U32 stride_Y = 0, stride_UV = 0;
+ OMX_TI_PARAM_VTCSLICE *pVtcConfig;// = (OMX_TI_PARAM_VTCSLICE *)pComponentParameterStructure;
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE *bufferalloc = NULL;
+ int size = 0;
+ int fd1 = -1, fd2 = -1;
+
+ pCompPrv = (PROXY_COMPONENT_PRIVATE *)hComp->pComponentPrivate;
+ //fprintf(stdout, "DOMX: CameraSetParam: called!!!\n");
+ switch (nParamIndex)
+ {
+ case OMX_TI_IndexParamVtcSlice:
+ pVtcConfig = (OMX_TI_PARAM_VTCSLICE *)pComponentParameterStructure;
+ fprintf(stdout, "DOMX: CameraSetParam: OMX_TI_IndexParamVtcSlice is called!!!\n");
+ DOMX_ERROR("CameraSetParam Called for Vtc Slice index\n");
+
+ //fprintf(stdout, "CameraSetParam Called for Vtc Slice height = %d\n", ((OMX_TI_PARAM_VTCSLICE *)pComponentParameterStructure)->nSliceHeight);
+ // MAX_NUM_INTERNAL_BUFFERS;
+
+ for(i=0; i < MAX_NUM_INTERNAL_BUFFERS; i++) {
+ pVtcConfig->nInternalBuffers = i;
+ ret = ion_alloc_tiler(pCompPrv->ion_fd, MAX_VTC_WIDTH_WITH_VNF, MAX_VTC_HEIGHT_WITH_VNF, TILER_PIXEL_FMT_8BIT, OMAP_ION_HEAP_TILER_MASK, &handle, (size_t *)&stride_Y);
+ if (ret < 0) {
+ DOMX_ERROR ("ION allocation failed - %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = ion_share(pCompPrv->ion_fd, handle, &fd1);
+ if (ret < 0) {
+ DOMX_ERROR("ION share failed");
+ ion_free(pCompPrv->ion_fd, handle);
+ goto EXIT;
+ }
+
+ pVtcConfig->IonBufhdl[0] = (OMX_PTR)(fd1);
+
+ //fprintf(stdout, "DOMX: ION Buffer#%d: Y: 0x%x\n", i, pVtcConfig->IonBufhdl[0]);
+
+ ret = ion_alloc_tiler(pCompPrv->ion_fd, MAX_VTC_WIDTH_WITH_VNF/2, MAX_VTC_HEIGHT_WITH_VNF/2, TILER_PIXEL_FMT_16BIT, OMAP_ION_HEAP_TILER_MASK, &handle, (size_t *)&stride_UV);
+ if (ret < 0) {
+ DOMX_ERROR ("ION allocation failed - %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = ion_share(pCompPrv->ion_fd, handle, &fd2);
+ if (ret < 0) {
+ DOMX_ERROR("ION share failed");
+ ion_free(pCompPrv->ion_fd, handle);
+ goto EXIT;
+ }
+
+ pVtcConfig->IonBufhdl[1] = (OMX_PTR)(fd2);
+ gCamIonHdl[i][0] = pVtcConfig->IonBufhdl[0];
+ gCamIonHdl[i][1] = pVtcConfig->IonBufhdl[1];
+ //fprintf(stdout, "DOMX: ION Buffer#%d: UV: 0x%x\n", i, pVtcConfig->IonBufhdl[1]);
+ eError = __PROXY_SetParameter(hComponent,
+ OMX_TI_IndexParamVtcSlice,
+ pVtcConfig,
+ pVtcConfig->IonBufhdl, 2);
+ close(fd1);
+ close(fd2);
+ }
+ goto EXIT;
+ case OMX_TI_IndexParamComponentBufferAllocation: {
+ OMX_U32 port = 0, index = 0;
+ int fd;
+ bufferalloc = (OMX_TI_PARAM_COMPONENTBUFALLOCTYPE *)
+ pComponentParameterStructure;
+
+ port = bufferalloc->nPortIndex;
+ index = bufferalloc->nIndex;
+
+ size = bufferalloc->nAllocWidth * bufferalloc->nAllocLines;
+ ret = ion_alloc_tiler (pCompPrv->ion_fd, size, 1,
+ TILER_PIXEL_FMT_PAGE,
+ OMAP_ION_HEAP_TILER_ALLOCATION_MASK,
+ &handle, &stride_Y);
+ if (ret < 0) {
+ DOMX_ERROR ("ION allocation failed - %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = ion_share(pCompPrv->ion_fd, handle, &fd);
+ if (ret < 0) {
+ DOMX_ERROR("ION share failed");
+ ion_free(pCompPrv->ion_fd, handle);
+ goto EXIT;
+ }
+
+ bufferalloc->pBuf[0] = fd;
+ eError = __PROXY_SetParameter(hComponent,
+ OMX_TI_IndexParamComponentBufferAllocation,
+ bufferalloc, &bufferalloc->pBuf[0], 1);
+ if (eError != OMX_ErrorNone) {
+ ion_free(pCompPrv->ion_fd, handle);
+ } else {
+ if (gComponentBufferAllocation[port][index]) {
+ ion_free(pCompPrv->ion_fd, gComponentBufferAllocation[port][index]);
+ }
+ gComponentBufferAllocation[port][index] = handle;
+ }
+ close (fd);
+ }
+ goto EXIT;
+ break;
+ default:
+ break;
+ }
+ eError = __PROXY_SetParameter(hComponent,
+ nParamIndex,
+ pComponentParameterStructure,
+ NULL, 0);
+
+ if (eError != OMX_ErrorNone) {
+ DOMX_ERROR(" CameraSetParam: Error in SetParam 0x%x", eError);
+ }
+EXIT:
+ return eError;
+}
OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_ERRORTYPE dcc_eError = OMX_ErrorNone;
OMX_COMPONENTTYPE *pHandle = NULL;
PROXY_COMPONENT_PRIVATE *pComponentPrivate;
+ OMX_U32 i = 0, j = 0;
pHandle = (OMX_COMPONENTTYPE *) hComponent;
TIMM_OSAL_ERRORTYPE eOsalError = TIMM_OSAL_ERR_NONE;
DOMX_ENTER("_____________________INSIDE CAMERA PROXY"
@@ -335,46 +564,23 @@ OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
TIMM_OSAL_Free(pComponentPrivate);
goto EXIT;
}
+ for(i=0; i < MAX_NUM_INTERNAL_BUFFERS; i++) {
+ gCamIonHdl[i][0] = NULL;
+ gCamIonHdl[i][1] = NULL;
+ }
+
+ for (i = 0; i < PROXY_MAXNUMOFPORTS; i++) {
+ for (j = 0; j < MAX_NUM_INTERNAL_BUFFERS; j++) {
+ gComponentBufferAllocation[i][j] = NULL;
+ }
+ }
pHandle->ComponentDeInit = ComponentPrivateDeInit;
pHandle->GetConfig = CameraGetConfig;
pHandle->SetConfig = CameraSetConfig;
- char *val = getenv("SET_DCC");
- dcc_flag = val ? strtol(val, NULL, 0) : DEFAULT_DCC;
- DOMX_DEBUG(" DCC: 0 - disabled 1 - enabled : val: %d", dcc_flag);
+ pHandle->SendCommand = Camera_SendCommand;
+ pHandle->SetParameter = CameraSetParam;
- if (dcc_flag)
- {
- eOsalError =
- TIMM_OSAL_MutexObtain(cam_mutex, TIMM_OSAL_SUSPEND);
- PROXY_assert(eOsalError == TIMM_OSAL_ERR_NONE,
- OMX_ErrorInsufficientResources, "Mutex lock failed");
-
- if (numofInstance == 0)
- {
- dcc_eError = DCC_Init(hComponent);
- if (dcc_eError != OMX_ErrorNone)
- {
- DOMX_DEBUG(" Error in DCC Init");
- }
-
- /* Configure Ducati to use DCC buffer from A9 side
- *ONLY* if DCC_Init is successful. */
- if (dcc_eError == OMX_ErrorNone)
- {
- dcc_eError = send_DCCBufPtr(hComponent);
- if (dcc_eError != OMX_ErrorNone)
- {
- DOMX_DEBUG(" Error in Sending DCC Buf ptr");
- }
- DCC_DeInit();
- }
- }
- numofInstance = numofInstance + 1;
- eOsalError = TIMM_OSAL_MutexRelease(cam_mutex);
- PROXY_assert(eOsalError == TIMM_OSAL_ERR_NONE,
- OMX_ErrorInsufficientResources, "Mutex release failed");
- }
EXIT:
return eError;
}
@@ -397,6 +603,7 @@ OMX_ERRORTYPE DCC_Init(OMX_HANDLETYPE hComponent)
OMX_ERRORTYPE eError = OMX_ErrorNone;
#ifdef USE_ION
int ret;
+ size_t stride;
#endif
OMX_S32 status = 0;
@@ -454,32 +661,27 @@ OMX_ERRORTYPE DCC_Init(OMX_HANDLETYPE hComponent)
return OMX_ErrorInsufficientResources;
}
dccbuf_size = (dccbuf_size + LINUX_PAGE_SIZE -1) & ~(LINUX_PAGE_SIZE - 1);
- ret = ion_alloc(ion_fd, dccbuf_size, 0x1000, 1 << ION_HEAP_TYPE_CARVEOUT, &DCC_Buff);
- if (ret)
- return OMX_ErrorInsufficientResources;
+ ret = ion_alloc(ion_fd, dccbuf_size, 0x1000, 1 << ION_HEAP_TYPE_CARVEOUT,
+ (struct ion_handle **)&DCC_Buff);
+
+ if (ret || ((int)DCC_Buff == -ENOMEM)) {
+ ret = ion_alloc_tiler(ion_fd, dccbuf_size, 1, TILER_PIXEL_FMT_PAGE,
+ OMAP_ION_HEAP_TILER_MASK, &DCC_Buff, &stride);
+ }
+
+ if (ret || ((int)DCC_Buff == -ENOMEM)) {
+ DOMX_ERROR("FAILED to allocate DCC buffer of size=%d. ret=0x%x",
+ dccbuf_size, ret);
+ return OMX_ErrorInsufficientResources;
+ }
if (ion_map(ion_fd, DCC_Buff, dccbuf_size, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
- &DCC_Buff_ptr,&mmap_fd) < 0)
+ (unsigned char **)&DCC_Buff_ptr, &mmap_fd) < 0)
{
DOMX_ERROR("userspace mapping of ION buffers returned error");
return OMX_ErrorInsufficientResources;
}
ptempbuf = DCC_Buff_ptr;
-#else
- MemReqDescTiler =
- (MemAllocBlock *) TIMM_OSAL_Malloc((sizeof(MemAllocBlock) * 2),
- TIMM_OSAL_TRUE, 0, TIMMOSAL_MEM_SEGMENT_EXT);
- PROXY_assert(MemReqDescTiler != NULL,
- OMX_ErrorInsufficientResources, "Malloc failed");
-
- /* Allocate 1D Tiler buffer for 'N'DCC files */
- MemReqDescTiler[0].fmt = PIXEL_FMT_PAGE;
- MemReqDescTiler[0].dim.len = dccbuf_size;
- MemReqDescTiler[0].stride = 0;
- DCC_Buff = MemMgr_Alloc(MemReqDescTiler, 1);
- PROXY_assert(DCC_Buff != NULL,
- OMX_ErrorInsufficientResources, "ERROR Allocating 1D TILER BUF");
- ptempbuf = DCC_Buff;
#endif
dccbuf_size = read_DCCdir(ptempbuf, dcc_dir, nIndex);
@@ -518,7 +720,11 @@ OMX_ERRORTYPE send_DCCBufPtr(OMX_HANDLETYPE hComponent)
DOMX_ENTER("ENTER");
uribufparam.nSharedBuffSize = dccbuf_size;
- uribufparam.pSharedBuff = (OMX_U8 *) DCC_Buff;
+#ifdef USE_ION
+ uribufparam.pSharedBuff = (OMX_PTR) mmap_fd;
+#else
+ uribufparam.pSharedBuff = (OMX_PTR) DCC_Buff;
+#endif
DOMX_DEBUG("SYSLINK MAPPED ADDR: 0x%x sizeof buffer %d",
uribufparam.pSharedBuff, uribufparam.nSharedBuffSize);
@@ -526,7 +732,7 @@ OMX_ERRORTYPE send_DCCBufPtr(OMX_HANDLETYPE hComponent)
eError = __PROXY_SetParameter(hComponent,
OMX_TI_IndexParamDccUriBuffer,
&uribufparam,
- &(uribufparam.pSharedBuff));
+ &(uribufparam.pSharedBuff), 1);
if (eError != OMX_ErrorNone) {
DOMX_ERROR(" Error in SetParam for DCC Uri Buffer 0x%x", eError);
@@ -646,14 +852,8 @@ void DCC_DeInit()
ion_free(ion_fd, DCC_Buff);
ion_close(ion_fd);
DCC_Buff = NULL;
-#else
- MemMgr_Free(DCC_Buff);
#endif
}
-#ifndef USE_ION
- if (MemReqDescTiler)
- TIMM_OSAL_Free(MemReqDescTiler);
-#endif
DOMX_EXIT("EXIT");
}
diff --git a/domx/omx_proxy_component/omx_h264_enc/src/omx_proxy_h264enc.c b/domx/omx_proxy_component/omx_h264_enc/src/omx_proxy_h264enc.c
index ec998f6..a5de227 100644
--- a/domx/omx_proxy_component/omx_h264_enc/src/omx_proxy_h264enc.c
+++ b/domx/omx_proxy_component/omx_h264_enc/src/omx_proxy_h264enc.c
@@ -75,6 +75,9 @@
#include <VideoMetadata.h>
#endif
+#include <stdlib.h>
+#include <cutils/properties.h>
+
#define COMPONENT_NAME "OMX.TI.DUCATI1.VIDEO.H264E"
/* needs to be specific for every configuration wrapper */
@@ -91,6 +94,26 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_SetParameter(OMX_IN OMX_HANDLETYPE hComponent,
#endif
+
+#define OMX_INIT_STRUCT(_s_, _name_) \
+ memset(&(_s_), 0x0, sizeof(_name_)); \
+ (_s_).nSize = sizeof(_name_); \
+ (_s_).nVersion.s.nVersionMajor = 0x1; \
+ (_s_).nVersion.s.nVersionMinor = 0x1; \
+ (_s_).nVersion.s.nRevision = 0x0; \
+ (_s_).nVersion.s.nStep = 0x0
+
+
+/* Params needed for Dynamic Frame Rate Control*/
+#define FRAME_RATE_THRESHOLD 1 /* Change in Frame rate to configure the encoder */
+OMX_U32 nFrameRateThreshold = 0;/* Frame Rate threshold for every frame rate update */
+OMX_U32 nPortFrameRate = 0; /* Port FPS initially set to the Encoder */
+OMX_U32 nFrameCounter = 0; /* Number of input frames recieved since last framerate calculation */
+OMX_TICKS nVideoTime = 0; /* Video duration since last framerate calculation */
+OMX_TICKS nLastFrameRateUpdateTime = 0; /*Time stamp at last frame rate update */
+OMX_U16 nBFrames = 0; /* Number of B Frames in H264 Encoder */
+
+
#ifdef ANDROID_CUSTOM_OPAQUECOLORFORMAT
/* Opaque color format requires below quirks to be enabled
* ENABLE_GRALLOC_BUFFER
@@ -111,7 +134,7 @@ int COLORCONVERT_PlatformOpaqueToNV12(void *hCC, void *pSrc[],
int nHeight, int nStride,
int nSrcBufType, int nDstBufType);
int COLORCONVERT_close(void *hCC,PROXY_COMPONENT_PRIVATE *pCompPrv);
-
+static int COLORCONVERT_AllocateBuffer(OMX_HANDLETYPE hComponent, OMX_U32 nStride);
static OMX_ERRORTYPE LOCAL_PROXY_H264E_AllocateBuffer(OMX_IN OMX_HANDLETYPE hComponent,
OMX_INOUT OMX_BUFFERHEADERTYPE ** ppBufferHdr, OMX_IN OMX_U32 nPortIndex,
OMX_IN OMX_PTR pAppPrivate, OMX_IN OMX_U32 nSizeBytes);
@@ -130,14 +153,123 @@ typedef struct _OMX_PROXY_H264E_PRIVATE
OMX_S32 nCurBufIndex;
alloc_device_t* mAllocDev;
}OMX_PROXY_H264E_PRIVATE;
+
+RPC_OMX_ERRORTYPE RPC_RegisterBuffer(OMX_HANDLETYPE hRPCCtx, int fd,
+ OMX_PTR *handle1, OMX_PTR *handle2,
+ PROXY_BUFFER_TYPE proxyBufferType);
+RPC_OMX_ERRORTYPE RPC_UnRegisterBuffer(OMX_HANDLETYPE hRPCCtx, OMX_PTR handle);
#endif
+
OMX_ERRORTYPE LOCAL_PROXY_H264E_GetExtensionIndex(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_STRING cParameterName, OMX_OUT OMX_INDEXTYPE * pIndexType);
OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
OMX_BUFFERHEADERTYPE * pBufferHdr);
+static OMX_ERRORTYPE OMX_ConfigureDynamicFrameRate( OMX_HANDLETYPE hComponent,
+ OMX_BUFFERHEADERTYPE * pBufferHdr)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_U32 nTargetFrameRate = 0; /* Target Frame Rate to be provided to Encoder */
+ OMX_U32 nCurrentFrameRate = 0; /* Current Frame Rate currently set in Encoder */
+ OMX_CONFIG_FRAMERATETYPE tFrameRate;
+ OMX_COMPONENTTYPE *pHandle;
+ if (hComponent == NULL){
+ DOMX_ERROR("Component is invalid/ not present ");
+ return OMX_ErrorBadParameter;
+ }
+ pHandle = (OMX_COMPONENTTYPE *) hComponent;
+
+ /* Initialise the OMX structures */
+ OMX_INIT_STRUCT(tFrameRate,OMX_CONFIG_FRAMERATETYPE);
+
+ /* Intialise nLastFrameRateUpdateTime for the 1st frame */
+ if((!nFrameCounter) && (!nLastFrameRateUpdateTime)){
+ nLastFrameRateUpdateTime = pBufferHdr-> nTimeStamp;
+ }
+
+ /* Increment the Frame Counter and Calculate Frame Rate*/
+ nFrameCounter++;
+ nVideoTime = pBufferHdr->nTimeStamp - nLastFrameRateUpdateTime;
+
+ if(nVideoTime < 0) {
+ return OMX_ErrorBadParameter;
+ }
+
+ /*Get Port Frame Rate if not read yet*/
+ if(!nFrameRateThreshold) {
+ tFrameRate.nPortIndex = OMX_H264E_INPUT_PORT; /* As per ducati support-set for input port */
+
+ /* Read Current FrameRate */
+ eError = pHandle->GetConfig(hComponent,OMX_IndexConfigVideoFramerate,&tFrameRate);
+ if (eError != OMX_ErrorNone)
+ DOMX_ERROR ("pHandle->GetConfig OMX_IndexConfigVideoFramerate eError :0x%x \n",eError);
+ nFrameRateThreshold = tFrameRate.xEncodeFramerate >>16;
+ nPortFrameRate = nFrameRateThreshold;
+ DOMX_DEBUG(" Port Frame Rate is %d ", nPortFrameRate);
+ }
+ nCurrentFrameRate = nFrameRateThreshold;
+
+ /* If Number of frames is less than the Threshold
+ * Frame Rate udpate is not necessary
+ */
+ if(nFrameCounter < nFrameRateThreshold){
+ DOMX_EXIT(" Threshold not reached, no update necessary");
+ return OMX_ErrorNone;
+ }
+
+ /*Calculate the new target Frame Rate*/
+ if (nVideoTime != 0)
+ nTargetFrameRate = nFrameCounter * 1000000 / nVideoTime;
+
+ /* For 1080p record, max FPS supported by Codec for profile 4.1 is 30.
+ * When Dynamic Frame Rate is enabled, there might be scenario when FPS
+ * calculated is more than 30. Hence adding the check so that Dynamic Frame
+ * Rate set is never greater than the port FPS initially set.
+ */
+ if(nTargetFrameRate > nPortFrameRate){
+ DOMX_DEBUG("Frame Rate Calculated is more than initial port set Frame Rate");
+ nTargetFrameRate = nPortFrameRate;
+ }
+
+ /* Difference in Frame Rate is more than Threshold - Only then update Frame Rate*/
+ if((( (OMX_S32)nTargetFrameRate) -((OMX_S32) nCurrentFrameRate) >= FRAME_RATE_THRESHOLD) ||
+ (((OMX_S32) nCurrentFrameRate) - ( (OMX_S32)nTargetFrameRate) >= FRAME_RATE_THRESHOLD)) {
+
+ /* Now Send the new Frame Rate */
+ tFrameRate.nPortIndex = OMX_H264E_INPUT_PORT; /* As per ducati support-set for input port */
+ tFrameRate.xEncodeFramerate = (OMX_U32)(nTargetFrameRate * (1 << 16));
+ eError = pHandle->SetConfig(hComponent,OMX_IndexConfigVideoFramerate,&tFrameRate);
+ if(eError != OMX_ErrorNone){
+ DOMX_ERROR(" Error while configuring Dynamic Frame Rate,Error info = %d",eError);
+ return eError;
+ } else {
+ DOMX_DEBUG("Dynamic Frame Rate configuration successful \n");
+ }
+ nFrameRateThreshold = nTargetFrameRate; /*Update the threshold */
+ }
+
+ /* reset all params */
+ nFrameCounter = 0 ;
+ nVideoTime = 0;
+ nLastFrameRateUpdateTime = pBufferHdr->nTimeStamp;
+ return OMX_ErrorNone;
+}
+
+static OMX_ERRORTYPE ComponentPrivateEmptyThisBuffer(OMX_HANDLETYPE hComponent,
+ OMX_BUFFERHEADERTYPE * pBufferHdr)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ eError = OMX_ConfigureDynamicFrameRate(hComponent, pBufferHdr);
+ if( eError != OMX_ErrorNone)
+ DOMX_ERROR(" Error while configuring FrameRate Dynamically.Error info = %d",eError);
+
+ DOMX_DEBUG("Redirection from ComponentPricateEmptyThisBuffer to PROXY_EmptyThisBuffer");
+ return LOCAL_PROXY_H264E_EmptyThisBuffer (hComponent,pBufferHdr);
+}
+
OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -149,6 +281,10 @@ OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
TIMM_OSAL_ERRORTYPE eOSALStatus = TIMM_OSAL_ERR_NONE;
OMX_PROXY_H264E_PRIVATE *pProxy = NULL;
#endif
+ char value[OMX_MAX_STRINGNAME_SIZE];
+ OMX_U32 mEnableVFR = 1; /* Flag used to enable/disable VFR for Encoder */
+ property_get("debug.vfr.enable", value, "1");
+ mEnableVFR = atoi(value);
DOMX_ENTER("");
@@ -222,6 +358,9 @@ OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
pHandle->AllocateBuffer = LOCAL_PROXY_H264E_AllocateBuffer;
#endif
+ if(mEnableVFR)
+ pHandle->EmptyThisBuffer = ComponentPrivateEmptyThisBuffer;
+
EXIT:
if (eError != OMX_ErrorNone)
{
@@ -346,6 +485,12 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_GetParameter(OMX_IN OMX_HANDLETYPE hComponent,
}
#endif
}
+ else if (nParamIndex == OMX_TI_IndexComponentHandle)
+ {
+ OMX_TI_COMPONENT_HANDLE * pCompHandle = pParamStruct;
+ pCompHandle->pHandle = hComponent;
+ eError = OMX_ErrorNone;
+ }
PROXY_assert((eError == OMX_ErrorNone) || (eError == OMX_ErrorNoMore),
eError," Error in Proxy GetParameter");
@@ -527,7 +672,7 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_GetExtensionIndex(OMX_IN OMX_HANDLETYPE hCompone
goto EXIT;
}
- PROXY_GetExtensionIndex(hComponent, cParameterName, pIndexType);
+ eError = PROXY_GetExtensionIndex(hComponent, cParameterName, pIndexType);
EXIT:
DOMX_EXIT("%s eError: %d",__FUNCTION__, eError);
@@ -551,7 +696,7 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
OMX_ERRORTYPE eError = OMX_ErrorNone;
PROXY_COMPONENT_PRIVATE *pCompPrv;
OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
- OMX_PTR pBufferOrig = pBufferHdr->pBuffer;
+ OMX_PTR pBufferOrig = NULL;
OMX_U32 nStride = 0, nNumLines = 0;
OMX_PARAM_PORTDEFINITIONTYPE tParamStruct;
OMX_U32 nFilledLen, nAllocLen;
@@ -560,6 +705,12 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
TIMM_OSAL_ERRORTYPE eOSALStatus = TIMM_OSAL_ERR_NONE;
OMX_U32 nBufIndex = 0, nSize=0, nRet=0;
#endif
+#ifdef ENABLE_GRALLOC_BUFFER
+ OMX_PTR pAuxBuf0 = NULL, pAuxBuf1 = NULL;
+ RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
+ OMX_ERRORTYPE eCompReturn = OMX_ErrorNone;
+ IMG_native_handle_t* pGrallocHandle=NULL;
+#endif
PROXY_require(pBufferHdr != NULL, OMX_ErrorBadParameter, NULL);
PROXY_require(hComp->pComponentPrivate != NULL, OMX_ErrorBadParameter,
@@ -593,7 +744,7 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
__FUNCTION__,hComponent, pCompPrv, pBufferHdr->nFilledLen,
pBufferHdr->nOffset, pBufferHdr->nFlags);
- if( pCompPrv->proxyPortBuffers[OMX_H264E_INPUT_PORT].proxyBufferType == EncoderMetadataPointers && nFilledLen != 0 )
+ if( pCompPrv->proxyPortBuffers[OMX_H264E_INPUT_PORT].proxyBufferType == EncoderMetadataPointers )
{
OMX_U32 *pTempBuffer;
OMX_U32 nMetadataBufferType;
@@ -607,14 +758,22 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
if(nMetadataBufferType == kMetadataBufferTypeCameraSource)
{
#ifdef ENABLE_GRALLOC_BUFFER
- IMG_native_handle_t* pGrallocHandle;
video_metadata_t* pVideoMetadataBuffer;
DOMX_DEBUG("MetadataBufferType is kMetadataBufferTypeCameraSource");
pVideoMetadataBuffer = (video_metadata_t*) ((OMX_U32 *)(pBufferHdr->pBuffer));
pGrallocHandle = (IMG_native_handle_t*) (pVideoMetadataBuffer->handle);
DOMX_DEBUG("Grallloc buffer recieved in metadata buffer 0x%x",pGrallocHandle );
-
+ if( pGrallocHandle->iFormat != HAL_PIXEL_FORMAT_TI_NV12 && pProxy->gralloc_handle[0] == NULL ) {
+ DOMX_DEBUG("Allocating NV12 buffers internally within DOMX actual count: %d", pCompPrv->nAllocatedBuffers);
+ pProxy->nCurBufIndex = 0;
+
+ while( (unsigned) pProxy->nCurBufIndex < pCompPrv->nAllocatedBuffers ) {
+ eError = COLORCONVERT_AllocateBuffer(hComponent, nStride);
+ PROXY_require(eError == OMX_ErrorNone, eError, "Error allocating buffers for color conversion");
+ pProxy->nCurBufIndex++;
+ }
+ }
pBufferHdr->pBuffer = (OMX_U8 *)(pGrallocHandle->fd[0]);
((OMX_TI_PLATFORMPRIVATE *) pBufferHdr->pPlatformPrivate)->
pAuxBuf1 = (OMX_PTR) pGrallocHandle->fd[1];
@@ -627,7 +786,6 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
else if(nMetadataBufferType == kMetadataBufferTypeGrallocSource)
{
#ifdef ENABLE_GRALLOC_BUFFER
- IMG_native_handle_t* pGrallocHandle;
buffer_handle_t tBufHandle;
DOMX_DEBUG("MetadataBufferType is kMetadataBufferTypeGrallocSource");
@@ -635,6 +793,16 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
tBufHandle = *((buffer_handle_t *)pTempBuffer);
pGrallocHandle = (IMG_native_handle_t*) tBufHandle;
DOMX_DEBUG("Grallloc buffer recieved in metadata buffer 0x%x",pGrallocHandle );
+ if( pGrallocHandle->iFormat != HAL_PIXEL_FORMAT_TI_NV12 && pProxy->gralloc_handle[0] == NULL ) {
+ DOMX_DEBUG("Allocating NV12 buffers internally within DOMX actual count: %d", pCompPrv->nAllocatedBuffers);
+ pProxy->nCurBufIndex = 0;
+
+ while( (unsigned) pProxy->nCurBufIndex < pCompPrv->nAllocatedBuffers ) {
+ eError = COLORCONVERT_AllocateBuffer(hComponent, nStride);
+ PROXY_require(eError == OMX_ErrorNone, eError, "Error allocating buffers for color conversion");
+ pProxy->nCurBufIndex++;
+ }
+ }
pBufferHdr->pBuffer = (OMX_U8 *)(pGrallocHandle->fd[0]);
((OMX_TI_PLATFORMPRIVATE *) pBufferHdr->pPlatformPrivate)->
@@ -642,7 +810,7 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
DOMX_DEBUG("%s Gralloc=0x%x, Y-fd=%d, UV-fd=%d", __FUNCTION__, pGrallocHandle,
pGrallocHandle->fd[0], pGrallocHandle->fd[1]);
#ifdef ANDROID_CUSTOM_OPAQUECOLORFORMAT
- if (pProxy->bAndroidOpaqueFormat)
+ if (pProxy->bAndroidOpaqueFormat && pGrallocHandle->iFormat != HAL_PIXEL_FORMAT_TI_NV12)
{
/* Dequeue NV12 buffer for encoder */
eOSALStatus = TIMM_OSAL_ReadFromPipe(pProxy->hBufPipe, &nBufIndex,
@@ -650,35 +818,55 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
TIMM_OSAL_SUSPEND);
PROXY_assert(eOSALStatus == TIMM_OSAL_ERR_NONE, OMX_ErrorBadParameter, NULL);
- /* Get NV12 data after colorconv*/
- nRet = COLORCONVERT_PlatformOpaqueToNV12(pProxy->hCC, (void **) &pGrallocHandle, (void **) &pProxy->gralloc_handle[nBufIndex],
+ if(nFilledLen != 0)
+ {
+ /* Get NV12 data after colorconv*/
+ nRet = COLORCONVERT_PlatformOpaqueToNV12(pProxy->hCC, (void **) &pGrallocHandle, (void **) &pProxy->gralloc_handle[nBufIndex],
pGrallocHandle->iWidth,
pGrallocHandle->iHeight,
4096, COLORCONVERT_BUFTYPE_GRALLOCOPAQUE,
COLORCONVERT_BUFTYPE_GRALLOCOPAQUE );
- if(nRet != 0)
- {
- eOSALStatus = TIMM_OSAL_WriteToPipe(pProxy->hBufPipe, (void *) &nBufIndex,
+
+ if(nRet != 0)
+ {
+ eOSALStatus = TIMM_OSAL_WriteToPipe(pProxy->hBufPipe, (void *) &nBufIndex,
sizeof(OMX_U32), TIMM_OSAL_SUSPEND);
- PROXY_assert(0, OMX_ErrorBadParameter, "Color conversion routine failed");
+ PROXY_assert(0, OMX_ErrorBadParameter, "Color conversion routine failed");
+ }
}
/* Update pBufferHdr with NV12 buffers for OMX component */
- pBufferHdr->pBuffer= pProxy->gralloc_handle[nBufIndex]->fd[0];
- ((OMX_TI_PLATFORMPRIVATE *) pBufferHdr->pPlatformPrivate)->pAuxBuf1 = pProxy->gralloc_handle[nBufIndex]->fd[1];
+ pBufferHdr->pBuffer= (OMX_U8 *)(pProxy->gralloc_handle[nBufIndex]->fd[0]);
+ ((OMX_TI_PLATFORMPRIVATE *) pBufferHdr->pPlatformPrivate)->pAuxBuf1 = (OMX_PTR)(pProxy->gralloc_handle[nBufIndex]->fd[1]);
}
#endif
#endif
}
else
{
- return OMX_ErrorBadParameter;
+ DOMX_ERROR("MetadataBufferType is unknow. Returning 'OMX_ErrorBadParameter'");
+ eError = OMX_ErrorBadParameter;
+ goto EXIT; //need to restore lenght fields in pBufferHdr
}
+#ifdef ENABLE_GRALLOC_BUFFER
+ eRPCError = RPC_RegisterBuffer(pCompPrv->hRemoteComp, pBufferHdr->pBuffer,
+ &pAuxBuf0, &pAuxBuf1,
+ GrallocPointers);
+ PROXY_checkRpcError();
+ if (pAuxBuf0)
+ pBufferHdr->pBuffer = pAuxBuf0;
+ if (pAuxBuf1)
+ ((OMX_TI_PLATFORMPRIVATE *) pBufferHdr->pPlatformPrivate)->pAuxBuf1 = pAuxBuf1;
+#endif
}
- PROXY_EmptyThisBuffer(hComponent, pBufferHdr);
+ eError = PROXY_EmptyThisBuffer(hComponent, pBufferHdr);
#ifdef ANDROID_CUSTOM_OPAQUECOLORFORMAT
- if (pProxy->bAndroidOpaqueFormat)
+ if (pProxy->bAndroidOpaqueFormat
+#ifdef ENABLE_GRALLOC_BUFFER
+&& pGrallocHandle != NULL && pGrallocHandle->iFormat != HAL_PIXEL_FORMAT_TI_NV12
+#endif
+)
{
/*Write buffer to end of pipe for re-circulation for future ETB()*/
eOSALStatus = TIMM_OSAL_WriteToPipe(pProxy->hBufPipe, (void *) &nBufIndex,
@@ -686,14 +874,19 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
PROXY_assert(eOSALStatus == TIMM_OSAL_ERR_NONE, OMX_ErrorBadParameter, "Pipe write failed");
}
#endif
- if( pCompPrv->proxyPortBuffers[pBufferHdr->nInputPortIndex].proxyBufferType == EncoderMetadataPointers)
+
+EXIT:
+ if( pBufferHdr!=NULL && pCompPrv->proxyPortBuffers[pBufferHdr->nInputPortIndex].proxyBufferType == EncoderMetadataPointers)
{
pBufferHdr->pBuffer = pBufferOrig;
pBufferHdr->nFilledLen = nFilledLen;
pBufferHdr->nAllocLen = nAllocLen;
+#ifdef ENABLE_GRALLOC_BUFFER
+ RPC_UnRegisterBuffer(pCompPrv->hRemoteComp, pAuxBuf0);
+ RPC_UnRegisterBuffer(pCompPrv->hRemoteComp, pAuxBuf1);
+#endif
}
- EXIT:
- return eError;
+ return eError;
}
#ifdef ANDROID_CUSTOM_OPAQUECOLORFORMAT
@@ -704,7 +897,6 @@ static OMX_ERRORTYPE LOCAL_PROXY_H264E_AllocateBuffer(OMX_HANDLETYPE hComponent,
OMX_ERRORTYPE eError = OMX_ErrorNone;
PROXY_COMPONENT_PRIVATE *pCompPrv = NULL;
OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
- OMX_CONFIG_RECTTYPE tParamRect;
OMX_PROXY_H264E_PRIVATE *pProxy = NULL;
TIMM_OSAL_ERRORTYPE eOSALStatus = TIMM_OSAL_ERR_NONE;
int err, nStride;
@@ -714,42 +906,10 @@ static OMX_ERRORTYPE LOCAL_PROXY_H264E_AllocateBuffer(OMX_HANDLETYPE hComponent,
pCompPrv = (PROXY_COMPONENT_PRIVATE *) hComp->pComponentPrivate;
pProxy = (OMX_PROXY_H264E_PRIVATE *) pCompPrv->pCompProxyPrv;
- if((nPortIndex == OMX_H264E_INPUT_PORT) &&
- (pProxy->bAndroidOpaqueFormat))
- {
-
- tParamRect.nSize = sizeof(OMX_CONFIG_RECTTYPE);
- tParamRect.nVersion.s.nVersionMajor = 1;
- tParamRect.nVersion.s.nVersionMinor = 1;
- tParamRect.nVersion.s.nRevision = 0;
- tParamRect.nVersion.s.nStep = 0;
- tParamRect.nPortIndex = nPortIndex;
-
- eError = PROXY_GetParameter(hComponent, (OMX_INDEXTYPE)OMX_TI_IndexParam2DBufferAllocDimension, &tParamRect);
- PROXY_assert(eError == OMX_ErrorNone, eError," Error in Proxy GetParameter");
-
- err = pProxy->mAllocDev->alloc(pProxy->mAllocDev,(int) tParamRect.nWidth,(int) tParamRect.nHeight,
- (int) HAL_PIXEL_FORMAT_TI_NV12,(int) GRALLOC_USAGE_HW_RENDER, &(pProxy->gralloc_handle[pProxy->nCurBufIndex]), &nStride);
- }
eError = PROXY_AllocateBuffer(hComponent, ppBufferHdr, nPortIndex,
pAppPrivate, nSizeBytes);
EXIT:
- if((nPortIndex == OMX_H264E_INPUT_PORT) &&
- (pProxy->bAndroidOpaqueFormat))
- {
- if(eError != OMX_ErrorNone)
- {
- err = pProxy->mAllocDev->free(pProxy->mAllocDev, pProxy->gralloc_handle[pProxy->nCurBufIndex]);
- }
- else
- {
- /*Populate buffer to pipe*/
- eOSALStatus = TIMM_OSAL_WriteToPipe(pProxy->hBufPipe, (void *) &pProxy->nCurBufIndex,
- sizeof(OMX_U32), TIMM_OSAL_SUSPEND);
- pProxy->nCurBufIndex++;
- }
- }
return eError;
}
@@ -768,7 +928,7 @@ static OMX_ERRORTYPE LOCAL_PROXY_H264E_FreeBuffer(OMX_IN OMX_HANDLETYPE hCompone
pProxy = (OMX_PROXY_H264E_PRIVATE *) pCompPrv->pCompProxyPrv;
if((nPortIndex == OMX_H264E_INPUT_PORT) &&
- (pProxy->bAndroidOpaqueFormat))
+ (pProxy->bAndroidOpaqueFormat) && (pProxy->gralloc_handle[0] != NULL))
{
pProxy->nCurBufIndex--;
PROXY_require(pProxy->nCurBufIndex >=0,
@@ -776,7 +936,7 @@ static OMX_ERRORTYPE LOCAL_PROXY_H264E_FreeBuffer(OMX_IN OMX_HANDLETYPE hCompone
if(pProxy->gralloc_handle[pProxy->nCurBufIndex])
{
- pProxy->mAllocDev->free(pProxy->mAllocDev, pProxy->gralloc_handle[pProxy->nCurBufIndex]);
+ pProxy->mAllocDev->free(pProxy->mAllocDev, (buffer_handle_t)(pProxy->gralloc_handle[pProxy->nCurBufIndex]));
pProxy->gralloc_handle[pProxy->nCurBufIndex] = NULL;
}
@@ -795,6 +955,35 @@ EXIT:
return eError;
}
+int COLORCONVERT_AllocateBuffer(OMX_HANDLETYPE hComponent, OMX_U32 nStride)
+{
+ OMX_CONFIG_RECTTYPE tParam;
+ TIMM_OSAL_ERRORTYPE eOSALStatus = TIMM_OSAL_ERR_NONE;
+ OMX_U32 err;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ PROXY_COMPONENT_PRIVATE *pCompPrv = NULL;
+ OMX_PROXY_H264E_PRIVATE *pProxy = NULL;
+ OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
+
+ pCompPrv = (PROXY_COMPONENT_PRIVATE *) hComp->pComponentPrivate;
+ pProxy = (OMX_PROXY_H264E_PRIVATE *) pCompPrv->pCompProxyPrv;
+
+ tParam.nSize = sizeof(OMX_CONFIG_RECTTYPE);
+ tParam.nVersion.s.nVersionMajor = 1;
+ tParam.nVersion.s.nVersionMinor = 1;
+ tParam.nVersion.s.nRevision = 0;
+ tParam.nVersion.s.nStep = 0;
+ tParam.nPortIndex = OMX_H264E_INPUT_PORT;
+ eError = PROXY_GetParameter(hComponent, (OMX_INDEXTYPE)OMX_TI_IndexParam2DBufferAllocDimension, &tParam);
+ PROXY_assert(eError == OMX_ErrorNone, eError, " Error in Proxy GetParameter");
+ err = pProxy->mAllocDev->alloc(pProxy->mAllocDev, (int) tParam.nWidth, (int) tParam.nHeight, (int) HAL_PIXEL_FORMAT_TI_NV12, (int) GRALLOC_USAGE_HW_RENDER, (const struct native_handle_t * *)(&(pProxy->gralloc_handle[pProxy->nCurBufIndex])), (int *) &nStride);
+ PROXY_assert(!err, err, " Error in allocating Gralloc buffers");
+ eOSALStatus = TIMM_OSAL_WriteToPipe(pProxy->hBufPipe, (void *) &pProxy->nCurBufIndex, sizeof(OMX_U32), TIMM_OSAL_SUSPEND);
+ PROXY_assert(eOSALStatus == TIMM_OSAL_ERR_NONE, OMX_ErrorBadParameter, "Pipe write failed");
+
+EXIT:
+ return eError;
+}
OMX_ERRORTYPE LOCAL_PROXY_H264E_ComponentDeInit(OMX_HANDLETYPE hComponent)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -827,7 +1016,7 @@ OMX_ERRORTYPE LOCAL_PROXY_H264E_ComponentDeInit(OMX_HANDLETYPE hComponent)
{
if(pProxy->gralloc_handle[i])
{
- pProxy->mAllocDev->free(pProxy->mAllocDev, pProxy->gralloc_handle[i]);
+ pProxy->mAllocDev->free(pProxy->mAllocDev, (buffer_handle_t)(pProxy->gralloc_handle[i]));
pProxy->gralloc_handle[i] = NULL;
}
}
diff --git a/domx/omx_proxy_component/omx_mpeg4_enc/src/omx_proxy_mpeg4enc.c b/domx/omx_proxy_component/omx_mpeg4_enc/src/omx_proxy_mpeg4enc.c
index b6a6431..9767d89 100644
--- a/domx/omx_proxy_component/omx_mpeg4_enc/src/omx_proxy_mpeg4enc.c
+++ b/domx/omx_proxy_component/omx_mpeg4_enc/src/omx_proxy_mpeg4enc.c
@@ -75,6 +75,9 @@
#include <VideoMetadata.h>
#endif
+#include <stdlib.h>
+#include <cutils/properties.h>
+
#define COMPONENT_NAME "OMX.TI.DUCATI1.VIDEO.MPEG4E"
/* needs to be specific for every configuration wrapper */
@@ -94,6 +97,26 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_SetParameter(OMX_IN OMX_HANDLETYPE hComponent,
#endif
+
+#define OMX_INIT_STRUCT(_s_, _name_) \
+ memset(&(_s_), 0x0, sizeof(_name_)); \
+ (_s_).nSize = sizeof(_name_); \
+ (_s_).nVersion.s.nVersionMajor = 0x1; \
+ (_s_).nVersion.s.nVersionMinor = 0x1; \
+ (_s_).nVersion.s.nRevision = 0x0; \
+ (_s_).nVersion.s.nStep = 0x0
+
+
+/* Params needed for Dynamic Frame Rate Control*/
+#define FRAME_RATE_THRESHOLD 1 /* Change in Frame rate to configure the encoder */
+OMX_U32 nFrameRateThreshold = 0;/* Frame Rate threshold for every frame rate update */
+OMX_U32 nPortFrameRate = 0; /* Port FPS initially set to the Encoder */
+OMX_U32 nFrameCounter = 0; /* Number of input frames recieved since last framerate calculation */
+OMX_TICKS nVideoTime = 0; /* Video duration since last framerate calculation */
+OMX_TICKS nLastFrameRateUpdateTime = 0; /*Time stamp at last frame rate update */
+OMX_U16 nBFrames = 0; /* Number of B Frames in H264 Encoder */
+
+
#ifdef ANDROID_CUSTOM_OPAQUECOLORFORMAT
#define OMX_MPEG4E_NUM_INTERNAL_BUF (8)
#define HAL_PIXEL_FORMAT_TI_NV12 (0x100)
@@ -129,6 +152,11 @@ typedef struct _OMX_PROXY_MPEG4E_PRIVATE
OMX_S32 nCurBufIndex;
alloc_device_t* mAllocDev;
}OMX_PROXY_MPEG4E_PRIVATE;
+
+RPC_OMX_ERRORTYPE RPC_RegisterBuffer(OMX_HANDLETYPE hRPCCtx, int fd,
+ OMX_PTR *handle1, OMX_PTR *handle2,
+ PROXY_BUFFER_TYPE proxyBufferType);
+RPC_OMX_ERRORTYPE RPC_UnRegisterBuffer(OMX_HANDLETYPE hRPCCtx, OMX_PTR handle);
#endif
OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_GetExtensionIndex(OMX_IN OMX_HANDLETYPE hComponent,
@@ -137,6 +165,109 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_GetExtensionIndex(OMX_IN OMX_HANDLETYPE hCompon
OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
OMX_BUFFERHEADERTYPE * pBufferHdr);
+static OMX_ERRORTYPE OMX_ConfigureDynamicFrameRate( OMX_HANDLETYPE hComponent,
+ OMX_BUFFERHEADERTYPE * pBufferHdr)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_U32 nTargetFrameRate = 0; /* Target Frame Rate to be provided to Encoder */
+ OMX_U32 nCurrentFrameRate = 0; /* Current Frame Rate currently set in Encoder */
+ OMX_CONFIG_FRAMERATETYPE tFrameRate;
+ OMX_COMPONENTTYPE *pHandle;
+ if (hComponent == NULL){
+ DOMX_ERROR("Component is invalid/ not present ");
+ return OMX_ErrorBadParameter;
+ }
+ pHandle = (OMX_COMPONENTTYPE *) hComponent;
+
+ /* Initialise the OMX structures */
+ OMX_INIT_STRUCT(tFrameRate,OMX_CONFIG_FRAMERATETYPE);
+
+ /* Intialise nLastFrameRateUpdateTime for the 1st frame */
+ if((!nFrameCounter) && (!nLastFrameRateUpdateTime)){
+ nLastFrameRateUpdateTime = pBufferHdr-> nTimeStamp;
+ }
+
+ /* Increment the Frame Counter and Calculate Frame Rate*/
+ nFrameCounter++;
+ nVideoTime = pBufferHdr->nTimeStamp - nLastFrameRateUpdateTime;
+
+ if(nVideoTime < 0) {
+ return OMX_ErrorBadParameter;
+ }
+
+ /*Get Port Frame Rate if not read yet*/
+ if(!nFrameRateThreshold) {
+ tFrameRate.nPortIndex = OMX_MPEG4E_INPUT_PORT; /* As per ducati support-set for input port */
+
+ /* Read Current FrameRate */
+ eError = pHandle->GetConfig(hComponent,OMX_IndexConfigVideoFramerate,&tFrameRate);
+ if (eError != OMX_ErrorNone)
+ DOMX_ERROR ("pHandle->GetConfig OMX_IndexConfigVideoFramerate eError :0x%x \n",eError);
+ nFrameRateThreshold = tFrameRate.xEncodeFramerate >>16;
+ nPortFrameRate = nFrameRateThreshold;
+ DOMX_DEBUG(" Port Frame Rate is %d ", nPortFrameRate);
+ }
+ nCurrentFrameRate = nFrameRateThreshold;
+
+ /* If Number of frames is less than the Threshold
+ * Frame Rate udpate is not necessary
+ */
+ if(nFrameCounter < nFrameRateThreshold){
+ DOMX_EXIT(" Threshold not reached, no update necessary");
+ return OMX_ErrorNone;
+ }
+
+ /*Calculate the new target Frame Rate*/
+ if (nVideoTime != 0)
+ nTargetFrameRate = nFrameCounter * 1000000 / nVideoTime;
+
+ /* For 1080p record, max FPS supported by Codec for profile 4.1 is 30.
+ * When Dynamic Frame Rate is enabled, there might be scenario when FPS
+ * calculated is more than 30. Hence adding the check so that Dynamic Frame
+ * Rate set is never greater than the port FPS initially set.
+ */
+ if(nTargetFrameRate > nPortFrameRate){
+ DOMX_DEBUG("Frame Rate Calculated is more than initial port set Frame Rate");
+ nTargetFrameRate = nPortFrameRate;
+ }
+
+ /* Difference in Frame Rate is more than Threshold - Only then update Frame Rate*/
+ if((( (OMX_S32)nTargetFrameRate) -((OMX_S32) nCurrentFrameRate) >= FRAME_RATE_THRESHOLD) ||
+ (((OMX_S32) nCurrentFrameRate) - ( (OMX_S32)nTargetFrameRate) >= FRAME_RATE_THRESHOLD)) {
+
+ /* Now Send the new Frame Rate */
+ tFrameRate.nPortIndex = OMX_MPEG4E_INPUT_PORT; /* As per ducati support-set for input port */
+ tFrameRate.xEncodeFramerate = (OMX_U32)(nTargetFrameRate * (1 << 16));
+ eError = pHandle->SetConfig(hComponent,OMX_IndexConfigVideoFramerate,&tFrameRate);
+ if(eError != OMX_ErrorNone){
+ DOMX_ERROR(" Error while configuring Dynamic Frame Rate,Error info = %d",eError);
+ return eError;
+ } else {
+ DOMX_DEBUG("Dynamic Frame Rate configuration successful \n");
+ }
+ nFrameRateThreshold = nTargetFrameRate; /*Update the threshold */
+ }
+
+ /* reset all params */
+ nFrameCounter = 0 ;
+ nVideoTime = 0;
+ nLastFrameRateUpdateTime = pBufferHdr->nTimeStamp;
+ return OMX_ErrorNone;
+}
+
+static OMX_ERRORTYPE ComponentPrivateEmptyThisBuffer(OMX_HANDLETYPE hComponent,
+ OMX_BUFFERHEADERTYPE * pBufferHdr)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ eError = OMX_ConfigureDynamicFrameRate(hComponent, pBufferHdr);
+ if( eError != OMX_ErrorNone)
+ DOMX_ERROR(" Error while configuring FrameRate Dynamically.Error info = %d",eError);
+
+ DOMX_DEBUG("Redirection from ComponentPricateEmptyThisBuffer to PROXY_EmptyThisBuffer");
+ return LOCAL_PROXY_MPEG4E_EmptyThisBuffer (hComponent,pBufferHdr);
+}
+
OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -148,6 +279,10 @@ OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
TIMM_OSAL_ERRORTYPE eOSALStatus = TIMM_OSAL_ERR_NONE;
OMX_PROXY_MPEG4E_PRIVATE *pProxy = NULL;
#endif
+ char value[OMX_MAX_STRINGNAME_SIZE];
+ OMX_U32 mEnableVFR = 1; /* Flag used to enable/disable VFR for Encoder */
+ property_get("debug.vfr.enable", value, "1");
+ mEnableVFR = atoi(value);
DOMX_ENTER("");
@@ -220,6 +355,9 @@ OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
pHandle->EmptyThisBuffer = LOCAL_PROXY_MPEG4E_EmptyThisBuffer;
pHandle->GetExtensionIndex = LOCAL_PROXY_MPEG4E_GetExtensionIndex;
+ if(mEnableVFR)
+ pHandle->EmptyThisBuffer = ComponentPrivateEmptyThisBuffer;
+
EXIT:
if (eError != OMX_ErrorNone)
{
@@ -510,7 +648,7 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_GetExtensionIndex(OMX_IN OMX_HANDLETYPE hCompon
goto EXIT;
}
- PROXY_GetExtensionIndex(hComponent, cParameterName, pIndexType);
+ eError = PROXY_GetExtensionIndex(hComponent, cParameterName, pIndexType);
EXIT:
DOMX_EXIT("%s eError: %d",__FUNCTION__, eError);
@@ -534,7 +672,7 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
OMX_ERRORTYPE eError = OMX_ErrorNone;
PROXY_COMPONENT_PRIVATE *pCompPrv;
OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
- OMX_PTR pBufferOrig = pBufferHdr->pBuffer;
+ OMX_PTR pBufferOrig = NULL;
OMX_U32 nStride = 0, nNumLines = 0;
OMX_PARAM_PORTDEFINITIONTYPE tParamStruct;
OMX_U32 nFilledLen, nAllocLen;
@@ -543,6 +681,11 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
TIMM_OSAL_ERRORTYPE eOSALStatus = TIMM_OSAL_ERR_NONE;
OMX_U32 nBufIndex = 0, nSize=0, nRet=0;
#endif
+#ifdef ENABLE_GRALLOC_BUFFER
+ OMX_PTR pAuxBuf0 = NULL, pAuxBuf1 = NULL;
+ RPC_OMX_ERRORTYPE eRPCError = RPC_OMX_ErrorNone;
+ OMX_ERRORTYPE eCompReturn = OMX_ErrorNone;
+#endif
PROXY_require(pBufferHdr != NULL, OMX_ErrorBadParameter, NULL);
PROXY_require(hComp->pComponentPrivate != NULL, OMX_ErrorBadParameter,
@@ -650,8 +793,8 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
DOMX_DEBUG(" --COLORCONVERT_PlatformOpaqueToNV12() ");
/* Update pBufferHdr with NV12 buffers for OMX component */
- pBufferHdr->pBuffer= pProxy->gralloc_handle[nBufIndex]->fd[0];
- ((OMX_TI_PLATFORMPRIVATE *) pBufferHdr->pPlatformPrivate)->pAuxBuf1 = pProxy->gralloc_handle[nBufIndex]->fd[1];
+ pBufferHdr->pBuffer= (OMX_U8 *)(pProxy->gralloc_handle[nBufIndex]->fd[0]);
+ ((OMX_TI_PLATFORMPRIVATE *) pBufferHdr->pPlatformPrivate)->pAuxBuf1 = (OMX_PTR)(pProxy->gralloc_handle[nBufIndex]->fd[1]);
}
#endif
#endif
@@ -660,9 +803,19 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
{
return OMX_ErrorBadParameter;
}
+#ifdef ENABLE_GRALLOC_BUFFER
+ eRPCError = RPC_RegisterBuffer(pCompPrv->hRemoteComp, pBufferHdr->pBuffer,
+ &pAuxBuf0, &pAuxBuf1,
+ GrallocPointers);
+ PROXY_checkRpcError();
+ if (pAuxBuf0)
+ pBufferHdr->pBuffer = pAuxBuf0;
+ if (pAuxBuf1)
+ ((OMX_TI_PLATFORMPRIVATE *) pBufferHdr->pPlatformPrivate)->pAuxBuf1 = pAuxBuf1;
+#endif
}
- PROXY_EmptyThisBuffer(hComponent, pBufferHdr);
+ eError = PROXY_EmptyThisBuffer(hComponent, pBufferHdr);
#ifdef ANDROID_CUSTOM_OPAQUECOLORFORMAT
if (pProxy->bAndroidOpaqueFormat)
{
@@ -672,13 +825,14 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_EmptyThisBuffer(OMX_HANDLETYPE hComponent,
PROXY_assert(eOSALStatus == TIMM_OSAL_ERR_NONE, OMX_ErrorBadParameter, "Pipe write failed");
}
#endif
- if( pCompPrv->proxyPortBuffers[pBufferHdr->nInputPortIndex].proxyBufferType == EncoderMetadataPointers)
- {
+
+ if( pCompPrv->proxyPortBuffers[pBufferHdr->nInputPortIndex].proxyBufferType == EncoderMetadataPointers) {
pBufferHdr->pBuffer = pBufferOrig;
- pBufferHdr->nFilledLen = nFilledLen;
- pBufferHdr->nAllocLen = nAllocLen;
+#ifdef ENABLE_GRALLOC_BUFFER
+ RPC_UnRegisterBuffer(pCompPrv->hRemoteComp, pAuxBuf0);
+ RPC_UnRegisterBuffer(pCompPrv->hRemoteComp, pAuxBuf1);
+#endif
}
-
EXIT:
return eError;
}
@@ -715,8 +869,9 @@ static OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_AllocateBuffer(OMX_HANDLETYPE hComponent
eError = PROXY_GetParameter(hComponent, (OMX_INDEXTYPE)OMX_TI_IndexParam2DBufferAllocDimension, &tParamRect);
PROXY_assert(eError == OMX_ErrorNone, eError," Error in Proxy GetParameter from 2d index in allocate buffer");
- err = pProxy->mAllocDev->alloc(pProxy->mAllocDev,(int) tParamRect.nWidth,(int) tParamRect.nHeight,
- (int) HAL_PIXEL_FORMAT_TI_NV12,(int) GRALLOC_USAGE_HW_RENDER, &(pProxy->gralloc_handle[pProxy->nCurBufIndex]), &nStride);
+ err = pProxy->mAllocDev->alloc(pProxy->mAllocDev,(int) tParamRect.nWidth,(int) tParamRect.nHeight,
+ (int) HAL_PIXEL_FORMAT_TI_NV12,(int) GRALLOC_USAGE_HW_RENDER,
+ (const struct native_handle_t **)(&(pProxy->gralloc_handle[pProxy->nCurBufIndex])), &nStride);
}
eError = PROXY_AllocateBuffer(hComponent, ppBufferHdr, nPortIndex,
@@ -727,7 +882,7 @@ EXIT:
{
if(eError != OMX_ErrorNone)
{
- err = pProxy->mAllocDev->free(pProxy->mAllocDev, pProxy->gralloc_handle[pProxy->nCurBufIndex]);
+ err = pProxy->mAllocDev->free(pProxy->mAllocDev, (buffer_handle_t)(pProxy->gralloc_handle[pProxy->nCurBufIndex]));
}
else
{
@@ -764,7 +919,7 @@ static OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_FreeBuffer(OMX_IN OMX_HANDLETYPE hCompon
if(pProxy->gralloc_handle[pProxy->nCurBufIndex])
{
- pProxy->mAllocDev->free(pProxy->mAllocDev, pProxy->gralloc_handle[pProxy->nCurBufIndex]);
+ pProxy->mAllocDev->free(pProxy->mAllocDev, (buffer_handle_t)(pProxy->gralloc_handle[pProxy->nCurBufIndex]));
pProxy->gralloc_handle[pProxy->nCurBufIndex] = NULL;
}
@@ -815,7 +970,7 @@ OMX_ERRORTYPE LOCAL_PROXY_MPEG4E_ComponentDeInit(OMX_HANDLETYPE hComponent)
{
if(pProxy->gralloc_handle[i])
{
- pProxy->mAllocDev->free(pProxy->mAllocDev, pProxy->gralloc_handle[i]);
+ pProxy->mAllocDev->free(pProxy->mAllocDev, (buffer_handle_t)(pProxy->gralloc_handle[i]));
pProxy->gralloc_handle[i] = NULL;
}
}
diff --git a/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec.c b/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec.c
index 5c5a4b9..8ac4c9d 100755
--- a/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec.c
+++ b/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec.c
@@ -64,6 +64,13 @@
#include "OMX_TI_Video.h"
#include "OMX_TI_Index.h"
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+#define LOG_TAG "OMXPROXYVIDEODEC"
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <utils/Log.h>
+#endif
+
#define COMPONENT_NAME "OMX.TI.DUCATI1.VIDEO.DECODER"
/* needs to be specific for every configuration wrapper */
@@ -116,6 +123,14 @@ OMX_ERRORTYPE PROXY_VIDDEC_FillBufferDone(OMX_HANDLETYPE hComponent,
OMX_PTR pMarkData);
#endif
+extern OMX_ERRORTYPE PrearrageEmptyThisBuffer(OMX_HANDLETYPE hComponent,
+ OMX_BUFFERHEADERTYPE * pBufferHdr);
+
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+extern void DumpVideoFrame(DebugFrame_Dump *frameInfo);
+#endif
+
+OMX_ERRORTYPE OMX_ProxyViddecInit(OMX_HANDLETYPE hComponent);
OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
{
@@ -153,7 +168,32 @@ OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
eError = OMX_ProxyViddecInit(hComponent);
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+ if (eError == OMX_ErrorNone)
+ {
+ char value[PROPERTY_VALUE_MAX];
+ property_get("debug.video.dumpframe", value, "0:0");
+ /* -ve value for fromFrame would disable this automatically */
+ pComponentPrivate->debugframeInfo.fromFrame = atoi(strtok(value, ":"));
+ pComponentPrivate->debugframeInfo.toFrame = atoi(strtok(NULL, ":"));
+ pComponentPrivate->debugframeInfo.runningFrame = pComponentPrivate->debugframeInfo.fromFrame;
+ }
+#endif
EXIT:
+ if (eError != OMX_ErrorNone)
+ {
+ DOMX_DEBUG("Error in Initializing Proxy");
+ if (pComponentPrivate->cCompName != NULL)
+ {
+ TIMM_OSAL_Free(pComponentPrivate->cCompName);
+ pComponentPrivate->cCompName = NULL;
+ }
+ if (pComponentPrivate != NULL)
+ {
+ TIMM_OSAL_Free(pComponentPrivate);
+ pComponentPrivate = NULL;
+ }
+ }
return eError;
}
@@ -186,7 +226,7 @@ OMX_ERRORTYPE OMX_ProxyViddecInit(OMX_HANDLETYPE hComponent)
eError = OMX_ProxyCommonInit(hComponent); // Calling Proxy Common Init()
PROXY_assert(eError == OMX_ErrorNone, eError, "Proxy common init returned error");
#ifdef ANDROID_QUIRK_CHANGE_PORT_VALUES
- pHandle->SetParameter = PROXY_VIDDEC_SetParameter;
+ pHandle->SetParameter = PROXY_VIDDEC_SetParameter;
pHandle->GetParameter = PROXY_VIDDEC_GetParameter;
#endif
pHandle->GetExtensionIndex = PROXY_VIDDEC_GetExtensionIndex;
@@ -231,22 +271,10 @@ OMX_ERRORTYPE OMX_ProxyViddecInit(OMX_HANDLETYPE hComponent)
PROXY_assert(eError == OMX_ErrorNone,
eError," Error in Proxy SetParameter for Enhanced port reconfig usage");
#endif
+ /* This is required to handle WMV/VC-1 content */
+ pHandle->EmptyThisBuffer = PrearrageEmptyThisBuffer;
EXIT:
- if (eError != OMX_ErrorNone)
- {
- DOMX_DEBUG("Error in Initializing Proxy");
- if (pComponentPrivate->cCompName != NULL)
- {
- TIMM_OSAL_Free(pComponentPrivate->cCompName);
- pComponentPrivate->cCompName = NULL;
- }
- if (pComponentPrivate != NULL)
- {
- TIMM_OSAL_Free(pComponentPrivate);
- pComponentPrivate = NULL;
- }
- }
return eError;
}
@@ -337,12 +365,15 @@ OMX_ERRORTYPE PROXY_VIDDEC_GetParameter(OMX_IN OMX_HANDLETYPE hComponent,
{
PROXY_CHK_VERSION(pParamStruct, OMX_TI_PARAMNATIVEBUFFERUSAGE);
pUsage->nUsage = GRALLOC_USAGE_HW_RENDER;
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+ pUsage->nUsage |= GRALLOC_USAGE_SW_READ_RARELY;
+#endif
goto EXIT;
}
}
#endif
eError = PROXY_GetParameter(hComponent,nParamIndex, pParamStruct);
- PROXY_assert((eError == OMX_ErrorNone) || (eError == OMX_ErrorNoMore),
+ PROXY_assert(eError == OMX_ErrorNone,
eError," Error in Proxy GetParameter");
if( nParamIndex == OMX_IndexParamPortDefinition)
@@ -624,8 +655,8 @@ OMX_ERRORTYPE PROXY_VIDDEC_FillThisBuffer(OMX_HANDLETYPE hComponent, OMX_BUFFERH
pCompPrv = (PROXY_COMPONENT_PRIVATE *) hComp->pComponentPrivate;
- if(pCompPrv->proxyPortBuffers[OMX_VIDEODECODER_OUTPUT_PORT].proxyBufferType
- == GrallocPointers)
+ if(pCompPrv->proxyPortBuffers[OMX_VIDEODECODER_OUTPUT_PORT].proxyBufferType
+ == GrallocPointers)
{
/* Lock the Gralloc buffer till it gets rendered completely */
/* Extract the Gralloc handle from the Header and then call lock on that */
@@ -644,10 +675,29 @@ OMX_ERRORTYPE PROXY_VIDDEC_FillThisBuffer(OMX_HANDLETYPE hComponent, OMX_BUFFERH
PROXY_assert(eError == OMX_ErrorNone,
eError," Error in Proxy GetParameter for Port Def");
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+ /* Get the Video frame crop window */
+ OMX_CONFIG_RECTTYPE rect;
+ rect.nSize = sizeof(rect);
+ rect.nVersion.s.nVersionMajor = OMX_VER_MAJOR;
+ rect.nVersion.s.nVersionMinor = OMX_VER_MINOR;
+ rect.nVersion.s.nRevision = 0x0;
+ rect.nVersion.s.nStep = 0x0;
+ rect.nPortIndex = OMX_VIDEODECODER_OUTPUT_PORT;
+
+ eError = PROXY_GetConfig(hComponent, OMX_IndexConfigCommonOutputCrop, &rect);
+
+ PROXY_assert(eError == OMX_ErrorNone,
+ eError," Error while getting output crop");
+ pCompPrv->debugframeInfo.frame_width = rect.nWidth;
+ pCompPrv->debugframeInfo.frame_height = rect.nHeight;
+ pCompPrv->debugframeInfo.frame_xoffset = rect.nLeft;
+ pCompPrv->debugframeInfo.frame_yoffset = rect.nTop;
+#endif
pCompPrv->grallocModule->lock((gralloc_module_t const *) pCompPrv->grallocModule,
(buffer_handle_t)grallocHandle, GRALLOC_USAGE_HW_RENDER,
0,0,sPortDef.format.video.nFrameWidth, sPortDef.format.video.nFrameHeight,NULL);
- }
+ }
eRPCError = PROXY_FillThisBuffer(hComponent, pBufferHdr);
@@ -689,7 +739,7 @@ OMX_ERRORTYPE PROXY_VIDDEC_FillBufferDone(OMX_HANDLETYPE hComponent,
if(pCompPrv->proxyPortBuffers[OMX_VIDEODECODER_OUTPUT_PORT].proxyBufferType
== GrallocPointers) {
- for (count = 0; count < pCompPrv->nTotalBuffers; ++count)
+ for (count = 0; count < pCompPrv->nTotalBuffers; ++count)
{
if (pCompPrv->tBufList[count].pBufHeaderRemote == remoteBufHdr)
{
@@ -702,7 +752,35 @@ OMX_ERRORTYPE PROXY_VIDDEC_FillBufferDone(OMX_HANDLETYPE hComponent,
OMX_ErrorBadParameter,
"Received invalid-buffer header from OMX component");
pCompPrv->grallocModule->unlock((gralloc_module_t const *) pCompPrv->grallocModule, (buffer_handle_t)grallocHandle);
- }
+
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+ ALOGV("frm[%u] to[%u] run[%u]", pCompPrv->debugframeInfo.fromFrame, pCompPrv->debugframeInfo.toFrame, pCompPrv->debugframeInfo.runningFrame);
+ /* Fill buffer Done successed, hence start dumping if requested */
+ OMX_BUFFERHEADERTYPE *pBufHdr = pCompPrv->tBufList[count].pBufHeader;
+ if ((pCompPrv->debugframeInfo.fromFrame == 0) && (pCompPrv->debugframeInfo.runningFrame <= pCompPrv->debugframeInfo.toFrame))
+ {
+ /* Lock the buffer for SW read usage and then access it */
+ pCompPrv->grallocModule->lock((gralloc_module_t const*) pCompPrv->grallocModule,
+ (buffer_handle_t)grallocHandle,
+ GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_SW_READ_RARELY,
+ pCompPrv->debugframeInfo.frame_xoffset,
+ pCompPrv->debugframeInfo.frame_yoffset,
+ pCompPrv->debugframeInfo.frame_width,
+ pCompPrv->debugframeInfo.frame_height,
+ (void*)pCompPrv->debugframeInfo.y_uv);
+
+ DumpVideoFrame(&pCompPrv->debugframeInfo);
+
+ pCompPrv->grallocModule->unlock((gralloc_module_t const *) pCompPrv->grallocModule,
+ (buffer_handle_t)grallocHandle);
+ pCompPrv->debugframeInfo.runningFrame++;
+ }
+ else if (pCompPrv->debugframeInfo.fromFrame > 0)
+ {
+ pCompPrv->debugframeInfo.fromFrame--;
+ }
+#endif
+ }
eRPCError = PROXY_FillBufferDone(hComponent,remoteBufHdr, nfilledLen, nOffset, nFlags,
nTimeStamp, hMarkTargetComponent, pMarkData);
diff --git a/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_secure.c b/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_secure.c
index 667a8d8..473a7f0 100644
--- a/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_secure.c
+++ b/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_secure.c
@@ -65,7 +65,8 @@ OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
if (pComponentPrivate->secure_misc_drv_fd < 0)
{
DOMX_ERROR("Can't open rproc_user device 0x%x\n", errno);
- return OMX_ErrorInsufficientResources;
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
}
ret = write(pComponentPrivate->secure_misc_drv_fd, &enable, sizeof(enable));
@@ -99,12 +100,21 @@ OMX_ERRORTYPE OMX_ComponentInit(OMX_HANDLETYPE hComponent)
pComponentPrivate->bMapIonBuffers = OMX_FALSE;
#endif
EXIT:
- if(eError != OMX_ErrorNone)
- {
- TIMM_OSAL_Free(pHandle->pComponentPrivate);
- pHandle->pComponentPrivate = NULL;
- }
- return eError;
+ if (eError != OMX_ErrorNone)
+ {
+ DOMX_DEBUG("Error in Initializing Proxy");
+ if (pComponentPrivate->cCompName != NULL)
+ {
+ TIMM_OSAL_Free(pComponentPrivate->cCompName);
+ pComponentPrivate->cCompName = NULL;
+ }
+ if (pComponentPrivate != NULL)
+ {
+ TIMM_OSAL_Free(pComponentPrivate);
+ pComponentPrivate = NULL;
+ }
+ }
+ return eError;
}
OMX_ERRORTYPE PROXY_VIDDEC_Secure_ComponentDeInit(OMX_HANDLETYPE hComponent)
diff --git a/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_utils.c b/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_utils.c
new file mode 100755
index 0000000..53922fd
--- /dev/null
+++ b/domx/omx_proxy_component/omx_video_dec/src/omx_proxy_videodec_utils.c
@@ -0,0 +1,318 @@
+/*
+ * Copyright (c) 2010, Texas Instruments Incorporated
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * * Neither the name of Texas Instruments Incorporated nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+ * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+/*==============================================================
+ *! Revision History
+ *! ============================
+ *! 21-Oct-2011 Rajesh vandanapu sarthav@ti.com: Initial Version
+ *================================================================*/
+
+/******************************************************************
+ * INCLUDE FILES
+ ******************************************************************/
+#include <stdio.h>
+#include <string.h>
+#include <assert.h>
+#include "omx_proxy_common.h"
+#include <timm_osal_interfaces.h>
+#include "OMX_TI_IVCommon.h"
+#include "OMX_TI_Video.h"
+#include "OMX_TI_Index.h"
+
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+#define LOG_TAG "OMXPROXYVIDEODEC"
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <utils/Log.h>
+#include <stdlib.h>
+#include <errno.h>
+#endif
+
+#define COMPONENT_NAME "OMX.TI.DUCATI1.VIDEO.DECODER"
+/* needs to be specific for every configuration wrapper */
+
+/* DEFINITIONS for parsing the config information & sequence header for WMV*/
+#define VIDDEC_GetUnalignedDword( pb, dw ) \
+ (dw) = ((OMX_U32) *(pb + 3) << 24) + \
+ ((OMX_U32) *(pb + 2) << 16) + \
+ ((OMX_U16) *(pb + 1) << 8) + *pb;
+
+#define VIDDEC_GetUnalignedDwordEx( pb, dw ) VIDDEC_GetUnalignedDword( pb, dw ); (pb) += sizeof(OMX_U32);
+#define VIDDEC_LoadDWORD( dw, p ) VIDDEC_GetUnalignedDwordEx( p, dw )
+#define VIDDEC_MAKEFOURCC(ch0, ch1, ch2, ch3) \
+ ((OMX_U32)(OMX_U8)(ch0) | ((OMX_U32)(OMX_U8)(ch1) << 8) | \
+ ((OMX_U32)(OMX_U8)(ch2) << 16) | ((OMX_U32)(OMX_U8)(ch3) << 24 ))
+
+#define VIDDEC_FOURCC(ch0, ch1, ch2, ch3) VIDDEC_MAKEFOURCC(ch0, ch1, ch2, ch3)
+
+#define FOURCC_WMV3 VIDDEC_FOURCC('W','M','V','3')
+#define FOURCC_WMV2 VIDDEC_FOURCC('W','M','V','2')
+#define FOURCC_WMV1 VIDDEC_FOURCC('W','M','V','1')
+#define FOURCC_WVC1 VIDDEC_FOURCC('W','V','C','1')
+
+#define CSD_POSITION 51 /*Codec Specific Data position on the "stream propierties object"(ASF spec)*/
+
+typedef struct VIDDEC_WMV_RCV_struct {
+ OMX_U32 nNumFrames : 24;
+ OMX_U32 nFrameType : 8;
+ OMX_U32 nID : 32;
+ OMX_U32 nStructData : 32; //STRUCT_C
+ OMX_U32 nVertSize; //STRUCT_A-1
+ OMX_U32 nHorizSize; //STRUCT_A-2
+ OMX_U32 nID2 : 32;
+ OMX_U32 nSequenceHdr : 32; //STRUCT_B
+} VIDDEC_WMV_RCV_struct;
+
+typedef struct VIDDEC_WMV_VC1_struct {
+ OMX_U32 nNumFrames : 24;
+ OMX_U32 nFrameType : 8;
+ OMX_U32 nID : 32;
+ OMX_U32 nStructData : 32; //STRUCT_C
+ OMX_U32 nVertSize; //STRUCT_A-1
+ OMX_U32 nHorizSize; //STRUCT_A-2
+ OMX_U32 nID2 : 32;
+ OMX_U32 nSequenceHdr : 32; //STRUCT_B
+} VIDDEC_WMV_VC1_struct;
+
+
+OMX_ERRORTYPE PrearrageEmptyThisBuffer(OMX_HANDLETYPE hComponent,
+ OMX_BUFFERHEADERTYPE * pBufferHdr)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ PROXY_COMPONENT_PRIVATE *pCompPrv = NULL;
+ OMX_COMPONENTTYPE *hComp = (OMX_COMPONENTTYPE *) hComponent;
+ OMX_U8* pBuffer = NULL;
+ OMX_U8* pData = NULL;
+ OMX_U32 nValue = 0;
+ OMX_U32 nWidth = 0;
+ OMX_U32 nHeight = 0;
+ OMX_U32 nActualCompression = 0;
+ OMX_U8* pCSD = NULL;
+ OMX_U32 nSize_CSD = 0;
+ DOMX_ENTER("");
+
+ PROXY_assert(pBufferHdr != NULL, OMX_ErrorBadParameter, NULL);
+
+ if (pBufferHdr->nFlags & OMX_BUFFERFLAG_CODECCONFIG){
+ PROXY_assert(hComp->pComponentPrivate != NULL, OMX_ErrorBadParameter, NULL);
+
+ pCompPrv = (PROXY_COMPONENT_PRIVATE *) hComp->pComponentPrivate;
+ /* Get component role */
+ OMX_PARAM_COMPONENTROLETYPE compRole;
+ compRole.nSize = sizeof(OMX_PARAM_COMPONENTROLETYPE);
+ compRole.nVersion.s.nVersionMajor = 1;
+ compRole.nVersion.s.nVersionMinor = 1; //Ducati OMX version
+ compRole.nVersion.s.nRevision = 0;
+ compRole.nVersion.s.nStep = 0;
+
+ eError = PROXY_GetParameter(hComp, OMX_IndexParamStandardComponentRole, &compRole);
+ if(eError != OMX_ErrorNone){
+ DOMX_ERROR("Error getting OMX_IndexParamStandardComponentRole");
+ }
+
+ if(!strcmp((char *)(compRole.cRole), "video_decoder.wmv")){
+ pBuffer = pBufferHdr->pBuffer;
+
+ VIDDEC_WMV_RCV_struct sStructRCV;
+
+ DOMX_DEBUG("nFlags: %x", pBufferHdr->nFlags);
+
+ pData = pBufferHdr->pBuffer + 15; /*Position to Width & Height*/
+ VIDDEC_LoadDWORD(nValue, pData);
+ nWidth = nValue;
+ VIDDEC_LoadDWORD(nValue, pData);
+ nHeight = nValue;
+
+ pData += 4; /*Position to compression type*/
+ VIDDEC_LoadDWORD(nValue, pData);
+ nActualCompression = nValue;
+
+ /*Seting pCSD to proper position*/
+ pCSD = pBufferHdr->pBuffer;
+ pCSD += CSD_POSITION;
+ nSize_CSD = pBufferHdr->nFilledLen - CSD_POSITION;
+
+ if(nActualCompression == FOURCC_WMV3){
+
+ //From VC-1 spec: Table 265: Sequence Layer Data Structure
+ sStructRCV.nNumFrames = 0xFFFFFF; /*Infinite frame number*/
+ sStructRCV.nFrameType = 0xc5; /*0x85 is the value given by ASF to rcv converter*/
+ sStructRCV.nID = 0x04; /*WMV3*/
+ sStructRCV.nStructData = 0x018a3106; /*0x06318a01zero fill 0x018a3106*/
+ sStructRCV.nVertSize = nHeight;
+ sStructRCV.nHorizSize = nWidth;
+ sStructRCV.nID2 = 0x0c; /* Fix value */
+ sStructRCV.nSequenceHdr = 0x00002a9f; /* This value is not provided by parser, so giving a value from a video*/
+
+ DOMX_DEBUG("initial: nStructData: %x", sStructRCV.nStructData);
+ DOMX_DEBUG("pCSD = %x", (OMX_U32)*pCSD);
+
+ sStructRCV.nStructData = (OMX_U32)pCSD[0] << 0 |
+ pCSD[1] << 8 |
+ pCSD[2] << 16 |
+ pCSD[3] << 24;
+
+ DOMX_DEBUG("FINAL: nStructData: %x", sStructRCV.nStructData);
+
+ //Copy RCV structure to actual buffer
+ assert(pBufferHdr->nFilledLen < pBufferHdr->nAllocLen);
+ pBufferHdr->nFilledLen = sizeof(VIDDEC_WMV_RCV_struct);
+ TIMM_OSAL_Memcpy(pBufferHdr->pBuffer, (OMX_U8*)(&sStructRCV),
+ pBufferHdr->nFilledLen);
+
+ }
+ else if (nActualCompression == FOURCC_WVC1){
+ DOMX_DEBUG("VC-1 Advance Profile prearrange");
+ pBufferHdr->nOffset = pBufferHdr->nOffset+52;
+ pBufferHdr->nFilledLen= pBufferHdr->nFilledLen-52;
+ }
+ }
+ }
+
+ EXIT:
+ DOMX_EXIT("eError: %d", eError);
+
+ return PROXY_EmptyThisBuffer(hComponent, pBufferHdr);
+}
+
+#ifdef ENABLE_RAW_BUFFERS_DUMP_UTILITY
+/**
+* Usage#
+* By default this feature is kept disabled to avoid security leaks.
+*
+* (1) Uncomment the below 2 lines from Android.mk
+* #LOCAL_CFLAGS += -DENABLE_RAW_BUFFERS_DUMP_UTILITY
+* #LOCAL_SHARED_LIBRARIES += libcutils
+* And rebuild the omx proxy common component
+*
+* (2) Before start playback, make sure that "data" folder has r/w
+* permissions. For this, execute the below
+* mount -o rw,remount -t ext3 /dev/block/mmcblk0p1 /data/
+* chmod 777 /data/
+*
+* (3) Set the property for number of frames to dump
+* eg: setprop debug.video.dumpframe 10:20
+* would dump frames from 10 to 20.
+*
+* (4) Pull the frames to PC over adb
+* adb pull /data/frame_10.txt
+*
+* (5) Analyse on PC tools.
+*/
+
+/*
+* Method to convert NV12 to YUV420p for PC analysis
+*/
+static void convertNV12ToYuv420(DebugFrame_Dump *frameInfo, void *dst)
+{
+ int stride = 4096; /* ARM Page size = 4k */
+ uint32_t ybuf_offset = frameInfo->frame_yoffset * stride + frameInfo->frame_xoffset;
+ uint8_t* p1y = (uint8_t*)frameInfo->y_uv[0] + ybuf_offset;
+ uint8_t* p2y = (uint8_t*) dst;
+ int i, j, j1;
+ int width = frameInfo->frame_width;
+ int height = frameInfo->frame_height;
+
+ LOGD("Coverting NV-12 to YUV420p Width[%d], Height[%d] and Stride[%d] offset[%d]",
+ width, height, stride, ybuf_offset);
+
+ /* copy y-buffer, almost bytewise copy, except for stride jumps.*/
+ for(i=0;i<height;i++)
+ {
+ /* copy whole row of Y pixels. source and desination will point to new row each time.*/
+ memcpy(p2y+i*width, p1y+i*stride, width);
+ }
+
+ /** copy uv buffers
+ * rearrange from packed planar [uvuvuv] to planar [uuu][vvvv] packages pixel wise
+ * calculate the offset for UV buffer
+ */
+ uint32_t UV_offset = frameInfo->frame_xoffset +
+ (frameInfo->frame_yoffset * stride)/2;
+
+ const uint8_t* p1uv = (uint8_t*)frameInfo->y_uv[1] + UV_offset;
+
+ uint8_t* p2u = ((uint8_t*) dst + (width * height));
+ uint8_t* p2v = ((uint8_t*) p2u + ((width/2) * (height/2)));
+ for(i=0;(i < height/2);i++)
+ {
+ for(j=0,j1=0;(j< width/2);j++,j1+=2)
+ {
+ p2u[j] = p1uv[j1];
+ p2v[j] = p1uv[j1+1];
+ }
+ p1uv+=stride;
+ p2u+=width/2;
+ p2v+=width/2;
+ }
+}
+
+void DumpVideoFrame(DebugFrame_Dump *frameInfo)
+{
+ /* First convert the frame to 420p and then write to SD Card */
+ OMX_U32 framesize = (frameInfo->frame_width *
+ frameInfo->frame_height * 3) / 2;
+ OMX_U8* localbuffer = malloc(framesize);
+ if (localbuffer == NULL)
+ {
+ LOGE("NO HEAP");
+ goto EXIT;
+ }
+ convertNV12ToYuv420(frameInfo, localbuffer);
+ int filedes = -1;
+ char framenumber[100];
+ sprintf(framenumber, "/data/frame_%ld.txt", frameInfo->runningFrame);
+ LOGD("file path %s",framenumber);
+ filedes = open(framenumber, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(filedes < 0)
+ {
+ LOGE("\n!!!!!!!!!Error in file open!!!!!!!! [%d][%s]\n", filedes, strerror(errno));
+ goto EXIT;
+ }
+ int ret = write (filedes, (void*)localbuffer, framesize);
+ if (ret < (int)framesize)
+ {
+ LOGE("File Write Failed");
+ }
+EXIT:
+ if (localbuffer)
+ {
+ free(localbuffer);
+ localbuffer = NULL;
+ }
+ if (filedes > 0)
+ {
+ close(filedes);
+ }
+}
+
+#endif
diff --git a/hwc/Android.mk b/hwc/Android.mk
index c6a2eae..81ad059 100644
--- a/hwc/Android.mk
+++ b/hwc/Android.mk
@@ -7,15 +7,19 @@ include $(CLEAR_VARS)
LOCAL_PRELINK_MODULE := false
LOCAL_ARM_MODE := arm
LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/../vendor/lib/hw
-LOCAL_SHARED_LIBRARIES := liblog libEGL libcutils libutils libhardware libhardware_legacy libz
-LOCAL_SRC_FILES := hwc.c
+LOCAL_SHARED_LIBRARIES := liblog libEGL libcutils libutils libhardware libhardware_legacy libz \
+ libion_ti
+LOCAL_SRC_FILES := hwc.c rgz_2d.c
LOCAL_STATIC_LIBRARIES := libpng
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE := hwcomposer.omap4
LOCAL_CFLAGS := -DLOG_TAG=\"ti_hwc\"
-LOCAL_C_INCLUDES += external/libpng external/zlib
+LOCAL_C_INCLUDES += \
+ external/libpng external/zlib \
+ $(HARDWARE_TI_OMAP4_BASE)/kernel-headers-ti
+
# LOG_NDEBUG=0 means verbose logging enabled
# LOCAL_CFLAGS += -DLOG_NDEBUG=0
include $(BUILD_SHARED_LIBRARY)
diff --git a/hwc/hal_public.h b/hwc/hal_public.h
index 3d64c4c..a7dfb08 100644
--- a/hwc/hal_public.h
+++ b/hwc/hal_public.h
@@ -164,4 +164,25 @@ typedef struct IMG_buffer_format_public_t
}
IMG_buffer_format_public_t;
+/*
+ * These are vendor specific pixel formats, by (informal) convention IMGTec
+ * formats start from the top of the range, TI formats start from the bottom
+ */
+#define HAL_PIXEL_FORMAT_BGRX_8888 0x1FF
+#define HAL_PIXEL_FORMAT_TI_NV12 0x100
+#define HAL_PIXEL_FORMAT_TI_UNUSED 0x101 /* Free for use */
+#define HAL_PIXEL_FORMAT_TI_NV12_1D 0x102
+
+#ifndef GRALLOC_USAGE_SYSTEM_HEAP
+#define GRALLOC_USAGE_SYSTEM_HEAP GRALLOC_USAGE_PRIVATE_0
+#else
+#error GRALLOC_USAGE_SYSTEM_HEAP should only be defined by hal_public.h
+#endif
+
+#ifndef GRALLOC_USAGE_PHYS_CONTIG
+#define GRALLOC_USAGE_PHYS_CONTIG GRALLOC_USAGE_PRIVATE_1
+#else
+#error GRALLOC_USAGE_PHYS_CONTIG should only be defined by hal_public.h
+#endif
#endif /* HAL_PUBLIC_H */
+
diff --git a/hwc/hwc.c b/hwc/hwc.c
index 74012dd..7c0fad0 100644
--- a/hwc/hwc.c
+++ b/hwc/hwc.c
@@ -20,6 +20,7 @@
#include <stdarg.h>
#include <fcntl.h>
#include <poll.h>
+#include <sys/eventfd.h>
#include <sys/ioctl.h>
#include <linux/fb.h>
#include <linux/omapfb.h>
@@ -29,14 +30,18 @@
#include <cutils/properties.h>
#include <cutils/log.h>
#include <cutils/native_handle.h>
-#define HWC_REMOVE_DEPRECATED_VERSIONS 1
+#define HWC_REMOVE_DEPRECATED_VERSIONS 0
#include <hardware/hardware.h>
#include <hardware/hwcomposer.h>
#include <EGL/egl.h>
#include <hardware_legacy/uevent.h>
#include <png.h>
+#include <utils/Timers.h>
#include <system/graphics.h>
+#include <linux/bltsville.h>
+
+#define MAX_HWC_LAYERS 32
#define ASPECT_RATIO_TOLERANCE 0.02f
@@ -50,15 +55,18 @@
#define DIV_ROUND_UP(a, b) (((a) + (b) - 1) / (b))
#include <video/dsscomp.h>
+#include <video/omap_hwc.h>
#include "hal_public.h"
+#include "rgz_2d.h"
+
+#include <linux/ion.h>
+#include <linux/omap_ion.h>
+#include <ion/ion.h>
#define MAX_HW_OVERLAYS 4
#define NUM_NONSCALING_OVERLAYS 1
-#define HAL_PIXEL_FORMAT_BGRX_8888 0x1FF
-#define HAL_PIXEL_FORMAT_TI_NV12 0x100
-#define HAL_PIXEL_FORMAT_TI_NV12_PADDED 0x101
-#define MAX_TILER_SLOT (16 << 20)
+#define NUM_EXT_DISPLAY_BACK_BUFFERS 2
struct ext_transform_t {
__u8 rotation : 3; /* 90-degree clockwise rotations */
@@ -103,6 +111,17 @@ enum {
EXT_HFLIP = (1 << 2), /* flip l-r on output (after rotation) */
};
+enum bltpolicy {
+ BLTPOLICY_DISABLED = 0,
+ BLTPOLICY_DEFAULT = 1, /* Default blit policy */
+ BLTPOLICY_ALL, /* Test mode to attempt to blit all */
+};
+
+enum bltmode {
+ BLTMODE_PAINT = 0, /* Attempt to blit layer by layer */
+ BLTMODE_REGION = 1, /* Attempt to blit layers via regions */
+};
+
/* ARGB image */
struct omap4_hwc_img {
int width;
@@ -119,6 +138,24 @@ struct omap4_hwc_module {
};
typedef struct omap4_hwc_module omap4_hwc_module_t;
+struct counts {
+ unsigned int possible_overlay_layers;
+ unsigned int composited_layers;
+ unsigned int scaled_layers;
+ unsigned int RGB;
+ unsigned int BGR;
+ unsigned int NV12;
+ unsigned int dockable;
+ unsigned int protected;
+
+ unsigned int max_hw_overlays;
+ unsigned int max_scaling_overlays;
+ unsigned int mem;
+ unsigned int s3d;
+
+ unsigned int large_rgb32_layers;
+};
+
struct omap4_hwc_device {
/* static data */
hwc_composer_device_1_t base;
@@ -131,33 +168,57 @@ struct omap4_hwc_device {
int fb_fd; /* file descriptor for /dev/fb0 */
int dsscomp_fd; /* file descriptor for /dev/dsscomp */
int hdmi_fb_fd; /* file descriptor for /dev/fb1 */
- int pipe_fds[2]; /* pipe to event thread */
+ int wakeup_evt; /* eventfd used to wakeup event thread */
int img_mem_size; /* size of fb for hdmi */
void *img_mem_ptr; /* start of fb for hdmi */
int flags_rgb_order;
int flags_nv12_only;
+ float upscaled_nv12_limit;
+ int on_tv; /* using a tv */
int force_sgx;
omap4_hwc_ext_t ext; /* external mirroring data */
int idle;
- int ovls_blending;
- /* composition data */
- struct dsscomp_setup_dispc_data dsscomp_data;
+ float primary_m[2][3]; /* internal transformation matrix */
+ int primary_transform;
+ int primary_rotation;
+ hwc_rect_t primary_region;
+
buffer_handle_t *buffers;
int use_sgx;
int swap_rb;
- unsigned int post2_layers;
+ unsigned int post2_layers; /* Buffers used with DSS pipes*/
+ unsigned int post2_blit_buffers; /* Buffers used with blit */
int ext_ovls; /* # of overlays on external display for current composition */
int ext_ovls_wanted; /* # of overlays that should be on external display for current composition */
int last_ext_ovls; /* # of overlays on external/internal display for last composition */
int last_int_ovls;
+
+ enum bltmode blt_mode;
+ enum bltpolicy blt_policy;
+
+ int blit_flags;
+ int blit_num;
+ struct omap_hwc_data comp_data; /* This is a kernel data structure */
+ struct rgz_blt_entry blit_ops[RGZ_MAX_BLITS];
+ struct counts stats;
+ int ion_fd;
+ struct ion_handle *ion_handles[2];
+
+ /* fake vsync event state */
+ pthread_mutex_t vsync_lock;
+ int vsync_enabled;
+ uint64_t last_vsync_time;
+ int last_vsync_time_valid;
+ uint64_t fake_vsync_period;
};
typedef struct omap4_hwc_device omap4_hwc_device_t;
#define HAL_FMT(f) ((f) == HAL_PIXEL_FORMAT_TI_NV12 ? "NV12" : \
+ (f) == HAL_PIXEL_FORMAT_TI_NV12_1D ? "NV12" : \
(f) == HAL_PIXEL_FORMAT_YV12 ? "YV12" : \
(f) == HAL_PIXEL_FORMAT_BGRX_8888 ? "xRGB32" : \
(f) == HAL_PIXEL_FORMAT_RGBX_8888 ? "xBGR32" : \
@@ -171,6 +232,34 @@ typedef struct omap4_hwc_device omap4_hwc_device_t;
(f) == OMAP_DSS_COLOR_RGB16 ? "RGB565" : "??")
static int debug = 0;
+static int debugpost2 = 0;
+static int debugblt = 0;
+static rgz_t grgz;
+static struct bvsurfgeom gscrngeom;
+
+static void showfps(void)
+{
+ static int framecount = 0;
+ static int lastframecount = 0;
+ static nsecs_t lastfpstime = 0;
+ static float fps = 0;
+ char value[PROPERTY_VALUE_MAX];
+
+ property_get("debug.hwc.showfps", value, "0");
+ if (!atoi(value)) {
+ return;
+ }
+
+ framecount++;
+ if (!(framecount & 0x7)) {
+ nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
+ nsecs_t diff = now - lastfpstime;
+ fps = ((framecount - lastframecount) * (float)(s2ns(1))) / diff;
+ lastfpstime = now;
+ lastframecount = framecount;
+ ALOGI("%d Frames, %f FPS", framecount, fps);
+ }
+}
static void dump_layer(hwc_layer_1_t const* l)
{
@@ -241,7 +330,7 @@ static void dump_printf(struct dump_buf *buf, const char *fmt, ...)
static void dump_set_info(omap4_hwc_device_t *hwc_dev, hwc_display_contents_1_t* list)
{
- struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->dsscomp_data;
+ struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->comp_data.dsscomp_data;
char logbuf[1024];
struct dump_buf log = {
.buf = logbuf,
@@ -255,7 +344,14 @@ static void dump_set_info(omap4_hwc_device_t *hwc_dev, hwc_display_contents_1_t*
dump_printf(&log, " ");
hwc_layer_1_t *layer = &list->hwLayers[i];
IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
- dump_printf(&log, "%p:%s,", handle, layer->compositionType == HWC_OVERLAY ? "DSS" : "SGX");
+ if (hwc_dev->post2_blit_buffers) {
+ if ((i + 1) < hwc_dev->post2_layers)
+ dump_printf(&log, "%p:%s,", handle, "DSS");
+ else
+ dump_printf(&log, "%p:%s,", handle, "BV2D");
+ }
+ else
+ dump_printf(&log, "%p:%s,", handle, layer->compositionType == HWC_OVERLAY ? "DSS" : "SGX");
if ((layer->flags & HWC_SKIP_LAYER) || !handle) {
dump_printf(&log, "SKIP");
continue;
@@ -286,6 +382,13 @@ static void dump_set_info(omap4_hwc_device_t *hwc_dev, hwc_display_contents_1_t*
dump_printf(&log, " ");
dump_printf(&log, "%p", hwc_dev->buffers[i]);
}
+ if (hwc_dev->post2_blit_buffers) {
+ dump_printf(&log, "} B{");
+ for (i = hwc_dev->post2_layers;
+ i < hwc_dev->post2_blit_buffers + hwc_dev->post2_layers; i++) {
+ dump_printf(&log, "%p ", hwc_dev->buffers[i]);
+ }
+ }
dump_printf(&log, "}%s\n", hwc_dev->use_sgx ? " swap" : "");
ALOGD("%s", log.buf);
@@ -302,7 +405,7 @@ static int omap4_hwc_is_valid_format(int format)
case HAL_PIXEL_FORMAT_BGRA_8888:
case HAL_PIXEL_FORMAT_BGRX_8888:
case HAL_PIXEL_FORMAT_TI_NV12:
- case HAL_PIXEL_FORMAT_TI_NV12_PADDED:
+ case HAL_PIXEL_FORMAT_TI_NV12_1D:
return 1;
default:
@@ -330,6 +433,18 @@ static int is_protected(hwc_layer_1_t *layer)
#define is_BLENDED(layer) ((layer)->blending != HWC_BLENDING_NONE)
+static int is_RGB32(IMG_native_handle_t *handle)
+{
+ switch(handle->iFormat)
+ {
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ case HAL_PIXEL_FORMAT_BGRX_8888:
+ return 1;
+ default:
+ return 0;
+ }
+}
+
static int is_RGB(IMG_native_handle_t *handle)
{
switch(handle->iFormat)
@@ -342,6 +457,21 @@ static int is_RGB(IMG_native_handle_t *handle)
return 0;
}
}
+static int get_rgb_bpp(IMG_native_handle_t *handle)
+{
+ switch(handle->iFormat)
+ {
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ case HAL_PIXEL_FORMAT_BGRX_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ return 32;
+ case HAL_PIXEL_FORMAT_RGB_565:
+ return 16;
+ default:
+ return 0;
+ }
+}
static int is_BGR_format(int format)
{
@@ -364,13 +494,32 @@ static int is_NV12(IMG_native_handle_t *handle)
switch(handle->iFormat)
{
case HAL_PIXEL_FORMAT_TI_NV12:
- case HAL_PIXEL_FORMAT_TI_NV12_PADDED:
+ case HAL_PIXEL_FORMAT_TI_NV12_1D:
return 1;
default:
return 0;
}
}
+static int is_upscaled_NV12(omap4_hwc_device_t *hwc_dev, hwc_layer_1_t *layer)
+{
+ if (!layer)
+ return 0;
+
+ IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
+ if (!is_NV12(handle))
+ return 0;
+
+ int w = WIDTH(layer->sourceCrop);
+ int h = HEIGHT(layer->sourceCrop);
+
+ if (layer->transform & HWC_TRANSFORM_ROT_90)
+ swap(w, h);
+
+ return (WIDTH(layer->displayFrame) >= w * hwc_dev->upscaled_nv12_limit ||
+ HEIGHT(layer->displayFrame) >= h * hwc_dev->upscaled_nv12_limit);
+}
+
static int dockable(hwc_layer_1_t *layer)
{
IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
@@ -378,6 +527,46 @@ static int dockable(hwc_layer_1_t *layer)
return (handle->usage & GRALLOC_USAGE_EXTERNAL_DISP);
}
+/* test if layer appears to be RGB32 (4 Bpp) and > 1280x720 */
+static int is_large_rgb32_layer(const hwc_layer_1_t *layer)
+{
+ IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
+
+ return is_RGB32(handle) &&
+ (((layer->sourceCrop.right - layer->sourceCrop.left) > 1280) ||
+ ((layer->sourceCrop.bottom - layer->sourceCrop.top) > 720));
+}
+
+static uint64_t vsync_clock_now()
+{
+ uint64_t now = 0;
+ struct timespec ts;
+
+ if (!clock_gettime(CLOCK_MONOTONIC, &ts))
+ now = ((uint64_t)ts.tv_sec) * 1000000000ull + ts.tv_nsec;
+
+ return now;
+}
+
+static void wakeup_hdmi_thread(omap4_hwc_device_t *hwc_dev)
+{
+ uint64_t tmp = 1;
+ write(hwc_dev->wakeup_evt, &tmp, sizeof(tmp));
+}
+
+static void fire_vsync_event(omap4_hwc_device_t *hwc_dev, uint64_t timestamp)
+{
+ pthread_mutex_lock(&hwc_dev->vsync_lock);
+
+ hwc_dev->last_vsync_time_valid = 1;
+ hwc_dev->last_vsync_time = timestamp;
+
+ pthread_mutex_unlock(&hwc_dev->vsync_lock);
+
+ if (hwc_dev->procs && hwc_dev->procs->vsync)
+ hwc_dev->procs->vsync(hwc_dev->procs, 0, timestamp);
+}
+
static unsigned int mem1d(IMG_native_handle_t *handle)
{
if (handle == NULL || is_NV12(handle))
@@ -419,7 +608,7 @@ omap4_hwc_setup_layer_base(struct dss2_ovl_cfg *oc, int index, int format, int b
break;
case HAL_PIXEL_FORMAT_TI_NV12:
- case HAL_PIXEL_FORMAT_TI_NV12_PADDED:
+ case HAL_PIXEL_FORMAT_TI_NV12_1D:
oc->color_mode = OMAP_DSS_COLOR_NV12;
bits_per_pixel = 8;
oc->cconv = ctbl_bt601_5;
@@ -661,10 +850,25 @@ crop_to_rect(struct dss2_ovl_cfg *cfg, struct hwc_rect vis_rect)
}
static void
+omap4_hwc_apply_transform(float transform[2][3],struct dss2_ovl_cfg *oc)
+{
+ float x, y, w, h;
+
+ /* display position */
+ x = transform[0][0] * oc->win.x + transform[0][1] * oc->win.y + transform[0][2];
+ y = transform[1][0] * oc->win.x + transform[1][1] * oc->win.y + transform[1][2];
+ w = transform[0][0] * oc->win.w + transform[0][1] * oc->win.h;
+ h = transform[1][0] * oc->win.w + transform[1][1] * oc->win.h;
+ oc->win.x = m_round(w > 0 ? x : x + w);
+ oc->win.y = m_round(h > 0 ? y : y + h);
+ oc->win.w = m_round(w > 0 ? w : -w);
+ oc->win.h = m_round(h > 0 ? h : -h);
+}
+
+static void
omap4_hwc_adjust_ext_layer(omap4_hwc_ext_t *ext, struct dss2_ovl_info *ovl)
{
struct dss2_ovl_cfg *oc = &ovl->cfg;
- float x, y, w, h;
/* crop to clone region if mirroring */
if (!ext->current.docking &&
@@ -673,15 +877,7 @@ omap4_hwc_adjust_ext_layer(omap4_hwc_ext_t *ext, struct dss2_ovl_info *ovl)
return;
}
- /* display position */
- x = ext->m[0][0] * oc->win.x + ext->m[0][1] * oc->win.y + ext->m[0][2];
- y = ext->m[1][0] * oc->win.x + ext->m[1][1] * oc->win.y + ext->m[1][2];
- w = ext->m[0][0] * oc->win.w + ext->m[0][1] * oc->win.h;
- h = ext->m[1][0] * oc->win.w + ext->m[1][1] * oc->win.h;
- oc->win.x = m_round(w > 0 ? x : x + w);
- oc->win.y = m_round(h > 0 ? y : y + h);
- oc->win.w = m_round(w > 0 ? w : -w);
- oc->win.h = m_round(h > 0 ? h : -h);
+ omap4_hwc_apply_transform(ext->m, oc);
/* combining transformations: F^a*R^b*F^i*R^j = F^(a+b)*R^(j+b*(-1)^i), because F*R = R^(-1)*F */
oc->rotation += (oc->mirror ? -1 : 1) * ext->current.rotation;
@@ -690,33 +886,28 @@ omap4_hwc_adjust_ext_layer(omap4_hwc_ext_t *ext, struct dss2_ovl_info *ovl)
oc->mirror = !oc->mirror;
}
-static struct dsscomp_dispc_limitations {
- __u8 max_xdecim_2d;
- __u8 max_ydecim_2d;
- __u8 max_xdecim_1d;
- __u8 max_ydecim_1d;
- __u32 fclk;
- __u8 max_downscale;
- __u8 min_width;
- __u16 integer_scale_ratio_limit;
- __u16 max_width;
- __u16 max_height;
-} limits = {
- .max_xdecim_1d = 16,
- .max_xdecim_2d = 16,
- .max_ydecim_1d = 16,
- .max_ydecim_2d = 2,
- .fclk = 170666666,
- .max_downscale = 4,
- .min_width = 2,
- .integer_scale_ratio_limit = 2048,
- .max_width = 2048,
- .max_height = 2048,
-};
+static struct dsscomp_platform_info limits;
+
+static void
+omap4_hwc_adjust_primary_display_layer(omap4_hwc_device_t *hwc_dev, struct dss2_ovl_info *ovl)
+{
+ struct dss2_ovl_cfg *oc = &ovl->cfg;
+
+ if (crop_to_rect(&ovl->cfg, hwc_dev->primary_region) != 0) {
+ ovl->cfg.enabled = 0;
+ return;
+ }
+
+ omap4_hwc_apply_transform(hwc_dev->primary_m, oc);
+
+ /* combining transformations: F^a*R^b*F^i*R^j = F^(a+b)*R^(j+b*(-1)^i), because F*R = R^(-1)*F */
+ oc->rotation += (oc->mirror ? -1 : 1) * hwc_dev->primary_rotation;
+ oc->rotation &= 3;
+}
static int omap4_hwc_can_scale(__u32 src_w, __u32 src_h, __u32 dst_w, __u32 dst_h, int is_2d,
- struct dsscomp_display_info *dis, struct dsscomp_dispc_limitations *limits,
- __u32 pclk)
+ struct dsscomp_display_info *dis, struct dsscomp_platform_info *limits,
+ __u32 pclk, void *handle)
{
__u32 fclk = limits->fclk / 1000;
__u32 min_src_w = DIV_ROUND_UP(src_w, is_2d ? limits->max_xdecim_2d : limits->max_xdecim_1d);
@@ -739,12 +930,16 @@ static int omap4_hwc_can_scale(__u32 src_w, __u32 src_h, __u32 dst_w, __u32 dst_
/* for manual panels pclk is 0, and there are no pclk based scaling limits */
if (!pclk)
- return !(dst_w * limits->max_downscale < min_src_w);
+ return !(dst_w < src_w / limits->max_downscale / (is_2d ? limits->max_xdecim_2d : limits->max_xdecim_1d));
/* :HACK: limit horizontal downscale well below theoretical limit as we saw display artifacts */
if (dst_w * 4 < src_w)
return 0;
+ if (handle)
+ if (get_rgb_bpp(handle) == 32 && src_w > 1280 && dst_w * 3 < src_w)
+ return 0;
+
/* max horizontal downscale is 4, or the fclk/pixclk */
if (fclk > pclk * limits->max_downscale)
fclk = pclk * limits->max_downscale;
@@ -771,7 +966,7 @@ static int omap4_hwc_can_scale_layer(omap4_hwc_device_t *hwc_dev, hwc_layer_1_t
/* NOTE: layers should be able to be scaled externally since
framebuffer is able to be scaled on selected external resolution */
return omap4_hwc_can_scale(src_w, src_h, dst_w, dst_h, is_NV12(handle), &hwc_dev->fb_dis, &limits,
- hwc_dev->fb_dis.timings.pixel_clock);
+ hwc_dev->fb_dis.timings.pixel_clock, handle);
}
static int omap4_hwc_is_valid_layer(omap4_hwc_device_t *hwc_dev,
@@ -789,7 +984,7 @@ static int omap4_hwc_is_valid_layer(omap4_hwc_device_t *hwc_dev,
if (!is_NV12(handle)) {
if (layer->transform)
return 0;
- if (mem1d(handle) > MAX_TILER_SLOT)
+ if (mem1d(handle) > limits.tiler1d_slot_size)
return 0;
}
@@ -837,10 +1032,13 @@ static __u32 add_scaling_score(__u32 score,
static int omap4_hwc_set_best_hdmi_mode(omap4_hwc_device_t *hwc_dev, __u32 xres, __u32 yres,
float xpy)
{
+ int dis_ix = hwc_dev->on_tv ? 0 : 1;
+ int forced_preferred_mode = 0;
+
struct _qdis {
struct dsscomp_display_info dis;
struct dsscomp_videomode modedb[32];
- } d = { .dis = { .ix = 1 } };
+ } d = { .dis = { .ix = dis_ix } };
omap4_hwc_ext_t *ext = &hwc_dev->ext;
d.dis.modedb_len = sizeof(d.modedb) / sizeof(*d.modedb);
@@ -864,6 +1062,31 @@ static int omap4_hwc_set_best_hdmi_mode(omap4_hwc_device_t *hwc_dev, __u32 xres,
ext->yres = 480;
}
+ /*
+ * copy the xres/yres from the preferred mode
+ */
+ __u32 preferred_mode_xres = 0;
+ __u32 preferred_mode_yres = 0;
+
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("persist.hwc.preferred_mode", value, "") <= 0 ||
+ sscanf(value, "%dx%d", &preferred_mode_xres,
+ &preferred_mode_yres) != 2) {
+ for (i = 0; i < d.dis.modedb_len; i++) {
+ if (d.modedb[i].flag & FB_FLAG_PREFERRED) {
+ preferred_mode_xres = d.modedb[i].xres;
+ preferred_mode_yres = d.modedb[i].yres;
+ ALOGD("preferred mode %d: xres %u yres %u\n",
+ i, d.modedb[i].xres, d.modedb[i].yres);
+ break;
+ }
+ }
+ } else {
+ ALOGD("forced preferred mode xres %u yres %u\n",
+ preferred_mode_xres, preferred_mode_yres);
+ forced_preferred_mode = 1;
+ }
+
__u32 ext_fb_xres, ext_fb_yres;
for (i = 0; i < d.dis.modedb_len; i++) {
__u32 score = 0;
@@ -872,6 +1095,13 @@ static int omap4_hwc_set_best_hdmi_mode(omap4_hwc_device_t *hwc_dev, __u32 xres,
__u32 ext_width = d.dis.width_in_mm;
__u32 ext_height = d.dis.height_in_mm;
+ /* reject it because the hw says it can't actually use this mode */
+ if ((d.modedb[i].flag & FB_FLAG_HW_CAPABLE) == 0)
+ continue;
+
+ if (d.modedb[i].vmode & FB_VMODE_INTERLACED)
+ mode_yres /= 2;
+
if (d.modedb[i].flag & FB_FLAG_RATIO_4_3) {
ext_width = 4;
ext_height = 3;
@@ -888,23 +1118,40 @@ static int omap4_hwc_set_best_hdmi_mode(omap4_hwc_device_t *hwc_dev, __u32 xres,
/* we need to ensure that even TILER2D buffers can be scaled */
if (!d.modedb[i].pixclock ||
- d.modedb[i].vmode ||
+ (d.modedb[i].vmode & ~FB_VMODE_INTERLACED) ||
!omap4_hwc_can_scale(xres, yres, ext_fb_xres, ext_fb_yres,
1, &d.dis, &limits,
- 1000000000 / d.modedb[i].pixclock))
+ 1000000000 / d.modedb[i].pixclock, NULL))
continue;
/* prefer CEA modes */
if (d.modedb[i].flag & (FB_FLAG_RATIO_4_3 | FB_FLAG_RATIO_16_9))
- score = 1;
+ score += 1;
+
+ /* prefer modes that match the preferred mode's resolution */
+ if (d.modedb[i].xres == preferred_mode_xres &&
+ d.modedb[i].yres == preferred_mode_yres) {
+ if (forced_preferred_mode)
+ score += 10;
+ else
+ score += 1;
+ }
+
+ /* prefer modes the kernel has hinted is the correct mode */
+ if (!forced_preferred_mode && (d.modedb[i].flag & FB_FLAG_PREFERRED))
+ score += 1;
/* prefer the same mode as we use for mirroring to avoid mode change */
- score = (score << 1) | (i == ~ext->mirror_mode && ext->avoid_mode_change);
+ score = (score << 1) | (i == ~ext->mirror_mode && ext->avoid_mode_change);
score = add_scaling_score(score, xres, yres, 60, ext_fb_xres, ext_fb_yres,
mode_xres, mode_yres, d.modedb[i].refresh ? : 1);
- ALOGD("#%d: %dx%d %dHz", i, mode_xres, mode_yres, d.modedb[i].refresh);
+ ALOGD("#%d: %dx%d %dHz flag 0x%x vmode 0x%x, score 0x%x",
+ i, mode_xres, mode_yres,
+ d.modedb[i].refresh, d.modedb[i].flag, d.modedb[i].vmode,
+ score);
+
if (debug)
ALOGD(" score=0x%x adj.res=%dx%d", score, ext_fb_xres, ext_fb_yres);
if (best_score < score) {
@@ -917,12 +1164,27 @@ static int omap4_hwc_set_best_hdmi_mode(omap4_hwc_device_t *hwc_dev, __u32 xres,
}
}
if (~best) {
- struct dsscomp_setup_display_data sdis = { .ix = 1, };
+ struct dsscomp_setup_display_data sdis = { .ix = dis_ix, };
sdis.mode = d.dis.modedb[best];
ALOGD("picking #%d", best);
/* only reconfigure on change */
- if (ext->last_mode != ~best)
+ if (ext->last_mode != ~best) {
+ /* set a property that apps that care (e.g. YouTube) can use
+ * to determine whether or not to stream lower resolution
+ * videos when the hdmi mode is < 1080p.
+ * otherwise, they'd give us 1080p and we'd just scale it
+ * down to the hdmi mode res. UI apps are always going
+ * to draw at 1080p and we'll scale down because the
+ * system can't support dynamic dpi changes.
+ */
+ char display[PROPERTY_VALUE_MAX];
+ snprintf(display, sizeof(display), "%dx%d",
+ d.modedb[best].xres, d.modedb[best].yres);
+ ALOGD("setting property sys.display-size to %s", display);
+ property_set("sys.display-size", display);
+
ioctl(hwc_dev->dsscomp_fd, DSSCIOC_SETUP_DISPLAY, &sdis);
+ }
ext->last_mode = ~best;
} else {
__u32 ext_width = d.dis.width_in_mm;
@@ -932,10 +1194,9 @@ static int omap4_hwc_set_best_hdmi_mode(omap4_hwc_device_t *hwc_dev, __u32 xres,
get_max_dimensions(xres, yres, xpy, d.dis.timings.x_res, d.dis.timings.y_res,
ext_width, ext_height, &ext_fb_xres, &ext_fb_yres);
if (!d.dis.timings.pixel_clock ||
- d.dis.mgr.interlaced ||
!omap4_hwc_can_scale(xres, yres, ext_fb_xres, ext_fb_yres,
1, &d.dis, &limits,
- d.dis.timings.pixel_clock)) {
+ d.dis.timings.pixel_clock, NULL)) {
ALOGW("DSS scaler cannot support HDMI cloning");
return -1;
}
@@ -948,21 +1209,6 @@ static int omap4_hwc_set_best_hdmi_mode(omap4_hwc_device_t *hwc_dev, __u32 xres,
return 0;
}
-struct counts {
- unsigned int possible_overlay_layers;
- unsigned int composited_layers;
- unsigned int scaled_layers;
- unsigned int RGB;
- unsigned int BGR;
- unsigned int NV12;
- unsigned int dockable;
- unsigned int protected;
-
- unsigned int max_hw_overlays;
- unsigned int max_scaling_overlays;
- unsigned int mem;
-};
-
static void gather_layer_statistics(omap4_hwc_device_t *hwc_dev, struct counts *num, hwc_display_contents_1_t *list)
{
unsigned int i;
@@ -978,7 +1224,7 @@ static void gather_layer_statistics(omap4_hwc_device_t *hwc_dev, struct counts *
num->possible_overlay_layers++;
/* NV12 layers can only be rendered on scaling overlays */
- if (scaled(layer) || is_NV12(handle))
+ if (scaled(layer) || is_NV12(handle) || hwc_dev->primary_transform)
num->scaled_layers++;
if (is_BGR(handle))
@@ -995,8 +1241,12 @@ static void gather_layer_statistics(omap4_hwc_device_t *hwc_dev, struct counts *
num->protected++;
num->mem += mem1d(handle);
+
+ if (is_large_rgb32_layer(layer))
+ num->large_rgb32_layers++;
}
}
+ hwc_dev->stats = *num;
}
static void decide_supported_cloning(omap4_hwc_device_t *hwc_dev, struct counts *num)
@@ -1029,6 +1279,7 @@ static void decide_supported_cloning(omap4_hwc_device_t *hwc_dev, struct counts
* otherwise, manage just from half the pipelines. NOTE: there is
* no danger of having used too many overlays for external display here.
*/
+
num->max_hw_overlays >>= 1;
nonscaling_ovls >>= 1;
hwc_dev->ext_ovls = MAX_HW_OVERLAYS - num->max_hw_overlays;
@@ -1049,16 +1300,24 @@ static void decide_supported_cloning(omap4_hwc_device_t *hwc_dev, struct counts
/* if mirroring, we are limited by both internal and external overlays. However,
ext_ovls is always <= MAX_HW_OVERLAYS / 2 <= max_hw_overlays */
- if (hwc_dev->ext_ovls && ext->current.enabled && !ext->current.docking)
+ if (!num->protected && hwc_dev->ext_ovls && ext->current.enabled && !ext->current.docking)
num->max_hw_overlays = hwc_dev->ext_ovls;
- num->max_scaling_overlays = num->max_hw_overlays - nonscaling_ovls;
+ /* If FB is not same resolution as LCD don't use GFX pipe line*/
+ if (hwc_dev->primary_transform) {
+ num->max_hw_overlays -= NUM_NONSCALING_OVERLAYS;
+ num->max_scaling_overlays = num->max_hw_overlays;
+ } else
+ num->max_scaling_overlays = num->max_hw_overlays - nonscaling_ovls;
}
+/* how many large layers can be composited by the DSS */
+static const unsigned int MAX_DSS_LARGE_LAYERS = 2;
+
static int can_dss_render_all(omap4_hwc_device_t *hwc_dev, struct counts *num)
{
omap4_hwc_ext_t *ext = &hwc_dev->ext;
- int on_tv = ext->on_tv && ext->current.enabled;
+ int on_tv = hwc_dev->on_tv || (ext->on_tv && ext->current.enabled);
int tform = ext->current.enabled && (ext->current.rotation || ext->current.hflip);
return !hwc_dev->force_sgx &&
@@ -1069,11 +1328,14 @@ static int can_dss_render_all(omap4_hwc_device_t *hwc_dev, struct counts *num)
num->scaled_layers <= num->max_scaling_overlays &&
num->NV12 <= num->max_scaling_overlays &&
/* fits into TILER slot */
- num->mem <= MAX_TILER_SLOT &&
+ num->mem <= limits.tiler1d_slot_size &&
/* we cannot clone non-NV12 transformed layers */
- (!tform || num->NV12 == num->possible_overlay_layers) &&
+ (!tform || (num->NV12 == num->possible_overlay_layers) ||
+ (num->NV12 && ext->current.docking)) &&
/* HDMI cannot display BGR */
- (num->BGR == 0 || (num->RGB == 0 && !on_tv) || !hwc_dev->flags_rgb_order);
+ (num->BGR == 0 || (num->RGB == 0 && !on_tv) || !hwc_dev->flags_rgb_order) &&
+ /* current hardware can only handle a limited number of 'large' RGB32 layers */
+ num->large_rgb32_layers <= MAX_DSS_LARGE_LAYERS;
}
static inline int can_dss_render_layer(omap4_hwc_device_t *hwc_dev,
@@ -1082,8 +1344,9 @@ static inline int can_dss_render_layer(omap4_hwc_device_t *hwc_dev,
IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
omap4_hwc_ext_t *ext = &hwc_dev->ext;
- int on_tv = ext->on_tv && ext->current.enabled;
- int tform = ext->current.enabled && (ext->current.rotation || ext->current.hflip);
+ int cloning = ext->current.enabled && (!ext->current.docking || (handle!=NULL ? dockable(layer) : 0));
+ int on_tv = hwc_dev->on_tv || (ext->on_tv && cloning);
+ int tform = cloning && (ext->current.rotation || ext->current.hflip);
return omap4_hwc_is_valid_layer(hwc_dev, layer, handle) &&
/* cannot rotate non-NV12 layers on external display */
@@ -1103,7 +1366,7 @@ static inline int display_area(struct dss2_ovl_info *o)
}
static int clone_layer(omap4_hwc_device_t *hwc_dev, int ix) {
- struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->dsscomp_data;
+ struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->comp_data.dsscomp_data;
omap4_hwc_ext_t *ext = &hwc_dev->ext;
int ext_ovl_ix = dsscomp->num_ovls - hwc_dev->post2_layers;
struct dss2_ovl_info *o = &dsscomp->ovls[dsscomp->num_ovls];
@@ -1118,8 +1381,21 @@ static int clone_layer(omap4_hwc_device_t *hwc_dev, int ix) {
/* reserve overlays at end for other display */
o->cfg.ix = MAX_HW_OVERLAYS - 1 - ext_ovl_ix;
o->cfg.mgr_ix = 1;
- o->addressing = OMAP_DSS_BUFADDR_OVL_IX;
- o->ba = ix;
+ /*
+ * Here the assumption is that overlay0 is the one attached to FB.
+ * Hence this clone_layer call is for FB cloning (provided use_sgx is true).
+ */
+ /* For the external displays whose transform is the same as
+ * that of primary display, ion_handles would be NULL hence
+ * the below logic doesn't execute.
+ */
+ if (ix == 0 && hwc_dev->ion_handles[sync_id%2] && hwc_dev->use_sgx) {
+ o->addressing = OMAP_DSS_BUFADDR_ION;
+ o->ba = (int)hwc_dev->ion_handles[sync_id%2];
+ } else {
+ o->addressing = OMAP_DSS_BUFADDR_OVL_IX;
+ o->ba = ix;
+ }
/* use distinct z values (to simplify z-order checking) */
o->cfg.zorder += hwc_dev->post2_layers;
@@ -1130,7 +1406,7 @@ static int clone_layer(omap4_hwc_device_t *hwc_dev, int ix) {
}
static int clone_external_layer(omap4_hwc_device_t *hwc_dev, int ix) {
- struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->dsscomp_data;
+ struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->comp_data.dsscomp_data;
omap4_hwc_ext_t *ext = &hwc_dev->ext;
/* mirror only 1 external layer */
@@ -1214,6 +1490,172 @@ static void check_sync_fds(size_t numDisplays, hwc_display_contents_1_t** displa
}
}
+static void blit_reset(omap4_hwc_device_t *hwc_dev, int flags)
+{
+ hwc_dev->blit_flags = 0;
+ hwc_dev->blit_num = 0;
+ hwc_dev->post2_blit_buffers = 0;
+ hwc_dev->comp_data.blit_data.rgz_items = 0;
+
+ /* We want to maintain the rgz dirty region data if there are no geometry changes */
+ if (flags & HWC_GEOMETRY_CHANGED)
+ rgz_release(&grgz);
+}
+
+static int blit_layers(omap4_hwc_device_t *hwc_dev, hwc_display_contents_1_t *list, int bufoff)
+{
+ /* Do not blit if this frame will be composed entirely by the GPU */
+ if (!list || hwc_dev->force_sgx)
+ goto err_out;
+
+ int rgz_in_op;
+ int rgz_out_op;
+
+ switch (hwc_dev->blt_mode) {
+ case BLTMODE_PAINT:
+ rgz_in_op = RGZ_IN_HWCCHK;
+ rgz_out_op = RGZ_OUT_BVCMD_PAINT;
+ break;
+ case BLTMODE_REGION:
+ default:
+ rgz_in_op = RGZ_IN_HWC;
+ rgz_out_op = RGZ_OUT_BVCMD_REGION;
+ break;
+ }
+
+ rgz_in_params_t in = {
+ .op = rgz_in_op,
+ .data = {
+ .hwc = {
+ .dstgeom = &gscrngeom,
+ .layers = list->hwLayers,
+ .layerno = list->numHwLayers
+ }
+ }
+ };
+
+ /*
+ * This means if all the layers marked for the FRAMEBUFFER cannot be
+ * blitted, do not blit, for e.g. SKIP layers
+ */
+ if (rgz_in(&in, &grgz) != RGZ_ALL)
+ goto err_out;
+
+ unsigned int i, count = 0;
+ for (i = 0; i < list->numHwLayers; i++) {
+ if (list->hwLayers[i].compositionType != HWC_OVERLAY) {
+ count++;
+ }
+ }
+
+ rgz_out_params_t out = {
+ .op = rgz_out_op,
+ .data = {
+ .bvc = {
+ .dstgeom = &gscrngeom,
+ .noblend = 0,
+ }
+ }
+ };
+
+ if (rgz_out(&grgz, &out) != 0) {
+ ALOGE("Failed generating blits");
+ goto err_out;
+ }
+
+ /* This is a special situation where the regionizer decided no blits are
+ * needed for this frame but there are blit buffers to synchronize with. Can
+ * happen only if the regionizer is enabled otherwise it's likely a bug
+ */
+ if (rgz_out_op != RGZ_OUT_BVCMD_REGION && out.data.bvc.out_blits == 0 && out.data.bvc.out_nhndls > 0) {
+ ALOGE("Regionizer invalid output blit_num %d, post2_blit_buffers %d", out.data.bvc.out_blits, out.data.bvc.out_nhndls);
+ goto err_out;
+ }
+
+ hwc_dev->blit_flags |= HWC_BLT_FLAG_USE_FB;
+ hwc_dev->blit_num = out.data.bvc.out_blits;
+ hwc_dev->post2_blit_buffers = out.data.bvc.out_nhndls;
+ for (i = 0; i < hwc_dev->post2_blit_buffers; i++) {
+ //ALOGI("blit buffers[%d] = %p", bufoff, out.data.bvc.out_hndls[i]);
+ hwc_dev->buffers[bufoff++] = out.data.bvc.out_hndls[i];
+ }
+
+ struct rgz_blt_entry *res_blit_ops = (struct rgz_blt_entry *) out.data.bvc.cmdp;
+ memcpy(hwc_dev->comp_data.blit_data.rgz_blts, res_blit_ops, sizeof(*res_blit_ops) * out.data.bvc.cmdlen);
+ ALOGI_IF(debugblt, "blt struct sz %d", sizeof(*res_blit_ops) * out.data.bvc.cmdlen);
+ ALOGE_IF(hwc_dev->blit_num != out.data.bvc.cmdlen,"blit_num != out.data.bvc.cmdlen, %d != %d", hwc_dev->blit_num, out.data.bvc.cmdlen);
+
+ /* all layers will be rendered without SGX help either via DSS or blitter */
+ for (i = 0; i < list->numHwLayers; i++) {
+ if (list->hwLayers[i].compositionType != HWC_OVERLAY) {
+ list->hwLayers[i].compositionType = HWC_OVERLAY;
+ //ALOGI("blitting layer %d", i);
+ list->hwLayers[i].hints &= ~HWC_HINT_TRIPLE_BUFFER;
+ }
+ list->hwLayers[i].hints &= ~HWC_HINT_CLEAR_FB;
+ }
+ return 1;
+
+err_out:
+ rgz_release(&grgz);
+ return 0;
+}
+
+void debug_post2(omap4_hwc_device_t *hwc_dev, int nbufs)
+{
+ if (!debugpost2)
+ return;
+ struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->comp_data.dsscomp_data;
+ int i;
+ for (i=0; i<nbufs; i++) {
+ ALOGI("buf[%d] hndl %p", i, hwc_dev->buffers[i]);
+ }
+ for (i=0; i < dsscomp->num_ovls; i++) {
+ ALOGI("ovl[%d] ba %d", i, dsscomp->ovls[i].ba);
+ }
+}
+
+static int free_tiler2d_buffers(omap4_hwc_device_t *hwc_dev)
+{
+ int i;
+
+ for (i = 0 ; i < NUM_EXT_DISPLAY_BACK_BUFFERS; i++) {
+ ion_free(hwc_dev->ion_fd, hwc_dev->ion_handles[i]);
+ hwc_dev->ion_handles[i] = NULL;
+ }
+ return 0;
+}
+
+static int allocate_tiler2d_buffers(omap4_hwc_device_t *hwc_dev)
+{
+ int ret, i;
+ size_t stride;
+
+ if (hwc_dev->ion_fd < 0) {
+ ALOGE("No ion fd, hence can't allocate tiler2d buffers");
+ return -1;
+ }
+
+ for (i = 0; i < NUM_EXT_DISPLAY_BACK_BUFFERS; i++) {
+ if (hwc_dev->ion_handles[i])
+ return 0;
+ }
+
+ for (i = 0 ; i < NUM_EXT_DISPLAY_BACK_BUFFERS; i++) {
+ ret = ion_alloc_tiler(hwc_dev->ion_fd, hwc_dev->fb_dev->base.width, hwc_dev->fb_dev->base.height,
+ TILER_PIXEL_FMT_32BIT, 0, &hwc_dev->ion_handles[i], &stride);
+ if (ret)
+ goto handle_error;
+
+ ALOGI("ion handle[%d][%p]", i, hwc_dev->ion_handles[i]);
+ }
+ return 0;
+
+handle_error:
+ free_tiler2d_buffers(hwc_dev);
+ return -1;
+}
+
static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDisplays,
hwc_display_contents_1_t** displays)
{
@@ -1223,7 +1665,7 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
hwc_display_contents_1_t* list = displays[0]; // ignore displays beyond the first
omap4_hwc_device_t *hwc_dev = (omap4_hwc_device_t *)dev;
- struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->dsscomp_data;
+ struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->comp_data.dsscomp_data;
struct counts num = { .composited_layers = list ? list->numHwLayers : 0 };
unsigned int i, ix;
@@ -1235,10 +1677,6 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
decide_supported_cloning(hwc_dev, &num);
- /* Disable the forced SGX rendering if there is only one layer */
- if (hwc_dev->force_sgx && num.composited_layers <= 1)
- hwc_dev->force_sgx = 0;
-
/* phase 3 logic */
if (can_dss_render_all(hwc_dev, &num)) {
/* All layers can be handled by the DSS -- don't use SGX for composition */
@@ -1251,16 +1689,37 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
}
/* setup pipes */
- dsscomp->num_ovls = hwc_dev->use_sgx;
int z = 0;
int fb_z = -1;
int scaled_gfx = 0;
int ix_docking = -1;
+ unsigned int big_layers = 0;
+
+ int blit_all = 0;
+ blit_reset(hwc_dev, list ? list->flags : 0);
+
+ /* If the SGX is used or we are going to blit something we need a framebuffer
+ * and a DSS pipe
+ */
+ int needs_fb = hwc_dev->use_sgx;
+
+ if (hwc_dev->blt_policy == BLTPOLICY_ALL) {
+ /* Check if we can blit everything */
+ blit_all = blit_layers(hwc_dev, list, 0);
+ if (blit_all) {
+ needs_fb = 1;
+ hwc_dev->use_sgx = 0;
+ }
+ }
+
+ /* If a framebuffer is needed, begin using VID1 for DSS overlay layers,
+ * we need GFX for FB
+ */
+ dsscomp->num_ovls = needs_fb ? 1 /*VID1*/ : 0 /*GFX*/;
/* set up if DSS layers */
unsigned int mem_used = 0;
- hwc_dev->ovls_blending = 0;
- for (i = 0; list && i < list->numHwLayers; i++) {
+ for (i = 0; list && i < list->numHwLayers && !blit_all; i++) {
hwc_layer_1_t *layer = &list->hwLayers[i];
IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
@@ -1269,21 +1728,27 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
(!hwc_dev->force_sgx ||
/* render protected and dockable layers via DSS */
is_protected(layer) ||
+ is_upscaled_NV12(hwc_dev, layer) ||
(hwc_dev->ext.current.docking && hwc_dev->ext.current.enabled && dockable(layer))) &&
- mem_used + mem1d(handle) < MAX_TILER_SLOT &&
+ mem_used + mem1d(handle) <= limits.tiler1d_slot_size &&
/* can't have a transparent overlay in the middle of the framebuffer stack */
- !(is_BLENDED(layer) && fb_z >= 0)) {
+ !(is_BLENDED(layer) && fb_z >= 0) &&
+ /* current hardware can only handle a limited number of 'large' RGB32 layers */
+ !(is_large_rgb32_layer(layer) && big_layers >= MAX_DSS_LARGE_LAYERS)) {
/* render via DSS overlay */
mem_used += mem1d(handle);
layer->compositionType = HWC_OVERLAY;
+ /*
+ * This hint will not be used in vanilla ICS, but maybe in
+ * JellyBean, it is useful to distinguish between blts and true
+ * overlays
+ */
+ layer->hints |= HWC_HINT_TRIPLE_BUFFER;
/* clear FB above all opaque layers if rendering via SGX */
if (hwc_dev->use_sgx && !is_BLENDED(layer))
layer->hints |= HWC_HINT_CLEAR_FB;
- /* see if any of the (non-backmost) overlays are doing blending */
- else if (is_BLENDED(layer) && i > 0)
- hwc_dev->ovls_blending = 1;
hwc_dev->buffers[dsscomp->num_ovls] = layer->handle;
@@ -1295,12 +1760,12 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
handle->iWidth,
handle->iHeight);
- dsscomp->ovls[dsscomp->num_ovls].cfg.ix = dsscomp->num_ovls;
+ dsscomp->ovls[dsscomp->num_ovls].cfg.ix = dsscomp->num_ovls + hwc_dev->primary_transform;
dsscomp->ovls[dsscomp->num_ovls].addressing = OMAP_DSS_BUFADDR_LAYER_IX;
dsscomp->ovls[dsscomp->num_ovls].ba = dsscomp->num_ovls;
/* ensure GFX layer is never scaled */
- if (dsscomp->num_ovls == 0) {
+ if ((dsscomp->num_ovls == 0) && (!hwc_dev->primary_transform)) {
scaled_gfx = scaled(layer) || is_NV12(handle);
} else if (scaled_gfx && !scaled(layer) && !is_NV12(handle)) {
/* swap GFX layer with this one */
@@ -1317,6 +1782,11 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
dsscomp->num_ovls++;
z++;
+
+ /* record whether or not this was a 'big' RGB32 layer */
+ if (is_large_rgb32_layer(layer)) {
+ big_layers++;
+ }
} else if (hwc_dev->use_sgx) {
if (fb_z < 0) {
/* NOTE: we are not handling transparent cutout for now */
@@ -1334,15 +1804,31 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
if (scaled_gfx)
dsscomp->ovls[0].cfg.ix = dsscomp->num_ovls;
- if (hwc_dev->use_sgx) {
+ if (hwc_dev->blt_policy == BLTPOLICY_DEFAULT) {
+ if (hwc_dev->use_sgx) {
+ if (blit_layers(hwc_dev, list, dsscomp->num_ovls == 1 ? 0 : dsscomp->num_ovls)) {
+ hwc_dev->use_sgx = 0;
+ }
+ }
+ }
+
+ /* If the SGX is not used and there is blit data we need a framebuffer and
+ * a DSS pipe well configured for it
+ */
+ if (needs_fb) {
/* assign a z-layer for fb */
if (fb_z < 0) {
- if (num.composited_layers)
+ if (!hwc_dev->blt_policy != BLTPOLICY_DISABLED && num.composited_layers)
ALOGE("**** should have assigned z-layer for fb");
fb_z = z++;
}
-
- hwc_dev->buffers[0] = NULL;
+ /*
+ * This is needed because if we blit all we would lose the handle of
+ * the first layer
+ */
+ if (hwc_dev->use_sgx) {
+ hwc_dev->buffers[0] = NULL;
+ }
omap4_hwc_setup_layer_base(&dsscomp->ovls[0].cfg, fb_z,
hwc_dev->fb_dev->base.format,
1, /* FB is always premultiplied */
@@ -1351,6 +1837,7 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
dsscomp->ovls[0].cfg.pre_mult_alpha = 1;
dsscomp->ovls[0].addressing = OMAP_DSS_BUFADDR_LAYER_IX;
dsscomp->ovls[0].ba = 0;
+ dsscomp->ovls[0].cfg.ix = hwc_dev->primary_transform;
}
/* mirror layers */
@@ -1387,6 +1874,14 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
}
}
}
+
+ /* Apply transform for primary display */
+ if (hwc_dev->primary_transform)
+ for (i = 0; i < dsscomp->num_ovls; i++) {
+ if(dsscomp->ovls[i].cfg.mgr_ix == 0)
+ omap4_hwc_adjust_primary_display_layer(hwc_dev, &dsscomp->ovls[i]);
+ }
+
ext->last = ext->current;
if (z != dsscomp->num_ovls || dsscomp->num_ovls > MAX_HW_OVERLAYS)
@@ -1416,6 +1911,14 @@ static int omap4_hwc_prepare(struct hwc_composer_device_1 *dev, size_t numDispla
hwc_dev->ext_ovls = dsscomp->num_ovls - hwc_dev->post2_layers;
}
+ /*
+ * Whilst the mode of the display is being changed drop compositions to the
+ * display
+ */
+ if (ext->last_mode == 0 && hwc_dev->on_tv) {
+ dsscomp->num_ovls = 0;
+ }
+
if (debug) {
ALOGD("prepare (%d) - %s (comp=%d, poss=%d/%d scaled, RGB=%d,BGR=%d,NV12=%d) (ext=%s%s%ddeg%s %dex/%dmx (last %dex,%din)\n",
dsscomp->sync_id,
@@ -1477,15 +1980,20 @@ static int omap4_hwc_set(struct hwc_composer_device_1 *dev,
sur = list->sur;
}
omap4_hwc_device_t *hwc_dev = (omap4_hwc_device_t *)dev;
- struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->dsscomp_data;
+ struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->comp_data.dsscomp_data;
int err = 0;
int invalidate;
pthread_mutex_lock(&hwc_dev->lock);
- omap4_hwc_reset_screen(hwc_dev);
+ /* disable resetting the screen on the first boot for devices
+ * with hdmi as primary input.
+ */
+ if (!hwc_dev->on_tv)
+ omap4_hwc_reset_screen(hwc_dev);
- invalidate = hwc_dev->ext_ovls_wanted && !hwc_dev->ext_ovls;
+ invalidate = hwc_dev->ext_ovls_wanted && (hwc_dev->ext_ovls < hwc_dev->ext_ovls_wanted) &&
+ (hwc_dev->stats.protected || !hwc_dev->ext_ovls);
if (debug)
dump_set_info(hwc_dev, list);
@@ -1506,14 +2014,57 @@ static int omap4_hwc_set(struct hwc_composer_device_1 *dev,
//dump_dsscomp(dsscomp);
// signal the event thread that a post has happened
- write(hwc_dev->pipe_fds[1], "s", 1);
+ wakeup_hdmi_thread(hwc_dev);
if (hwc_dev->force_sgx > 0)
hwc_dev->force_sgx--;
+ hwc_dev->comp_data.blit_data.rgz_flags = hwc_dev->blit_flags;
+ hwc_dev->comp_data.blit_data.rgz_items = hwc_dev->blit_num;
+ int omaplfb_comp_data_sz = sizeof(hwc_dev->comp_data) +
+ (hwc_dev->comp_data.blit_data.rgz_items * sizeof(struct rgz_blt_entry));
+
+
+ unsigned int nbufs = hwc_dev->post2_layers;
+ if (hwc_dev->post2_blit_buffers) {
+ /*
+ * We don't want to pass a NULL entry in the Post2, but we need to
+ * fix up buffer handle array and overlay indexes to account for
+ * this
+ */
+ nbufs += hwc_dev->post2_blit_buffers - 1;
+
+ if (hwc_dev->post2_layers > 1) {
+ unsigned int i, j;
+ for (i = 0; i < nbufs; i++) {
+ hwc_dev->buffers[i] = hwc_dev->buffers[i+1];
+ }
+ for (i = 1, j= 1; j < hwc_dev->post2_layers; i++, j++) {
+ dsscomp->ovls[j].ba = i;
+ }
+ }
+ }
+ ALOGI_IF(debugblt && hwc_dev->blt_policy != BLTPOLICY_DISABLED,
+ "Post2, blits %d, ovl_buffers %d, blit_buffers %d sgx %d",
+ hwc_dev->blit_num, hwc_dev->post2_layers, hwc_dev->post2_blit_buffers,
+ hwc_dev->use_sgx);
+
+ debug_post2(hwc_dev, nbufs);
err = hwc_dev->fb_dev->Post2((framebuffer_device_t *)hwc_dev->fb_dev,
hwc_dev->buffers,
- hwc_dev->post2_layers,
- dsscomp, sizeof(*dsscomp));
+ nbufs,
+ dsscomp, omaplfb_comp_data_sz);
+ showfps();
+
+#if 0
+ if (!hwc_dev->use_sgx) {
+ __u32 crt = 0;
+ int err2 = ioctl(hwc_dev->fb_fd, FBIO_WAITFORVSYNC, &crt);
+ if (err2) {
+ ALOGE("failed to wait for vsync (%d)", errno);
+ err = err ? : -errno;
+ }
+ }
+#endif
}
hwc_dev->last_ext_ovls = hwc_dev->ext_ovls;
hwc_dev->last_int_ovls = hwc_dev->post2_layers;
@@ -1525,7 +2076,7 @@ static int omap4_hwc_set(struct hwc_composer_device_1 *dev,
err_out:
pthread_mutex_unlock(&hwc_dev->lock);
- if (invalidate)
+ if (invalidate && hwc_dev->procs && hwc_dev->procs->invalidate)
hwc_dev->procs->invalidate(hwc_dev->procs);
return err;
@@ -1534,7 +2085,7 @@ err_out:
static void omap4_hwc_dump(struct hwc_composer_device_1 *dev, char *buff, int buff_len)
{
omap4_hwc_device_t *hwc_dev = (omap4_hwc_device_t *)dev;
- struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->dsscomp_data;
+ struct dsscomp_setup_dispc_data *dsscomp = &hwc_dev->comp_data.dsscomp_data;
struct dump_buf log = {
.buf = buff,
.buf_len = buff_len,
@@ -1548,17 +2099,22 @@ static void omap4_hwc_dump(struct hwc_composer_device_1 *dev, char *buff, int bu
struct dss2_ovl_cfg *cfg = &dsscomp->ovls[i].cfg;
dump_printf(&log, " layer %d:\n", i);
- dump_printf(&log, " enabled: %s\n",
- cfg->enabled ? "true" : "false");
- dump_printf(&log, " buff: %p %dx%d stride: %d\n",
- hwc_dev->buffers[i], cfg->width, cfg->height, cfg->stride);
- dump_printf(&log, " src: (%d,%d) %dx%d\n",
- cfg->crop.x, cfg->crop.y, cfg->crop.w, cfg->crop.h);
- dump_printf(&log, " dst: (%d,%d) %dx%d\n",
- cfg->win.x, cfg->win.y, cfg->win.w, cfg->win.h);
- dump_printf(&log, " ix: %d\n", cfg->ix);
- dump_printf(&log, " zorder: %d\n\n", cfg->zorder);
- }
+ dump_printf(&log, " enabled:%s buff:%p %dx%d stride:%d\n",
+ cfg->enabled ? "true" : "false", hwc_dev->buffers[i],
+ cfg->width, cfg->height, cfg->stride);
+ dump_printf(&log, " src:(%d,%d) %dx%d dst:(%d,%d) %dx%d ix:%d zorder:%d\n",
+ cfg->crop.x, cfg->crop.y, cfg->crop.w, cfg->crop.h,
+ cfg->win.x, cfg->win.y, cfg->win.w, cfg->win.h,
+ cfg->ix, cfg->zorder);
+ }
+
+ if (hwc_dev->blt_policy != BLTPOLICY_DISABLED) {
+ dump_printf(&log, " bltpolicy: %s, bltmode: %s\n",
+ hwc_dev->blt_policy == BLTPOLICY_DEFAULT ? "default" :
+ hwc_dev->blt_policy == BLTPOLICY_ALL ? "all" : "unknown",
+ hwc_dev->blt_mode == BLTMODE_PAINT ? "paint" : "regionize");
+ }
+ dump_printf(&log, "\n");
}
static void free_png_image(omap4_hwc_device_t *hwc_dev, struct omap4_hwc_img *img)
@@ -1688,8 +2244,12 @@ static int omap4_hwc_device_close(hw_device_t* device)
close(hwc_dev->hdmi_fb_fd);
if (hwc_dev->fb_fd >= 0)
close(hwc_dev->fb_fd);
+ if (hwc_dev->ion_fd >= 0)
+ ion_close(hwc_dev->ion_fd);
+
/* pthread will get killed when parent process exits */
pthread_mutex_destroy(&hwc_dev->lock);
+ pthread_mutex_destroy(&hwc_dev->vsync_lock);
free(hwc_dev);
}
@@ -1722,11 +2282,67 @@ err_out:
return err;
}
+static void set_primary_display_transform_matrix(omap4_hwc_device_t *hwc_dev)
+{
+ /* create primary display translation matrix */
+ hwc_dev->fb_dis.ix = 0;/*Default display*/
+
+ int ret = ioctl(hwc_dev->dsscomp_fd, DSSCIOC_QUERY_DISPLAY, &hwc_dev->fb_dis);
+ if (ret)
+ ALOGE("failed to get display info (%d): %m", errno);
+
+ int lcd_w = hwc_dev->fb_dis.timings.x_res;
+ int lcd_h = hwc_dev->fb_dis.timings.y_res;
+ int orig_w = hwc_dev->fb_dev->base.width;
+ int orig_h = hwc_dev->fb_dev->base.height;
+ hwc_rect_t region = {.left = 0, .top = 0, .right = orig_w, .bottom = orig_h};
+ hwc_dev->primary_region = region;
+ hwc_dev->primary_rotation = ((lcd_w > lcd_h) ^ (orig_w > orig_h)) ? 1 : 0;
+ hwc_dev->primary_transform = ((lcd_w != orig_w)||(lcd_h != orig_h)) ? 1 : 0;
+
+ ALOGI("transforming FB (%dx%d) => (%dx%d) rot%d", orig_w, orig_h, lcd_w, lcd_h, hwc_dev->primary_rotation);
+
+ /* reorientation matrix is:
+ m = (center-from-target-center) * (scale-to-target) * (mirror) * (rotate) * (center-to-original-center) */
+
+ memcpy(hwc_dev->primary_m, m_unit, sizeof(m_unit));
+ m_translate(hwc_dev->primary_m, -(orig_w >> 1), -(orig_h >> 1));
+ m_rotate(hwc_dev->primary_m, hwc_dev->primary_rotation);
+ if (hwc_dev->primary_rotation & 1)
+ swap(orig_w, orig_h);
+ m_scale(hwc_dev->primary_m, orig_w, lcd_w, orig_h, lcd_h);
+ m_translate(hwc_dev->primary_m, lcd_w >> 1, lcd_h >> 1);
+}
+
+
static void handle_hotplug(omap4_hwc_device_t *hwc_dev)
{
omap4_hwc_ext_t *ext = &hwc_dev->ext;
__u8 state = ext->hdmi_state;
+ /* Ignore external HDMI logic if the primary display is HDMI */
+ if (hwc_dev->on_tv) {
+ ALOGI("Primary display is HDMI - skip clone/dock logic");
+
+ if (state) {
+ __u32 xres = hwc_dev->fb_dev->base.width;
+ __u32 yres = hwc_dev->fb_dev->base.height;
+ if (omap4_hwc_set_best_hdmi_mode(hwc_dev, xres, yres, ext->lcd_xpy)) {
+ ALOGE("Failed to set HDMI mode");
+ }
+ set_primary_display_transform_matrix(hwc_dev);
+
+ ioctl(hwc_dev->fb_fd, FBIOBLANK, FB_BLANK_UNBLANK);
+
+ if (hwc_dev->procs && hwc_dev->procs->invalidate) {
+ hwc_dev->procs->invalidate(hwc_dev->procs);
+ }
+ } else
+ ext->last_mode = 0;
+
+ return;
+ }
+
pthread_mutex_lock(&hwc_dev->lock);
ext->dock.enabled = ext->mirror.enabled = 0;
if (state) {
@@ -1744,7 +2360,7 @@ static void handle_hotplug(omap4_hwc_device_t *hwc_dev)
ext->dock.rotation = atoi(value) & EXT_ROTATION;
ext->dock.hflip = (atoi(value) & EXT_HFLIP) > 0;
ext->dock.docking = 1;
- property_get("persist.hwc.mirroring.transform", value, hwc_dev->fb_dev->base.height > hwc_dev->fb_dev->base.width ? "3" : "0");
+ property_get("persist.hwc.mirroring.transform", value, hwc_dev->fb_dis.timings.y_res > hwc_dev->fb_dis.timings.x_res ? "3" : "0");
ext->mirror.rotation = atoi(value) & EXT_ROTATION;
ext->mirror.hflip = (atoi(value) & EXT_HFLIP) > 0;
ext->mirror.docking = 0;
@@ -1771,8 +2387,19 @@ static void handle_hotplug(omap4_hwc_device_t *hwc_dev)
} else
ext->mirror.enabled = 0;
}
+ /* Allocate backup buffers for FB rotation
+ * This is required only if the FB tranform is different from that
+ * of the external display and the FB is not in TILER2D space
+ */
+ if (ext->mirror.rotation && (limits.fbmem_type != DSSCOMP_FBMEM_TILER2D))
+ allocate_tiler2d_buffers(hwc_dev);
+
} else {
ext->last_mode = 0;
+ if (ext->mirror.rotation && (limits.fbmem_type != DSSCOMP_FBMEM_TILER2D)) {
+ /* free tiler 2D buffer on detach */
+ free_tiler2d_buffers(hwc_dev);
+ }
}
ALOGI("external display changed (state=%d, mirror={%s tform=%ddeg%s}, dock={%s tform=%ddeg%s%s}, tv=%d", state,
ext->mirror.enabled ? "enabled" : "disabled",
@@ -1789,8 +2416,8 @@ static void handle_hotplug(omap4_hwc_device_t *hwc_dev)
/* hwc_dev->procs is set right after the device is opened, but there is
* still a race condition where a hotplug event might occur after the open
* but before the procs are registered. */
- if (hwc_dev->procs)
- hwc_dev->procs->invalidate(hwc_dev->procs);
+ if (hwc_dev->procs && hwc_dev->procs->invalidate)
+ hwc_dev->procs->invalidate(hwc_dev->procs);
}
static void handle_uevents(omap4_hwc_device_t *hwc_dev, const char *buff, int len)
@@ -1826,13 +2453,31 @@ static void handle_uevents(omap4_hwc_device_t *hwc_dev, const char *buff, int le
}
if (vsync) {
- if (hwc_dev->procs)
- hwc_dev->procs->vsync(hwc_dev->procs, 0, timestamp);
+ fire_vsync_event(hwc_dev, timestamp);
} else {
- if (dock)
+ if (dock) {
hwc_dev->ext.force_dock = state == 1;
- else
- hwc_dev->ext.hdmi_state = state == 1;
+ } else {
+ /* If the primary display is HDMI, VSYNC is enabled, and HDMI's plug
+ * state has just gone from 1->0, then we need to be sure to start
+ * faking the VSYNC events.
+ */
+ if (hwc_dev->on_tv) {
+ int new_state, state_change;
+
+ pthread_mutex_lock(&hwc_dev->vsync_lock);
+
+ new_state = state == 1;
+ state_change = (new_state != hwc_dev->ext.hdmi_state);
+ hwc_dev->ext.hdmi_state = new_state;
+ if (state_change && !new_state)
+ wakeup_hdmi_thread(hwc_dev);
+
+ pthread_mutex_unlock(&hwc_dev->vsync_lock);
+ } else {
+ hwc_dev->ext.hdmi_state = state == 1;
+ }
+ }
handle_hotplug(hwc_dev);
}
}
@@ -1852,7 +2497,7 @@ static void *omap4_hwc_hdmi_thread(void *data)
fds[0].fd = uevent_get_fd();
fds[0].events = POLLIN;
- fds[1].fd = hwc_dev->pipe_fds[0];
+ fds[1].fd = hwc_dev->wakeup_evt;
fds[1].events = POLLIN;
timeout = hwc_dev->idle ? hwc_dev->idle : -1;
@@ -1860,13 +2505,61 @@ static void *omap4_hwc_hdmi_thread(void *data)
memset(uevent_desc, 0, sizeof(uevent_desc));
do {
- err = poll(fds, hwc_dev->idle ? 2 : 1, timeout);
+ uint64_t idle_wakeup = (uint64_t)(-1);
+ uint64_t vsync_wakeup = (uint64_t)(-1);
+ uint64_t now = vsync_clock_now();
+ uint64_t effective_wakeup;
+ int effective_timeout;
+
+ if (timeout >= 0)
+ idle_wakeup = now + (((uint64_t)timeout) * 1000000);
+
+ if (hwc_dev->on_tv) {
+ pthread_mutex_lock(&hwc_dev->vsync_lock);
+
+ if (!hwc_dev->ext.hdmi_state && hwc_dev->vsync_enabled) {
+ vsync_wakeup = hwc_dev->last_vsync_time_valid
+ ? hwc_dev->last_vsync_time
+ : now;
+
+ vsync_wakeup += hwc_dev->fake_vsync_period;
+
+ if (vsync_wakeup < now)
+ vsync_wakeup = now;
+ }
+
+ pthread_mutex_unlock(&hwc_dev->vsync_lock);
+ }
+
+ effective_wakeup = idle_wakeup < vsync_wakeup
+ ? idle_wakeup
+ : vsync_wakeup;
+ if (effective_wakeup == (uint64_t)(-1))
+ effective_timeout = -1;
+ else if (effective_wakeup <= now)
+ effective_timeout = 0;
+ else
+ effective_timeout = (int)((effective_wakeup - now + 999999) / 1000000);
+
+ if (effective_timeout)
+ err = poll(fds, 2, effective_timeout);
+ else
+ err = 0;
+
+ now = vsync_clock_now();
if (err == 0) {
- if (hwc_dev->idle) {
+ int fired = 0;
+
+ if (now >= vsync_wakeup) {
+ fire_vsync_event(hwc_dev, vsync_wakeup);
+ fired = 1;
+ }
+
+ if (hwc_dev->idle && (now >= idle_wakeup)) {
if (hwc_dev->procs) {
pthread_mutex_lock(&hwc_dev->lock);
- invalidate = !hwc_dev->force_sgx && hwc_dev->ovls_blending;
+ invalidate = hwc_dev->last_int_ovls > 1 && !hwc_dev->force_sgx;
if (invalidate) {
hwc_dev->force_sgx = 2;
}
@@ -1878,8 +2571,11 @@ static void *omap4_hwc_hdmi_thread(void *data)
}
}
- continue;
+ fired = 1;
}
+
+ if (fired)
+ continue;
}
if (err == -1) {
@@ -1888,9 +2584,11 @@ static void *omap4_hwc_hdmi_thread(void *data)
continue;
}
- if (hwc_dev->idle && fds[1].revents & POLLIN) {
- char c;
- read(hwc_dev->pipe_fds[0], &c, 1);
+ if (fds[1].revents & POLLIN) {
+ uint64_t tmp;
+
+ read(hwc_dev->wakeup_evt, &tmp, sizeof(tmp));
+
if (!hwc_dev->force_sgx)
timeout = hwc_dev->idle ? hwc_dev->idle : -1;
}
@@ -1945,7 +2643,31 @@ static int omap4_hwc_event_control(struct hwc_composer_device_1* dev,
int val = !!enabled;
int err;
- err = ioctl(hwc_dev->fb_fd, OMAPFB_ENABLEVSYNC, &val);
+ /* If the primary display is HDMI, then we need to be sure to fake a
+ * stream vsync events if vsync is enabled, but HDMI happens to be
+ * disconnected.
+ */
+ if (hwc_dev->on_tv) {
+ pthread_mutex_lock(&hwc_dev->vsync_lock);
+
+ if (!val)
+ hwc_dev->last_vsync_time_valid = 0;
+
+ /* If VSYNC is enabled, but HDMI is not actually plugged in, we need
+ * to fake it. Poke the work thread to make sure it is taking care
+ * of things.
+ */
+ if (!hwc_dev->ext.hdmi_state && !hwc_dev->vsync_enabled && val)
+ wakeup_hdmi_thread(hwc_dev);
+
+ hwc_dev->vsync_enabled = val;
+
+ err = ioctl(hwc_dev->fb_fd, OMAPFB_ENABLEVSYNC, &val);
+ pthread_mutex_unlock(&hwc_dev->vsync_lock);
+ } else {
+ err = ioctl(hwc_dev->fb_fd, OMAPFB_ENABLEVSYNC, &val);
+ }
+
if (err < 0)
return -errno;
@@ -2004,8 +2726,13 @@ static int omap4_hwc_device_open(const hw_module_t* module, const char* name,
hwc_dev->base.registerProcs = omap4_hwc_registerProcs;
hwc_dev->base.dump = omap4_hwc_dump;
hwc_dev->fb_dev = hwc_mod->fb_dev;
+ hwc_dev->wakeup_evt = -1;
*device = &hwc_dev->base.common;
+ hwc_dev->vsync_enabled = 0;
+ hwc_dev->last_vsync_time_valid = 0;
+ hwc_dev->fake_vsync_period = 1000000000ull/60;
+
hwc_dev->dsscomp_fd = open("/dev/dsscomp", O_RDWR);
if (hwc_dev->dsscomp_fd < 0) {
ALOGE("failed to open dsscomp (%d)", errno);
@@ -2013,9 +2740,9 @@ static int omap4_hwc_device_open(const hw_module_t* module, const char* name,
goto done;
}
- hwc_dev->hdmi_fb_fd = open("/dev/graphics/fb1", O_RDWR);
- if (hwc_dev->hdmi_fb_fd < 0) {
- ALOGE("failed to open hdmi fb (%d)", errno);
+ int ret = ioctl(hwc_dev->dsscomp_fd, DSSCIOC_QUERY_PLATFORM, &limits);
+ if (ret) {
+ ALOGE("failed to get platform limits (%d): %m", errno);
err = -errno;
goto done;
}
@@ -2042,23 +2769,54 @@ static int omap4_hwc_device_open(const hw_module_t* module, const char* name,
goto done;
}
- hwc_dev->buffers = malloc(sizeof(buffer_handle_t) * MAX_HW_OVERLAYS);
+ /* Allocate the maximum buffers that we can receive from HWC */
+ hwc_dev->buffers = malloc(sizeof(buffer_handle_t) * MAX_HWC_LAYERS);
if (!hwc_dev->buffers) {
err = -ENOMEM;
goto done;
}
- int ret = ioctl(hwc_dev->dsscomp_fd, DSSCIOC_QUERY_DISPLAY, &hwc_dev->fb_dis);
+ ret = ioctl(hwc_dev->dsscomp_fd, DSSCIOC_QUERY_DISPLAY, &hwc_dev->fb_dis);
if (ret) {
ALOGE("failed to get display info (%d): %m", errno);
err = -errno;
goto done;
}
- hwc_dev->ext.lcd_xpy = (float) hwc_dev->fb_dis.width_in_mm / hwc_dev->fb_dis.timings.x_res /
- hwc_dev->fb_dis.height_in_mm * hwc_dev->fb_dis.timings.y_res;
- if (pipe(hwc_dev->pipe_fds) == -1) {
- ALOGE("failed to event pipe (%d): %m", errno);
+ hwc_dev->ion_fd = ion_open();
+ if (hwc_dev->ion_fd < 0) {
+ ALOGE("failed to open ion driver (%d)", errno);
+ }
+
+ int i;
+ for (i = 0; i < NUM_EXT_DISPLAY_BACK_BUFFERS; i++) {
+ hwc_dev->ion_handles[i] = NULL;
+ }
+
+ /* use default value in case some of requested display parameters missing */
+ hwc_dev->ext.lcd_xpy = 1.0;
+ if (hwc_dev->fb_dis.timings.x_res && hwc_dev->fb_dis.height_in_mm) {
+ hwc_dev->ext.lcd_xpy = (float)
+ hwc_dev->fb_dis.width_in_mm / hwc_dev->fb_dis.timings.x_res /
+ hwc_dev->fb_dis.height_in_mm * hwc_dev->fb_dis.timings.y_res;
+ }
+
+ if (hwc_dev->fb_dis.channel == OMAP_DSS_CHANNEL_DIGIT) {
+ ALOGI("Primary display is HDMI");
+ hwc_dev->on_tv = 1;
+ }
+ else {
+ hwc_dev->hdmi_fb_fd = open("/dev/graphics/fb1", O_RDWR);
+ if (hwc_dev->hdmi_fb_fd < 0) {
+ ALOGE("failed to open hdmi fb (%d)", errno);
+ err = -errno;
+ goto done;
+ }
+ }
+ set_primary_display_transform_matrix(hwc_dev);
+
+ if ((hwc_dev->wakeup_evt = eventfd(0, EFD_NONBLOCK)) < 0) {
+ ALOGE("failed to eventfd (%d): %m", errno);
err = -errno;
goto done;
}
@@ -2068,6 +2826,13 @@ static int omap4_hwc_device_open(const hw_module_t* module, const char* name,
err = -errno;
goto done;
}
+
+ if (pthread_mutex_init(&hwc_dev->vsync_lock, NULL)) {
+ ALOGE("failed to create vsync mutex (%d): %m", errno);
+ err = -errno;
+ goto done;
+ }
+
if (pthread_create(&hwc_dev->hdmi_thread, NULL, omap4_hwc_hdmi_thread, hwc_dev))
{
ALOGE("failed to create HDMI listening thread (%d): %m", errno);
@@ -2121,6 +2886,32 @@ static int omap4_hwc_device_open(const hw_module_t* module, const char* name,
ALOGI("omap4_hwc_device_open(rgb_order=%d nv12_only=%d)",
hwc_dev->flags_rgb_order, hwc_dev->flags_nv12_only);
+ int gc2d_fd = open("/dev/gcioctl", O_RDWR);
+ if (gc2d_fd < 0) {
+ ALOGI("Unable to open gc-core device (%d), blits disabled", errno);
+ hwc_dev->blt_policy = BLTPOLICY_DISABLED;
+ } else {
+ property_get("persist.hwc.bltmode", value, "1");
+ hwc_dev->blt_mode = atoi(value);
+ property_get("persist.hwc.bltpolicy", value, "1");
+ hwc_dev->blt_policy = atoi(value);
+ ALOGI("blitter present, blits mode %d, blits policy %d", hwc_dev->blt_mode, hwc_dev->blt_policy);
+ close(gc2d_fd);
+
+ if (rgz_get_screengeometry(hwc_dev->fb_fd, &gscrngeom,
+ hwc_dev->fb_dev->base.format) != 0) {
+ err = -EINVAL;
+ goto done;
+ }
+ }
+
+ property_get("persist.hwc.upscaled_nv12_limit", value, "2.");
+ sscanf(value, "%f", &hwc_dev->upscaled_nv12_limit);
+ if (hwc_dev->upscaled_nv12_limit < 0. || hwc_dev->upscaled_nv12_limit > 2048.) {
+ ALOGW("Invalid upscaled_nv12_limit (%s), setting to 2.", value);
+ hwc_dev->upscaled_nv12_limit = 2.;
+ }
+
done:
if (err && hwc_dev) {
if (hwc_dev->dsscomp_fd >= 0)
@@ -2129,7 +2920,10 @@ done:
close(hwc_dev->hdmi_fb_fd);
if (hwc_dev->fb_fd >= 0)
close(hwc_dev->fb_fd);
+ if (hwc_dev->wakeup_evt >= 0)
+ close(hwc_dev->wakeup_evt);
pthread_mutex_destroy(&hwc_dev->lock);
+ pthread_mutex_destroy(&hwc_dev->vsync_lock);
free(hwc_dev->buffers);
free(hwc_dev);
}
diff --git a/hwc/rgz_2d.c b/hwc/rgz_2d.c
new file mode 100644
index 0000000..4f9e483
--- /dev/null
+++ b/hwc/rgz_2d.c
@@ -0,0 +1,1675 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <time.h>
+#include <assert.h>
+#include <strings.h>
+#include <dlfcn.h>
+
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <linux/fb.h>
+#include <linux/bltsville.h>
+#include <video/dsscomp.h>
+#include <video/omap_hwc.h>
+
+#ifndef RGZ_TEST_INTEGRATION
+#include <cutils/log.h>
+#include <cutils/properties.h>
+#include <hardware/hwcomposer.h>
+#include "hal_public.h"
+#else
+#include "hwcomposer.h"
+#include "buffer_handle.h"
+#define ALIGN(x,a) (((x) + (a) - 1L) & ~((a) - 1L))
+#define HW_ALIGN 32
+#endif
+
+#include "rgz_2d.h"
+
+#ifdef RGZ_TEST_INTEGRATION
+extern void BVDump(const char* prefix, const char* tab, const struct bvbltparams* parms);
+#define BVDUMP(p,t,parms) BVDump(p, t, parms)
+#define HANDLE_TO_BUFFER(h) handle_to_buffer(h)
+#define HANDLE_TO_STRIDE(h) handle_to_stride(h)
+#else
+static int rgz_handle_to_stride(IMG_native_handle_t *h);
+#define BVDUMP(p,t,parms)
+#define HANDLE_TO_BUFFER(h) NULL
+/* Needs to be meaningful for TILER & GFX buffers and NV12 */
+#define HANDLE_TO_STRIDE(h) rgz_handle_to_stride(h)
+#endif
+#define DSTSTRIDE(dstgeom) dstgeom->virtstride
+
+/* Borrowed macros from hwc.c vvv - consider sharing later */
+#define min(a, b) ( { typeof(a) __a = (a), __b = (b); __a < __b ? __a : __b; } )
+#define max(a, b) ( { typeof(a) __a = (a), __b = (b); __a > __b ? __a : __b; } )
+#define swap(a, b) do { typeof(a) __a = (a); (a) = (b); (b) = __a; } while (0)
+
+#define WIDTH(rect) ((rect).right - (rect).left)
+#define HEIGHT(rect) ((rect).bottom - (rect).top)
+
+#define is_RGB(format) ((format) == HAL_PIXEL_FORMAT_BGRA_8888 || (format) == HAL_PIXEL_FORMAT_RGB_565 || (format) == HAL_PIXEL_FORMAT_BGRX_8888)
+#define is_BGR(format) ((format) == HAL_PIXEL_FORMAT_RGBX_8888 || (format) == HAL_PIXEL_FORMAT_RGBA_8888)
+#define is_NV12(format) ((format) == HAL_PIXEL_FORMAT_TI_NV12 || (format) == HAL_PIXEL_FORMAT_TI_NV12_PADDED)
+
+#define HAL_PIXEL_FORMAT_BGRX_8888 0x1FF
+#define HAL_PIXEL_FORMAT_TI_NV12 0x100
+#define HAL_PIXEL_FORMAT_TI_NV12_PADDED 0x101
+/* Borrowed macros from hwc.c ^^^ */
+#define is_OPAQUE(format) ((format) == HAL_PIXEL_FORMAT_RGB_565 || (format) == HAL_PIXEL_FORMAT_RGBX_8888 || (format) == HAL_PIXEL_FORMAT_BGRX_8888)
+
+/* OUTP the means for grabbing diagnostic data */
+#ifndef RGZ_TEST_INTEGRATION
+#define OUTP ALOGI
+#define OUTE ALOGE
+#else
+#define OUTP(...) { printf(__VA_ARGS__); printf("\n"); fflush(stdout); }
+#define OUTE OUTP
+#define ALOGD_IF(debug, ...) { if (debug) OUTP(__VA_ARGS__); }
+#endif
+
+#define IS_BVCMD(params) (params->op == RGZ_OUT_BVCMD_REGION || params->op == RGZ_OUT_BVCMD_PAINT)
+
+/* Number of framebuffers to track */
+#define RGZ_NUM_FB 2
+
+struct rgz_blts {
+ struct rgz_blt_entry bvcmds[RGZ_MAX_BLITS];
+ int idx;
+};
+
+
+static int rgz_hwc_layer_blit(rgz_out_params_t *params, rgz_layer_t *rgz_layer);
+static void rgz_blts_init(struct rgz_blts *blts);
+static void rgz_blts_free(struct rgz_blts *blts);
+static struct rgz_blt_entry* rgz_blts_get(struct rgz_blts *blts, rgz_out_params_t *params);
+static int rgz_blts_bvdirect(rgz_t* rgz, struct rgz_blts *blts, rgz_out_params_t *params);
+static void rgz_get_src_rect(hwc_layer_1_t* layer, blit_rect_t *subregion_rect, blit_rect_t *res_rect);
+static int hal_to_ocd(int color);
+static int rgz_get_orientation(unsigned int transform);
+static int rgz_get_flip_flags(unsigned int transform, int use_src2_flags);
+static int rgz_hwc_scaled(hwc_layer_1_t *layer);
+
+int debug = 0;
+struct rgz_blts blts;
+/* Represents a screen sized background layer */
+static hwc_layer_1_t bg_layer;
+
+static void svgout_header(int htmlw, int htmlh, int coordw, int coordh)
+{
+ OUTP("<svg xmlns=\"http://www.w3.org/2000/svg\""
+ "width=\"%d\" height=\"%d\""
+ "viewBox=\"0 0 %d %d\">",
+ htmlw, htmlh, coordw, coordh);
+}
+
+static void svgout_footer(void)
+{
+ OUTP("</svg>");
+}
+
+static void svgout_rect(blit_rect_t *r, char *color, char *text)
+{
+ OUTP("<rect x=\"%d\" y=\"%d\" width=\"%d\" height=\"%d\" fill=\"%s\" "
+ "fill-opacity=\"%f\" stroke=\"black\" stroke-width=\"1\" />",
+ r->left, r->top, r->right - r->left, r->bottom - r->top, color, 1.0f);
+
+ if (!text)
+ return;
+
+ OUTP("<text x=\"%d\" y=\"%d\" style=\"font-size:30\" fill=\"black\">%s"
+ "</text>",
+ r->left, r->top + 40, text);
+}
+
+static int empty_rect(blit_rect_t *r)
+{
+ return !r->left && !r->top && !r->right && !r->bottom;
+}
+
+static int get_top_rect(blit_hregion_t *hregion, int subregion, blit_rect_t **routp)
+{
+ int l = hregion->nlayers - 1;
+ do {
+ *routp = &hregion->blitrects[l][subregion];
+ if (!empty_rect(*routp))
+ break;
+ }
+ while (--l >= 0);
+ return l;
+}
+
+/*
+ * The idea here is that we walk the layers from front to back and count the
+ * number of layers in the hregion until the first layer which doesn't require
+ * blending.
+ */
+static int get_layer_ops(blit_hregion_t *hregion, int subregion, int *bottom)
+{
+ int l = hregion->nlayers - 1;
+ int ops = 0;
+ *bottom = -1;
+ do {
+ if (!empty_rect(&hregion->blitrects[l][subregion])) {
+ ops++;
+ *bottom = l;
+ hwc_layer_1_t *layer = hregion->rgz_layers[l]->hwc_layer;
+ IMG_native_handle_t *h = (IMG_native_handle_t *)layer->handle;
+ if ((layer->blending != HWC_BLENDING_PREMULT) || is_OPAQUE(h->iFormat))
+ break;
+ }
+ }
+ while (--l >= 0);
+ return ops;
+}
+
+static int get_layer_ops_next(blit_hregion_t *hregion, int subregion, int l)
+{
+ while (++l < hregion->nlayers) {
+ if (!empty_rect(&hregion->blitrects[l][subregion]))
+ return l;
+ }
+ return -1;
+}
+
+static int svgout_intersects_display(blit_rect_t *a, int dispw, int disph)
+{
+ return ((a->bottom > 0) && (a->top < disph) &&
+ (a->right > 0) && (a->left < dispw));
+}
+
+static void svgout_hregion(blit_hregion_t *hregion, int dispw, int disph)
+{
+ char *colors[] = {"red", "orange", "yellow", "green", "blue", "indigo", "violet", NULL};
+ int b;
+ for (b = 0; b < hregion->nsubregions; b++) {
+ blit_rect_t *rect;
+ (void)get_top_rect(hregion, b, &rect);
+ /* Only generate SVG for subregions intersecting the displayed area */
+ if (!svgout_intersects_display(rect, dispw, disph))
+ continue;
+ svgout_rect(rect, colors[b % 7], NULL);
+ }
+}
+
+static void rgz_out_svg(rgz_t *rgz, rgz_out_params_t *params)
+{
+ if (!rgz || !(rgz->state & RGZ_REGION_DATA)) {
+ OUTE("rgz_out_svg invoked with bad state");
+ return;
+ }
+ blit_hregion_t *hregions = rgz->hregions;
+ svgout_header(params->data.svg.htmlw, params->data.svg.htmlh,
+ params->data.svg.dispw, params->data.svg.disph);
+ int i;
+ for (i = 0; i < rgz->nhregions; i++) {
+
+ OUTP("<!-- hregion %d (subcount %d)-->", i, hregions[i].nsubregions);
+ svgout_hregion(&hregions[i], params->data.svg.dispw,
+ params->data.svg.disph);
+ }
+ svgout_footer();
+}
+
+/* XXX duplicate of hwc.c version */
+static void dump_layer(hwc_layer_1_t const* l, int iserr)
+{
+#define FMT(f) ((f) == HAL_PIXEL_FORMAT_TI_NV12 ? "NV12" : \
+ (f) == HAL_PIXEL_FORMAT_BGRX_8888 ? "xRGB32" : \
+ (f) == HAL_PIXEL_FORMAT_RGBX_8888 ? "xBGR32" : \
+ (f) == HAL_PIXEL_FORMAT_BGRA_8888 ? "ARGB32" : \
+ (f) == HAL_PIXEL_FORMAT_RGBA_8888 ? "ABGR32" : \
+ (f) == HAL_PIXEL_FORMAT_RGB_565 ? "RGB565" : "??")
+
+ OUTE("%stype=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x, {%d,%d,%d,%d}, {%d,%d,%d,%d}",
+ iserr ? ">> " : " ",
+ l->compositionType, l->flags, l->handle, l->transform, l->blending,
+ l->sourceCrop.left,
+ l->sourceCrop.top,
+ l->sourceCrop.right,
+ l->sourceCrop.bottom,
+ l->displayFrame.left,
+ l->displayFrame.top,
+ l->displayFrame.right,
+ l->displayFrame.bottom);
+ if (l->handle) {
+ IMG_native_handle_t *h = (IMG_native_handle_t *)l->handle;
+ OUTE("%s%d*%d(%s)",
+ iserr ? ">> " : " ",
+ h->iWidth, h->iHeight, FMT(h->iFormat));
+ OUTE("hndl %p", l->handle);
+ }
+}
+
+static void dump_all(rgz_layer_t *rgz_layers, unsigned int layerno, unsigned int errlayer)
+{
+ unsigned int i;
+ for (i = 0; i < layerno; i++) {
+ hwc_layer_1_t *l = rgz_layers[i].hwc_layer;
+ OUTE("Layer %d", i);
+ dump_layer(l, errlayer == i);
+ }
+}
+
+static int rgz_out_bvdirect_paint(rgz_t *rgz, rgz_out_params_t *params)
+{
+ int rv = 0;
+ unsigned int i;
+ (void)rgz;
+
+ rgz_blts_init(&blts);
+
+ /* Begin from index 1 to remove the background layer from the output */
+ for (i = 1; i < rgz->rgz_layerno; i++) {
+ rv = rgz_hwc_layer_blit(params, &rgz->rgz_layers[i]);
+ if (rv) {
+ OUTE("bvdirect_paint: error in layer %d: %d", i, rv);
+ dump_all(rgz->rgz_layers, rgz->rgz_layerno, i);
+ rgz_blts_free(&blts);
+ return rv;
+ }
+ }
+ rgz_blts_bvdirect(rgz, &blts, params);
+ rgz_blts_free(&blts);
+ return rv;
+}
+
+static void rgz_set_async(struct rgz_blt_entry *e, int async)
+{
+ e->bp.flags = async ? e->bp.flags | BVFLAG_ASYNC : e->bp.flags & ~BVFLAG_ASYNC;
+}
+
+static void rgz_get_screen_info(rgz_out_params_t *params, struct bvsurfgeom **screen_geom)
+{
+ *screen_geom = params->data.bvc.dstgeom;
+}
+
+static int rgz_is_blending_disabled(rgz_out_params_t *params)
+{
+ return params->data.bvc.noblend;
+}
+
+static void rgz_get_displayframe_rect(hwc_layer_1_t *layer, blit_rect_t *res_rect)
+{
+ res_rect->left = layer->displayFrame.left;
+ res_rect->top = layer->displayFrame.top;
+ res_rect->bottom = layer->displayFrame.bottom;
+ res_rect->right = layer->displayFrame.right;
+}
+
+static void rgz_set_dst_data(rgz_out_params_t *params, blit_rect_t *subregion_rect,
+ struct rgz_blt_entry* e)
+{
+ struct bvsurfgeom *screen_geom;
+ rgz_get_screen_info(params, &screen_geom);
+
+ /* omaplfb is in charge of assigning the correct dstdesc in the kernel */
+ e->dstgeom.structsize = sizeof(struct bvsurfgeom);
+ e->dstgeom.format = screen_geom->format;
+ e->dstgeom.width = screen_geom->width;
+ e->dstgeom.height = screen_geom->height;
+ e->dstgeom.orientation = screen_geom->orientation;
+ e->dstgeom.virtstride = DSTSTRIDE(screen_geom);
+
+ e->bp.dstrect.left = subregion_rect->left;
+ e->bp.dstrect.top = subregion_rect->top;
+ e->bp.dstrect.width = WIDTH(*subregion_rect);
+ e->bp.dstrect.height = HEIGHT(*subregion_rect);
+}
+
+static void rgz_set_src_data(rgz_out_params_t *params, rgz_layer_t *rgz_layer,
+ blit_rect_t *subregion_rect, struct rgz_blt_entry* e, int is_src2)
+{
+ hwc_layer_1_t *hwc_layer = rgz_layer->hwc_layer;
+ struct bvbuffdesc *srcdesc = is_src2 ? &e->src2desc : &e->src1desc;
+ struct bvsurfgeom *srcgeom = is_src2 ? &e->src2geom : &e->src1geom;
+ struct bvrect *srcrect = is_src2 ? &e->bp.src2rect : &e->bp.src1rect;
+ IMG_native_handle_t *handle = (IMG_native_handle_t *)hwc_layer->handle;
+
+ srcdesc->structsize = sizeof(struct bvbuffdesc);
+ srcdesc->length = handle->iHeight * HANDLE_TO_STRIDE(handle);
+ srcdesc->auxptr = (void*)rgz_layer->buffidx;
+ srcgeom->structsize = sizeof(struct bvsurfgeom);
+ srcgeom->format = hal_to_ocd(handle->iFormat);
+ srcgeom->width = handle->iWidth;
+ srcgeom->height = handle->iHeight;
+ srcgeom->orientation = rgz_get_orientation(hwc_layer->transform);
+ srcgeom->virtstride = HANDLE_TO_STRIDE(handle);
+ if (hwc_layer->transform & HAL_TRANSFORM_ROT_90)
+ swap(srcgeom->width, srcgeom->height);
+
+ /* Find out what portion of the src we want to use for the blit */
+ blit_rect_t res_rect;
+ rgz_get_src_rect(hwc_layer, subregion_rect, &res_rect);
+ srcrect->left = res_rect.left;
+ srcrect->top = res_rect.top;
+ srcrect->width = WIDTH(res_rect);
+ srcrect->height = HEIGHT(res_rect);
+}
+
+/*
+ * Set the clipping rectangle, if part of the subregion rectangle is outside
+ * the boundaries of the destination, remove only the out-of-bounds area
+ */
+static void rgz_set_clip_rect(rgz_out_params_t *params, blit_rect_t *subregion_rect,
+ struct rgz_blt_entry* e)
+{
+ struct bvsurfgeom *screen_geom;
+ rgz_get_screen_info(params, &screen_geom);
+
+ blit_rect_t clip_rect;
+ clip_rect.left = max(0, subregion_rect->left);
+ clip_rect.top = max(0, subregion_rect->top);
+ clip_rect.bottom = min(screen_geom->height, subregion_rect->bottom);
+ clip_rect.right = min(screen_geom->width, subregion_rect->right);
+
+ e->bp.cliprect.left = clip_rect.left;
+ e->bp.cliprect.top = clip_rect.top;
+ e->bp.cliprect.width = WIDTH(clip_rect);
+ e->bp.cliprect.height = HEIGHT(clip_rect);
+}
+
+/*
+ * Configures blit entry to set src2 is the same as the destination
+ */
+static void rgz_set_src2_is_dst(rgz_out_params_t *params, struct rgz_blt_entry* e)
+{
+ /* omaplfb is in charge of assigning the correct src2desc in the kernel */
+ e->src2geom = e->dstgeom;
+ e->src2desc.structsize = sizeof(struct bvbuffdesc);
+ e->src2desc.auxptr = (void*)HWC_BLT_DESC_FB_FN(0);
+ e->bp.src2rect = e->bp.dstrect;
+}
+
+/*
+ * Configure the scaling mode according to the layer format
+ */
+static void rgz_cfg_scale_mode(struct rgz_blt_entry* e, hwc_layer_1_t *layer)
+{
+ /*
+ * TODO: Revisit scaling mode assignment later, output between GPU and GC320
+ * seem different
+ */
+ IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
+ e->bp.scalemode = is_NV12(handle->iFormat) ? BVSCALE_9x9_TAP : BVSCALE_BILINEAR;
+}
+
+/*
+ * Copies src1 into the framebuffer
+ */
+static struct rgz_blt_entry* rgz_hwc_subregion_copy(rgz_out_params_t *params,
+ blit_rect_t *subregion_rect, rgz_layer_t *rgz_src1)
+{
+ struct rgz_blt_entry* e = rgz_blts_get(&blts, params);
+ hwc_layer_1_t *hwc_src1 = rgz_src1->hwc_layer;
+ e->bp.structsize = sizeof(struct bvbltparams);
+ e->bp.op.rop = 0xCCCC; /* SRCCOPY */
+ e->bp.flags = BVFLAG_CLIP | BVFLAG_ROP;
+ e->bp.flags |= rgz_get_flip_flags(hwc_src1->transform, 0);
+ rgz_set_async(e, 1);
+
+ blit_rect_t tmp_rect;
+ if (rgz_hwc_scaled(hwc_src1)) {
+ rgz_get_displayframe_rect(hwc_src1, &tmp_rect);
+ rgz_cfg_scale_mode(e, hwc_src1);
+ } else
+ tmp_rect = *subregion_rect;
+
+ rgz_set_src_data(params, rgz_src1, &tmp_rect, e, 0);
+ rgz_set_dst_data(params, &tmp_rect, e);
+ rgz_set_clip_rect(params, subregion_rect, e);
+
+ if((e->src1geom.format == OCDFMT_BGR124) ||
+ (e->src1geom.format == OCDFMT_RGB124) ||
+ (e->src1geom.format == OCDFMT_RGB16))
+ e->dstgeom.format = OCDFMT_BGR124;
+
+ return e;
+}
+
+/*
+ * Blends two layers and write the result in the framebuffer, src1 must be the
+ * top most layer while src2 is the one behind. If src2 is NULL means src1 will
+ * be blended with the current content of the framebuffer.
+ */
+static struct rgz_blt_entry* rgz_hwc_subregion_blend(rgz_out_params_t *params,
+ blit_rect_t *subregion_rect, rgz_layer_t *rgz_src1, rgz_layer_t *rgz_src2)
+{
+ struct rgz_blt_entry* e = rgz_blts_get(&blts, params);
+ hwc_layer_1_t *hwc_src1 = rgz_src1->hwc_layer;
+ e->bp.structsize = sizeof(struct bvbltparams);
+ e->bp.op.blend = BVBLEND_SRC1OVER;
+ e->bp.flags = BVFLAG_CLIP | BVFLAG_BLEND;
+ e->bp.flags |= rgz_get_flip_flags(hwc_src1->transform, 0);
+ rgz_set_async(e, 1);
+
+ blit_rect_t tmp_rect;
+ if (rgz_hwc_scaled(hwc_src1)) {
+ rgz_get_displayframe_rect(hwc_src1, &tmp_rect);
+ rgz_cfg_scale_mode(e, hwc_src1);
+ } else
+ tmp_rect = *subregion_rect;
+
+ rgz_set_src_data(params, rgz_src1, &tmp_rect, e, 0);
+ rgz_set_dst_data(params, &tmp_rect, e);
+ rgz_set_clip_rect(params, subregion_rect, e);
+
+ if (rgz_src2) {
+ /*
+ * NOTE: Due to an API limitation it's not possible to blend src1 and
+ * src2 if both have scaling, hence only src1 is used for now
+ */
+ hwc_layer_1_t *hwc_src2 = rgz_src2->hwc_layer;
+ if (rgz_hwc_scaled(hwc_src2))
+ OUTE("src2 layer %p has scaling, this is not supported", hwc_src2);
+ e->bp.flags |= rgz_get_flip_flags(hwc_src2->transform, 1);
+ rgz_set_src_data(params, rgz_src2, subregion_rect, e, 1);
+ } else
+ rgz_set_src2_is_dst(params, e);
+
+ return e;
+}
+
+/*
+ * Clear the destination buffer, if rect is NULL means the whole screen, rect
+ * cannot be outside the boundaries of the screen
+ */
+static void rgz_out_clrdst(rgz_out_params_t *params, blit_rect_t *rect)
+{
+ struct rgz_blt_entry* e = rgz_blts_get(&blts, params);
+ e->bp.structsize = sizeof(struct bvbltparams);
+ e->bp.op.rop = 0xCCCC; /* SRCCOPY */
+ e->bp.flags = BVFLAG_CLIP | BVFLAG_ROP;
+ rgz_set_async(e, 1);
+
+ struct bvsurfgeom *screen_geom;
+ rgz_get_screen_info(params, &screen_geom);
+
+ e->src1desc.structsize = sizeof(struct bvbuffdesc);
+ e->src1desc.length = 4; /* 1 pixel, 32bpp */
+ /*
+ * With the HWC we don't bother having a buffer for the fill we'll get the
+ * OMAPLFB to fixup the src1desc and stride if the auxiliary pointer is -1
+ */
+ e->src1desc.auxptr = (void*)-1;
+ e->src1geom.structsize = sizeof(struct bvsurfgeom);
+ e->src1geom.format = OCDFMT_RGBA24;
+ e->bp.src1rect.left = e->bp.src1rect.top = e->src1geom.orientation = 0;
+ e->src1geom.height = e->src1geom.width = e->bp.src1rect.height = e->bp.src1rect.width = 1;
+
+ blit_rect_t clear_rect;
+ if (rect) {
+ clear_rect.left = rect->left;
+ clear_rect.top = rect->top;
+ clear_rect.right = rect->right;
+ clear_rect.bottom = rect->bottom;
+ } else {
+ clear_rect.left = clear_rect.top = 0;
+ clear_rect.right = screen_geom->width;
+ clear_rect.bottom = screen_geom->height;
+ }
+
+ rgz_set_dst_data(params, &clear_rect, e);
+ rgz_set_clip_rect(params, &clear_rect, e);
+}
+
+static int rgz_out_bvcmd_paint(rgz_t *rgz, rgz_out_params_t *params)
+{
+ int rv = 0;
+ params->data.bvc.out_blits = 0;
+ params->data.bvc.out_nhndls = 0;
+ rgz_blts_init(&blts);
+ rgz_out_clrdst(params, NULL);
+
+ unsigned int i, j;
+
+ /* Begin from index 1 to remove the background layer from the output */
+ for (i = 1, j = 0; i < rgz->rgz_layerno; i++) {
+ rgz_layer_t *rgz_layer = &rgz->rgz_layers[i];
+ hwc_layer_1_t *l = rgz_layer->hwc_layer;
+
+ //OUTP("blitting meminfo %d", rgz->rgz_layers[i].buffidx);
+
+ /*
+ * See if it is needed to put transparent pixels where this layer
+ * is located in the screen
+ */
+ if (rgz_layer->buffidx == -1) {
+ struct bvsurfgeom *scrgeom = params->data.bvc.dstgeom;
+ blit_rect_t srcregion;
+ srcregion.left = max(0, l->displayFrame.left);
+ srcregion.top = max(0, l->displayFrame.top);
+ srcregion.bottom = min(scrgeom->height, l->displayFrame.bottom);
+ srcregion.right = min(scrgeom->width, l->displayFrame.right);
+ rgz_out_clrdst(params, &srcregion);
+ continue;
+ }
+
+ rv = rgz_hwc_layer_blit(params, rgz_layer);
+ if (rv) {
+ OUTE("bvcmd_paint: error in layer %d: %d", i, rv);
+ dump_all(rgz->rgz_layers, rgz->rgz_layerno, i);
+ rgz_blts_free(&blts);
+ return rv;
+ }
+ params->data.bvc.out_hndls[j++] = l->handle;
+ params->data.bvc.out_nhndls++;
+ }
+
+ /* Last blit is made sync to act like a fence for the previous async blits */
+ struct rgz_blt_entry* e = &blts.bvcmds[blts.idx-1];
+ rgz_set_async(e, 0);
+
+ /* FIXME: we want to be able to call rgz_blts_free and populate the actual
+ * composition data structure ourselves */
+ params->data.bvc.cmdp = blts.bvcmds;
+ params->data.bvc.cmdlen = blts.idx;
+
+ if (params->data.bvc.out_blits >= RGZ_MAX_BLITS) {
+ rv = -1;
+ // rgz_blts_free(&blts); // FIXME
+ }
+ return rv;
+}
+
+static float getscalew(hwc_layer_1_t *layer)
+{
+ int w = WIDTH(layer->sourceCrop);
+ int h = HEIGHT(layer->sourceCrop);
+
+ if (layer->transform & HWC_TRANSFORM_ROT_90)
+ swap(w, h);
+
+ return ((float)WIDTH(layer->displayFrame)) / (float)w;
+}
+
+static float getscaleh(hwc_layer_1_t *layer)
+{
+ int w = WIDTH(layer->sourceCrop);
+ int h = HEIGHT(layer->sourceCrop);
+
+ if (layer->transform & HWC_TRANSFORM_ROT_90)
+ swap(w, h);
+
+ return ((float)HEIGHT(layer->displayFrame)) / (float)h;
+}
+
+static int rgz_bswap(int *a, int *b)
+{
+ if (*a > *b) {
+ int tmp = *b;
+ *b = *a;
+ *a = tmp;
+ return 1;
+ }
+ return 0;
+}
+
+/*
+ * Simple bubble sort on an array
+ */
+static void rgz_bsort(int *a, int len)
+{
+ int i, s;
+
+ do {
+ s=0;
+ for (i=0; i+1<len; i++) {
+ if (rgz_bswap(&a[i], &a[i+1]))
+ s = 1;
+ }
+ } while (s);
+}
+
+/*
+ * Leave only unique numbers in a sorted array
+ */
+static int rgz_bunique(int *a, int len)
+{
+ int unique = 1;
+ int base = 0;
+ while (base + 1 < len) {
+ if (a[base] == a[base + 1]) {
+ int skip = 1;
+ while (base + skip < len && a[base] == a[base + skip])
+ skip++;
+ if (base + skip == len)
+ break;
+ int i;
+ for (i = 0; i < skip - 1; i++)
+ a[base + 1 + i] = a[base + skip];
+ }
+ unique++;
+ base++;
+ }
+ return unique;
+}
+
+static int rgz_hwc_layer_sortbyy(rgz_layer_t *ra, int rsz, int *out, int *width, int screen_height)
+{
+ int outsz = 0;
+ int i;
+ *width = 0;
+ for (i = 0; i < rsz; i++) {
+ hwc_layer_1_t *layer = ra[i].hwc_layer;
+ /* Maintain regions inside display boundaries */
+ int top = layer->displayFrame.top;
+ int bottom = layer->displayFrame.bottom;
+ out[outsz++] = max(0, top);
+ out[outsz++] = min(bottom, screen_height);
+ int right = layer->displayFrame.right;
+ *width = *width > right ? *width : right;
+ }
+ rgz_bsort(out, outsz);
+ return outsz;
+}
+
+static int rgz_hwc_intersects(blit_rect_t *a, hwc_rect_t *b)
+{
+ return ((a->bottom > b->top) && (a->top < b->bottom) &&
+ (a->right > b->left) && (a->left < b->right));
+}
+
+static void rgz_gen_blitregions(blit_hregion_t *hregion, int screen_width)
+{
+/*
+ * 1. Get the offsets (left/right positions) of each layer within the
+ * hregion. Assume that layers describe the bounds of the hregion.
+ * 2. We should then be able to generate an array of rects
+ * 3. Each layer will have a different z-order, for each z-order
+ * find the intersection. Some intersections will be empty.
+ */
+
+ int offsets[RGZ_SUBREGIONMAX];
+ int noffsets=0;
+ int l, r;
+ for (l = 0; l < hregion->nlayers; l++) {
+ hwc_layer_1_t *layer = hregion->rgz_layers[l]->hwc_layer;
+ /* Make sure the subregion is not outside the boundaries of the screen */
+ int left = layer->displayFrame.left;
+ int right = layer->displayFrame.right;
+ offsets[noffsets++] = max(0, left);
+ offsets[noffsets++] = min(right, screen_width);
+ }
+ rgz_bsort(offsets, noffsets);
+ noffsets = rgz_bunique(offsets, noffsets);
+ hregion->nsubregions = noffsets - 1;
+ bzero(hregion->blitrects, sizeof(hregion->blitrects));
+ for (r = 0; r + 1 < noffsets; r++) {
+ blit_rect_t subregion;
+ subregion.top = hregion->rect.top;
+ subregion.bottom = hregion->rect.bottom;
+ subregion.left = offsets[r];
+ subregion.right = offsets[r+1];
+
+ ALOGD_IF(debug, " sub l %d r %d",
+ subregion.left, subregion.right);
+ for (l = 0; l < hregion->nlayers; l++) {
+ hwc_layer_1_t *layer = hregion->rgz_layers[l]->hwc_layer;
+ if (rgz_hwc_intersects(&subregion, &layer->displayFrame)) {
+
+ hregion->blitrects[l][r] = subregion;
+
+ ALOGD_IF(debug, "hregion->blitrects[%d][%d] (%d %d %d %d)", l, r,
+ hregion->blitrects[l][r].left,
+ hregion->blitrects[l][r].top,
+ hregion->blitrects[l][r].right,
+ hregion->blitrects[l][r].bottom);
+ }
+ }
+ }
+}
+
+static int rgz_hwc_scaled(hwc_layer_1_t *layer)
+{
+ int w = WIDTH(layer->sourceCrop);
+ int h = HEIGHT(layer->sourceCrop);
+
+ if (layer->transform & HWC_TRANSFORM_ROT_90)
+ swap(w, h);
+
+ return WIDTH(layer->displayFrame) != w || HEIGHT(layer->displayFrame) != h;
+}
+
+static int rgz_in_valid_hwc_layer(hwc_layer_1_t *layer)
+{
+ IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
+ if ((layer->flags & HWC_SKIP_LAYER) || !handle)
+ return 0;
+
+ if (is_NV12(handle->iFormat))
+ return handle->iFormat == HAL_PIXEL_FORMAT_TI_NV12;
+
+ /* FIXME: The following must be removed when GC supports vertical/horizontal
+ * buffer flips, please note having a FLIP_H and FLIP_V means 180 rotation
+ * which is supported indeed
+ */
+ if (layer->transform) {
+ int is_flipped = !!(layer->transform & HWC_TRANSFORM_FLIP_H) ^ !!(layer->transform & HWC_TRANSFORM_FLIP_V);
+ if (is_flipped) {
+ ALOGE("Layer %p is flipped %d", layer, layer->transform);
+ return 0;
+ }
+ }
+
+ switch(handle->iFormat) {
+ case HAL_PIXEL_FORMAT_BGRX_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ case HAL_PIXEL_FORMAT_RGB_565:
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ break;
+ default:
+ return 0;
+ }
+ return 1;
+}
+
+/* Reset dirty region data and state */
+static void rgz_delete_region_data(rgz_t *rgz){
+ if (!rgz)
+ return;
+ if (rgz->hregions)
+ free(rgz->hregions);
+ rgz->hregions = NULL;
+ rgz->nhregions = 0;
+ rgz->state &= ~RGZ_REGION_DATA;
+}
+
+static void rgz_handle_dirty_region(rgz_t *rgz, int reset_counters)
+{
+ unsigned int i;
+ for (i = 0; i < rgz->rgz_layerno; i++) {
+ rgz_layer_t *rgz_layer = &rgz->rgz_layers[i];
+ void *new_handle;
+
+ /*
+ * We don't care about the handle for background and layers with the
+ * clear fb hint, but we want to maintain a layer state for dirty
+ * region handling.
+ */
+ if (i == 0 || rgz_layer->buffidx == -1)
+ new_handle = (void*)0x1;
+ else
+ new_handle = (void*)rgz_layer->hwc_layer->handle;
+
+ if (reset_counters || new_handle != rgz_layer->dirty_hndl) {
+ rgz_layer->dirty_count = RGZ_NUM_FB;
+ rgz_layer->dirty_hndl = new_handle;
+ } else
+ rgz_layer->dirty_count -= rgz_layer->dirty_count ? 1 : 0;
+
+ }
+}
+
+static int rgz_in_hwccheck(rgz_in_params_t *p, rgz_t *rgz)
+{
+ hwc_layer_1_t *layers = p->data.hwc.layers;
+ int layerno = p->data.hwc.layerno;
+
+ rgz->state &= ~RGZ_STATE_INIT;
+
+ if (!layers)
+ return -1;
+
+ /* For debugging */
+ //dump_all(layers, layerno, 0);
+
+ /*
+ * Store buffer index to be sent in the HWC Post2 list. Any overlay
+ * meminfos must come first
+ */
+ int l, memidx = 0;
+ for (l = 0; l < layerno; l++) {
+ /*
+ * Workaround: If a NV12 layer is present in the list, don't even try
+ * to blit. There is a performance degradation while playing video and
+ * using GC at the same time.
+ */
+ IMG_native_handle_t *handle = (IMG_native_handle_t *)layers[l].handle;
+ if (!(layers[l].flags & HWC_SKIP_LAYER) && handle && is_NV12(handle->iFormat))
+ return -1;
+
+ if (layers[l].compositionType == HWC_OVERLAY)
+ memidx++;
+ }
+
+ int possible_blit = 0, candidates = 0;
+
+ /*
+ * Insert the background layer at the beginning of the list, maintain a
+ * state for dirty region handling
+ */
+ rgz_layer_t *rgz_layer = &rgz->rgz_layers[0];
+ rgz_layer->hwc_layer = &bg_layer;
+
+ for (l = 0; l < layerno; l++) {
+ if (layers[l].compositionType == HWC_FRAMEBUFFER) {
+ candidates++;
+ if (rgz_in_valid_hwc_layer(&layers[l]) &&
+ possible_blit < RGZ_INPUT_MAXLAYERS) {
+ rgz_layer_t *rgz_layer = &rgz->rgz_layers[possible_blit+1];
+ rgz_layer->hwc_layer = &layers[l];
+ rgz_layer->buffidx = memidx++;
+ possible_blit++;
+ }
+ continue;
+ }
+
+ if (layers[l].hints & HWC_HINT_CLEAR_FB) {
+ candidates++;
+ if (possible_blit < RGZ_INPUT_MAXLAYERS) {
+ /*
+ * Use only the layer rectangle as an input to regionize when the clear
+ * fb hint is present, mark this layer to identify it.
+ */
+ rgz_layer_t *rgz_layer = &rgz->rgz_layers[possible_blit+1];
+ rgz_layer->buffidx = -1;
+ rgz_layer->hwc_layer = &layers[l];
+ possible_blit++;
+ }
+ }
+ }
+
+ if (!possible_blit || possible_blit != candidates) {
+ return -1;
+ }
+
+ unsigned int blit_layers = possible_blit + 1; /* Account for background layer */
+ int reset_dirty_counters = rgz->rgz_layerno != blit_layers ? 1 : 0;
+ /*
+ * The layers we are going to blit differ in number from the previous frame,
+ * we can't trust anymore the region data, calculate it again
+ */
+ if (reset_dirty_counters)
+ rgz_delete_region_data(rgz);
+
+ rgz->state |= RGZ_STATE_INIT;
+ rgz->rgz_layerno = blit_layers;
+
+ rgz_handle_dirty_region(rgz, reset_dirty_counters);
+
+ return RGZ_ALL;
+}
+
+static int rgz_in_hwc(rgz_in_params_t *p, rgz_t *rgz)
+{
+ int yentries[RGZ_SUBREGIONMAX];
+ int dispw; /* widest layer */
+ int screen_width = p->data.hwc.dstgeom->width;
+ int screen_height = p->data.hwc.dstgeom->height;
+
+ if (!(rgz->state & RGZ_STATE_INIT)) {
+ OUTE("rgz_process started with bad state");
+ return -1;
+ }
+
+ /* If there is already region data avoid parsing it again */
+ if (rgz->state & RGZ_REGION_DATA) {
+ return 0;
+ }
+
+ int layerno = rgz->rgz_layerno;
+
+ /* Find the horizontal regions */
+ rgz_layer_t *rgz_layers = rgz->rgz_layers;
+ int ylen = rgz_hwc_layer_sortbyy(rgz_layers, layerno, yentries, &dispw, screen_height);
+
+ ylen = rgz_bunique(yentries, ylen);
+
+ /* at this point we have an array of horizontal regions */
+ rgz->nhregions = ylen - 1;
+
+ blit_hregion_t *hregions = calloc(rgz->nhregions, sizeof(blit_hregion_t));
+ if (!hregions) {
+ OUTE("Unable to allocate memory for hregions");
+ return -1;
+ }
+ rgz->hregions = hregions;
+
+ ALOGD_IF(debug, "Allocated %d regions (sz = %d), layerno = %d", rgz->nhregions, rgz->nhregions * sizeof(blit_hregion_t), layerno);
+ int i, j;
+ for (i = 0; i < rgz->nhregions; i++) {
+ hregions[i].rect.top = yentries[i];
+ hregions[i].rect.bottom = yentries[i+1];
+ /* Avoid hregions outside the display boundaries */
+ hregions[i].rect.left = 0;
+ hregions[i].rect.right = dispw > screen_width ? screen_width : dispw;
+ hregions[i].nlayers = 0;
+ for (j = 0; j < layerno; j++) {
+ hwc_layer_1_t *layer = rgz_layers[j].hwc_layer;
+ if (rgz_hwc_intersects(&hregions[i].rect, &layer->displayFrame)) {
+ int l = hregions[i].nlayers++;
+ hregions[i].rgz_layers[l] = &rgz_layers[j];
+ }
+ }
+ }
+
+ /* Calculate blit regions */
+ for (i = 0; i < rgz->nhregions; i++) {
+ rgz_gen_blitregions(&hregions[i], screen_width);
+ ALOGD_IF(debug, "hregion %3d: nsubregions %d", i, hregions[i].nsubregions);
+ ALOGD_IF(debug, " : %d to %d: ",
+ hregions[i].rect.top, hregions[i].rect.bottom);
+ for (j = 0; j < hregions[i].nlayers; j++)
+ ALOGD_IF(debug, " %p ", hregions[i].rgz_layers[j]->hwc_layer);
+ }
+ rgz->state |= RGZ_REGION_DATA;
+ return 0;
+}
+
+/*
+ * generate a human readable description of the layer
+ *
+ * idx, flags, fmt, type, sleft, stop, sright, sbot, dleft, dtop, \
+ * dright, dbot, rot, flip, blending, scalew, scaleh, visrects
+ *
+ */
+static void rgz_print_layer(hwc_layer_1_t *l, int idx, int csv)
+{
+ char big_log[1024];
+ int e = sizeof(big_log);
+ char *end = big_log + e;
+ e -= snprintf(end - e, e, "<!-- LAYER-DAT: %d", idx);
+
+
+ e -= snprintf(end - e, e, "%s %p", csv ? "," : " hndl:",
+ l->handle ? l->handle : NULL);
+
+ e -= snprintf(end - e, e, "%s %s", csv ? "," : " flags:",
+ l->flags & HWC_SKIP_LAYER ? "skip" : "none");
+
+ IMG_native_handle_t *handle = (IMG_native_handle_t *)l->handle;
+ if (handle) {
+ e -= snprintf(end - e, e, "%s", csv ? ", " : " fmt: ");
+ switch(handle->iFormat) {
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ e -= snprintf(end - e, e, "bgra"); break;
+ case HAL_PIXEL_FORMAT_RGB_565:
+ e -= snprintf(end - e, e, "rgb565"); break;
+ case HAL_PIXEL_FORMAT_BGRX_8888:
+ e -= snprintf(end - e, e, "bgrx"); break;
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ e -= snprintf(end - e, e, "rgbx"); break;
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ e -= snprintf(end - e, e, "rgba"); break;
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_NV12_PADDED:
+ e -= snprintf(end - e, e, "nv12"); break;
+ default:
+ e -= snprintf(end - e, e, "unknown");
+ }
+ e -= snprintf(end - e, e, "%s", csv ? ", " : " type: ");
+ if (handle->usage & GRALLOC_USAGE_HW_RENDER)
+ e -= snprintf(end - e, e, "hw");
+ else if (handle->usage & GRALLOC_USAGE_SW_READ_MASK ||
+ handle->usage & GRALLOC_USAGE_SW_WRITE_MASK)
+ e -= snprintf(end - e, e, "sw");
+ else
+ e -= snprintf(end - e, e, "unknown");
+ } else {
+ e -= snprintf(end - e, e, csv ? ", unknown" : " fmt: unknown");
+ e -= snprintf(end - e, e, csv ? ", na" : " type: na");
+ }
+ e -= snprintf(end - e, e, csv ? ", %d, %d, %d, %d" : " src: %d %d %d %d",
+ l->sourceCrop.left, l->sourceCrop.top, l->sourceCrop.right,
+ l->sourceCrop.bottom);
+ e -= snprintf(end - e, e, csv ? ", %d, %d, %d, %d" : " disp: %d %d %d %d",
+ l->displayFrame.left, l->displayFrame.top,
+ l->displayFrame.right, l->displayFrame.bottom);
+
+ e -= snprintf(end - e, e, "%s %s", csv ? "," : " rot:",
+ l->transform & HWC_TRANSFORM_ROT_90 ? "90" :
+ l->transform & HWC_TRANSFORM_ROT_180 ? "180" :
+ l->transform & HWC_TRANSFORM_ROT_270 ? "270" : "none");
+
+ char flip[5] = "";
+ strcat(flip, l->transform & HWC_TRANSFORM_FLIP_H ? "H" : "");
+ strcat(flip, l->transform & HWC_TRANSFORM_FLIP_V ? "V" : "");
+ if (!(l->transform & (HWC_TRANSFORM_FLIP_V|HWC_TRANSFORM_FLIP_H)))
+ strcpy(flip, "none");
+ e -= snprintf(end - e, e, "%s %s", csv ? "," : " flip:", flip);
+
+ e -= snprintf(end - e, e, "%s %s", csv ? "," : " blending:",
+ l->blending == HWC_BLENDING_NONE ? "none" :
+ l->blending == HWC_BLENDING_PREMULT ? "premult" :
+ l->blending == HWC_BLENDING_COVERAGE ? "coverage" : "invalid");
+
+ e -= snprintf(end - e, e, "%s %1.3f", csv ? "," : " scalew:", getscalew(l));
+ e -= snprintf(end - e, e, "%s %1.3f", csv ? "," : " scaleh:", getscaleh(l));
+
+ e -= snprintf(end - e, e, "%s %d", csv ? "," : " visrect:",
+ l->visibleRegionScreen.numRects);
+
+ if (!csv) {
+ e -= snprintf(end - e, e, " -->");
+ OUTP("%s", big_log);
+
+ size_t i = 0;
+ for (; i < l->visibleRegionScreen.numRects; i++) {
+ hwc_rect_t const *r = &l->visibleRegionScreen.rects[i];
+ OUTP("<!-- LAYER-VIS: %d: rect: %d %d %d %d -->",
+ i, r->left, r->top, r->right, r->bottom);
+ }
+ } else {
+ size_t i = 0;
+ for (; i < l->visibleRegionScreen.numRects; i++) {
+ hwc_rect_t const *r = &l->visibleRegionScreen.rects[i];
+ e -= snprintf(end - e, e, ", %d, %d, %d, %d",
+ r->left, r->top, r->right, r->bottom);
+ }
+ e -= snprintf(end - e, e, " -->");
+ OUTP("%s", big_log);
+ }
+}
+
+static void rgz_print_layers(hwc_display_contents_1_t* list, int csv)
+{
+ size_t i;
+ for (i = 0; i < list->numHwLayers; i++) {
+ hwc_layer_1_t *l = &list->hwLayers[i];
+ rgz_print_layer(l, i, csv);
+ }
+}
+
+static int hal_to_ocd(int color)
+{
+ switch(color) {
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ return OCDFMT_BGRA24;
+ case HAL_PIXEL_FORMAT_BGRX_8888:
+ return OCDFMT_BGR124;
+ case HAL_PIXEL_FORMAT_RGB_565:
+ return OCDFMT_RGB16;
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ return OCDFMT_RGBA24;
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ return OCDFMT_RGB124;
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ return OCDFMT_NV12;
+ case HAL_PIXEL_FORMAT_YV12:
+ return OCDFMT_YV12;
+ default:
+ return OCDFMT_UNKNOWN;
+ }
+}
+
+/*
+ * The loadbltsville fn is only needed for testing, the bltsville shared
+ * libraries aren't planned to be used directly in production code here
+ */
+static BVFN_MAP bv_map;
+static BVFN_BLT bv_blt;
+static BVFN_UNMAP bv_unmap;
+#ifndef RGZ_TEST_INTEGRATION
+gralloc_module_t const *gralloc;
+#endif
+#define BLTSVILLELIB "libbltsville_cpu.so"
+
+#ifdef RGZ_TEST_INTEGRATION
+static int loadbltsville(void)
+{
+ void *hndl = dlopen(BLTSVILLELIB, RTLD_LOCAL | RTLD_LAZY);
+ if (!hndl) {
+ OUTE("Loading bltsville failed");
+ return -1;
+ }
+ bv_map = (BVFN_MAP)dlsym(hndl, "bv_map");
+ bv_blt = (BVFN_BLT)dlsym(hndl, "bv_blt");
+ bv_unmap = (BVFN_UNMAP)dlsym(hndl, "bv_unmap");
+ if(!bv_blt || !bv_map || !bv_unmap) {
+ OUTE("Missing bltsville fn %p %p %p", bv_map, bv_blt, bv_unmap);
+ return -1;
+ }
+ OUTP("Loaded %s", BLTSVILLELIB);
+
+#ifndef RGZ_TEST_INTEGRATION
+ hw_module_t const* module;
+ int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module);
+ if (err != 0) {
+ OUTE("Loading gralloc failed");
+ return -1;
+ }
+ gralloc = (gralloc_module_t const *)module;
+#endif
+ return 0;
+}
+#else
+static int loadbltsville(void) {
+ return 0;
+}
+#endif
+
+#ifndef RGZ_TEST_INTEGRATION
+static int rgz_handle_to_stride(IMG_native_handle_t *h)
+{
+ int bpp = is_NV12(h->iFormat) ? 0 : (h->iFormat == HAL_PIXEL_FORMAT_RGB_565 ? 2 : 4);
+ int stride = ALIGN(h->iWidth, HW_ALIGN) * bpp;
+ return stride;
+}
+
+#endif
+
+extern void BVDump(const char* prefix, const char* tab, const struct bvbltparams* parms);
+
+static int rgz_get_orientation(unsigned int transform)
+{
+ int orientation = 0;
+ if ((transform & HWC_TRANSFORM_FLIP_H) && (transform & HWC_TRANSFORM_FLIP_V))
+ orientation += 180;
+ if (transform & HWC_TRANSFORM_ROT_90)
+ orientation += 90;
+
+ return orientation;
+}
+
+static int rgz_get_flip_flags(unsigned int transform, int use_src2_flags)
+{
+ /*
+ * If vertical and horizontal flip flags are set it means a 180 rotation
+ * (with no flip) is intended for the layer, so we return 0 in that case.
+ */
+ int flip_flags = 0;
+ if (transform & HWC_TRANSFORM_FLIP_H)
+ flip_flags |= (use_src2_flags ? BVFLAG_HORZ_FLIP_SRC2 : BVFLAG_HORZ_FLIP_SRC1);
+ if (transform & HWC_TRANSFORM_FLIP_V)
+ flip_flags = flip_flags ? 0 : flip_flags | (use_src2_flags ? BVFLAG_VERT_FLIP_SRC2 : BVFLAG_VERT_FLIP_SRC1);
+ return flip_flags;
+}
+
+static int rgz_hwc_layer_blit(rgz_out_params_t *params, rgz_layer_t *rgz_layer)
+{
+ static int loaded = 0;
+ if (!loaded)
+ loaded = loadbltsville() ? : 1; /* attempt load once */
+
+ hwc_layer_1_t* layer = rgz_layer->hwc_layer;
+ blit_rect_t srcregion;
+ rgz_get_displayframe_rect(layer, &srcregion);
+
+ int noblend = rgz_is_blending_disabled(params);
+ if (!noblend && layer->blending == HWC_BLENDING_PREMULT)
+ rgz_hwc_subregion_blend(params, &srcregion, rgz_layer, NULL);
+ else
+ rgz_hwc_subregion_copy(params, &srcregion, rgz_layer);
+
+ return 0;
+}
+
+/*
+ * Calculate the src rectangle on the basis of the layer display, source crop
+ * and subregion rectangles. Additionally any rotation will be taken in
+ * account. The resulting rectangle is written in res_rect.
+ */
+static void rgz_get_src_rect(hwc_layer_1_t* layer, blit_rect_t *subregion_rect, blit_rect_t *res_rect)
+{
+ IMG_native_handle_t *handle = (IMG_native_handle_t *)layer->handle;
+ int res_left = 0;
+ int res_top = 0;
+ int delta_left;
+ int delta_top;
+ int res_width;
+ int res_height;
+
+ /*
+ * If the layer is scaled we use the whole cropping rectangle from the
+ * source and just move the clipping rectangle for the region we want to
+ * blit, this is done to prevent any artifacts when blitting subregions of
+ * a scaled layer. If there is a transform, adjust the width and height
+ * accordingly to match the rotated buffer geometry.
+ */
+ if (rgz_hwc_scaled(layer)) {
+ delta_top = 0;
+ delta_left = 0;
+ res_width = WIDTH(layer->sourceCrop);
+ res_height = HEIGHT(layer->sourceCrop);
+ if (layer->transform & HAL_TRANSFORM_ROT_90)
+ swap(res_width , res_height);
+ } else {
+ delta_top = subregion_rect->top - layer->displayFrame.top;
+ delta_left = subregion_rect->left - layer->displayFrame.left;
+ res_width = WIDTH(*subregion_rect);
+ res_height = HEIGHT(*subregion_rect);
+ }
+
+ /*
+ * Calculate the top, left offset from the source cropping rectangle
+ * depending on the rotation
+ */
+ switch(layer->transform) {
+ case 0:
+ res_left = layer->sourceCrop.left + delta_left;
+ res_top = layer->sourceCrop.top + delta_top;
+ break;
+ case HAL_TRANSFORM_ROT_90:
+ res_left = handle->iHeight - layer->sourceCrop.bottom + delta_left;
+ res_top = layer->sourceCrop.left + delta_top;
+ break;
+ case HAL_TRANSFORM_ROT_180:
+ res_left = handle->iWidth - layer->sourceCrop.right + delta_left;
+ res_top = handle->iHeight - layer->sourceCrop.bottom + delta_top;
+ break;
+ case HAL_TRANSFORM_ROT_270:
+ res_left = layer->sourceCrop.top + delta_left;
+ res_top = handle->iWidth - layer->sourceCrop.right + delta_top;
+ break;
+ default:
+ OUTE("Invalid transform value %d", layer->transform);
+ }
+
+ /* Resulting rectangle has the subregion dimensions */
+ res_rect->left = res_left;
+ res_rect->top = res_top;
+ res_rect->right = res_left + res_width;
+ res_rect->bottom = res_top + res_height;
+}
+
+static void rgz_batch_entry(struct rgz_blt_entry* e, unsigned int flag, unsigned int set)
+{
+ e->bp.flags &= ~BVFLAG_BATCH_MASK;
+ e->bp.flags |= flag;
+ e->bp.batchflags |= set;
+}
+
+static int rgz_hwc_subregion_blit(blit_hregion_t *hregion, int sidx, rgz_out_params_t *params)
+{
+ static int loaded = 0;
+ if (!loaded)
+ loaded = loadbltsville() ? : 1; /* attempt load once */
+
+ int lix;
+ int ldepth = get_layer_ops(hregion, sidx, &lix);
+ if (ldepth == 0) {
+ /* Impossible, there are no layers in this region even if the
+ * background is covering the whole screen
+ */
+ OUTE("hregion %p subregion %d doesn't have any ops", hregion, sidx);
+ return -1;
+ }
+
+ /* Determine if this region is dirty */
+ int dirty = 0, dirtylix = lix;
+ while (dirtylix != -1) {
+ rgz_layer_t *rgz_layer = hregion->rgz_layers[dirtylix];
+ if (rgz_layer->dirty_count){
+ /* One of the layers is dirty, we need to generate blits for this subregion */
+ dirty = 1;
+ break;
+ }
+ dirtylix = get_layer_ops_next(hregion, sidx, dirtylix);
+ }
+
+ if (!dirty)
+ return 0;
+
+ /* Check if the bottom layer is the background */
+ if (hregion->rgz_layers[lix]->hwc_layer == &bg_layer) {
+ if (ldepth == 1) {
+ /* Background layer is the only operation, clear subregion */
+ rgz_out_clrdst(params, &hregion->blitrects[lix][sidx]);
+ return 0;
+ } else {
+ /* No need to generate blits with background layer if there is
+ * another layer on top of it, discard it
+ */
+ ldepth--;
+ lix = get_layer_ops_next(hregion, sidx, lix);
+ }
+ }
+
+ /*
+ * See if the depth most layer needs to be ignored. If this layer is the
+ * only operation, we need to clear this subregion.
+ */
+ if (hregion->rgz_layers[lix]->buffidx == -1) {
+ ldepth--;
+ if (!ldepth) {
+ rgz_out_clrdst(params, &hregion->blitrects[lix][sidx]);
+ return 0;
+ }
+ lix = get_layer_ops_next(hregion, sidx, lix);
+ }
+
+ int noblend = rgz_is_blending_disabled(params);
+
+ if (!noblend && ldepth > 1) { /* BLEND */
+ blit_rect_t *rect = &hregion->blitrects[lix][sidx];
+ struct rgz_blt_entry* e;
+
+ int s2lix = lix;
+ lix = get_layer_ops_next(hregion, sidx, lix);
+
+ /*
+ * We save a read and a write from the FB if we blend the bottom
+ * two layers, we can do this only if both layers are not scaled
+ */
+ int first_batchflags = 0;
+ if (!rgz_hwc_scaled(hregion->rgz_layers[lix]->hwc_layer) &&
+ !rgz_hwc_scaled(hregion->rgz_layers[s2lix]->hwc_layer)) {
+ e = rgz_hwc_subregion_blend(params, rect, hregion->rgz_layers[lix],
+ hregion->rgz_layers[s2lix]);
+ first_batchflags |= BVBATCH_SRC2;
+ } else {
+ /* Return index to the first operation and make a copy of the first layer */
+ lix = s2lix;
+ e = rgz_hwc_subregion_copy(params, rect, hregion->rgz_layers[lix]);
+ first_batchflags |= BVBATCH_OP | BVBATCH_SRC2;
+ }
+ rgz_batch_entry(e, BVFLAG_BATCH_BEGIN, 0);
+
+ /* Rest of layers blended with FB */
+ int first = 1;
+ while((lix = get_layer_ops_next(hregion, sidx, lix)) != -1) {
+ int batchflags = 0;
+ e = rgz_hwc_subregion_blend(params, rect, hregion->rgz_layers[lix], NULL);
+ if (first) {
+ first = 0;
+ batchflags |= first_batchflags;
+ }
+ /*
+ * TODO: This will work when scaling is introduced, however we need
+ * to think on a better way to optimize this.
+ */
+ batchflags |= BVBATCH_SRC1 | BVBATCH_SRC1RECT_ORIGIN| BVBATCH_SRC1RECT_SIZE |
+ BVBATCH_DSTRECT_ORIGIN | BVBATCH_DSTRECT_SIZE | BVBATCH_SRC2RECT_ORIGIN |
+ BVBATCH_SRC2RECT_SIZE | BVBATCH_SCALE;
+ rgz_batch_entry(e, BVFLAG_BATCH_CONTINUE, batchflags);
+ }
+
+ if (e->bp.flags & BVFLAG_BATCH_BEGIN)
+ rgz_batch_entry(e, 0, 0);
+ else
+ rgz_batch_entry(e, BVFLAG_BATCH_END, 0);
+
+ } else { /* COPY */
+ blit_rect_t *rect = &hregion->blitrects[lix][sidx];
+ if (noblend) /* get_layer_ops() doesn't understand this so get the top */
+ lix = get_top_rect(hregion, sidx, &rect);
+ rgz_hwc_subregion_copy(params, rect, hregion->rgz_layers[lix]);
+ }
+ return 0;
+}
+
+struct bvbuffdesc gscrndesc = {
+ .structsize = sizeof(struct bvbuffdesc), .length = 0,
+ .auxptr = MAP_FAILED
+};
+struct bvsurfgeom gscrngeom = {
+ .structsize = sizeof(struct bvsurfgeom), .format = OCDFMT_UNKNOWN
+};
+
+static void rgz_blts_init(struct rgz_blts *blts)
+{
+ bzero(blts, sizeof(*blts));
+}
+
+static void rgz_blts_free(struct rgz_blts *blts)
+{
+ /* TODO ??? maybe we should dynamically allocate this */
+ rgz_blts_init(blts);
+}
+
+static struct rgz_blt_entry* rgz_blts_get(struct rgz_blts *blts, rgz_out_params_t *params)
+{
+ struct rgz_blt_entry *ne;
+ if (blts->idx < RGZ_MAX_BLITS) {
+ ne = &blts->bvcmds[blts->idx++];
+ if (IS_BVCMD(params))
+ params->data.bvc.out_blits++;
+ } else {
+ OUTE("!!! BIG PROBLEM !!! run out of blit entries");
+ ne = &blts->bvcmds[blts->idx - 1]; /* Return last slot */
+ }
+ return ne;
+}
+
+static int rgz_blts_bvdirect(rgz_t *rgz, struct rgz_blts *blts, rgz_out_params_t *params)
+{
+ struct bvbatch *batch = NULL;
+ int rv = -1;
+ int idx = 0;
+
+ while (idx < blts->idx) {
+ struct rgz_blt_entry *e = &blts->bvcmds[idx];
+ if (e->bp.flags & BVFLAG_BATCH_MASK)
+ e->bp.batch = batch;
+ rv = bv_blt(&e->bp);
+ if (rv) {
+ OUTE("BV_BLT failed: %d", rv);
+ BVDUMP("bv_blt:", " ", &e->bp);
+ return -1;
+ }
+ if (e->bp.flags & BVFLAG_BATCH_BEGIN)
+ batch = e->bp.batch;
+ idx++;
+ }
+ return rv;
+}
+
+static int rgz_out_region(rgz_t *rgz, rgz_out_params_t *params)
+{
+ if (!(rgz->state & RGZ_REGION_DATA)) {
+ OUTE("rgz_out_region invoked with bad state");
+ return -1;
+ }
+
+ rgz_blts_init(&blts);
+ ALOGD_IF(debug, "rgz_out_region:");
+
+ if (IS_BVCMD(params))
+ params->data.bvc.out_blits = 0;
+
+ int i;
+ for (i = 0; i < rgz->nhregions; i++) {
+ blit_hregion_t *hregion = &rgz->hregions[i];
+ int s;
+ ALOGD_IF(debug, "h[%d] nsubregions = %d", i, hregion->nsubregions);
+ if (hregion->nlayers == 0) {
+ /* Impossible, there are no layers in this region even if the
+ * background is covering the whole screen
+ */
+ OUTE("hregion %p doesn't have any ops", hregion);
+ return -1;
+ }
+ for (s = 0; s < hregion->nsubregions; s++) {
+ ALOGD_IF(debug, "h[%d] -> [%d]", i, s);
+ if (rgz_hwc_subregion_blit(hregion, s, params))
+ return -1;
+ }
+ }
+
+ int rv = 0;
+
+ if (IS_BVCMD(params)) {
+ unsigned int j;
+ params->data.bvc.out_nhndls = 0;
+ /* Begin from index 1 to remove the background layer from the output */
+ for (j = 1, i = 0; j < rgz->rgz_layerno; j++) {
+ rgz_layer_t *rgz_layer = &rgz->rgz_layers[j];
+ /* We don't need the handles for layers marked as -1 */
+ if (rgz_layer->buffidx == -1)
+ continue;
+ hwc_layer_1_t *layer = rgz_layer->hwc_layer;
+ params->data.bvc.out_hndls[i++] = layer->handle;
+ params->data.bvc.out_nhndls++;
+ }
+
+ if (blts.idx > 0) {
+ /* Last blit is made sync to act like a fence for the previous async blits */
+ struct rgz_blt_entry* e = &blts.bvcmds[blts.idx-1];
+ rgz_set_async(e, 0);
+ }
+
+ /* FIXME: we want to be able to call rgz_blts_free and populate the actual
+ * composition data structure ourselves */
+ params->data.bvc.cmdp = blts.bvcmds;
+ params->data.bvc.cmdlen = blts.idx;
+ if (params->data.bvc.out_blits >= RGZ_MAX_BLITS)
+ rv = -1;
+ //rgz_blts_free(&blts);
+ } else {
+ rv = rgz_blts_bvdirect(rgz, &blts, params);
+ rgz_blts_free(&blts);
+ }
+
+ return rv;
+}
+
+void rgz_profile_hwc(hwc_display_contents_1_t* list, int dispw, int disph)
+{
+ if (!list) /* A NULL composition list can occur */
+ return;
+
+#ifndef RGZ_TEST_INTEGRATION
+ static char regiondump2[PROPERTY_VALUE_MAX] = "";
+ char regiondump[PROPERTY_VALUE_MAX];
+ property_get("debug.2dhwc.region", regiondump, "0");
+ int dumpregions = strncmp(regiondump, regiondump2, PROPERTY_VALUE_MAX);
+ if (dumpregions)
+ strncpy(regiondump2, regiondump, PROPERTY_VALUE_MAX);
+ else {
+ dumpregions = !strncmp(regiondump, "all", PROPERTY_VALUE_MAX) &&
+ (list->flags & HWC_GEOMETRY_CHANGED);
+ static int iteration = 0;
+ if (dumpregions)
+ sprintf(regiondump, "iteration %d", iteration++);
+ }
+
+ char dumplayerdata[PROPERTY_VALUE_MAX];
+ /* 0 - off, 1 - human readable, 2 - CSV */
+ property_get("debug.2dhwc.dumplayers", dumplayerdata, "0");
+ int dumplayers = atoi(dumplayerdata);
+#else
+ char regiondump[] = "";
+ int dumplayers = 1;
+ int dumpregions = 0;
+#endif
+ if (dumplayers && (list->flags & HWC_GEOMETRY_CHANGED)) {
+ OUTP("<!-- BEGUN-LAYER-DUMP: %d -->", list->numHwLayers);
+ rgz_print_layers(list, dumplayers == 1 ? 0 : 1);
+ OUTP("<!-- ENDED-LAYER-DUMP -->");
+ }
+
+ if(!dumpregions)
+ return;
+
+ rgz_t rgz;
+ rgz_in_params_t ip = { .data = { .hwc = {
+ .layers = list->hwLayers,
+ .layerno = list->numHwLayers } } };
+ ip.op = RGZ_IN_HWCCHK;
+ if (rgz_in(&ip, &rgz) == RGZ_ALL) {
+ ip.op = RGZ_IN_HWC;
+ if (rgz_in(&ip, &rgz) == RGZ_ALL) {
+ OUTP("<!-- BEGUN-SVG-DUMP: %s -->", regiondump);
+ OUTP("<b>%s</b>", regiondump);
+ rgz_out_params_t op = {
+ .op = RGZ_OUT_SVG,
+ .data = {
+ .svg = {
+ .dispw = dispw, .disph = disph,
+ .htmlw = 450, .htmlh = 800
+ }
+ },
+ };
+ rgz_out(&rgz, &op);
+ OUTP("<!-- ENDED-SVG-DUMP -->");
+ }
+ }
+ rgz_release(&rgz);
+}
+
+int rgz_get_screengeometry(int fd, struct bvsurfgeom *geom, int fmt)
+{
+ /* Populate Bltsville destination buffer information with framebuffer data */
+ struct fb_fix_screeninfo fb_fixinfo;
+ struct fb_var_screeninfo fb_varinfo;
+
+ ALOGI("Attempting to get framebuffer device info.");
+ if(ioctl(fd, FBIOGET_FSCREENINFO, &fb_fixinfo)) {
+ OUTE("Error getting fb_fixinfo");
+ return -EINVAL;
+ }
+
+ if(ioctl(fd, FBIOGET_VSCREENINFO, &fb_varinfo)) {
+ ALOGE("Error gettting fb_varinfo");
+ return -EINVAL;
+ }
+
+ bzero(&bg_layer, sizeof(bg_layer));
+ bg_layer.displayFrame.left = bg_layer.displayFrame.top = 0;
+ bg_layer.displayFrame.right = fb_varinfo.xres;
+ bg_layer.displayFrame.bottom = fb_varinfo.yres;
+
+ bzero(geom, sizeof(*geom));
+ geom->structsize = sizeof(*geom);
+ geom->width = fb_varinfo.xres;
+ geom->height = fb_varinfo.yres;
+ geom->virtstride = fb_fixinfo.line_length;
+ geom->format = hal_to_ocd(fmt);
+ /* Always set to 0, src buffers will contain rotation values as needed */
+ geom->orientation = 0;
+ return 0;
+}
+
+int rgz_in(rgz_in_params_t *p, rgz_t *rgz)
+{
+ int rv = -1;
+ switch (p->op) {
+ case RGZ_IN_HWC:
+ rv = rgz_in_hwccheck(p, rgz);
+ if (rv == RGZ_ALL)
+ rv = rgz_in_hwc(p, rgz) ? 0 : RGZ_ALL;
+ break;
+ case RGZ_IN_HWCCHK:
+ bzero(rgz, sizeof(rgz_t));
+ rv = rgz_in_hwccheck(p, rgz);
+ break;
+ default:
+ return -1;
+ }
+ return rv;
+}
+
+void rgz_release(rgz_t *rgz)
+{
+ if (!rgz)
+ return;
+ if (rgz->hregions)
+ free(rgz->hregions);
+ bzero(rgz, sizeof(*rgz));
+}
+
+int rgz_out(rgz_t *rgz, rgz_out_params_t *params)
+{
+ switch (params->op) {
+ case RGZ_OUT_SVG:
+ rgz_out_svg(rgz, params);
+ return 0;
+ case RGZ_OUT_BVDIRECT_PAINT:
+ return rgz_out_bvdirect_paint(rgz, params);
+ case RGZ_OUT_BVCMD_PAINT:
+ return rgz_out_bvcmd_paint(rgz, params);
+ case RGZ_OUT_BVDIRECT_REGION:
+ case RGZ_OUT_BVCMD_REGION:
+ return rgz_out_region(rgz, params);
+ default:
+ return -1;
+ }
+}
diff --git a/hwc/rgz_2d.h b/hwc/rgz_2d.h
new file mode 100644
index 0000000..cf751ee
--- /dev/null
+++ b/hwc/rgz_2d.h
@@ -0,0 +1,290 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __RGZ_2D__
+#define __RGZ_2D__
+
+#include <linux/bltsville.h>
+
+/*
+ * Maximum number of layers used to generate subregion rectangles in a
+ * horizontal region.
+ */
+#define RGZ_MAXLAYERS 13
+
+/*
+ * Maximum number of layers the regionizer will accept as input. Account for an
+ * additional 'background layer' to generate empty subregion rectangles.
+ */
+#define RGZ_INPUT_MAXLAYERS (RGZ_MAXLAYERS - 1)
+
+/*
+ * Regionizer data
+ *
+ * This is an oqaque structure passed in by the client
+ */
+struct rgz;
+typedef struct rgz rgz_t;
+
+/*
+ * With an open framebuffer file descriptor get the geometry of
+ * the device
+ */
+int rgz_get_screengeometry(int fd, struct bvsurfgeom *geom, int fmt);
+
+/*
+ * Regionizer input parameters
+ */
+struct rgz_in_hwc {
+ int flags;
+ int layerno;
+ hwc_layer_1_t *layers;
+ struct bvsurfgeom *dstgeom;
+};
+
+typedef struct rgz_in_params {
+ int op; /* See RGZ_IN_* */
+ union {
+ struct rgz_in_hwc hwc;
+ } data;
+} rgz_in_params_t;
+
+/*
+ * Validate whether the HWC layers can be rendered
+ *
+ * Arguments (rgz_in_params_t):
+ * op RGZ_IN_HWCCHK
+ * data.hwc.layers HWC layer array
+ * data.hwc.layerno HWC layer array size
+ *
+ * Returns:
+ * rv = RGZ_ALL, -1 failure
+ */
+#define RGZ_IN_HWCCHK 1
+
+/*
+ * Regionize the HWC layers
+ *
+ * This generates region data which can be used with regionizer
+ * output function. This call will validate whether all or some of the
+ * layers can be rendered.
+ *
+ * The caller must use rgz_release when done with the region data
+ *
+ * Arguments (rgz_in_params_t):
+ * op RGZ_IN_HWC
+ * data.hwc.layers HWC layer array
+ * data.hwc.layerno HWC layer array size
+ *
+ * Returns:
+ * rv = RGZ_ALL, -1 failure
+ */
+#define RGZ_IN_HWC 2
+
+int rgz_in(rgz_in_params_t *param, rgz_t *rgz);
+
+/* This means all layers can be blitted */
+#define RGZ_ALL 1
+
+/*
+ * Free regionizer resources
+ */
+void rgz_release(rgz_t *rgz);
+
+/*
+ * Regionizer output operations
+ */
+struct rgz_out_bvcmd {
+ void *cmdp;
+ int cmdlen;
+ struct bvsurfgeom *dstgeom;
+ int noblend;
+ buffer_handle_t out_hndls[RGZ_INPUT_MAXLAYERS]; /* OUTPUT */
+ int out_nhndls; /* OUTPUT */
+ int out_blits; /* OUTPUT */
+};
+
+struct rgz_out_svg {
+ int dispw;
+ int disph;
+ int htmlw;
+ int htmlh;
+};
+
+struct rgz_out_bvdirect {
+ struct bvbuffdesc *dstdesc;
+ struct bvsurfgeom *dstgeom;
+ int noblend;
+};
+
+typedef struct rgz_out_params {
+ int op; /* See RGZ_OUT_* */
+ union {
+ struct rgz_out_bvcmd bvc;
+ struct rgz_out_bvdirect bv;
+ struct rgz_out_svg svg;
+ } data;
+} rgz_out_params_t;
+
+/*
+ * Regionizer output commands
+ */
+
+/*
+ * Output SVG from regionizer
+ *
+ * rgz_out_params_t:
+ *
+ * op RGZ_OUT_SVG
+ * data.svg.dispw
+ * data.svg.disph Display width and height these values will be the
+ * viewport dimensions i.e. the logical coordinate space
+ * rather than the physical size
+ * data.svg.htmlw
+ * data.svg.htmlh HTML output dimensions
+ */
+#define RGZ_OUT_SVG 0
+
+/*
+ * This commands generates bltsville command data structures for HWC which will
+ * paint layer by layer
+ *
+ * rgz_out_params_t:
+ *
+ * op RGZ_OUT_BVCMD_PAINT
+ * data.bvc.cmdp Pointer to buffer with cmd data
+ * data.bvc.cmdlen length of cmdp
+ * data.bvc.dstgeom bltsville struct describing the destination geometry
+ * data.bvc.noblend Test option to disable blending
+ * data.bvc.out_hndls Array of buffer handles (OUTPUT)
+ * data.bvc.out_nhndls Number of buffer handles (OUTPUT)
+ * data.bvc.out_blits Number of blits (OUTPUT)
+ */
+#define RGZ_OUT_BVCMD_PAINT 1
+
+/*
+ * This commands generates bltsville command data structures for HWC which will
+ * render via regions. This will involve a complete redraw of the screen.
+ *
+ * See RGZ_OUT_BVCMD_PAINT
+ */
+#define RGZ_OUT_BVCMD_REGION 2
+
+/*
+ * Perform actual blits painting each layer from back to front - this is a test
+ * command
+ *
+ * rgz_out_params_t:
+ *
+ * op RGZ_OUT_BVDIRECT_PAINT
+ * data.bv.dstdesc bltsville struct describing the destination buffer
+ * data.bv.dstgeom bltsville struct describing the destination geometry
+ * data.bv.list List of HWC layers to blit, only HWC_OVERLAY layers
+ * will be rendered
+ * data.bv.noblend Test option to disable blending
+ */
+#define RGZ_OUT_BVDIRECT_PAINT 3
+/*
+ * Perform actual blits where each blit is a subregion - this is a test mode
+ */
+#define RGZ_OUT_BVDIRECT_REGION 5
+
+int rgz_out(rgz_t *rgz, rgz_out_params_t* params);
+
+/*
+ * Produce instrumented logging of layer data
+ */
+void rgz_profile_hwc(hwc_display_contents_1_t* list, int dispw, int disph);
+
+/*
+ * ----------------------------------
+ * IMPLEMENTATION DETAILS FOLLOW HERE
+ * ----------------------------------
+ */
+
+/*
+ * Regionizer blit data structures
+ */
+typedef struct blit_rect {
+ int left, top, right, bottom;
+} blit_rect_t;
+
+/*
+ * A hregion is a horizontal area generated from the intersection of layers
+ * for a given composition.
+ *
+ * ----------------------------------------
+ * | layer 0 |
+ * | xxxxxxxxxxxxxxxxxx |
+ * | x layer 1 x |
+ * | x x |
+ * | x xxxxxxxxxxxxxxxxxxx
+ * | x x layer 2 x
+ * | x x x
+ * | xxxxxxxxxx x
+ * | x x
+ * | x x
+ * ---------------------xxxxxxxxxxxxxxxxxxx
+ *
+ * This can be broken up into a number of horizontal regions:
+ *
+ * ----------------------------------------
+ * | H1 l0 |
+ * |-----------xxxxxxxxxxxxxxxxxx---------|
+ * | H2 x x |
+ * | l0 x l01 x l0 |
+ * |-----------x--------xxxxxxxxxxxxxxxxxxx
+ * | H3 x x x x
+ * | l0 x l01 x l012 x l02 x
+ * |-----------xxxxxxxxxxxxxxxxxx---------x
+ * | H4 x x
+ * | l0 x l02 x
+ * ---------------------xxxxxxxxxxxxxxxxxxx
+ *
+ * Each hregion is just an array of rectangles. By accounting for the layers
+ * at different z-order, and hregion becomes a multi-dimensional array e.g. in
+ * the diagram above H4 has 2 sub-regions, layer 0 intersects with the first
+ * region and layers 0 and 2 intersect with the second region.
+ */
+#define RGZ_SUBREGIONMAX ((RGZ_MAXLAYERS << 1) - 1)
+#define RGZ_MAX_BLITS (RGZ_SUBREGIONMAX * RGZ_SUBREGIONMAX)
+
+typedef struct rgz_layer {
+ hwc_layer_1_t *hwc_layer;
+ int buffidx;
+ int dirty_count;
+ void* dirty_hndl;
+} rgz_layer_t;
+
+typedef struct blit_hregion {
+ blit_rect_t rect;
+ rgz_layer_t *rgz_layers[RGZ_MAXLAYERS];
+ int nlayers;
+ int nsubregions;
+ blit_rect_t blitrects[RGZ_MAXLAYERS][RGZ_SUBREGIONMAX]; /* z-order | rectangle */
+} blit_hregion_t;
+
+enum { RGZ_STATE_INIT = 1, RGZ_REGION_DATA = 2} ;
+
+struct rgz {
+ /* All fields here are opaque to the caller */
+ blit_hregion_t *hregions;
+ int nhregions;
+ int state;
+ unsigned int rgz_layerno;
+ rgz_layer_t rgz_layers[RGZ_MAXLAYERS];
+};
+
+#endif /* __RGZ_2D__ */
diff --git a/ion/Android.mk b/ion/Android.mk
index 58fc9f9..3f36032 100644
--- a/ion/Android.mk
+++ b/ion/Android.mk
@@ -4,10 +4,30 @@ ifeq ($(TARGET_BOARD_PLATFORM),omap4)
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
+LOCAL_C_INCLUDES += $(HARDWARE_TI_OMAP4_BASE)/kernel-headers-ti
+LOCAL_C_INCLUDES += $(HARDWARE_TI_OMAP4_BASE)/system-core-headers-ti
LOCAL_SRC_FILES := ion.c
LOCAL_MODULE := libion_ti
LOCAL_MODULE_TAGS := optional
LOCAL_SHARED_LIBRARIES := liblog
include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES += $(HARDWARE_TI_OMAP4_BASE)/kernel-headers-ti
+LOCAL_C_INCLUDES += $(HARDWARE_TI_OMAP4_BASE)/system-core-headers-ti
+LOCAL_SRC_FILES := ion.c ion_test.c
+LOCAL_MODULE := iontest_ti
+LOCAL_MODULE_TAGS := optional tests
+LOCAL_SHARED_LIBRARIES := liblog
+include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES += $(HARDWARE_TI_OMAP4_BASE)/kernel-headers-ti
+LOCAL_C_INCLUDES += $(HARDWARE_TI_OMAP4_BASE)/system-core-headers-ti
+LOCAL_SRC_FILES := ion.c ion_test_2.c
+LOCAL_MODULE := iontest2_ti
+LOCAL_MODULE_TAGS := optional tests
+LOCAL_SHARED_LIBRARIES := liblog
+include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
+
endif
diff --git a/ion/ion.c b/ion/ion.c
index 54579db..2ecf697 100644
--- a/ion/ion.c
+++ b/ion/ion.c
@@ -17,6 +17,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#define LOG_TAG "ion"
+
+#include <cutils/log.h>
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
@@ -24,12 +27,8 @@
#include <sys/mman.h>
#include <sys/types.h>
-#define LOG_TAG "ion"
-#include <cutils/log.h>
-
-#include "linux_ion.h"
-#include "omap_ion.h"
-#include "ion.h"
+#include <linux/ion.h>
+#include <ion/ion.h>
int ion_open()
{
@@ -55,8 +54,8 @@ static int ion_ioctl(int fd, int req, void *arg)
return ret;
}
-int ion_alloc(int fd, size_t len, size_t align, unsigned int flags,
- struct ion_handle **handle)
+int ion_alloc(int fd, size_t len, size_t align,
+ unsigned int flags, struct ion_handle **handle)
{
int ret;
struct ion_allocation_data data = {
@@ -81,6 +80,8 @@ int ion_alloc_tiler(int fd, size_t w, size_t h, int fmt, unsigned int flags,
.h = h,
.fmt = fmt,
.flags = flags,
+ .out_align = PAGE_SIZE,
+ .token = 0,
};
struct ion_custom_data custom_data = {
@@ -154,3 +155,58 @@ int ion_import(int fd, int share_fd, struct ion_handle **handle)
*handle = data.handle;
return ret;
}
+
+#if 0
+int ion_sync_fd(int fd, int handle_fd)
+{
+ struct ion_fd_data data = {
+ .fd = handle_fd,
+ };
+ return ion_ioctl(fd, ION_IOC_SYNC, &data);
+}
+#endif
+
+int ion_map_cacheable(int fd, struct ion_handle *handle, size_t length, int prot,
+ int flags, off_t offset, unsigned char **ptr, int *map_fd)
+{
+ struct ion_fd_data data = {
+ .handle = handle,
+ .cacheable = 1,
+ };
+ int ret = ion_ioctl(fd, ION_IOC_MAP, &data);
+ if (ret < 0)
+ return ret;
+ *map_fd = data.fd;
+ if (*map_fd < 0) {
+ ALOGE("map ioctl returned negative fd\n");
+ return -EINVAL;
+ }
+ *ptr = mmap(NULL, length, prot, flags, *map_fd, offset);
+ if (*ptr == MAP_FAILED) {
+ ALOGE("mmap failed: %s\n", strerror(errno));
+ return -errno;
+ }
+ return ret;
+}
+
+int ion_flush_cached(int fd, struct ion_handle *handle, size_t length,
+ unsigned char *ptr)
+{
+ struct ion_cached_user_buf_data data = {
+ .handle = handle,
+ .vaddr = (unsigned long)ptr,
+ .size = length,
+ };
+ return ion_ioctl(fd, ION_IOC_FLUSH_CACHED, &data);
+}
+
+int ion_inval_cached(int fd, struct ion_handle *handle, size_t length,
+ unsigned char *ptr)
+{
+ struct ion_cached_user_buf_data data = {
+ .handle = handle,
+ .vaddr = (unsigned long)ptr,
+ .size = length,
+ };
+ return ion_ioctl(fd, ION_IOC_INVAL_CACHED, &data);
+}
diff --git a/ion/ion_test.c b/ion/ion_test.c
new file mode 100644
index 0000000..945062f
--- /dev/null
+++ b/ion/ion_test.c
@@ -0,0 +1,323 @@
+#include <errno.h>
+#include <fcntl.h>
+#include <getopt.h>
+#include <string.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <sys/mman.h>
+#include <sys/ioctl.h>
+#include <sys/socket.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <ion/ion.h>
+#include <linux/ion.h>
+#include <linux/omap_ion.h>
+
+size_t len = 1024*1024, align = 0;
+int prot = PROT_READ | PROT_WRITE;
+int map_flags = MAP_SHARED;
+int alloc_flags = 0;
+int heap_mask = 1;
+int test = -1;
+size_t width = 1024*1024, height = 1024*1024;
+int fmt = TILER_PIXEL_FMT_32BIT;
+int tiler_test = 0;
+size_t stride;
+
+int _ion_alloc_test(int *fd, struct ion_handle **handle)
+{
+ int ret;
+
+ *fd = ion_open();
+ if (*fd < 0)
+ return *fd;
+
+ if (tiler_test)
+ ret = ion_alloc_tiler(*fd, width, height, fmt, alloc_flags,
+ handle, &stride);
+ else
+ ret = ion_alloc(*fd, len, align, alloc_flags, handle);
+
+ if (ret)
+ printf("%s failed: %s\n", __func__, strerror(ret));
+ return ret;
+}
+
+void ion_alloc_test()
+{
+ int fd, ret;
+ struct ion_handle *handle;
+
+ if(_ion_alloc_test(&fd, &handle))
+ return;
+
+ ret = ion_free(fd, handle);
+ if (ret) {
+ printf("%s failed: %s %p\n", __func__, strerror(ret), handle);
+ return;
+ }
+ ion_close(fd);
+ printf("ion alloc test: passed\n");
+}
+
+void _ion_tiler_map_test(unsigned char *ptr)
+{
+ size_t row, col;
+
+ for (row = 0; row < height; row++)
+ for (col = 0; col < width; col++) {
+ int i = (row * stride) + col;
+ ptr[i] = (unsigned char)i;
+ }
+ for (row = 0; row < height; row++)
+ for (col = 0; col < width; col++) {
+ int i = (row * stride) + col;
+ if (ptr[i] != (unsigned char)i)
+ printf("%s failed wrote %d read %d from mapped "
+ "memory\n", __func__, i, ptr[i]);
+ }
+}
+
+
+void ion_map_test()
+{
+ int fd, map_fd, ret;
+ size_t i;
+ struct ion_handle *handle;
+ unsigned char *ptr;
+
+ if(_ion_alloc_test(&fd, &handle))
+ return;
+
+ if (tiler_test)
+ len = height * stride;
+ ret = ion_map(fd, handle, len, prot, map_flags, 0, &ptr, &map_fd);
+ if (ret)
+ return;
+
+ if (tiler_test)
+ _ion_tiler_map_test(ptr);
+ else {
+ for (i = 0; i < len; i++) {
+ ptr[i] = (unsigned char)i;
+ }
+ for (i = 0; i < len; i++)
+ if (ptr[i] != (unsigned char)i)
+ printf("%s failed wrote %d read %d from mapped "
+ "memory\n", __func__, i, ptr[i]);
+ }
+ /* clean up properly */
+ ret = ion_free(fd, handle);
+ ion_close(fd);
+ munmap(ptr, len);
+ close(map_fd);
+
+ _ion_alloc_test(&fd, &handle);
+ close(fd);
+
+#if 0
+ munmap(ptr, len);
+ close(map_fd);
+ ion_close(fd);
+
+ _ion_alloc_test(len, align, flags, &fd, &handle);
+ close(map_fd);
+ ret = ion_map(fd, handle, len, prot, flags, 0, &ptr, &map_fd);
+ /* don't clean up */
+#endif
+}
+
+void ion_share_test()
+
+{
+ struct ion_handle *handle;
+ int sd[2];
+ int num_fd = 1;
+ struct iovec count_vec = {
+ .iov_base = &num_fd,
+ .iov_len = sizeof num_fd,
+ };
+ char buf[CMSG_SPACE(sizeof(int))];
+ socketpair(AF_UNIX, SOCK_STREAM, 0, sd);
+ if (fork()) {
+ struct msghdr msg = {
+ .msg_control = buf,
+ .msg_controllen = sizeof buf,
+ .msg_iov = &count_vec,
+ .msg_iovlen = 1,
+ };
+
+ struct cmsghdr *cmsg;
+ int fd, share_fd, ret;
+ char *ptr;
+ /* parent */
+ if(_ion_alloc_test(&fd, &handle))
+ return;
+ ret = ion_share(fd, handle, &share_fd);
+ if (ret)
+ printf("share failed %s\n", strerror(errno));
+ ptr = mmap(NULL, len, prot, map_flags, share_fd, 0);
+ if (ptr == MAP_FAILED) {
+ return;
+ }
+ strcpy(ptr, "master");
+ cmsg = CMSG_FIRSTHDR(&msg);
+ cmsg->cmsg_level = SOL_SOCKET;
+ cmsg->cmsg_type = SCM_RIGHTS;
+ cmsg->cmsg_len = CMSG_LEN(sizeof(int));
+ *(int *)CMSG_DATA(cmsg) = share_fd;
+ /* send the fd */
+ printf("master? [%10s] should be [master]\n", ptr);
+ printf("master sending msg 1\n");
+ sendmsg(sd[0], &msg, 0);
+ if (recvmsg(sd[0], &msg, 0) < 0)
+ perror("master recv msg 2");
+ printf("master? [%10s] should be [child]\n", ptr);
+
+ /* send ping */
+ sendmsg(sd[0], &msg, 0);
+ printf("master->master? [%10s]\n", ptr);
+ if (recvmsg(sd[0], &msg, 0) < 0)
+ perror("master recv 1");
+ } else {
+ struct msghdr msg;
+ struct cmsghdr *cmsg;
+ char* ptr;
+ int fd, recv_fd;
+ char* child_buf[100];
+ /* child */
+ struct iovec count_vec = {
+ .iov_base = child_buf,
+ .iov_len = sizeof child_buf,
+ };
+
+ struct msghdr child_msg = {
+ .msg_control = buf,
+ .msg_controllen = sizeof buf,
+ .msg_iov = &count_vec,
+ .msg_iovlen = 1,
+ };
+
+ if (recvmsg(sd[1], &child_msg, 0) < 0)
+ perror("child recv msg 1");
+ cmsg = CMSG_FIRSTHDR(&child_msg);
+ if (cmsg == NULL) {
+ printf("no cmsg rcvd in child");
+ return;
+ }
+ recv_fd = *(int*)CMSG_DATA(cmsg);
+ if (recv_fd < 0) {
+ printf("could not get recv_fd from socket");
+ return;
+ }
+ printf("child %d\n", recv_fd);
+ fd = ion_open();
+ ptr = mmap(NULL, len, prot, map_flags, recv_fd, 0);
+ if (ptr == MAP_FAILED) {
+ return;
+ }
+ printf("child? [%10s] should be [master]\n", ptr);
+ strcpy(ptr, "child");
+ printf("child sending msg 2\n");
+ sendmsg(sd[1], &child_msg, 0);
+ }
+}
+
+int main(int argc, char* argv[]) {
+ int c;
+ enum tests {
+ ALLOC_TEST = 0, MAP_TEST, SHARE_TEST,
+ };
+
+ while (1) {
+ static struct option opts[] = {
+ {"alloc", no_argument, 0, 'a'},
+ {"alloc_flags", required_argument, 0, 'f'},
+ {"heap_mask", required_argument, 0, 'h'},
+ {"map", no_argument, 0, 'm'},
+ {"share", no_argument, 0, 's'},
+ {"len", required_argument, 0, 'l'},
+ {"align", required_argument, 0, 'g'},
+ {"map_flags", required_argument, 0, 'z'},
+ {"prot", required_argument, 0, 'p'},
+ {"alloc_tiler", no_argument, 0, 't'},
+ {"width", required_argument, 0, 'w'},
+ {"height", required_argument, 0, 'h'},
+ {"fmt", required_argument, 0, 'r'},
+ };
+ int i = 0;
+ c = getopt_long(argc, argv, "af:h:l:mr:st", opts, &i);
+ if (c == -1)
+ break;
+
+ switch (c) {
+ case 'l':
+ len = atol(optarg);
+ break;
+ case 'g':
+ align = atol(optarg);
+ break;
+ case 'z':
+ map_flags = 0;
+ map_flags |= strstr(optarg, "PROT_EXEC") ?
+ PROT_EXEC : 0;
+ map_flags |= strstr(optarg, "PROT_READ") ?
+ PROT_READ: 0;
+ map_flags |= strstr(optarg, "PROT_WRITE") ?
+ PROT_WRITE: 0;
+ map_flags |= strstr(optarg, "PROT_NONE") ?
+ PROT_NONE: 0;
+ break;
+ case 'p':
+ prot = 0;
+ prot |= strstr(optarg, "MAP_PRIVATE") ?
+ MAP_PRIVATE : 0;
+ prot |= strstr(optarg, "MAP_SHARED") ?
+ MAP_PRIVATE : 0;
+ break;
+ case 'f':
+ alloc_flags = atol(optarg);
+ break;
+ case 'a':
+ test = ALLOC_TEST;
+ break;
+ case 'm':
+ test = MAP_TEST;
+ break;
+ case 'r':
+ fmt = atol(optarg);
+ break;
+ case 's':
+ test = SHARE_TEST;
+ break;
+ case 'w':
+ width = atol(optarg);
+ break;
+ case 'h':
+ height = atol(optarg);
+ break;
+ case 't':
+ tiler_test = 1;
+ break;
+ }
+ }
+ printf("test %d, len %u, width %u, height %u fmt %u align %u, "
+ "map_flags %d, prot %d, alloc_flags %d\n", test, len, width,
+ height, fmt, align, map_flags, prot, alloc_flags);
+ switch (test) {
+ case ALLOC_TEST:
+ ion_alloc_test();
+ break;
+ case MAP_TEST:
+ ion_map_test();
+ break;
+ case SHARE_TEST:
+ ion_share_test();
+ break;
+ default:
+ printf("must specify a test (alloc, map, share)\n");
+ }
+ return 0;
+}
diff --git a/ion/ion_test_2.c b/ion/ion_test_2.c
new file mode 100644
index 0000000..9d7ae46
--- /dev/null
+++ b/ion/ion_test_2.c
@@ -0,0 +1,481 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Test case to test ION Memory Allocator module
+ */
+
+#include <errno.h>
+#include <fcntl.h>
+#include <getopt.h>
+#include <string.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <sys/mman.h>
+#include <sys/ioctl.h>
+#include <sys/socket.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <ion/ion.h>
+#include <linux/ion.h>
+#include <linux/omap_ion.h>
+
+size_t len = 1024*1024, align = 0;
+int prot = PROT_READ | PROT_WRITE;
+int map_flags = MAP_SHARED;
+int alloc_flags = 0;
+int test = -1;
+size_t width = 1024*1024, height = 1024*1024;
+int fmt = TILER_PIXEL_FMT_32BIT;
+int tiler_test = 0;
+size_t stride;
+
+int _ion_alloc_test(int fd, struct ion_handle **handle)
+{
+ int ret;
+
+ if (tiler_test)
+ ret = ion_alloc_tiler(fd, width, height, fmt, alloc_flags,
+ handle, &stride);
+ else
+ ret = ion_alloc(fd, len, align, alloc_flags, handle);
+
+ if (ret)
+ printf("%s() failed: %s\n", __func__, strerror(ret));
+ return ret;
+}
+
+int ion_alloc_test(int count)
+{
+ int fd, ret = 0, i, count_alloc;
+ struct ion_handle **handle;
+
+ fd = ion_open();
+ if (fd < 0) {
+ printf("%s(): FAILED to open ion device\n", __func__);
+ return -1;
+ }
+
+ handle = (struct ion_handle **)malloc(count * sizeof(struct ion_handle *));
+ if(handle == NULL) {
+ printf("%s() : FAILED to allocate memory for ion_handles\n", __func__);
+ return -ENOMEM;
+ }
+
+ /* Allocate ion_handles */
+ count_alloc = count;
+ for(i = 0; i < count; i++) {
+ ret = _ion_alloc_test(fd, &(handle[i]));
+ printf("%s(): Alloc handle[%d]=%p\n", __func__, i, handle[i]);
+ if(ret || ((int)handle[i] == -ENOMEM)) {
+ printf("%s(): Alloc handle[%d]=%p FAILED, err:%s\n",
+ __func__, i, handle[i], strerror(ret));
+ count_alloc = i;
+ goto err_alloc;
+ }
+ }
+
+ err_alloc:
+ /* Free ion_handles */
+ for (i = 0; i < count_alloc; i++) {
+ printf("%s(): Free handle[%d]=%p\n", __func__, i, handle[i]);
+ ret = ion_free(fd, handle[i]);
+ if (ret) {
+ printf("%s(): Free handle[%d]=%p FAILED, err:%s\n",
+ __func__, i, handle[i], strerror(ret));
+ }
+ }
+
+ ion_close(fd);
+ free(handle);
+ handle = NULL;
+
+ if(ret || (count_alloc != count)) {
+ printf("\nion alloc test: FAILED\n\n");
+ if(count_alloc != count)
+ ret = -ENOMEM;
+ }
+ else
+ printf("\nion alloc test: PASSED\n\n");
+
+ return ret;
+}
+
+void _ion_tiler_map_test(unsigned char *ptr)
+{
+ size_t row, col;
+
+ for (row = 0; row < height; row++)
+ for (col = 0; col < width; col++) {
+ int i = (row * stride) + col;
+ ptr[i] = (unsigned char)i;
+ }
+ for (row = 0; row < height; row++)
+ for (col = 0; col < width; col++) {
+ int i = (row * stride) + col;
+ if (ptr[i] != (unsigned char)i)
+ printf("%s(): FAILED, wrote %d read %d from mapped "
+ "memory\n", __func__, i, ptr[i]);
+ }
+}
+
+void _ion_map_test(unsigned char *ptr)
+{
+ size_t i;
+
+ for (i = 0; i < len; i++) {
+ ptr[i] = (unsigned char)i;
+ }
+ for (i = 0; i < len; i++) {
+ if (ptr[i] != (unsigned char)i)
+ printf("%s(): failed wrote %d read %d from mapped "
+ "memory\n", __func__, i, ptr[i]);
+ }
+}
+
+int ion_map_test(int count)
+{
+ int fd, ret = 0, i, count_alloc, count_map;
+ struct ion_handle **handle;
+ unsigned char **ptr;
+ int *map_fd;
+
+ fd = ion_open();
+ if (fd < 0) {
+ printf("%s(): FAILED to open ion device\n", __func__);
+ return -1;
+ }
+
+ handle = (struct ion_handle **)malloc(count * sizeof(struct ion_handle *));
+ if(handle == NULL) {
+ printf("%s(): FAILED to allocate memory for ion_handles\n", __func__);
+ return -ENOMEM;
+ }
+
+ count_alloc = count;
+ count_map = count;
+
+ /* Allocate ion_handles */
+ for(i = 0; i < count; i++) {
+ ret = _ion_alloc_test(fd, &(handle[i]));
+ printf("%s(): Alloc handle[%d]=%p\n", __func__, i, handle[i]);
+ if(ret || ((int)handle[i] == -ENOMEM)) {
+ printf("%s(): Alloc handle[%d]=%p FAILED, err:%s\n",
+ __func__, i, handle[i], strerror(ret));
+ count_alloc = i;
+ goto err_alloc;
+ }
+ }
+
+ /* Map ion_handles and validate */
+ if (tiler_test)
+ len = height * stride;
+
+ ptr = (unsigned char **)malloc(count * sizeof(unsigned char **));
+ map_fd = (int *)malloc(count * sizeof(int *));
+
+ for(i = 0; i < count; i++) {
+ /* Map ion_handle on userside */
+ ret = ion_map(fd, handle[i], len, prot, map_flags, 0, &(ptr[i]), &(map_fd[i]));
+ printf("%s(): Map handle[%d]=%p, map_fd=%d, ptr=%p\n",
+ __func__, i, handle[i], map_fd[i], ptr[i]);
+ if(ret) {
+ printf("%s Map handle[%d]=%p FAILED, err:%s\n",
+ __func__, i, handle[i], strerror(ret));
+ count_map = i;
+ goto err_map;
+ }
+
+ /* Validate mapping by writing the data and reading it back */
+ if (tiler_test)
+ _ion_tiler_map_test(ptr[i]);
+ else
+ _ion_map_test(ptr[i]);
+ }
+
+ /* clean up properly */
+ err_map:
+ for(i = 0; i < count_map; i++) {
+ /* Unmap ion_handles */
+ ret = munmap(ptr[i], len);
+ printf("%s(): Unmap handle[%d]=%p, map_fd=%d, ptr=%p\n",
+ __func__, i, handle[i], map_fd[i], ptr[i]);
+ if(ret) {
+ printf("%s(): Unmap handle[%d]=%p FAILED, err:%s\n",
+ __func__, i, handle[i], strerror(ret));
+ goto err_map;
+ }
+ /* Close fds */
+ close(map_fd[i]);
+ }
+ free(map_fd);
+ free(ptr);
+
+ err_alloc:
+ /* Free ion_handles */
+ for (i = 0; i < count_alloc; i++) {
+ printf("%s(): Free handle[%d]=%p\n", __func__, i, handle[i]);
+ ret = ion_free(fd, handle[i]);
+ if (ret) {
+ printf("%s(): Free handle[%d]=%p FAILED, err:%s\n",
+ __func__, i, handle[i], strerror(ret));
+ }
+ }
+
+ ion_close(fd);
+ free(handle);
+ handle = NULL;
+
+ if(ret || (count_alloc != count) || (count_map != count))
+ {
+ printf("\nion map test: FAILED\n\n");
+ if((count_alloc != count) || (count_map != count))
+ ret = -ENOMEM;
+ } else
+ printf("\nion map test: PASSED\n");
+
+ return ret;
+}
+
+/**
+ * Go on allocating buffers of specified size & type, untill the allocation fails.
+ * Then free 10 buffers and allocate 10 buffers again.
+ */
+int ion_alloc_fail_alloc_test()
+{
+ int fd, ret = 0, i;
+ struct ion_handle **handle;
+ const int COUNT_ALLOC_MAX = 200;
+ const int COUNT_REALLOC_MAX = 10;
+ int count_alloc = COUNT_ALLOC_MAX, count_realloc = COUNT_ALLOC_MAX;
+
+ fd = ion_open();
+ if (fd < 0) {
+ printf("%s(): FAILED to open ion device\n", __func__);
+ return -1;
+ }
+
+ handle = (struct ion_handle **)malloc(COUNT_ALLOC_MAX * sizeof(struct ion_handle *));
+ if(handle == NULL) {
+ printf("%s(): FAILED to allocate memory for ion_handles\n", __func__);
+ return -ENOMEM;
+ }
+
+ /* Allocate ion_handles as much as possible */
+ for(i = 0; i < COUNT_ALLOC_MAX; i++) {
+ ret = _ion_alloc_test(fd, &(handle[i]));
+ printf("%s(): Alloc handle[%d]=%p\n", __func__, i, handle[i]);
+ if(ret || ((int)handle[i] == -ENOMEM)) {
+ printf("%s(): Alloc handle[%d]=%p FAILED, err:%s\n\n",
+ __func__, i, handle[i], strerror(ret));
+ count_alloc = i;
+ break;
+ }
+ }
+
+ /* Free COUNT_REALLOC_MAX ion_handles */
+ for (i = count_alloc-1; i > (count_alloc-1 - COUNT_REALLOC_MAX); i--) {
+ printf("%s(): Free handle[%d]=%p\n", __func__, i, handle[i]);
+ ret = ion_free(fd, handle[i]);
+ if (ret) {
+ printf("%s(): Free handle[%d]=%p FAILED, err:%s\n\n",
+ __func__, i, handle[i], strerror(ret));
+ }
+ }
+
+ /* Again allocate COUNT_REALLOC_MAX ion_handles to test
+ that we are still able to allocate */
+ for(i = (count_alloc - COUNT_REALLOC_MAX); i < count_alloc; i++) {
+ ret = _ion_alloc_test(fd, &(handle[i]));
+ printf("%s(): Alloc handle[%d]=%p\n", __func__, i, handle[i]);
+ if(ret || ((int)handle[i] == -ENOMEM)) {
+ printf("%s(): Alloc handle[%d]=%p FAILED, err:%s\n\n",
+ __func__, i, handle[i], strerror(ret));
+ count_realloc = i;
+ goto err_alloc;
+ }
+ }
+ count_realloc = i;
+
+ err_alloc:
+ /* Free all ion_handles */
+ for (i = 0; i < count_alloc; i++) {
+ printf("%s(): Free handle[%d]=%p\n", __func__, i, handle[i]);
+ ret = ion_free(fd, handle[i]);
+ if (ret) {
+ printf("%s(): Free handle[%d]=%p FAILED, err:%s\n",
+ __func__, i, handle[i], strerror(ret));
+ }
+ }
+
+ ion_close(fd);
+ free(handle);
+ handle = NULL;
+
+ printf("\ncount_alloc=%d, count_realloc=%d\n",count_alloc, count_realloc);
+
+ if(ret || (count_alloc != count_realloc)) {
+ printf("\nion alloc->fail->alloc test: FAILED\n\n");
+ if(count_alloc != COUNT_ALLOC_MAX)
+ ret = -ENOMEM;
+ }
+ else
+ printf("\nion alloc->fail->alloc test: PASSED\n\n");
+
+ return ret;
+}
+
+int custom_test(int test_number)
+{
+ switch(test_number) {
+ case 1 :
+ return ion_alloc_fail_alloc_test();
+ default :
+ printf("%s(): Invalid custom_test_number=%d\n", __func__, test_number);
+ return -EINVAL;
+ }
+}
+
+int main(int argc, char* argv[]) {
+ int c, ret;
+ unsigned int count = 1, iteration = 1, j, custom_test_num = 1;
+ enum tests {
+ ALLOC_TEST = 0, MAP_TEST, CUSTOM_TEST,
+ };
+
+ while (1) {
+ static struct option opts[] = {
+ {"alloc", no_argument, 0, 'a'},
+ {"alloc_flags", required_argument, 0, 'f'},
+ {"map", no_argument, 0, 'm'},
+ {"custom", required_argument, 0, 'c'},
+ {"len", required_argument, 0, 'l'},
+ {"align", required_argument, 0, 'g'},
+ {"map_flags", required_argument, 0, 'z'},
+ {"prot", required_argument, 0, 'p'},
+ {"alloc_tiler", no_argument, 0, 't'},
+ {"width", required_argument, 0, 'w'},
+ {"height", required_argument, 0, 'h'},
+ {"fmt", required_argument, 0, 'r'},
+ {"count", required_argument, 0, 'n'},
+ {"iteration", required_argument, 0, 'i'},
+ };
+ int i = 0;
+ c = getopt_long(argc, argv, "af:h:l:mr:stw:c:n:i:", opts, &i);
+ if (c == -1)
+ break;
+
+ switch (c) {
+ case 'l':
+ len = atol(optarg);
+ break;
+ case 'g':
+ align = atol(optarg);
+ break;
+ case 'z':
+ map_flags = 0;
+ map_flags |= strstr(optarg, "PROT_EXEC") ?
+ PROT_EXEC : 0;
+ map_flags |= strstr(optarg, "PROT_READ") ?
+ PROT_READ: 0;
+ map_flags |= strstr(optarg, "PROT_WRITE") ?
+ PROT_WRITE: 0;
+ map_flags |= strstr(optarg, "PROT_NONE") ?
+ PROT_NONE: 0;
+ break;
+ case 'p':
+ prot = 0;
+ prot |= strstr(optarg, "MAP_PRIVATE") ?
+ MAP_PRIVATE : 0;
+ prot |= strstr(optarg, "MAP_SHARED") ?
+ MAP_PRIVATE : 0;
+ break;
+ case 'f':
+ alloc_flags = atol(optarg);
+ break;
+ case 'a':
+ test = ALLOC_TEST;
+ break;
+ case 'm':
+ test = MAP_TEST;
+ break;
+ case 'c':
+ test = CUSTOM_TEST;
+ printf("KALP : Case 'c'\n");
+ custom_test_num = atol(optarg);
+ break;
+ case 'r':
+ fmt = atol(optarg);
+ break;
+ case 'w':
+ width = atol(optarg);
+ break;
+ case 'h':
+ height = atol(optarg);
+ break;
+ case 't':
+ tiler_test = 1;
+ break;
+ case 'n':
+ printf("KALP : Case 'n'\n");
+ count = atol(optarg);
+ break;
+ case 'i':
+ printf("KALP : Case 'i'\n");
+ iteration = atol(optarg);
+ break;
+ }
+ }
+ printf("test %d, len %u, width %u, height %u, fmt %u, align %u, count %d, "
+ "iteration %d, map_flags %d, prot %d, alloc_flags %d\n", test, len, width,
+ height, fmt, align, count, iteration, map_flags, prot, alloc_flags);
+
+ switch (test) {
+ case ALLOC_TEST:
+ for(j = 0; j < iteration; j++) {
+ ret = ion_alloc_test(count);
+ if(ret) {
+ printf("\nion alloc test: FAILED at iteration-%d\n", j+1);
+ break;
+ }
+ }
+ break;
+
+ case MAP_TEST:
+ for(j = 0; j < iteration; j++) {
+ ret = ion_map_test(count);
+ if(ret) {
+ printf("\nion map test: FAILED at iteration-%d\n", j+1);
+ break;
+ }
+ }
+ break;
+
+ case CUSTOM_TEST:
+ ret = custom_test(custom_test_num);
+ if(ret) {
+ printf("\nion custom test #%d: FAILED\n", custom_test_num);
+ }
+ break;
+
+ default:
+ printf("must specify a test (alloc, map, custom)\n");
+ }
+
+ return 0;
+}
diff --git a/kernel-headers-ti/linux/bltsville.h b/kernel-headers-ti/linux/bltsville.h
new file mode 100644
index 0000000..aba3df1
--- /dev/null
+++ b/kernel-headers-ti/linux/bltsville.h
@@ -0,0 +1,520 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BLTSVILLE_H
+#define BLTSVILLE_H
+#include "ocd.h"
+#include "bverror.h"
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#include "bvblend.h"
+#include "bvfilter.h"
+#include "bvbuffdesc.h"
+#include "bvcache.h"
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#include "bventry.h"
+#include "bvsurfgeom.h"
+struct bvrect {
+ int left;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ int top;
+ unsigned int width;
+ unsigned int height;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVFLAG_OP_SHIFT 0
+#define BVFLAG_OP_MASK (0xF << BVFLAG_OP_SHIFT)
+#define BVFLAG_ROP (0x1 << BVFLAG_OP_SHIFT)
+#define BVFLAG_BLEND (0x2 << BVFLAG_OP_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVFLAG_FILTER (0x4 << BVFLAG_OP_SHIFT)
+#define BVFLAG_KEY_SRC 0x00000010
+#define BVFLAG_KEY_DST 0x00000020
+#define BVFLAG_CLIP 0x00000040
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVFLAG_SRCMASK 0x00000080
+#define BVFLAG_ASYNC 0x00000100
+#define BVFLAG_TILE_SRC1 0x00000200
+#define BVFLAG_TILE_SRC2 0x00000400
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVFLAG_TILE_MASK 0x00000800
+#define BVFLAG_BATCH_SHIFT 12
+#define BVFLAG_BATCH_MASK (3 << BVFLAG_BATCH_SHIFT)
+#define BVFLAG_BATCH_NONE (0 << BVFLAG_BATCH_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVFLAG_BATCH_BEGIN (1 << BVFLAG_BATCH_SHIFT)
+#define BVFLAG_BATCH_CONTINUE (2 << BVFLAG_BATCH_SHIFT)
+#define BVFLAG_BATCH_END (3 << BVFLAG_BATCH_SHIFT)
+#define BVFLAG_HORZ_FLIP_SRC1 0x00004000
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVFLAG_VERT_FLIP_SRC1 0x00008000
+#define BVFLAG_HORZ_FLIP_SRC2 0x00010000
+#define BVFLAG_VERT_FLIP_SRC2 0x00020000
+#define BVFLAG_HORZ_FLIP_MASK 0x00040000
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVFLAG_VERT_FLIP_MASK 0x00080000
+#define BVFLAG_SCALE_RETURN 0x00100000
+#define BVFLAG_DITHER_RETURN 0x00200000
+#define BVIMPL_ANY 0
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVIMPL_FIRST_HW (1 << 31)
+#define BVIMPL_FIRST_CPU (1 << 0)
+#define BVSCALEDEF_VENDOR_SHIFT 24
+#define BVSCALEDEF_VENDOR_MASK (0xFF << BVSCALEDEF_VENDOR_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_VENDOR_ALL (0 << BVSCALEDEF_VENDOR_SHIFT)
+#define BVSCALEDEF_VENDOR_TI (1 << BVSCALEDEF_VENDOR_SHIFT)
+#define BVSCALEDEF_VENDOR_GENERIC (0xFF << BVSCALEDEF_VENDOR_SHIFT)
+#define BVSCALEDEF_CLASS_SHIFT 22
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_IMPLICIT (0 << BVSCALEDEF_CLASS_SHIFT)
+#define BVSCALEDEF_EXPLICIT (1 << BVSCALEDEF_CLASS_SHIFT)
+#define BVSCALEDEF_CLASS_MASK (3 << BVSCALEDEF_CLASS_MASK)
+#define BVSCALEDEF_QUALITY_SHIFT 16
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_FASTEST (0x00 << BVSCALEDEF_QUALITY_SHIFT)
+#define BVSCALEDEF_GOOD (0x15 << BVSCALEDEF_QUALITY_SHIFT)
+#define BVSCALEDEF_BETTER (0x2A << BVSCALEDEF_QUALITY_SHIFT)
+#define BVSCALEDEF_BEST (0x3F << BVSCALEDEF_QUALITY_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_QUALITY_MASK (0x3F << BVSCALEDEF_QUALITY_MASK)
+#define BVSCALEDEF_TECHNIQUE_SHIFT 8
+#define BVSCALEDEF_DONT_CARE (0x0 << BVSCALEDEF_TECHNIQUE_SHIFT)
+#define BVSCALEDEF_NOT_NEAREST_NEIGHBOR (0x1 << BVSCALEDEF_TECHNIQUE_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_POINT_SAMPLE (0x2 << BVSCALEDEF_TECHNIQUE_SHIFT)
+#define BVSCALEDEF_INTERPOLATED (0x3 << BVSCALEDEF_TECHNIQUE_SHIFT)
+#define BVSCALEDEF_TECHNIQUE_MASK (0xF << BVSCALEDEF_TECHNIQUE_SHIFT)
+#define BVSCALEDEF_TYPE_SHIFT 0
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_PHOTO (1 << BVSCALEDEF_TYPE_SHIFT)
+#define BVSCALEDEF_DRAWING (2 << BVSCALEDEF_TYPE_SHIFT)
+#define BVSCALEDEF_TYPE_MASK (3 << BVSCALEDEF_TYPE_MASK)
+#define BVSCALEDEF_HORZ_SHIFT 8
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_HORZ_MASK (0xFF << BVSCALEDEF_HORZ_SHIFT)
+#define BVSCALEDEF_VERT_SHIFT 0
+#define BVSCALEDEF_VERT_MASK (0xFF << BVSCALEDEF_VERT_SHIFT)
+#define BVSCALEDEF_NEAREST_NEIGHBOR 0x00
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_LINEAR 0x01
+#define BVSCALEDEF_CUBIC 0x02
+#define BVSCALEDEF_3_TAP 0x03
+#define BVSCALEDEF_5_TAP 0x05
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVSCALEDEF_7_TAP 0x07
+#define BVSCALEDEF_9_TAP 0x09
+enum bvscalemode {
+ BVSCALE_FASTEST = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_FASTEST |
+ BVSCALEDEF_DONT_CARE,
+ BVSCALE_FASTEST_NOT_NEAREST_NEIGHBOR = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_FASTEST |
+ BVSCALEDEF_NOT_NEAREST_NEIGHBOR,
+ BVSCALE_FASTEST_POINT_SAMPLE = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_FASTEST |
+ BVSCALEDEF_POINT_SAMPLE,
+ BVSCALE_FASTEST_INTERPOLATED = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_FASTEST |
+ BVSCALEDEF_INTERPOLATED,
+ BVSCALE_FASTEST_PHOTO = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_FASTEST |
+ BVSCALEDEF_PHOTO,
+ BVSCALE_FASTEST_DRAWING = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_FASTEST |
+ BVSCALEDEF_DRAWING,
+ BVSCALE_GOOD = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_GOOD |
+ BVSCALEDEF_DONT_CARE,
+ BVSCALE_GOOD_POINT_SAMPLE = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_GOOD |
+ BVSCALEDEF_POINT_SAMPLE,
+ BVSCALE_GOOD_INTERPOLATED = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_GOOD |
+ BVSCALEDEF_INTERPOLATED,
+ BVSCALE_GOOD_PHOTO = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_GOOD |
+ BVSCALEDEF_PHOTO,
+ BVSCALE_GOOD_DRAWING = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_GOOD |
+ BVSCALEDEF_DRAWING,
+ BVSCALE_BETTER = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BETTER |
+ BVSCALEDEF_DONT_CARE,
+ BVSCALE_BETTER_POINT_SAMPLE = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BETTER |
+ BVSCALEDEF_POINT_SAMPLE,
+ BVSCALE_BETTER_INTERPOLATED = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BETTER |
+ BVSCALEDEF_INTERPOLATED,
+ BVSCALE_BETTER_PHOTO = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BETTER |
+ BVSCALEDEF_PHOTO,
+ BVSCALE_BETTER_DRAWING = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BETTER |
+ BVSCALEDEF_DRAWING,
+ BVSCALE_BEST = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BEST |
+ BVSCALEDEF_DONT_CARE,
+ BVSCALE_BEST_POINT_SAMPLE = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BEST |
+ BVSCALEDEF_POINT_SAMPLE,
+ BVSCALE_BEST_INTERPOLATED = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BEST |
+ BVSCALEDEF_INTERPOLATED,
+ BVSCALE_BEST_PHOTO = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BEST |
+ BVSCALEDEF_PHOTO,
+ BVSCALE_BEST_DRAWING = BVSCALEDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_IMPLICIT |
+ BVSCALEDEF_BEST |
+ BVSCALEDEF_DRAWING,
+ BVSCALE_NEAREST_NEIGHBOR = BVSCALEDEF_VENDOR_GENERIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_EXPLICIT |
+ (BVSCALEDEF_NEAREST_NEIGHBOR << BVSCALEDEF_HORZ_SHIFT) |
+ (BVSCALEDEF_NEAREST_NEIGHBOR << BVSCALEDEF_VERT_SHIFT),
+ BVSCALE_BILINEAR = BVSCALEDEF_VENDOR_GENERIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_EXPLICIT |
+ (BVSCALEDEF_LINEAR << BVSCALEDEF_HORZ_SHIFT) |
+ (BVSCALEDEF_LINEAR << BVSCALEDEF_VERT_SHIFT),
+ BVSCALE_BICUBIC = BVSCALEDEF_VENDOR_GENERIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_EXPLICIT |
+ (BVSCALEDEF_CUBIC << BVSCALEDEF_HORZ_SHIFT) |
+ (BVSCALEDEF_CUBIC << BVSCALEDEF_VERT_SHIFT),
+ BVSCALE_3x3_TAP = BVSCALEDEF_VENDOR_GENERIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_EXPLICIT |
+ (BVSCALEDEF_3_TAP << BVSCALEDEF_HORZ_SHIFT) |
+ (BVSCALEDEF_3_TAP << BVSCALEDEF_VERT_SHIFT),
+ BVSCALE_5x5_TAP = BVSCALEDEF_VENDOR_GENERIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_EXPLICIT |
+ (BVSCALEDEF_5_TAP << BVSCALEDEF_HORZ_SHIFT) |
+ (BVSCALEDEF_5_TAP << BVSCALEDEF_VERT_SHIFT),
+ BVSCALE_7x7_TAP = BVSCALEDEF_VENDOR_GENERIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_EXPLICIT |
+ (BVSCALEDEF_7_TAP << BVSCALEDEF_HORZ_SHIFT) |
+ (BVSCALEDEF_7_TAP << BVSCALEDEF_VERT_SHIFT),
+ BVSCALE_9x9_TAP = BVSCALEDEF_VENDOR_GENERIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVSCALEDEF_EXPLICIT |
+ (BVSCALEDEF_9_TAP << BVSCALEDEF_HORZ_SHIFT) |
+ (BVSCALEDEF_9_TAP << BVSCALEDEF_VERT_SHIFT),
+#ifdef BVSCALE_EXTERNAL_INCLUDE
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#include BVSCALE_EXTERNAL_INCLUDE
+#endif
+};
+#define BVDITHERDEF_VENDOR_SHIFT 24
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVDITHERDEF_VENDOR_MASK (0xFF << BVDITHERDEF_VENDOR_SHIFT)
+#define BVDITHERDEF_VENDOR_ALL (0 << BVDITHERDEF_VENDOR_SHIFT)
+#define BVDITHERDEF_VENDOR_TI (1 << BVDITHERDEF_VENDOR_SHIFT)
+#define BVDITHERDEF_VENDOR_GENERIC (0xFF << BVDITHERDEF_VENDOR_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVDITHERDEF_TYPE_SHIFT 16
+#define BVDITHERDEF_PHOTO (0x01 << BVDITHERDEF_TYPE_SHIFT)
+#define BVDITHERDEF_DRAWING (0x02 << BVDITHERDEF_TYPE_SHIFT)
+#define BVDITHERDEF_TECHNIQUE_SHIFT 8
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVDITHERDEF_DONT_CARE (0x00 << BVDITHERDEF_TECHNIQUE_SHIFT)
+#define BVDITHERDEF_RANDOM (0x01 << BVDITHERDEF_TECHNIQUE_SHIFT)
+#define BVDITHERDEF_ORDERED (0x02 << BVDITHERDEF_TECHNIQUE_SHIFT)
+#define BVDITHERDEF_DIFFUSED (0x04 << BVDITHERDEF_TECHNIQUE_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVDITHERDEF_ON (0xFF << BVDITHERDEF_TECHNIQUE_SHIFT)
+#define BVDITHERDEF_QUALITY_SHIFT 0
+#define BVDITHERDEF_FASTEST (0x00 << BVDITHERDEF_QUALITY_SHIFT)
+#define BVDITHERDEF_GOOD (0x55 << BVDITHERDEF_QUALITY_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVDITHERDEF_BETTER (0xAA << BVDITHERDEF_QUALITY_SHIFT)
+#define BVDITHERDEF_BEST (0xFF << BVDITHERDEF_QUALITY_SHIFT)
+enum bvdithermode {
+ BVDITHER_FASTEST = BVDITHERDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_FASTEST |
+ BVDITHERDEF_DONT_CARE,
+ BVDITHER_FASTEST_ON = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_FASTEST |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_ON,
+ BVDITHER_FASTEST_RANDOM = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_FASTEST |
+ BVDITHERDEF_RANDOM,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHER_FASTEST_ORDERED = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_FASTEST |
+ BVDITHERDEF_ORDERED,
+ BVDITHER_FASTEST_DIFFUSED = BVDITHERDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_FASTEST |
+ BVDITHERDEF_DIFFUSED,
+ BVDITHER_FASTEST_PHOTO = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_FASTEST |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_PHOTO,
+ BVDITHER_FASTEST_DRAWING = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_FASTEST |
+ BVDITHERDEF_DRAWING,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHER_GOOD = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_GOOD |
+ BVDITHERDEF_DONT_CARE,
+ BVDITHER_GOOD_ON = BVDITHERDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_GOOD |
+ BVDITHERDEF_ON,
+ BVDITHER_GOOD_RANDOM = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_GOOD |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_RANDOM,
+ BVDITHER_GOOD_ORDERED = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_GOOD |
+ BVDITHERDEF_ORDERED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHER_GOOD_DIFFUSED = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_GOOD |
+ BVDITHERDEF_DIFFUSED,
+ BVDITHER_GOOD_PHOTO = BVDITHERDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_GOOD |
+ BVDITHERDEF_PHOTO,
+ BVDITHER_GOOD_DRAWING = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_GOOD |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_DRAWING,
+ BVDITHER_BETTER = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BETTER |
+ BVDITHERDEF_DONT_CARE,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHER_BETTER_ON = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BETTER |
+ BVDITHERDEF_ON,
+ BVDITHER_BETTER_RANDOM = BVDITHERDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_BETTER |
+ BVDITHERDEF_RANDOM,
+ BVDITHER_BETTER_ORDERED = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BETTER |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_ORDERED,
+ BVDITHER_BETTER_DIFFUSED = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BETTER |
+ BVDITHERDEF_DIFFUSED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHER_BETTER_PHOTO = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BETTER |
+ BVDITHERDEF_PHOTO,
+ BVDITHER_BETTER_DRAWING = BVDITHERDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_BETTER |
+ BVDITHERDEF_DRAWING,
+ BVDITHER_BEST = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BEST |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_DONT_CARE,
+ BVDITHER_BEST_ON = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BEST |
+ BVDITHERDEF_ON,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHER_BEST_RANDOM = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BEST |
+ BVDITHERDEF_RANDOM,
+ BVDITHER_BEST_ORDERED = BVDITHERDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_BEST |
+ BVDITHERDEF_ORDERED,
+ BVDITHER_BEST_DIFFUSED = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BEST |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHERDEF_DIFFUSED,
+ BVDITHER_BEST_PHOTO = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BEST |
+ BVDITHERDEF_PHOTO,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHER_BEST_DRAWING = BVDITHERDEF_VENDOR_ALL |
+ BVDITHERDEF_BEST |
+ BVDITHERDEF_DRAWING,
+ BVDITHER_NONE = BVDITHERDEF_VENDOR_GENERIC + 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVDITHER_ORDERED_2x2 = BVDITHERDEF_VENDOR_GENERIC + 4,
+ BVDITHER_ORDERED_4x4 = BVDITHERDEF_VENDOR_GENERIC + 16,
+ BVDITHER_ORDERED_2x2_4x4 = BVDITHERDEF_VENDOR_GENERIC + 4 + 16,
+#ifdef BVDITHER_EXTERNAL_INCLUDE
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#include BVDITHER_EXTERNAL_INCLUDE
+#endif
+};
+#define BVTILE_LEFT_SHIFT 0
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVTILE_TOP_SHIFT (BVTILE_LEFT_SHIFT + 2)
+#define BVTILE_RIGHT_SHIFT (BVTILE_TOP_SHIFT + 2)
+#define BVTILE_BOTTOM_SHIFT (BVTILE_RIGHT_SHIFT + 2)
+#define BVTILE_LEFT_REPEAT (0 << BVTILE_LEFT_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVTILE_TOP_REPEAT (0 << BVTILE_TOP_SHIFT)
+#define BVTILE_RIGHT_REPEAT (0 << BVTILE_RIGHT_SHIFT)
+#define BVTILE_BOTTOM_REPEAT (0 << BVTILE_BOTTOM_SHIFT)
+#define BVTILE_LEFT_MIRROR (1 << BVTILE_LEFT_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVTILE_TOP_MIRROR (1 << BVTILE_TOP_SHIFT)
+#define BVTILE_RIGHT_MIRROR (1 << BVTILE_RIGHT_SHIFT)
+#define BVTILE_BOTTOM_MIRROR (1 << BVTILE_BOTTOM_SHIFT)
+struct bvtileparams {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ unsigned int structsize;
+ unsigned long flags;
+ void *virtaddr;
+ int dstleft;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ int dsttop;
+ unsigned int srcwidth;
+ unsigned int srcheight;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBATCH_OP 0x00000001
+#define BVBATCH_KEY 0x00000002
+#define BVBATCH_MISCFLAGS 0x00000004
+#define BVBATCH_ALPHA 0x00000008
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBATCH_DITHER 0x00000010
+#define BVBATCH_SCALE 0x00000020
+#define BVBATCH_DST 0x00000100
+#define BVBATCH_SRC1 0x00000200
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBATCH_SRC2 0x00000400
+#define BVBATCH_MASK 0x00000800
+#define BVBATCH_DSTRECT_ORIGIN 0x00001000
+#define BVBATCH_DSTRECT_SIZE 0x00002000
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBATCH_SRC1RECT_ORIGIN 0x00004000
+#define BVBATCH_SRC1RECT_SIZE 0x00008000
+#define BVBATCH_SRC2RECT_ORIGIN 0x00010000
+#define BVBATCH_SRC2RECT_SIZE 0x00020000
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBATCH_MASKRECT_ORIGIN 0x00040000
+#define BVBATCH_MASKRECT_SIZE 0x00080000
+#define BVBATCH_CLIPRECT_ORIGIN 0x00100000
+#define BVBATCH_CLIPRECT_SIZE 0x00200000
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBATCH_CLIPRECT (BVBATCH_CLIPRECT_ORIGIN | BVBATCH_CLIPRECT_SIZE)
+#define BVBATCH_TILE_SRC1 0x00400000
+#define BVBATCH_TILE_SRC2 0x00800000
+#define BVBATCH_TILE_MASK 0x00100000
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBATCH_ENDNOP 0x80000000
+struct bvcallbackerror {
+ unsigned int structsize;
+ enum bverror error;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ char *errdesc;
+};
+struct bvbatch;
+union bvinbuff {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct bvbuffdesc *desc;
+ struct bvtileparams *tileparams;
+};
+union bvop {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ unsigned short rop;
+ enum bvblend blend;
+ struct bvfilter *filter;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct bvbltparams {
+ unsigned int structsize;
+ char *errdesc;
+ unsigned long implementation;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ unsigned long flags;
+ union bvop op;
+ void *colorkey;
+ union bvalpha globalalpha;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum bvscalemode scalemode;
+ enum bvdithermode dithermode;
+ struct bvbuffdesc *dstdesc;
+ struct bvsurfgeom *dstgeom;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct bvrect dstrect;
+ union bvinbuff src1;
+ struct bvsurfgeom *src1geom;
+ struct bvrect src1rect;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ union bvinbuff src2;
+ struct bvsurfgeom *src2geom;
+ struct bvrect src2rect;
+ union bvinbuff mask;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct bvsurfgeom *maskgeom;
+ struct bvrect maskrect;
+ struct bvrect cliprect;
+ unsigned long batchflags;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct bvbatch *batch;
+ void (*callbackfn)(struct bvcallbackerror *err,
+ unsigned long callbackdata);
+ unsigned long callbackdata;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+#endif
diff --git a/kernel-headers-ti/linux/bvblend.h b/kernel-headers-ti/linux/bvblend.h
new file mode 100644
index 0000000..4e3d6d1
--- /dev/null
+++ b/kernel-headers-ti/linux/bvblend.h
@@ -0,0 +1,210 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BVBLEND_H
+#define BVBLEND_H
+#define BVBLENDDEF_FORMAT_SHIFT 28
+#define BVBLENDDEF_FORMAT_MASK (0xF << BVBLENDDEF_FORMAT_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_FORMAT_CLASSIC (0x0 << BVBLENDDEF_FORMAT_SHIFT)
+#define BVBLENDDEF_FORMAT_ESSENTIAL (0x1 << BVBLENDDEF_FORMAT_SHIFT)
+#define BVBLENDDEF_MODE_SHIFT 4
+#define BVBLENDDEF_INV_SHIFT 2
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_NORM_SHIFT 0
+#define BVBLENDDEF_ONLY_A (0 << BVBLENDDEF_MODE_SHIFT)
+#define BVBLENDDEF_MIN (1 << BVBLENDDEF_MODE_SHIFT)
+#define BVBLENDDEF_MAX (2 << BVBLENDDEF_MODE_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_ONLY_C (3 << BVBLENDDEF_MODE_SHIFT)
+#define BVBLENDDEF_MODE_MASK (3 << BVBLENDDEF_MODE_SHIFT)
+#define BVBLENDDEF_NORM_C1 (0 << BVBLENDDEF_NORM_SHIFT)
+#define BVBLENDDEF_NORM_A1 (1 << BVBLENDDEF_NORM_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_NORM_C2 (2 << BVBLENDDEF_NORM_SHIFT)
+#define BVBLENDDEF_NORM_A2 (3 << BVBLENDDEF_NORM_SHIFT)
+#define BVBLENDDEF_NORM_MASK (3 << BVBLENDDEF_NORM_SHIFT)
+#define BVBLENDDEF_INV_C1 (0 << BVBLENDDEF_INV_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_INV_A1 (1 << BVBLENDDEF_INV_SHIFT)
+#define BVBLENDDEF_INV_C2 (2 << BVBLENDDEF_INV_SHIFT)
+#define BVBLENDDEF_INV_A2 (3 << BVBLENDDEF_INV_SHIFT)
+#define BVBLENDDEF_INV_MASK (3 << BVBLENDDEF_INV_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_ONLY_A_NORM_xx BVBLENDDEF_NORM_C1
+#define BVBLENDDEF_ONLY_A_INV_xx BVBLENDDEF_INV_C1
+#define BVBLENDDEF_ONLY_C_NORM_xx BVBLENDDEF_NORM_A2
+#define BVBLENDDEF_ONLY_C_INV_xx BVBLENDDEF_INV_A2
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_ZERO (BVBLENDDEF_ONLY_A | BVBLENDDEF_ONLY_A_NORM_xx | BVBLENDDEF_ONLY_A_INV_xx)
+#define BVBLENDDEF_C1 (BVBLENDDEF_ONLY_C | BVBLENDDEF_NORM_C1 | BVBLENDDEF_ONLY_C_INV_xx)
+#define BVBLENDDEF_A1 (BVBLENDDEF_ONLY_A | BVBLENDDEF_NORM_A1 | BVBLENDDEF_ONLY_A_INV_xx)
+#define BVBLENDDEF_C2 (BVBLENDDEF_ONLY_C | BVBLENDDEF_NORM_C2 | BVBLENDDEF_ONLY_C_INV_xx)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_A2 (BVBLENDDEF_ONLY_A | BVBLENDDEF_NORM_A2 | BVBLENDDEF_ONLY_A_INV_xx)
+#define BVBLENDDEF_ONE_MINUS_C1 (BVBLENDDEF_ONLY_C | BVBLENDDEF_ONLY_C_NORM_xx | BVBLENDDEF_INV_C1)
+#define BVBLENDDEF_ONE_MINUS_A1 (BVBLENDDEF_ONLY_A | BVBLENDDEF_ONLY_A_NORM_xx | BVBLENDDEF_INV_A1)
+#define BVBLENDDEF_ONE_MINUS_C2 (BVBLENDDEF_ONLY_C | BVBLENDDEF_ONLY_C_NORM_xx | BVBLENDDEF_INV_C2)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_ONE_MINUS_A2 (BVBLENDDEF_ONLY_A | BVBLENDDEF_ONLY_A_NORM_xx | BVBLENDDEF_INV_A2)
+#define BVBLENDDEF_ONE (BVBLENDDEF_ONLY_C | BVBLENDDEF_ONLY_C_NORM_xx | BVBLENDDEF_ONLY_C_INV_xx)
+#define BVBLENDDEF_K_MASK (BVBLENDDEF_MODE_MASK | BVBLENDDEF_INV_MASK | BVBLENDDEF_NORM_MASK)
+#define BVBLENDDEF_K1_SHIFT 18
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_K2_SHIFT 12
+#define BVBLENDDEF_K3_SHIFT 6
+#define BVBLENDDEF_K4_SHIFT 0
+#define BVBLENDDEF_K1_MASK (BVBLENDDEF_K_MASK << BVBLENDDEF_K1_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_K2_MASK (BVBLENDDEF_K_MASK << BVBLENDDEF_K2_SHIFT)
+#define BVBLENDDEF_K3_MASK (BVBLENDDEF_K_MASK << BVBLENDDEF_K3_SHIFT)
+#define BVBLENDDEF_K4_MASK (BVBLENDDEF_K_MASK << BVBLENDDEF_K4_SHIFT)
+#define BVBLENDDEF_CLASSIC_EQUATION_MASK 0x00FFFFFF
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_REMOTE 0x08000000
+#define BVBLENDDEF_GLOBAL_SHIFT 24
+#define BVBLENDDEF_GLOBAL_MASK (3 << BVBLENDDEF_GLOBAL_SHIFT)
+#define BVBLENDDEF_GLOBAL_NONE (0 << BVBLENDDEF_GLOBAL_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVBLENDDEF_GLOBAL_UCHAR (1 << BVBLENDDEF_GLOBAL_SHIFT)
+#define BVBLENDDEF_GLOBAL_FLOAT (3 << BVBLENDDEF_GLOBAL_SHIFT)
+union bvalpha {
+ unsigned char size8;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ float fp;
+};
+enum bvblend {
+ BVBLEND_CLEAR = BVBLENDDEF_FORMAT_CLASSIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K4_SHIFT),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_SRC1 = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K3_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_SRC2 = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K2_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_SRC1OVER = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K1_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ONE_MINUS_A1 << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_ONE_MINUS_A1 << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_SRC2OVER = BVBLENDDEF_FORMAT_CLASSIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ONE_MINUS_A2 << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_ONE_MINUS_A2 << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K4_SHIFT),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_SRC1IN = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_A2 << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_A2 << BVBLENDDEF_K3_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_SRC2IN = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_A1 << BVBLENDDEF_K2_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_A1 << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_SRC1OUT = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_ONE_MINUS_A2 << BVBLENDDEF_K1_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_ONE_MINUS_A2 << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_SRC2OUT = BVBLENDDEF_FORMAT_CLASSIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_ONE_MINUS_A1 << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_ZERO << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_ONE_MINUS_A1 << BVBLENDDEF_K4_SHIFT),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_SRC1ATOP = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_A2 << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_ONE_MINUS_A1 << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_A2 << BVBLENDDEF_K3_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ONE_MINUS_A1 << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_SRC2ATOP = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_ONE_MINUS_A2 << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_A1 << BVBLENDDEF_K2_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ONE_MINUS_A2 << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_A1 << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_XOR = BVBLENDDEF_FORMAT_CLASSIC |
+ (BVBLENDDEF_ONE_MINUS_A2 << BVBLENDDEF_K1_SHIFT) |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ONE_MINUS_A1 << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_ONE_MINUS_A2 << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_ONE_MINUS_A1 << BVBLENDDEF_K4_SHIFT),
+ BVBLEND_PLUS = BVBLENDDEF_FORMAT_CLASSIC |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (BVBLENDDEF_ONE << BVBLENDDEF_K1_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K2_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K3_SHIFT) |
+ (BVBLENDDEF_ONE << BVBLENDDEF_K4_SHIFT),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_NORMAL = BVBLENDDEF_FORMAT_ESSENTIAL + 0,
+ BVBLEND_LIGHTEN = BVBLENDDEF_FORMAT_ESSENTIAL + 1,
+ BVBLEND_DARKEN = BVBLENDDEF_FORMAT_ESSENTIAL + 2,
+ BVBLEND_MULTIPLY = BVBLENDDEF_FORMAT_ESSENTIAL + 3,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_AVERAGE = BVBLENDDEF_FORMAT_ESSENTIAL + 4,
+ BVBLEND_ADD = BVBLENDDEF_FORMAT_ESSENTIAL + 5,
+ BVBLEND_LINEAR_DODGE = BVBLEND_ADD,
+ BVBLEND_SUBTRACT = BVBLENDDEF_FORMAT_ESSENTIAL + 6,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_LINEAR_BURN = BVBLEND_SUBTRACT,
+ BVBLEND_DIFFERENCE = BVBLENDDEF_FORMAT_ESSENTIAL + 7,
+ BVBLEND_NEGATE = BVBLENDDEF_FORMAT_ESSENTIAL + 8,
+ BVBLEND_SCREEN = BVBLENDDEF_FORMAT_ESSENTIAL + 9,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_EXCLUSION = BVBLENDDEF_FORMAT_ESSENTIAL + 10,
+ BVBLEND_OVERLAY = BVBLENDDEF_FORMAT_ESSENTIAL + 11,
+ BVBLEND_SOFT_LIGHT = BVBLENDDEF_FORMAT_ESSENTIAL + 12,
+ BVBLEND_HARD_LIGHT = BVBLENDDEF_FORMAT_ESSENTIAL + 13,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_COLOR_DODGE = BVBLENDDEF_FORMAT_ESSENTIAL + 14,
+ BVBLEND_COLOR_BURN = BVBLENDDEF_FORMAT_ESSENTIAL + 15,
+ BVBLEND_LINEAR_LIGHT = BVBLENDDEF_FORMAT_ESSENTIAL + 16,
+ BVBLEND_VIVID_LIGHT = BVBLENDDEF_FORMAT_ESSENTIAL + 17,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_PIN_LIGHT = BVBLENDDEF_FORMAT_ESSENTIAL + 18,
+ BVBLEND_HARD_MIX = BVBLENDDEF_FORMAT_ESSENTIAL + 19,
+ BVBLEND_REFLECT = BVBLENDDEF_FORMAT_ESSENTIAL + 20,
+ BVBLEND_GLOW = BVBLENDDEF_FORMAT_ESSENTIAL + 21,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVBLEND_PHOENIX = BVBLENDDEF_FORMAT_ESSENTIAL + 22,
+#ifdef BVBLEND_EXTERNAL_INCLUDE
+#define BVBLEND_EXTERNAL_INCLUDE
+#endif
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+#endif
diff --git a/kernel-headers-ti/linux/bvbuffdesc.h b/kernel-headers-ti/linux/bvbuffdesc.h
new file mode 100644
index 0000000..397d937
--- /dev/null
+++ b/kernel-headers-ti/linux/bvbuffdesc.h
@@ -0,0 +1,56 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BVBUFFDESC_H
+#define BVBUFFDESC_H
+struct bvbuffmap;
+#define BVATDEF_VENDOR_SHIFT 24
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVATDEF_VENDOR_MASK (0xFF << BVATDEF_VENDOR_SHIFT)
+#define BVATDEF_VENDOR_ALL (0x00 << BVATDEF_VENDOR_SHIFT)
+#define BVATDEF_VENDOR_TI (0x01 << BVATDEF_VENDOR_SHIFT)
+enum bvauxtype {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVAT_NONE = 0,
+ BVAT_PHYSDESC =
+ BVATDEF_VENDOR_ALL + 1,
+#ifdef BVAT_EXTERNAL_INCLUDE
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#include BVAT_EXTERNAL_INCLUDE
+#endif
+};
+struct bvphysdesc {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ unsigned int structsize;
+ unsigned long pagesize;
+ unsigned long *pagearray;
+ unsigned int pagecount;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ unsigned long pageoffset;
+};
+struct bvbuffdesc {
+ unsigned int structsize;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ void *virtaddr;
+ unsigned long length;
+ struct bvbuffmap *map;
+ enum bvauxtype auxtype;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ void *auxptr;
+};
+#endif
diff --git a/kernel-headers-ti/linux/bvcache.h b/kernel-headers-ti/linux/bvcache.h
new file mode 100644
index 0000000..c65d0aa
--- /dev/null
+++ b/kernel-headers-ti/linux/bvcache.h
@@ -0,0 +1,41 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BVCACHE_H_
+#define BVCACHE_H_
+struct bvbuffdesc;
+struct bvsurfgeom;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct bvrect;
+enum bvcacheop {
+ BVCACHE_BIDIRECTIONAL = 0,
+ BVCACHE_CPU_TO_DEVICE = 1,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVCACHE_CPU_FROM_DEVICE = 2,
+ BVCACHE_RESERVED3 = 3,
+};
+struct bvcopparams {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ unsigned int structsize;
+ struct bvbuffdesc *desc;
+ struct bvsurfgeom *geom;
+ struct bvrect *rect;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum bvcacheop cacheop;
+};
+#endif
diff --git a/kernel-headers-ti/linux/bventry.h b/kernel-headers-ti/linux/bventry.h
new file mode 100644
index 0000000..f7f2230
--- /dev/null
+++ b/kernel-headers-ti/linux/bventry.h
@@ -0,0 +1,39 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BVENTRY_H
+#define BVENTRY_H
+struct bvbuffdesc;
+struct bvbltparams;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct bvcopparams;
+typedef enum bverror (*BVFN_MAP) (struct bvbuffdesc *buffdesc);
+typedef enum bverror (*BVFN_UNMAP) (struct bvbuffdesc *buffdesc);
+typedef enum bverror (*BVFN_BLT) (struct bvbltparams *bltparams);
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+typedef enum bverror (*BVFN_CACHE)(struct bvcopparams *copparams);
+struct bventry {
+ unsigned int structsize;
+ BVFN_MAP bv_map;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVFN_UNMAP bv_unmap;
+ BVFN_BLT bv_blt;
+ BVFN_CACHE bv_cache;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#endif
diff --git a/kernel-headers-ti/linux/bverror.h b/kernel-headers-ti/linux/bverror.h
new file mode 100644
index 0000000..63a3841
--- /dev/null
+++ b/kernel-headers-ti/linux/bverror.h
@@ -0,0 +1,331 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BVERROR_H
+#define BVERROR_H
+#define BVERRDEF_VENDOR_SHIFT 24
+#define BVERRDEF_VENDOR_MASK (0xFF << BVERRDEF_VENDOR_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define BVERRDEF_VENDOR_ALL (0x00 << BVERRDEF_VENDOR_SHIFT)
+#define BVERRDEF_VENDOR_TI (0x01 << BVERRDEF_VENDOR_SHIFT)
+enum bverror {
+ BVERR_NONE = 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_UNK =
+ BVERRDEF_VENDOR_ALL + 1,
+ BVERR_OOM =
+ BVERRDEF_VENDOR_ALL + 2,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_RSRC =
+ BVERRDEF_VENDOR_ALL + 3,
+ BVERR_VIRTADDR =
+ BVERRDEF_VENDOR_ALL + 1000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_VIRTPTR =
+ BVERR_VIRTADDR,
+ BVERR_BUFFERDESC =
+ BVERRDEF_VENDOR_ALL + 10000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_BUFFERDESC_VERS =
+ BVERRDEF_VENDOR_ALL + 11000,
+ BVERR_BUFFERDESC_VIRTADDR =
+ BVERRDEF_VENDOR_ALL + 12000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_BUFFERDESC_LEN =
+ BVERRDEF_VENDOR_ALL + 13000,
+ BVERR_BUFFERDESC_ALIGNMENT =
+ BVERRDEF_VENDOR_ALL + 14000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_BLTPARAMS_VERS =
+ BVERRDEF_VENDOR_ALL + 20000,
+ BVERR_IMPLEMENTATION =
+ BVERRDEF_VENDOR_ALL + 21000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_FLAGS =
+ BVERRDEF_VENDOR_ALL + 22000,
+ BVERR_OP =
+ BVERRDEF_VENDOR_ALL + 22100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_KEY =
+ BVERRDEF_VENDOR_ALL + 22200,
+ BVERR_SRC1_TILE =
+ BVERRDEF_VENDOR_ALL + 22300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2_TILE =
+ BVERRDEF_VENDOR_ALL + 22310,
+ BVERR_MASK_TILE =
+ BVERRDEF_VENDOR_ALL + 22320,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_FLIP =
+ BVERRDEF_VENDOR_ALL + 22400,
+ BVERR_ROP =
+ BVERRDEF_VENDOR_ALL + 23000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_BLEND =
+ BVERRDEF_VENDOR_ALL + 23100,
+ BVERR_GLOBAL_ALPHA =
+ BVERRDEF_VENDOR_ALL + 23110,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_FILTER =
+ BVERRDEF_VENDOR_ALL + 23200,
+ BVERR_FILTER_PARAMS_VERS =
+ BVERRDEF_VENDOR_ALL + 23210,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_FILTER_PARAMS =
+ BVERRDEF_VENDOR_ALL + 23220,
+ BVERR_SCALE_MODE =
+ BVERRDEF_VENDOR_ALL + 24000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_DITHER_MODE =
+ BVERRDEF_VENDOR_ALL + 25000,
+ BVERR_DSTDESC =
+ BVERRDEF_VENDOR_ALL + 26000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_DSTDESC_VERS =
+ BVERRDEF_VENDOR_ALL + 26100,
+ BVERR_DSTDESC_VIRTADDR =
+ BVERRDEF_VENDOR_ALL + 26200,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_DSTDESC_LEN =
+ BVERRDEF_VENDOR_ALL + 26300,
+ BVERR_DST_ALIGNMENT =
+ BVERRDEF_VENDOR_ALL + 26400,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_DSTGEOM =
+ BVERRDEF_VENDOR_ALL + 27000,
+ BVERR_DSTGEOM_VERS =
+ BVERRDEF_VENDOR_ALL + 27100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_DSTGEOM_FORMAT =
+ BVERRDEF_VENDOR_ALL + 27200,
+ BVERR_DSTGEOM_STRIDE =
+ BVERRDEF_VENDOR_ALL + 27300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_DSTGEOM_PALETTE =
+ BVERRDEF_VENDOR_ALL + 27400,
+ BVERR_DSTRECT =
+ BVERRDEF_VENDOR_ALL + 28000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1DESC =
+ BVERRDEF_VENDOR_ALL + 29000,
+ BVERR_SRC1DESC_VERS =
+ BVERRDEF_VENDOR_ALL + 29100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1DESC_VIRTADDR =
+ BVERRDEF_VENDOR_ALL + 29200,
+ BVERR_SRC1DESC_LEN =
+ BVERRDEF_VENDOR_ALL + 29300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1DESC_ALIGNMENT =
+ BVERRDEF_VENDOR_ALL + 29400,
+ BVERR_SRC1GEOM =
+ BVERRDEF_VENDOR_ALL + 30000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1GEOM_VERS =
+ BVERRDEF_VENDOR_ALL + 30100,
+ BVERR_SRC1GEOM_FORMAT =
+ BVERRDEF_VENDOR_ALL + 30200,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1GEOM_STRIDE =
+ BVERRDEF_VENDOR_ALL + 30300,
+ BVERR_SRC1GEOM_PALETTE =
+ BVERRDEF_VENDOR_ALL + 30400,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1RECT =
+ BVERRDEF_VENDOR_ALL + 31000,
+ BVERR_SRC1_HORZSCALE =
+ BVERRDEF_VENDOR_ALL + 31100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1_VERTSCALE =
+ BVERRDEF_VENDOR_ALL + 31200,
+ BVERR_SRC1_ROT =
+ BVERRDEF_VENDOR_ALL + 31300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1_TILEPARAMS =
+ BVERR_SRC1DESC,
+ BVERR_SRC1_TILE_VERS =
+ BVERRDEF_VENDOR_ALL + 32000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1_TILEPARAMS_VERS =
+ BVERR_SRC1_TILE_VERS,
+ BVERR_SRC1_TILE_FLAGS =
+ BVERRDEF_VENDOR_ALL + 32100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1_TILEPARAMS_FLAGS =
+ BVERR_SRC1_TILE_FLAGS,
+ BVERR_SRC1_TILE_VIRTADDR =
+ BVERR_SRC1DESC_VIRTADDR,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1_TILEPARAMS_VIRTADDR =
+ BVERR_SRC1_TILE_VIRTADDR,
+ BVERR_SRC1_TILE_ORIGIN =
+ BVERRDEF_VENDOR_ALL + 32200,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1_TILEPARAMS_ORIGIN =
+ BVERR_SRC1_TILE_ORIGIN,
+ BVERR_SRC1_TILE_SIZE =
+ BVERRDEF_VENDOR_ALL + 32300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC1_TILEPARAMS_SIZE =
+ BVERR_SRC1_TILE_SIZE,
+ BVERR_SRC2DESC =
+ BVERRDEF_VENDOR_ALL + 33000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2DESC_VERS =
+ BVERRDEF_VENDOR_ALL + 33100,
+ BVERR_SRC2DESC_VIRTADDR =
+ BVERRDEF_VENDOR_ALL + 33200,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2DESC_LEN =
+ BVERRDEF_VENDOR_ALL + 33300,
+ BVERR_SRC2DESC_ALIGNMENT =
+ BVERRDEF_VENDOR_ALL + 33400,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2GEOM =
+ BVERRDEF_VENDOR_ALL + 34000,
+ BVERR_SRC2GEOM_VERS =
+ BVERRDEF_VENDOR_ALL + 34100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2GEOM_FORMAT =
+ BVERRDEF_VENDOR_ALL + 34200,
+ BVERR_SRC2GEOM_STRIDE =
+ BVERRDEF_VENDOR_ALL + 34300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2GEOM_PALETTE =
+ BVERRDEF_VENDOR_ALL + 34400,
+ BVERR_SRC2RECT =
+ BVERRDEF_VENDOR_ALL + 35000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2_HORZSCALE =
+ BVERRDEF_VENDOR_ALL + 35100,
+ BVERR_SRC2_VERTSCALE =
+ BVERRDEF_VENDOR_ALL + 35200,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2_ROT =
+ BVERRDEF_VENDOR_ALL + 35300,
+ BVERR_SRC2_TILEPARAMS =
+ BVERR_SRC2DESC,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2_TILE_VERS =
+ BVERRDEF_VENDOR_ALL + 36000,
+ BVERR_SRC2_TILEPARAMS_VERS =
+ BVERR_SRC2_TILE_VERS,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2_TILE_FLAGS =
+ BVERRDEF_VENDOR_ALL + 36100,
+ BVERR_SRC2_TILEPARAMS_FLAGS =
+ BVERR_SRC2_TILE_FLAGS,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2_TILE_VIRTADDR =
+ BVERR_SRC2DESC_VIRTADDR,
+ BVERR_SRC2_TILEPARAMS_VIRTADDR =
+ BVERR_SRC2_TILE_VIRTADDR,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2_TILE_ORIGIN =
+ BVERRDEF_VENDOR_ALL + 36200,
+ BVERR_SRC2_TILEPARAMS_ORIGIN =
+ BVERR_SRC2_TILE_ORIGIN,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_SRC2_TILE_SIZE =
+ BVERRDEF_VENDOR_ALL + 36300,
+ BVERR_SRC2_TILEPARAMS_SIZE =
+ BVERR_SRC2_TILE_SIZE,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASKDESC =
+ BVERRDEF_VENDOR_ALL + 37000,
+ BVERR_MASKDESC_VERS =
+ BVERRDEF_VENDOR_ALL + 37100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASKDESC_VIRTADDR =
+ BVERRDEF_VENDOR_ALL + 37200,
+ BVERR_MASKDESC_LEN =
+ BVERRDEF_VENDOR_ALL + 37300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASKDESC_ALIGNMENT =
+ BVERRDEF_VENDOR_ALL + 37400,
+ BVERR_MASKGEOM =
+ BVERRDEF_VENDOR_ALL + 38000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASKGEOM_VERS =
+ BVERRDEF_VENDOR_ALL + 38100,
+ BVERR_MASKGEOM_FORMAT =
+ BVERRDEF_VENDOR_ALL + 38200,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASKGEOM_STRIDE =
+ BVERRDEF_VENDOR_ALL + 38300,
+ BVERR_MASKGEOM_PALETTE =
+ BVERRDEF_VENDOR_ALL + 38400,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASKRECT =
+ BVERRDEF_VENDOR_ALL + 39000,
+ BVERR_MASK_HORZSCALE =
+ BVERRDEF_VENDOR_ALL + 39100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASK_VERTSCALE =
+ BVERRDEF_VENDOR_ALL + 39200,
+ BVERR_MASK_ROT =
+ BVERRDEF_VENDOR_ALL + 39300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASK_TILEPARAMS =
+ BVERR_MASKDESC,
+ BVERR_MASK_TILE_VERS =
+ BVERRDEF_VENDOR_ALL + 40000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASK_TILEPARAMS_VERS =
+ BVERR_MASK_TILE_VERS,
+ BVERR_MASK_TILE_FLAGS =
+ BVERRDEF_VENDOR_ALL + 40100,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASK_TILEPARAMS_FLAGS =
+ BVERR_MASK_TILE_FLAGS,
+ BVERR_MASK_TILE_VIRTADDR =
+ BVERR_MASKDESC_VIRTADDR,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASK_TILEPARAMS_VIRTADDR =
+ BVERR_MASK_TILE_VIRTADDR,
+ BVERR_MASK_TILE_ORIGIN =
+ BVERRDEF_VENDOR_ALL + 40200,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASK_TILEPARAMS_ORIGIN =
+ BVERR_MASK_TILE_ORIGIN,
+ BVERR_MASK_TILE_SIZE =
+ BVERRDEF_VENDOR_ALL + 40300,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MASK_TILEPARAMS_SIZE =
+ BVERR_MASK_TILE_SIZE,
+ BVERR_CLIP_RECT =
+ BVERRDEF_VENDOR_ALL + 41000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_BATCH_FLAGS =
+ BVERRDEF_VENDOR_ALL + 42000,
+ BVERR_BATCH =
+ BVERRDEF_VENDOR_ALL + 43000,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_OP_FAILED =
+ BVERRDEF_VENDOR_ALL + 50000,
+ BVERR_OP_INCOMPLETE =
+ BVERRDEF_VENDOR_ALL + 50001,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVERR_MEMORY_ERROR =
+ BVERRDEF_VENDOR_ALL + 51000,
+#ifdef BVERR_EXTERNAL_INCLUDE
+#include BVERR_EXTERNAL_INCLUDE
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#endif
+};
+#endif
diff --git a/kernel-headers-ti/linux/bvfilter.h b/kernel-headers-ti/linux/bvfilter.h
new file mode 100644
index 0000000..237a7d4
--- /dev/null
+++ b/kernel-headers-ti/linux/bvfilter.h
@@ -0,0 +1,34 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BVFILTER_H
+#define BVFILTER_H
+enum bvfiltertype {
+ BVFILTER_DUMMY
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#ifdef BVFILTER_EXTERNAL_INCLUDE
+#include BVFILTER_EXTERNAL_INCLUDE
+#endif
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct bvfilter {
+ enum bvfiltertype filter;
+ void *params;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#endif
diff --git a/kernel-headers-ti/linux/bvinternal.h b/kernel-headers-ti/linux/bvinternal.h
new file mode 100644
index 0000000..a3f04d4
--- /dev/null
+++ b/kernel-headers-ti/linux/bvinternal.h
@@ -0,0 +1,29 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BVINTERNAL_H
+#define BVINTENRAL_H
+struct bvbuffmap {
+ unsigned int structsize;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ BVFN_UNMAP bv_unmap;
+ unsigned long handle;
+ struct bvbuffmap *nextmap;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#endif
diff --git a/kernel-headers-ti/linux/bvsurfgeom.h b/kernel-headers-ti/linux/bvsurfgeom.h
new file mode 100644
index 0000000..fde6e7a
--- /dev/null
+++ b/kernel-headers-ti/linux/bvsurfgeom.h
@@ -0,0 +1,34 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef BVSURFGEOM_H
+#define BVSURFGEOM_H
+struct bvsurfgeom {
+ unsigned int structsize;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum ocdformat format;
+ unsigned int width;
+ unsigned int height;
+ int orientation;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ long virtstride;
+ enum ocdformat paletteformat;
+ void *palette;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#endif
diff --git a/kernel-headers-ti/linux/ion.h b/kernel-headers-ti/linux/ion.h
new file mode 100644
index 0000000..300853a
--- /dev/null
+++ b/kernel-headers-ti/linux/ion.h
@@ -0,0 +1,76 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef _LINUX_ION_H
+#define _LINUX_ION_H
+#include <linux/types.h>
+struct ion_handle;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+enum ion_heap_type {
+ ION_HEAP_TYPE_SYSTEM,
+ ION_HEAP_TYPE_SYSTEM_CONTIG,
+ ION_HEAP_TYPE_CARVEOUT,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ ION_HEAP_TYPE_CUSTOM,
+ ION_NUM_HEAPS,
+};
+#define ION_HEAP_SYSTEM_MASK (1 << ION_HEAP_TYPE_SYSTEM)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define ION_HEAP_SYSTEM_CONTIG_MASK (1 << ION_HEAP_TYPE_SYSTEM_CONTIG)
+#define ION_HEAP_CARVEOUT_MASK (1 << ION_HEAP_TYPE_CARVEOUT)
+struct ion_allocation_data {
+ size_t len;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ size_t align;
+ unsigned int flags;
+ struct ion_handle *handle;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct ion_fd_data {
+ struct ion_handle *handle;
+ int fd;
+ unsigned char cacheable;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+struct ion_handle_data {
+ struct ion_handle *handle;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct ion_custom_data {
+ unsigned int cmd;
+ unsigned long arg;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct ion_cached_user_buf_data {
+ struct ion_handle *handle;
+ unsigned long vaddr;
+ size_t size;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+#define ION_IOC_MAGIC 'I'
+#define ION_IOC_ALLOC _IOWR(ION_IOC_MAGIC, 0, struct ion_allocation_data)
+#define ION_IOC_FREE _IOWR(ION_IOC_MAGIC, 1, struct ion_handle_data)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define ION_IOC_MAP _IOWR(ION_IOC_MAGIC, 2, struct ion_fd_data)
+#define ION_IOC_SHARE _IOWR(ION_IOC_MAGIC, 4, struct ion_fd_data)
+#define ION_IOC_IMPORT _IOWR(ION_IOC_MAGIC, 5, int)
+#define ION_IOC_CUSTOM _IOWR(ION_IOC_MAGIC, 6, struct ion_custom_data)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define ION_IOC_FLUSH_CACHED _IOWR(ION_IOC_MAGIC, 7, struct ion_cached_user_buf_data)
+#define ION_IOC_INVAL_CACHED _IOWR(ION_IOC_MAGIC, 8, struct ion_cached_user_buf_data)
+#endif
diff --git a/kernel-headers-ti/linux/ocd.h b/kernel-headers-ti/linux/ocd.h
new file mode 100644
index 0000000..c6fe07b
--- /dev/null
+++ b/kernel-headers-ti/linux/ocd.h
@@ -0,0 +1,624 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef OCD_H
+#define OCD_H
+#define OCDFMTDEF_VENDOR_SHIFT 24
+#define OCDFMTDEF_VENDOR_MASK (0xFF << OCDFMTDEF_VENDOR_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_VENDOR_ALL (0x00 << OCDFMTDEF_VENDOR_SHIFT)
+#define OCDFMTDEF_VENDOR_TI (0x01 << OCDFMTDEF_VENDOR_SHIFT)
+#define OCDFMTDEF_CS_SHIFT 21
+#define OCDFMTDEF_CS_MASK (7 << OCDFMTDEF_CS_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_CS_MONO (0 << OCDFMTDEF_CS_SHIFT)
+#define OCDFMTDEF_CS_LUT (1 << OCDFMTDEF_CS_SHIFT)
+#define OCDFMTDEF_CS_RGB (2 << OCDFMTDEF_CS_SHIFT)
+#define OCDFMTDEF_CS_YCbCr (3 << OCDFMTDEF_CS_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_CS_ALPHA (4 << OCDFMTDEF_CS_SHIFT)
+#define OCDFMTDEF_STD_SHIFT 19
+#define OCDFMTDEF_STD_MASK (3 << OCDFMTDEF_STD_SHIFT)
+#define OCDFMTDEF_STD_ITUR_601_YCbCr (0 << OCDFMTDEF_STD_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_STD_ITUR_709_YCbCr (1 << OCDFMTDEF_STD_SHIFT)
+#define OCDFMTDEF_FULLSCALE_YCbCr (3 << OCDFMTDEF_STD_SHIFT)
+#define OCDFMTDEF_ALPHA (1 << 18)
+#define OCDFMTDEF_NON_PREMULT (1 << 17)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_FILL_EMPTY_0 (1 << 17)
+#define OCDFMTDEF_SUBSAMPLE_HORZ_ALIGNED (0 << 16)
+#define OCDFMTDEF_SUBSAMPLE_HORZ_CENTERED (1 << 16)
+#define OCDFMTDEF_ALPHA_COMPONENTS_SHIFT 16
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_ALPHA_COMPONENTS_MASK (3 << OCDFMTDEF_ALPHA_COMPONENTS_SHIFT)
+#define OCDFMTDEF_ALPHA_COMPONENTS_1 (0 << OCDFMTDEF_ALPHA_COMPONENTS_SHIFT)
+#define OCDFMTDEF_ALPHA_COMPONENTS_2 (1 << OCDFMTDEF_ALPHA_COMPONENTS_SHIFT)
+#define OCDFMTDEF_ALPHA_COMPONENTS_3 (2 << OCDFMTDEF_ALPHA_COMPONENTS_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_ALPHA_COMPONENTS_4 (3 << OCDFMTDEF_ALPHA_COMPONENTS_SHIFT)
+#define OCDFMTDEF_SUBSAMPLE_SHIFT 14
+#define OCDFMTDEF_SUBSAMPLE_MASK (3 << OCDFMTDEF_SUBSAMPLE_SHIFT)
+#define OCDFMTDEF_SUBSAMPLE_NONE (0 << OCDFMTDEF_SUBSAMPLE_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_SUBSAMPLE_422_YCbCr (1 << OCDFMTDEF_SUBSAMPLE_SHIFT)
+#define OCDFMTDEF_SUBSAMPLE_420_YCbCr (2 << OCDFMTDEF_SUBSAMPLE_SHIFT)
+#define OCDFMTDEF_SUBSAMPLE_411_YCbCr (3 << OCDFMTDEF_SUBSAMPLE_SHIFT)
+#define OCDFMTDEF_LAYOUT_SHIFT 11
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_LAYOUT_MASK (7 << OCDFMTDEF_LAYOUT_SHIFT)
+#define OCDFMTDEF_PACKED (0 << OCDFMTDEF_LAYOUT_SHIFT)
+#define OCDFMTDEF_DISTRIBUTED (1 << OCDFMTDEF_LAYOUT_SHIFT)
+#define OCDFMTDEF_2_PLANE_YCbCr (2 << OCDFMTDEF_LAYOUT_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_3_PLANE_STACKED (3 << OCDFMTDEF_LAYOUT_SHIFT)
+#define OCDFMTDEF_3_PLANE_SIDE_BY_SIDE_YCbCr (7 << OCDFMTDEF_LAYOUT_SHIFT)
+#define OCDFMTDEF_REVERSED (1 << 10)
+#define OCDFMTDEF_LEFT_JUSTIFIED (1 << 9)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_CONTAINER_SHIFT 6
+#define OCDFMTDEF_CONTAINER_MASK (7 << OCDFMTDEF_CONTAINER_SHIFT)
+#define OCDFMTDEF_CONTAINER_8BIT (0 << OCDFMTDEF_CONTAINER_SHIFT)
+#define OCDFMTDEF_CONTAINER_16BIT (1 << OCDFMTDEF_CONTAINER_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_CONTAINER_24BIT (2 << OCDFMTDEF_CONTAINER_SHIFT)
+#define OCDFMTDEF_CONTAINER_32BIT (3 << OCDFMTDEF_CONTAINER_SHIFT)
+#define OCDFMTDEF_CONTAINER_48BIT (5 << OCDFMTDEF_CONTAINER_SHIFT)
+#define OCDFMTDEF_CONTAINER_64BIT (7 << OCDFMTDEF_CONTAINER_SHIFT)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OCDFMTDEF_COMPONENTSIZEMINUS1_SHIFT 0
+#define OCDFMTDEF_COMPONENTSIZEMINUS1_MASK (0x3F << OCDFMTDEF_COMPONENTSIZEMINUS1_SHIFT)
+enum ocdformat {
+ OCDFMT_UNKNOWN = -1,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_NONE = OCDFMT_UNKNOWN,
+ OCDFMT_ALPHA1 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_ALPHA |
+ OCDFMTDEF_ALPHA_COMPONENTS_1 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+ (1 - 1),
+ OCDFMT_ALPHA2 = OCDFMTDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CS_ALPHA |
+ OCDFMTDEF_ALPHA_COMPONENTS_1 |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (2 - 1),
+ OCDFMT_ALPHA4 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_ALPHA |
+ OCDFMTDEF_ALPHA_COMPONENTS_1 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+ (4 - 1),
+ OCDFMT_ALPHA8 = OCDFMTDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CS_ALPHA |
+ OCDFMTDEF_ALPHA_COMPONENTS_1 |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (8 - 1),
+ OCDFMT_ALPHA4x1 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_ALPHA |
+ OCDFMTDEF_ALPHA_COMPONENTS_4 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+ (4 - 1),
+ OCDFMT_ALPHA3x8 = OCDFMTDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CS_ALPHA |
+ OCDFMTDEF_ALPHA_COMPONENTS_3 |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_24BIT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (24 - 1),
+ OCDFMT_ALPHA4x8 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_ALPHA |
+ OCDFMTDEF_ALPHA_COMPONENTS_4 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_32BIT |
+ (32 - 1),
+ OCDFMT_MONO1 = OCDFMTDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CS_MONO |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+ (1 - 1),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_MONO2 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_MONO |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (2 - 1),
+ OCDFMT_MONO4 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_MONO |
+ OCDFMTDEF_PACKED |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CONTAINER_8BIT |
+ (4 - 1),
+ OCDFMT_MONO8 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_MONO |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+ (8 - 1),
+ OCDFMT_LUT1 = OCDFMTDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CS_LUT |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+ (1 - 1),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_LUT2 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_LUT |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (2 - 1),
+ OCDFMT_LUT4 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_LUT |
+ OCDFMTDEF_PACKED |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CONTAINER_8BIT |
+ (4 - 1),
+ OCDFMT_LUT8 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_LUT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_8BIT |
+ (8 - 1),
+ OCDFMT_RGB12 = OCDFMTDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CS_RGB |
+ OCDFMTDEF_SUBSAMPLE_NONE |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_16BIT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (12 - 1),
+ OCDFMT_xRGB12 = OCDFMT_RGB12,
+ OCDFMT_1RGB12 = OCDFMT_xRGB12,
+ OCDFMT_0RGB12 = OCDFMT_xRGB12 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_BGR12 = OCDFMT_RGB12 |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_xBGR12 = OCDFMT_BGR12,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_1BGR12 = OCDFMT_xBGR12,
+ OCDFMT_0BGR12 = OCDFMT_xBGR12 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_RGBx12 = OCDFMT_xRGB12 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_LEFT_JUSTIFIED,
+ OCDFMT_RGB112 = OCDFMT_RGBx12,
+ OCDFMT_RGB012 = OCDFMT_RGBx12 |
+ OCDFMTDEF_FILL_EMPTY_0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_BGRx12 = OCDFMT_xRGB12 |
+ OCDFMTDEF_LEFT_JUSTIFIED |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_BGR112 = OCDFMT_BGRx12,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_BGR012 = OCDFMT_BGRx12 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_RGB15 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_RGB |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_SUBSAMPLE_NONE |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_16BIT |
+ (15 - 1),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_xRGB15 = OCDFMT_RGB15,
+ OCDFMT_1RGB15 = OCDFMT_xRGB15,
+ OCDFMT_0RGB15 = OCDFMT_xRGB15 |
+ OCDFMTDEF_FILL_EMPTY_0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_BGR15 = OCDFMT_RGB15 |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_xBGR15 = OCDFMT_BGR15,
+ OCDFMT_1BGR15 = OCDFMT_xBGR15,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_0BGR15 = OCDFMT_xBGR15 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_RGBx15 = OCDFMT_RGB15 |
+ OCDFMTDEF_LEFT_JUSTIFIED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_RGB115 = OCDFMT_RGBx15,
+ OCDFMT_RGB015 = OCDFMT_RGBx15 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_BGRx15 = OCDFMT_RGB15 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_LEFT_JUSTIFIED |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_BGR115 = OCDFMT_BGRx15,
+ OCDFMT_BGR015 = OCDFMT_BGRx15 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_RGB16 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_RGB |
+ OCDFMTDEF_SUBSAMPLE_NONE |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_16BIT |
+ (16 - 1),
+ OCDFMT_BGR16 = OCDFMT_RGB16 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_REVERSED,
+ OCDFMT_RGB24 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_RGB |
+ OCDFMTDEF_SUBSAMPLE_NONE |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_24BIT |
+ (24 - 1),
+ OCDFMT_BGR24 = OCDFMT_RGB24 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_REVERSED,
+ OCDFMT_xRGB16 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_RGB |
+ OCDFMTDEF_SUBSAMPLE_NONE |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_24BIT |
+ (16 - 1),
+ OCDFMT_1RGB16 = OCDFMT_xRGB16,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_0RGB16 = OCDFMT_xRGB16 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_xBGR16 = OCDFMT_xRGB16 |
+ OCDFMTDEF_REVERSED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_1BGR16 = OCDFMT_xBGR16,
+ OCDFMT_0BGR16 = OCDFMT_xBGR16 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_RGBx16 = OCDFMT_xRGB16 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_LEFT_JUSTIFIED,
+ OCDFMT_RGB116 = OCDFMT_RGBx16,
+ OCDFMT_RGB016 = OCDFMT_RGBx16 |
+ OCDFMTDEF_FILL_EMPTY_0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_BGRx16 = OCDFMT_xRGB16 |
+ OCDFMTDEF_LEFT_JUSTIFIED |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_BGR116 = OCDFMT_BGRx16,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_BGR016 = OCDFMT_BGRx16 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_xRGB24 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_RGB |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_SUBSAMPLE_NONE |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_32BIT |
+ (24 - 1),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_1RGB24 = OCDFMT_xRGB24,
+ OCDFMT_0RGB24 = OCDFMT_xRGB24 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_xBGR24 = OCDFMT_xRGB24 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_REVERSED,
+ OCDFMT_1BGR24 = OCDFMT_xBGR24,
+ OCDFMT_0BGR24 = OCDFMT_xBGR24 |
+ OCDFMTDEF_FILL_EMPTY_0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_RGBx24 = OCDFMT_xRGB24 |
+ OCDFMTDEF_LEFT_JUSTIFIED,
+ OCDFMT_RGB124 = OCDFMT_RGBx24,
+ OCDFMT_RGB024 = OCDFMT_RGBx24 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_BGRx24 = OCDFMT_xRGB24 |
+ OCDFMTDEF_LEFT_JUSTIFIED |
+ OCDFMTDEF_REVERSED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_BGR124 = OCDFMT_BGRx24,
+ OCDFMT_BGR024 = OCDFMT_BGRx24 |
+ OCDFMTDEF_FILL_EMPTY_0,
+ OCDFMT_ARGB12 = OCDFMT_xRGB12 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_ALPHA,
+ OCDFMT_ABGR12 = OCDFMT_xBGR12 |
+ OCDFMTDEF_ALPHA,
+ OCDFMT_RGBA12 = OCDFMT_RGBx12 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_ALPHA,
+ OCDFMT_BGRA12 = OCDFMT_BGRx12 |
+ OCDFMTDEF_ALPHA,
+ OCDFMT_ARGB16 = OCDFMT_xRGB16 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_ALPHA,
+ OCDFMT_ABGR16 = OCDFMT_ARGB16 |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_RGBA16 = OCDFMT_ARGB16 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_LEFT_JUSTIFIED,
+ OCDFMT_BGRA16 = OCDFMT_ARGB16 |
+ OCDFMTDEF_LEFT_JUSTIFIED |
+ OCDFMTDEF_REVERSED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_ARGB24 = OCDFMT_xRGB24 |
+ OCDFMTDEF_ALPHA,
+ OCDFMT_ABGR24 = OCDFMT_xBGR24 |
+ OCDFMTDEF_ALPHA,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_RGBA24 = OCDFMT_RGBx24 |
+ OCDFMTDEF_ALPHA,
+ OCDFMT_BGRA24 = OCDFMT_BGRx24 |
+ OCDFMTDEF_ALPHA,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_nARGB12 = OCDFMT_ARGB12 |
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_ARGB12_NON_PREMULT = OCDFMT_nARGB12,
+ OCDFMT_nABGR12 = OCDFMT_ABGR12 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_ABGR12_NON_PREMULT = OCDFMT_nABGR12,
+ OCDFMT_nRGBA12 = OCDFMT_RGBA12 |
+ OCDFMTDEF_NON_PREMULT,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_RGBA12_NON_PREMULT = OCDFMT_nRGBA12,
+ OCDFMT_nBGRA12 = OCDFMT_BGRA12 |
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_BGRA12_NON_PREMULT = OCDFMT_nBGRA12,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_ARGB15 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_RGB |
+ OCDFMTDEF_ALPHA |
+ OCDFMTDEF_NON_PREMULT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_SUBSAMPLE_NONE |
+ OCDFMTDEF_PACKED |
+ OCDFMTDEF_CONTAINER_16BIT |
+ (15 - 1),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_nARGB15 = OCDFMT_ARGB15,
+ OCDFMT_ARGB15_NON_PREMULT = OCDFMT_nARGB15,
+ OCDFMT_ABGR15 = OCDFMT_ARGB15 |
+ OCDFMTDEF_REVERSED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_nABGR15 = OCDFMT_ABGR15,
+ OCDFMT_ABGR15_NON_PREMULT = OCDFMT_nABGR15,
+ OCDFMT_RGBA15 = OCDFMT_ARGB15 |
+ OCDFMTDEF_LEFT_JUSTIFIED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_nRGBA15 = OCDFMT_RGBA15,
+ OCDFMT_RGBA15_NON_PREMULT = OCDFMT_nRGBA15,
+ OCDFMT_BGRA15 = OCDFMT_ARGB15 |
+ OCDFMTDEF_LEFT_JUSTIFIED |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_REVERSED,
+ OCDFMT_nBGRA15 = OCDFMT_BGRA15,
+ OCDFMT_BGRA15_NON_PREMULT = OCDFMT_nRGBA15,
+ OCDFMT_nARGB16 = OCDFMT_ARGB16 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_ARGB16_NON_PREMULT = OCDFMT_nARGB16,
+ OCDFMT_nABGR16 = OCDFMT_ABGR16 |
+ OCDFMTDEF_NON_PREMULT,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_ABGR16_NON_PREMULT = OCDFMT_nABGR16,
+ OCDFMT_nRGBA16 = OCDFMT_RGBA16 |
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_RGBA16_NON_PREMULT = OCDFMT_nRGBA16,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_nBGRA16 = OCDFMT_BGRA16 |
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_BGRA16_NON_PREMULT = OCDFMT_nBGRA16,
+ OCDFMT_nARGB24 = OCDFMT_ARGB24 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_ARGB24_NON_PREMULT = OCDFMT_nARGB24,
+ OCDFMT_nABGR24 = OCDFMT_ABGR24 |
+ OCDFMTDEF_NON_PREMULT,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_ABGR24_NON_PREMULT = OCDFMT_nABGR24,
+ OCDFMT_nRGBA24 = OCDFMT_RGBA24 |
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_RGBA24_NON_PREMULT = OCDFMT_nRGBA24,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_nBGRA24 = OCDFMT_BGRA24 |
+ OCDFMTDEF_NON_PREMULT,
+ OCDFMT_BGRA24_NON_PREMULT = OCDFMT_nBGRA24,
+ OCDFMT_UYVY = OCDFMTDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CS_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_422_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_HORZ_ALIGNED |
+ OCDFMTDEF_PACKED |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CONTAINER_32BIT |
+ (16 - 1),
+ OCDFMT_UYVY_601 = OCDFMT_UYVY |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_UYVY_709 = OCDFMT_UYVY |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_Y422 = OCDFMT_UYVY,
+ OCDFMT_Y422_601 = OCDFMT_UYVY_601,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_Y422_709 = OCDFMT_UYVY_709,
+ OCDFMT_VYUY = OCDFMT_UYVY |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_VYUY_601 = OCDFMT_VYUY |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_VYUY_709 = OCDFMT_VYUY |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_YUYV = OCDFMT_UYVY |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_LEFT_JUSTIFIED,
+ OCDFMT_YUYV_601 = OCDFMT_YUYV |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_YUYV_709 = OCDFMT_YUYV |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_YUY2 = OCDFMT_YUYV,
+ OCDFMT_YUY2_601 = OCDFMT_YUYV_601,
+ OCDFMT_YUY2_709 = OCDFMT_YUYV_709,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_YVYU = OCDFMT_VYUY |
+ OCDFMTDEF_LEFT_JUSTIFIED,
+ OCDFMT_YVYU_601 = OCDFMT_YVYU |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_YVYU_709 = OCDFMT_YVYU |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_YV16 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_YCbCr |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_SUBSAMPLE_422_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_HORZ_ALIGNED |
+ OCDFMTDEF_3_PLANE_STACKED |
+ OCDFMTDEF_CONTAINER_32BIT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (16 - 1),
+ OCDFMT_YV16_601 = OCDFMT_YV16 |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_YV16_709 = OCDFMT_YV16 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_IYUV = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_420_YCbCr |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_SUBSAMPLE_HORZ_ALIGNED |
+ OCDFMTDEF_3_PLANE_STACKED |
+ OCDFMTDEF_CONTAINER_48BIT |
+ (12 - 1),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_IYUV_601 = OCDFMT_IYUV |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_IYUV_709 = OCDFMT_IYUV |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_I420 = OCDFMT_IYUV,
+ OCDFMT_I420_601 = OCDFMT_IYUV_601,
+ OCDFMT_I420_709 = OCDFMT_IYUV_709,
+ OCDFMT_YV12 = OCDFMT_IYUV |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_REVERSED,
+ OCDFMT_YV12_601 = OCDFMT_YV12 |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_YV12_709 = OCDFMT_YV12 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_IMC3 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_420_YCbCr |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_SUBSAMPLE_HORZ_ALIGNED |
+ OCDFMTDEF_3_PLANE_STACKED |
+ OCDFMTDEF_LEFT_JUSTIFIED |
+ OCDFMTDEF_CONTAINER_48BIT |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ (12 - 1),
+ OCDFMT_IMC3_601 = OCDFMT_IMC3 |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_IMC3_709 = OCDFMT_IMC3 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_IMC1 = OCDFMT_IMC3 |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_IMC1_601 = OCDFMT_IMC1 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_IMC1_709 = OCDFMT_IMC1 |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_IMC4 = OCDFMTDEF_VENDOR_ALL |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CS_YCbCr |
+ OCDFMTDEF_STD_ITUR_601_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_420_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_HORZ_ALIGNED |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_3_PLANE_SIDE_BY_SIDE_YCbCr |
+ OCDFMTDEF_CONTAINER_48BIT |
+ (12 - 1),
+ OCDFMT_IMC4_601 = OCDFMT_IMC4 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_IMC4_709 = OCDFMT_IMC4 |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_IMC2 = OCDFMT_IMC4 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_REVERSED,
+ OCDFMT_IMC2_601 = OCDFMT_IMC2 |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_IMC2_709 = OCDFMT_IMC2 |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_NV16 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_422_YCbCr |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_SUBSAMPLE_HORZ_ALIGNED |
+ OCDFMTDEF_2_PLANE_YCbCr |
+ OCDFMTDEF_CONTAINER_32BIT |
+ (16 - 1),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_NV16_601 = OCDFMT_NV16 |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_NV16_709 = OCDFMT_NV16 |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_NV61 = OCDFMT_NV16 |
+ OCDFMTDEF_REVERSED,
+ OCDFMT_NV61_601 = OCDFMT_NV61 |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_NV61_709 = OCDFMT_NV61 |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_NV12 = OCDFMTDEF_VENDOR_ALL |
+ OCDFMTDEF_CS_YCbCr |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_STD_ITUR_601_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_420_YCbCr |
+ OCDFMTDEF_SUBSAMPLE_HORZ_ALIGNED |
+ OCDFMTDEF_2_PLANE_YCbCr |
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMTDEF_CONTAINER_48BIT |
+ (12 - 1),
+ OCDFMT_NV12_601 = OCDFMT_NV12 |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_NV12_709 = OCDFMT_NV12 |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+ OCDFMT_NV21 = OCDFMT_NV12 |
+ OCDFMTDEF_REVERSED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OCDFMT_NV21_601 = OCDFMT_NV21 |
+ OCDFMTDEF_STD_ITUR_601_YCbCr,
+ OCDFMT_NV21_709 = OCDFMT_NV21 |
+ OCDFMTDEF_STD_ITUR_709_YCbCr,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#ifdef OCD_EXTERNAL_INCLUDE
+#include OCD_EXTERNAL_INCLUDE
+#endif
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#endif
diff --git a/kernel-headers-ti/linux/omap_ion.h b/kernel-headers-ti/linux/omap_ion.h
new file mode 100644
index 0000000..b8a6228
--- /dev/null
+++ b/kernel-headers-ti/linux/omap_ion.h
@@ -0,0 +1,61 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef _LINUX_OMAP_ION_H
+#define _LINUX_OMAP_ION_H
+#include <linux/types.h>
+struct omap_ion_tiler_alloc_data {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ size_t w;
+ size_t h;
+ int fmt;
+ unsigned int flags;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct ion_handle *handle;
+ size_t stride;
+ size_t offset;
+ unsigned int out_align;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ unsigned int token;
+};
+enum {
+ OMAP_ION_HEAP_TYPE_TILER = ION_HEAP_TYPE_CUSTOM + 1,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+#define OMAP_ION_HEAP_TILER_MASK (1 << OMAP_ION_HEAP_TYPE_TILER)
+enum {
+ OMAP_ION_TILER_ALLOC,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+enum {
+ TILER_PIXEL_FMT_MIN = 0,
+ TILER_PIXEL_FMT_8BIT = 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ TILER_PIXEL_FMT_16BIT = 1,
+ TILER_PIXEL_FMT_32BIT = 2,
+ TILER_PIXEL_FMT_PAGE = 3,
+ TILER_PIXEL_FMT_MAX = 3
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+enum {
+ OMAP_ION_HEAP_LARGE_SURFACES,
+ OMAP_ION_HEAP_TILER,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_ION_HEAP_SECURE_INPUT,
+};
+#endif
diff --git a/kernel-headers-ti/linux/rpmsg_omx.h b/kernel-headers-ti/linux/rpmsg_omx.h
new file mode 100644
index 0000000..e02c36e
--- /dev/null
+++ b/kernel-headers-ti/linux/rpmsg_omx.h
@@ -0,0 +1,60 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef RPMSG_OMX_H
+#define RPMSG_OMX_H
+#include <linux/ioctl.h>
+
+/**
+ * struct omx_pvr_data - metadata passed to/from userspace for a pvr register
+ * @fd: a file descriptor representing a pvr handle
+ * @num_handles: field filled by driver. userspace uses this to determine
+ * number of handles associated with fd
+ * @handles: opaque pointers pointing to buffers
+ */
+struct omx_pvr_data {
+ int fd;
+ unsigned int num_handles;
+ void *handles[2];
+};
+
+#define OMX_IOC_MAGIC 'X'
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define OMX_IOCCONNECT _IOW(OMX_IOC_MAGIC, 1, char *)
+#define OMX_IOCIONREGISTER _IOWR(OMX_IOC_MAGIC, 2, struct ion_fd_data)
+#define OMX_IOCIONUNREGISTER _IOWR(OMX_IOC_MAGIC, 3, struct ion_fd_data)
+#define OMX_IOCPVRREGISTER _IOWR(OMX_IOC_MAGIC, 4, struct omx_pvr_data)
+
+#define OMX_IOC_MAXNR (4)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct omx_conn_req {
+ char name[48];
+} __packed;
+struct omx_packet {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint16_t desc;
+ uint16_t msg_id;
+ uint32_t flags;
+ uint32_t fxn_idx;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ int32_t result;
+ uint32_t data_size;
+ uint32_t data[0];
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#endif
diff --git a/kernel-headers-ti/video/dsscomp.h b/kernel-headers-ti/video/dsscomp.h
new file mode 100644
index 0000000..8668c13
--- /dev/null
+++ b/kernel-headers-ti/video/dsscomp.h
@@ -0,0 +1,414 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef _LINUX_DSSCOMP_H
+#define _LINUX_DSSCOMP_H
+enum omap_plane {
+ OMAP_DSS_GFX = 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_VIDEO1 = 1,
+ OMAP_DSS_VIDEO2 = 2,
+ OMAP_DSS_VIDEO3 = 3,
+ OMAP_DSS_WB = 4,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+enum omap_channel {
+ OMAP_DSS_CHANNEL_LCD = 0,
+ OMAP_DSS_CHANNEL_DIGIT = 1,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_CHANNEL_LCD2 = 2,
+};
+enum omap_color_mode {
+ OMAP_DSS_COLOR_CLUT1 = 1 << 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_COLOR_CLUT2 = 1 << 1,
+ OMAP_DSS_COLOR_CLUT4 = 1 << 2,
+ OMAP_DSS_COLOR_CLUT8 = 1 << 3,
+ OMAP_DSS_COLOR_RGB12U = 1 << 4,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_COLOR_ARGB16 = 1 << 5,
+ OMAP_DSS_COLOR_RGB16 = 1 << 6,
+ OMAP_DSS_COLOR_RGB24U = 1 << 7,
+ OMAP_DSS_COLOR_RGB24P = 1 << 8,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_COLOR_YUV2 = 1 << 9,
+ OMAP_DSS_COLOR_UYVY = 1 << 10,
+ OMAP_DSS_COLOR_ARGB32 = 1 << 11,
+ OMAP_DSS_COLOR_RGBA32 = 1 << 12,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_COLOR_RGBX24 = 1 << 13,
+ OMAP_DSS_COLOR_RGBX32 = 1 << 13,
+ OMAP_DSS_COLOR_NV12 = 1 << 14,
+ OMAP_DSS_COLOR_RGBA16 = 1 << 15,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_COLOR_RGBX12 = 1 << 16,
+ OMAP_DSS_COLOR_RGBX16 = 1 << 16,
+ OMAP_DSS_COLOR_ARGB16_1555 = 1 << 17,
+ OMAP_DSS_COLOR_XRGB15 = 1 << 18,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_COLOR_XRGB16_1555 = 1 << 18,
+};
+enum omap_writeback_source {
+ OMAP_WB_LCD1 = 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_WB_TV = 1,
+ OMAP_WB_LCD2 = 2,
+ OMAP_WB_GFX = 3,
+ OMAP_WB_VID1 = 4,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_WB_VID2 = 5,
+ OMAP_WB_VID3 = 6
+};
+enum omap_writeback_mode {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_WB_CAPTURE_MODE = 0x0,
+ OMAP_WB_MEM2MEM_MODE = 0x1,
+};
+enum omap_dss_trans_key_type {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_COLOR_KEY_GFX_DST = 0,
+ OMAP_DSS_COLOR_KEY_VID_SRC = 1,
+};
+enum omap_dss_display_state {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_DISPLAY_DISABLED = 0,
+ OMAP_DSS_DISPLAY_ACTIVE,
+ OMAP_DSS_DISPLAY_SUSPENDED,
+ OMAP_DSS_DISPLAY_TRANSITION,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+struct omap_video_timings {
+ __u16 x_res;
+ __u16 y_res;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 pixel_clock;
+ __u16 hsw;
+ __u16 hfp;
+ __u16 hbp;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u16 vsw;
+ __u16 vfp;
+ __u16 vbp;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct omap_dss_cconv_coefs {
+ __s16 ry, rcr, rcb;
+ __s16 gy, gcr, gcb;
+ __s16 by, bcr, bcb;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u16 full_range;
+} __attribute__ ((aligned(4)));
+struct omap_dss_cpr_coefs {
+ __s16 rr, rg, rb;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __s16 gr, gg, gb;
+ __s16 br, bg, bb;
+};
+struct dsscomp_videomode {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ const char *name;
+ __u32 refresh;
+ __u32 xres;
+ __u32 yres;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 pixclock;
+ __u32 left_margin;
+ __u32 right_margin;
+ __u32 upper_margin;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 lower_margin;
+ __u32 hsync_len;
+ __u32 vsync_len;
+ __u32 sync;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 vmode;
+ __u32 flag;
+};
+enum s3d_disp_type {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ S3D_DISP_NONE = 0,
+ S3D_DISP_FRAME_SEQ,
+ S3D_DISP_ROW_IL,
+ S3D_DISP_COL_IL,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ S3D_DISP_PIX_IL,
+ S3D_DISP_CHECKB,
+ S3D_DISP_OVERUNDER,
+ S3D_DISP_SIDEBYSIDE,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+enum s3d_disp_sub_sampling {
+ S3D_DISP_SUB_SAMPLE_NONE = 0,
+ S3D_DISP_SUB_SAMPLE_V,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ S3D_DISP_SUB_SAMPLE_H,
+};
+enum s3d_disp_order {
+ S3D_DISP_ORDER_L = 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ S3D_DISP_ORDER_R = 1,
+};
+enum s3d_disp_view {
+ S3D_DISP_VIEW_L = 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ S3D_DISP_VIEW_R,
+};
+struct s3d_disp_info {
+ enum s3d_disp_type type;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum s3d_disp_sub_sampling sub_samp;
+ enum s3d_disp_order order;
+ unsigned int gap;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+enum omap_dss_ilace_mode {
+ OMAP_DSS_ILACE = (1 << 0),
+ OMAP_DSS_ILACE_SEQ = (1 << 1),
+ OMAP_DSS_ILACE_SWAP = (1 << 2),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_ILACE_NONE = 0,
+ OMAP_DSS_ILACE_IL_TB = OMAP_DSS_ILACE,
+ OMAP_DSS_ILACE_IL_BT = OMAP_DSS_ILACE | OMAP_DSS_ILACE_SWAP,
+ OMAP_DSS_ILACE_SEQ_TB = OMAP_DSS_ILACE_IL_TB | OMAP_DSS_ILACE_SEQ,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_ILACE_SEQ_BT = OMAP_DSS_ILACE_IL_BT | OMAP_DSS_ILACE_SEQ,
+};
+struct dss2_vc1_range_map_info {
+ __u8 enable;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 range_y;
+ __u8 range_uv;
+} __attribute__ ((aligned(4)));
+struct dss2_rect_t {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __s32 x;
+ __s32 y;
+ __u32 w;
+ __u32 h;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+} __attribute__ ((aligned(4)));
+struct dss2_decim {
+ __u8 min_x;
+ __u8 max_x;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 min_y;
+ __u8 max_y;
+} __attribute__ ((aligned(4)));
+struct dss2_ovl_cfg {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u16 width;
+ __u16 height;
+ __u32 stride;
+ enum omap_color_mode color_mode;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 pre_mult_alpha;
+ __u8 global_alpha;
+ __u8 rotation;
+ __u8 mirror;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum omap_dss_ilace_mode ilace;
+ struct dss2_rect_t win;
+ struct dss2_rect_t crop;
+ struct dss2_decim decim;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct omap_dss_cconv_coefs cconv;
+ struct dss2_vc1_range_map_info vc1;
+ __u8 wb_source;
+ enum omap_writeback_mode wb_mode;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 ix;
+ __u8 zorder;
+ __u8 enabled;
+ __u8 zonly;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 mgr_ix;
+} __attribute__ ((aligned(4)));
+enum omapdss_buffer_type {
+ OMAP_DSS_BUFTYPE_SDMA,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_BUFTYPE_TILER_8BIT,
+ OMAP_DSS_BUFTYPE_TILER_16BIT,
+ OMAP_DSS_BUFTYPE_TILER_32BIT,
+ OMAP_DSS_BUFTYPE_TILER_PAGE,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+enum omapdss_buffer_addressing_type {
+ OMAP_DSS_BUFADDR_DIRECT,
+ OMAP_DSS_BUFADDR_BYTYPE,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_BUFADDR_ION,
+ OMAP_DSS_BUFADDR_GRALLOC,
+ OMAP_DSS_BUFADDR_OVL_IX,
+ OMAP_DSS_BUFADDR_LAYER_IX,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ OMAP_DSS_BUFADDR_FB,
+};
+struct dss2_ovl_info {
+ struct dss2_ovl_cfg cfg;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum omapdss_buffer_addressing_type addressing;
+ union {
+ struct {
+ void *address;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ void *uv_address;
+ };
+ struct {
+ enum omapdss_buffer_type ba_type;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum omapdss_buffer_type uv_type;
+ };
+ struct {
+ __u32 ba;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 uv;
+ };
+ };
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct dss2_mgr_info {
+ __u32 ix;
+ __u32 default_color;
+ enum omap_dss_trans_key_type trans_key_type;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 trans_key;
+ struct omap_dss_cpr_coefs cpr_coefs;
+ __u8 trans_enabled;
+ __u8 interlaced;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 alpha_blending;
+ __u8 cpr_enabled;
+ __u8 swap_rb;
+} __attribute__ ((aligned(4)));
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+enum dsscomp_setup_mode {
+ DSSCOMP_SETUP_MODE_APPLY = (1 << 0),
+ DSSCOMP_SETUP_MODE_DISPLAY = (1 << 1),
+ DSSCOMP_SETUP_MODE_CAPTURE = (1 << 2),
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ DSSCOMP_SETUP_APPLY = DSSCOMP_SETUP_MODE_APPLY,
+ DSSCOMP_SETUP_DISPLAY =
+ DSSCOMP_SETUP_MODE_APPLY | DSSCOMP_SETUP_MODE_DISPLAY,
+ DSSCOMP_SETUP_CAPTURE =
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ DSSCOMP_SETUP_MODE_APPLY | DSSCOMP_SETUP_MODE_CAPTURE,
+ DSSCOMP_SETUP_DISPLAY_CAPTURE =
+ DSSCOMP_SETUP_DISPLAY | DSSCOMP_SETUP_CAPTURE,
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct dsscomp_setup_mgr_data {
+ __u32 sync_id;
+ struct dss2_rect_t win;
+ enum dsscomp_setup_mode mode;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u16 num_ovls;
+ __u16 get_sync_obj;
+ struct dss2_mgr_info mgr;
+ struct dss2_ovl_info ovls[0];
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+struct dsscomp_check_ovl_data {
+ enum dsscomp_setup_mode mode;
+ struct dss2_mgr_info mgr;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct dss2_ovl_info ovl;
+};
+struct dsscomp_setup_dispc_data {
+ __u32 sync_id;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum dsscomp_setup_mode mode;
+ __u16 num_ovls;
+ __u16 num_mgrs;
+ __u16 get_sync_obj;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct dss2_mgr_info mgrs[3];
+ struct dss2_ovl_info ovls[5];
+};
+struct dsscomp_wb_copy_data {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct dss2_ovl_info ovl, wb;
+};
+struct dsscomp_display_info {
+ __u32 ix;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 overlays_available;
+ __u32 overlays_owned;
+ enum omap_channel channel;
+ enum omap_dss_display_state state;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 enabled;
+ struct omap_video_timings timings;
+ struct s3d_disp_info s3d_info;
+ struct dss2_mgr_info mgr;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u16 width_in_mm;
+ __u16 height_in_mm;
+ __u32 modedb_len;
+ struct dsscomp_videomode modedb[];
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+struct dsscomp_setup_display_data {
+ __u32 ix;
+ struct dsscomp_videomode mode;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+enum dsscomp_wait_phase {
+ DSSCOMP_WAIT_PROGRAMMED = 1,
+ DSSCOMP_WAIT_DISPLAYED,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ DSSCOMP_WAIT_RELEASED,
+};
+struct dsscomp_wait_data {
+ __u32 timeout_us;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum dsscomp_wait_phase phase;
+};
+enum dsscomp_fbmem_type {
+ DSSCOMP_FBMEM_TILER2D = 0,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ DSSCOMP_FBMEM_VRAM = 1,
+};
+struct dsscomp_platform_info {
+ __u8 max_xdecim_2d;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 max_ydecim_2d;
+ __u8 max_xdecim_1d;
+ __u8 max_ydecim_1d;
+ __u32 fclk;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u8 min_width;
+ __u16 max_width;
+ __u16 max_height;
+ __u8 max_downscale;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u16 integer_scale_ratio_limit;
+ __u32 tiler1d_slot_size;
+ enum dsscomp_fbmem_type fbmem_type;
+};
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define DSSCIOC_SETUP_MGR _IOW('O', 128, struct dsscomp_setup_mgr_data)
+#define DSSCIOC_CHECK_OVL _IOWR('O', 129, struct dsscomp_check_ovl_data)
+#define DSSCIOC_WB_COPY _IOW('O', 130, struct dsscomp_wb_copy_data)
+#define DSSCIOC_QUERY_DISPLAY _IOWR('O', 131, struct dsscomp_display_info)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define DSSCIOC_WAIT _IOW('O', 132, struct dsscomp_wait_data)
+#define DSSCIOC_SETUP_DISPC _IOW('O', 133, struct dsscomp_setup_dispc_data)
+#define DSSCIOC_SETUP_DISPLAY _IOW('O', 134, struct dsscomp_setup_display_data)
+#define DSSCIOC_QUERY_PLATFORM _IOR('O', 135, struct dsscomp_platform_info)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#endif
diff --git a/kernel-headers-ti/video/omap_hwc.h b/kernel-headers-ti/video/omap_hwc.h
new file mode 100644
index 0000000..b461bfb
--- /dev/null
+++ b/kernel-headers-ti/video/omap_hwc.h
@@ -0,0 +1,48 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ *** This header was automatically generated from a Linux kernel header
+ *** of the same name, to make information necessary for userspace to
+ *** call into the kernel available to libc. It contains only constants,
+ *** structures, and macros generated from the original header, and thus,
+ *** contains no copyrightable information.
+ ***
+ *** To edit the content of this header, modify the corresponding
+ *** source file (e.g. under external/kernel-headers/original/) then
+ *** run bionic/libc/kernel/tools/update_all.py
+ ***
+ *** Any manual change here will be lost the next time this script will
+ *** be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef _LINUX_OMAP_HWC_H
+#define _LINUX_OMAP_HWC_H
+#define HWC_BLT_DESC_FLAG 0x80000000
+#define HWC_BLT_DESC_FB 0x40000000
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define HWC_BLT_DESC_FB_FN(ovlno) (HWC_BLT_DESC_FLAG | HWC_BLT_DESC_FB | (ovlno))
+#define HWC_BLT_FLAG_USE_FB (1 << 0)
+struct rgz_blt_entry {
+ struct bvbltparams bp;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct bvsurfgeom dstgeom;
+ struct bvsurfgeom src1geom;
+ struct bvbuffdesc src1desc;
+ struct bvsurfgeom src2geom;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct bvbuffdesc src2desc;
+};
+struct omap_hwc_blit_data {
+ __u16 rgz_flags;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u16 rgz_items;
+ struct rgz_blt_entry rgz_blts[0];
+};
+struct omap_hwc_data {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct dsscomp_setup_dispc_data dsscomp_data;
+ struct omap_hwc_blit_data blit_data;
+};
+#endif
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
diff --git a/libtiutils/Android.mk b/libtiutils/Android.mk
index e15eba9..ba6aab1 100644
--- a/libtiutils/Android.mk
+++ b/libtiutils/Android.mk
@@ -7,10 +7,11 @@ include $(CLEAR_VARS)
LOCAL_PRELINK_MODULE := false
LOCAL_SRC_FILES:= \
+ DebugUtils.cpp \
MessageQueue.cpp \
Semaphore.cpp \
ErrorUtils.cpp
-
+
LOCAL_SHARED_LIBRARIES:= \
libdl \
libui \
@@ -19,13 +20,26 @@ LOCAL_SHARED_LIBRARIES:= \
libcutils
LOCAL_C_INCLUDES += \
- bionic/libc/include \
- hardware/ti/omap4xxx/domx/omx_core/inc \
- hardware/ti/omap4xxx/domx/mm_osal/inc
-
-LOCAL_CFLAGS += -fno-short-enums
+ frameworks/native/include
+
+LOCAL_C_INCLUDES += \
+ bionic/libc/include
+
+LOCAL_C_INCLUDES += \
+ $(HARDWARE_TI_OMAP4_BASE)/domx/omx_core/inc \
+ $(HARDWARE_TI_OMAP4_BASE)/domx/mm_osal/inc
+
+LOCAL_CFLAGS += -fno-short-enums $(ANDROID_API_CFLAGS)
+
+ifdef TI_UTILS_MESSAGE_QUEUE_DEBUG_ENABLED
+ # Enable debug logs
+ LOCAL_CFLAGS += -DMSGQ_DEBUG
+endif
-# LOCAL_CFLAGS +=
+ifdef TI_UTILS_MESSAGE_QUEUE_DEBUG_FUNCTION_NAMES
+ # Enable function enter/exit logging
+ LOCAL_CFLAGS += -DTI_UTILS_FUNCTION_LOGGER_ENABLE
+endif
LOCAL_MODULE:= libtiutils
LOCAL_MODULE_TAGS:= optional
diff --git a/libtiutils/DebugUtils.cpp b/libtiutils/DebugUtils.cpp
new file mode 100644
index 0000000..60ad0c8
--- /dev/null
+++ b/libtiutils/DebugUtils.cpp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "DebugUtils.h"
+
+#include "utils/Debug.h"
+
+
+
+
+namespace Ti {
+
+
+
+
+// shared const buffer with spaces for indentation string
+extern const char sIndentStringBuffer[] =
+ " "
+ " ";
+template class android::CompileTimeAssert<sizeof(sIndentStringBuffer) - 1 == kIndentStringMaxLength>;
+
+
+
+
+static const int kDebugThreadInfoGrowSize = 16;
+
+
+
+
+Debug Debug::sInstance;
+
+
+
+
+Debug::Debug()
+{
+ grow();
+}
+
+
+void Debug::grow()
+{
+ android::AutoMutex locker(mMutex);
+ (void)locker;
+
+ const int size = kDebugThreadInfoGrowSize;
+
+ const int newSize = (mData.get() ? mData->threads.size() : 0) + size;
+
+ Data * const newData = new Data;
+ newData->threads.setCapacity(newSize);
+
+ // insert previous thread info pointers
+ if ( mData.get() )
+ newData->threads.insertVectorAt(mData->threads, 0);
+
+ // populate data with new thread infos
+ for ( int i = 0; i < size; ++i )
+ newData->threads.add(new ThreadInfo);
+
+ // replace old data with new one
+ mData = newData;
+}
+
+
+Debug::ThreadInfo * Debug::registerThread(Data * const data, const int32_t threadId)
+{
+ const int size = data->threads.size();
+ for ( int i = 0; i < size; ++i )
+ {
+ ThreadInfo * const threadInfo = data->threads.itemAt(i);
+ if ( android_atomic_acquire_cas(0, threadId, &threadInfo->threadId) == 0 )
+ return threadInfo;
+ }
+
+ // failed to find empty slot for thread
+ return 0;
+}
+
+
+
+
+} // namespace Ti
diff --git a/libtiutils/DebugUtils.h b/libtiutils/DebugUtils.h
index f421252..a05ba8d 100644
--- a/libtiutils/DebugUtils.h
+++ b/libtiutils/DebugUtils.h
@@ -14,23 +14,384 @@
* limitations under the License.
*/
-
-
#ifndef DEBUG_UTILS_H
#define DEBUG_UTILS_H
-///Defines for debug statements - Macro LOG_TAG needs to be defined in the respective files
-#define DBGUTILS_LOGVA(str) ALOGV("%s:%d %s - " str,__FILE__, __LINE__,__FUNCTION__);
-#define DBGUTILS_LOGVB(str,...) ALOGV("%s:%d %s - " str,__FILE__, __LINE__, __FUNCTION__, __VA_ARGS__);
-#define DBGUTILS_LOGDA(str) ALOGD("%s:%d %s - " str,__FILE__, __LINE__,__FUNCTION__);
-#define DBGUTILS_LOGDB(str, ...) ALOGD("%s:%d %s - " str,__FILE__, __LINE__, __FUNCTION__, __VA_ARGS__);
-#define DBGUTILS_LOGEA(str) ALOGE("%s:%d %s - " str,__FILE__, __LINE__, __FUNCTION__);
-#define DBGUTILS_LOGEB(str, ...) ALOGE("%s:%d %s - " str,__FILE__, __LINE__,__FUNCTION__, __VA_ARGS__);
-#define LOG_FUNCTION_NAME ALOGV("%d: %s() ENTER", __LINE__, __FUNCTION__);
-#define LOG_FUNCTION_NAME_EXIT ALOGV("%d: %s() EXIT", __LINE__, __FUNCTION__);
+#include <android/log.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
-#endif //DEBUG_UTILS_H
+namespace Ti {
+
+
+
+
+// use 2 space characters for call stack indent
+static const int kFunctionLoggerIndentSize = 2;
+
+
+
+
+template <int Size = kFunctionLoggerIndentSize>
+class IndentString
+{
+public:
+ IndentString(int length);
+
+ const char * string() const;
+
+private:
+ int calculateOffset(int length) const;
+
+private:
+ const int mOffset;
+};
+
+
+
+
+class Debug
+{
+public:
+ static Debug * instance();
+
+ int offsetForCurrentThread();
+ void log(int priority, const char * format, ...);
+
+private:
+ class ThreadInfo
+ {
+ public:
+ ThreadInfo() :
+ threadId(0), callOffset(0)
+ {}
+
+ volatile int32_t threadId;
+ int callOffset;
+ };
+
+ class Data : public android::RefBase
+ {
+ public:
+ android::Vector<ThreadInfo*> threads;
+ };
+
+private:
+ // called from FunctionLogger
+ void increaseOffsetForCurrentThread();
+ void decreaseOffsetForCurrentThread();
+
+private:
+ Debug();
+
+ void grow();
+ ThreadInfo * registerThread(Data * data, int32_t threadId);
+ ThreadInfo * findCurrentThreadInfo();
+ void addOffsetForCurrentThread(int offset);
+
+private:
+ static Debug sInstance;
+
+ mutable android::Mutex mMutex;
+ android::sp<Data> mData;
+
+ friend class FunctionLogger;
+};
+
+
+
+
+class FunctionLogger
+{
+public:
+ FunctionLogger(const char * file, int line, const char * function);
+ ~FunctionLogger();
+
+ void setExitLine(int line);
+
+private:
+ const char * const mFile;
+ const int mLine;
+ const char * const mFunction;
+ const void * const mThreadId;
+ int mExitLine;
+};
+
+
+
+
+#ifdef TI_UTILS_FUNCTION_LOGGER_ENABLE
+# define LOG_FUNCTION_NAME Ti::FunctionLogger __function_logger_instance(__FILE__, __LINE__, __FUNCTION__);
+# define LOG_FUNCTION_NAME_EXIT __function_logger_instance.setExitLine(__LINE__);
+#else
+# define LOG_FUNCTION_NAME int __function_logger_instance;
+# define LOG_FUNCTION_NAME_EXIT (void*)__function_logger_instance;
+#endif
+
+#ifdef TI_UTILS_DEBUG_USE_TIMESTAMPS
+ // truncate timestamp to 1000 seconds to fit into 6 characters
+# define TI_UTILS_DEBUG_TIMESTAMP_TOKEN "[%06d] "
+# define TI_UTILS_DEBUG_TIMESTAMP_VARIABLE static_cast<int>(nanoseconds_to_milliseconds(systemTime()) % 1000000),
+#else
+# define TI_UTILS_DEBUG_TIMESTAMP_TOKEN
+# define TI_UTILS_DEBUG_TIMESTAMP_VARIABLE
+#endif
+
+
+
+
+#define DBGUTILS_LOGV_FULL(priority, file, line, function, format, ...) \
+ do \
+ { \
+ Ti::Debug * const debug = Ti::Debug::instance(); \
+ debug->log(priority, format, \
+ TI_UTILS_DEBUG_TIMESTAMP_VARIABLE \
+ reinterpret_cast<int>(androidGetThreadId()), \
+ Ti::IndentString<>(debug->offsetForCurrentThread()).string(), \
+ file, line, function, __VA_ARGS__); \
+ } while (0)
+
+#define DBGUTILS_LOGV(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_VERBOSE, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGD(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_DEBUG, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGI(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_INFO, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGW(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_WARN, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGE(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_ERROR, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+#define DBGUTILS_LOGF(...) DBGUTILS_LOGV_FULL(ANDROID_LOG_FATAL, __FILE__, __LINE__, __FUNCTION__, TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - " __VA_ARGS__, "")
+
+#define DBGUTILS_LOGVA DBGUTILS_LOGV
+#define DBGUTILS_LOGVB DBGUTILS_LOGV
+
+#define DBGUTILS_LOGDA DBGUTILS_LOGD
+#define DBGUTILS_LOGDB DBGUTILS_LOGD
+
+#define DBGUTILS_LOGEA DBGUTILS_LOGE
+#define DBGUTILS_LOGEB DBGUTILS_LOGE
+
+// asserts
+#define _DBGUTILS_PLAIN_ASSERT(condition) \
+ do \
+ { \
+ if ( !(condition) ) \
+ { \
+ __android_log_print(ANDROID_LOG_FATAL, "Ti::Debug", \
+ "Condition failed: " #condition); \
+ __android_log_print(ANDROID_LOG_FATAL, "Ti::Debug", \
+ "Aborting process..."); \
+ abort(); \
+ } \
+ } while (0)
+
+#define _DBGUTILS_PLAIN_ASSERT_X(condition, ...) \
+ do \
+ { \
+ if ( !(condition) ) \
+ { \
+ __android_log_print(ANDROID_LOG_FATAL, "Ti::Debug", \
+ "Condition failed: " #condition ": " __VA_ARGS__); \
+ __android_log_print(ANDROID_LOG_FATAL, "Ti::Debug", \
+ "Aborting process..."); \
+ abort(); \
+ } \
+ } while (0)
+
+#define DBGUTILS_ASSERT(condition) \
+ do \
+ { \
+ if ( !(condition) ) \
+ { \
+ DBGUTILS_LOGF("Condition failed: " #condition); \
+ DBGUTILS_LOGF("Aborting process..."); \
+ abort(); \
+ } \
+ } while (0)
+#define DBGUTILS_ASSERT_X(condition, ...) \
+ do \
+ { \
+ if ( !(condition) ) \
+ { \
+ DBGUTILS_LOGF("Condition failed: " #condition ": " __VA_ARGS__); \
+ DBGUTILS_LOGF("Aborting process..."); \
+ abort(); \
+ } \
+ } while (0)
+
+
+
+
+static const int kIndentStringMaxLength = 128;
+
+template <int Size>
+inline int IndentString<Size>::calculateOffset(const int length) const
+{
+ const int offset = kIndentStringMaxLength - length*Size;
+ return offset < 0 ? 0 : offset;
+}
+
+template <int Size>
+inline IndentString<Size>::IndentString(const int length) :
+ mOffset(calculateOffset(length))
+{}
+
+template <int Size>
+inline const char * IndentString<Size>::string() const
+{
+ extern const char sIndentStringBuffer[];
+ return sIndentStringBuffer + mOffset;
+}
+
+
+
+
+inline Debug * Debug::instance()
+{ return &sInstance; }
+
+
+inline Debug::ThreadInfo * Debug::findCurrentThreadInfo()
+{
+ // retain reference to threads data
+ android::sp<Data> data = mData;
+
+ // iterate over threads to locate thread id,
+ // this is safe from race conditions because each thread
+ // is able to modify only his own ThreadInfo structure
+ const int32_t threadId = reinterpret_cast<int32_t>(androidGetThreadId());
+ const int size = int(data->threads.size());
+ for ( int i = 0; i < size; ++i )
+ {
+ ThreadInfo * const threadInfo = data->threads.itemAt(i);
+ if ( threadInfo->threadId == threadId )
+ return threadInfo;
+ }
+
+ // this thread has not been registered yet,
+ // try to fing empty thread info slot
+ while ( true )
+ {
+ ThreadInfo * const threadInfo = registerThread(data.get(), threadId);
+ if ( threadInfo )
+ return threadInfo;
+
+ // failed registering thread, because all slots are occupied
+ // grow the data and try again
+ grow();
+
+ data = mData;
+ }
+
+ // should never reach here
+ _DBGUTILS_PLAIN_ASSERT(false);
+ return 0;
+}
+
+
+inline void Debug::addOffsetForCurrentThread(const int offset)
+{
+ if ( offset == 0 )
+ return;
+
+ ThreadInfo * const threadInfo = findCurrentThreadInfo();
+ _DBGUTILS_PLAIN_ASSERT(threadInfo);
+
+ threadInfo->callOffset += offset;
+
+ if ( threadInfo->callOffset == 0 )
+ {
+ // thread call stack has dropped to zero, unregister it
+ android_atomic_acquire_store(0, &threadInfo->threadId);
+ }
+}
+
+
+inline int Debug::offsetForCurrentThread()
+{
+#ifdef TI_UTILS_FUNCTION_LOGGER_ENABLE
+ ThreadInfo * const threadInfo = findCurrentThreadInfo();
+ _DBGUTILS_PLAIN_ASSERT(threadInfo);
+
+ return threadInfo->callOffset;
+#else
+ return 0;
+#endif
+}
+
+
+inline void Debug::increaseOffsetForCurrentThread()
+{
+#ifdef TI_UTILS_FUNCTION_LOGGER_ENABLE
+ addOffsetForCurrentThread(1);
+#endif
+}
+
+
+inline void Debug::decreaseOffsetForCurrentThread()
+{
+#ifdef TI_UTILS_FUNCTION_LOGGER_ENABLE
+ addOffsetForCurrentThread(-1);
+#endif
+}
+
+
+inline void Debug::log(const int priority, const char * const format, ...)
+{
+ va_list args;
+ va_start(args, format);
+ __android_log_vprint(priority, LOG_TAG, format, args);
+ va_end(args);
+}
+
+
+
+
+inline FunctionLogger::FunctionLogger(const char * const file, const int line, const char * const function) :
+ mFile(file), mLine(line), mFunction(function), mThreadId(androidGetThreadId()), mExitLine(-1)
+{
+ Debug * const debug = Debug::instance();
+ debug->increaseOffsetForCurrentThread();
+ android_printLog(ANDROID_LOG_DEBUG, LOG_TAG,
+ TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s+ %s:%d %s - ENTER",
+ TI_UTILS_DEBUG_TIMESTAMP_VARIABLE
+ (int)mThreadId, IndentString<>(debug->offsetForCurrentThread()).string(),
+ mFile, mLine, mFunction);
+}
+
+
+inline FunctionLogger::~FunctionLogger()
+{
+ Debug * const debug = Debug::instance();
+ android_printLog(ANDROID_LOG_DEBUG, LOG_TAG,
+ TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s- %s:%d %s - EXIT",
+ TI_UTILS_DEBUG_TIMESTAMP_VARIABLE
+ (int)mThreadId, IndentString<>(debug->offsetForCurrentThread()).string(),
+ mFile, mExitLine == -1 ? mLine : mExitLine, mFunction);
+ debug->decreaseOffsetForCurrentThread();
+}
+
+
+inline void FunctionLogger::setExitLine(const int line)
+{
+ if ( mExitLine != -1 )
+ {
+ Debug * const debug = Debug::instance();
+ android_printLog(ANDROID_LOG_DEBUG, LOG_TAG,
+ TI_UTILS_DEBUG_TIMESTAMP_TOKEN "(%x) %s %s:%d %s - Double function exit trace detected. Previous: %d",
+ TI_UTILS_DEBUG_TIMESTAMP_VARIABLE
+ (int)mThreadId, IndentString<>(debug->offsetForCurrentThread()).string(),
+ mFile, line, mFunction, mExitLine);
+ }
+
+ mExitLine = line;
+}
+
+
+
+
+} // namespace Ti
+
+
+
+
+#endif //DEBUG_UTILS_H
diff --git a/libtiutils/ErrorUtils.cpp b/libtiutils/ErrorUtils.cpp
index df0e51c..e30fcfd 100644
--- a/libtiutils/ErrorUtils.cpp
+++ b/libtiutils/ErrorUtils.cpp
@@ -17,7 +17,8 @@
#include "ErrorUtils.h"
-namespace android {
+namespace Ti {
+namespace Utils {
/**
@brief Method to convert from POSIX to Android errors
@@ -135,7 +136,5 @@ status_t ErrorUtils::omxToAndroidError(OMX_ERRORTYPE error)
}
-};
-
-
-
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/ErrorUtils.h b/libtiutils/ErrorUtils.h
index 204ec97..c6c23a2 100644
--- a/libtiutils/ErrorUtils.h
+++ b/libtiutils/ErrorUtils.h
@@ -23,6 +23,8 @@
///Header file where all the OMX error codes are defined
#include "OMX_Core.h"
+#include "Status.h"
+
extern "C"
{
@@ -30,7 +32,8 @@ extern "C"
#include "timm_osal_error.h"
};
-namespace android {
+namespace Ti {
+namespace Utils {
///Generic class with static methods to convert any standard error type to Android error type
class ErrorUtils
@@ -47,6 +50,7 @@ public:
};
-};
+} // namespace Utils
+} // namespace Ti
#endif /// ERROR_UTILS_H
diff --git a/libtiutils/MessageQueue.cpp b/libtiutils/MessageQueue.cpp
index e3647d4..13b1d53 100644
--- a/libtiutils/MessageQueue.cpp
+++ b/libtiutils/MessageQueue.cpp
@@ -29,7 +29,8 @@
#include "MessageQueue.h"
-namespace TIUTILS {
+namespace Ti {
+namespace Utils {
/**
@brief Constructor for the message queue class
@@ -278,6 +279,8 @@ bool MessageQueue::isEmpty()
void MessageQueue::clear()
{
+ LOG_FUNCTION_NAME;
+
if(!this->fd_read)
{
MSGQ_LOGEA("read descriptor not initialized for message queue");
@@ -412,4 +415,5 @@ android::status_t MessageQueue::waitForMsg(MessageQueue *queue1, MessageQueue *q
return ret;
}
-};
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/MessageQueue.h b/libtiutils/MessageQueue.h
index 6d05201..68943b7 100644
--- a/libtiutils/MessageQueue.h
+++ b/libtiutils/MessageQueue.h
@@ -22,32 +22,19 @@
#include "DebugUtils.h"
#include <stdint.h>
-///Uncomment this macro to debug the message queue implementation
-//#define DEBUG_LOG
-
-///Camera HAL Logging Functions
-#ifndef DEBUG_LOG
-
-#define MSGQ_LOGDA(str)
-#define MSGQ_LOGDB(str, ...)
-
-#undef LOG_FUNCTION_NAME
-#undef LOG_FUNCTION_NAME_EXIT
-#define LOG_FUNCTION_NAME
-#define LOG_FUNCTION_NAME_EXIT
-
+#ifdef MSGQ_DEBUG
+# define MSGQ_LOGDA DBGUTILS_LOGDA
+# define MSGQ_LOGDB DBGUTILS_LOGDB
#else
-
-#define MSGQ_LOGDA DBGUTILS_LOGDA
-#define MSGQ_LOGDB DBGUTILS_LOGDB
-
+# define MSGQ_LOGDA(str)
+# define MSGQ_LOGDB(str, ...)
#endif
#define MSGQ_LOGEA DBGUTILS_LOGEA
#define MSGQ_LOGEB DBGUTILS_LOGEB
-
-namespace TIUTILS {
+namespace Ti {
+namespace Utils {
///Message type
struct Message
@@ -102,6 +89,10 @@ private:
bool mHasMsg;
};
-};
+} // namespace Utils
+} // namespace Ti
+
+
+
#endif
diff --git a/libtiutils/Semaphore.cpp b/libtiutils/Semaphore.cpp
index 37f3a89..512eee3 100644
--- a/libtiutils/Semaphore.cpp
+++ b/libtiutils/Semaphore.cpp
@@ -21,7 +21,8 @@
#include <utils/Log.h>
#include <time.h>
-namespace android {
+namespace Ti {
+namespace Utils {
/**
@brief Constructor for the semaphore class
@@ -227,6 +228,5 @@ status_t Semaphore::WaitTimeout(int timeoutMicroSecs)
}
-};
-
-
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/Semaphore.h b/libtiutils/Semaphore.h
index 6990848..8d64f3f 100644
--- a/libtiutils/Semaphore.h
+++ b/libtiutils/Semaphore.h
@@ -24,7 +24,10 @@
#include <string.h>
#include <unistd.h>
-namespace android {
+#include "Status.h"
+
+namespace Ti {
+namespace Utils {
class Semaphore
{
@@ -56,4 +59,5 @@ private:
};
-};
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/Status.h b/libtiutils/Status.h
new file mode 100644
index 0000000..ded2cec
--- /dev/null
+++ b/libtiutils/Status.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TI_UTILS_STATUS_H
+#define TI_UTILS_STATUS_H
+
+#include <utils/Errors.h>
+
+#include "UtilsCommon.h"
+
+
+
+
+namespace Ti {
+
+
+
+
+typedef int status_t;
+
+#define TI_CAMERA_DEFINE_STATUS_CODE(x) x = android::x,
+enum {
+ TI_CAMERA_DEFINE_STATUS_CODE(OK)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_ERROR)
+ TI_CAMERA_DEFINE_STATUS_CODE(UNKNOWN_ERROR)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_MEMORY)
+ TI_CAMERA_DEFINE_STATUS_CODE(INVALID_OPERATION)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_VALUE)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_TYPE)
+ TI_CAMERA_DEFINE_STATUS_CODE(NAME_NOT_FOUND)
+ TI_CAMERA_DEFINE_STATUS_CODE(PERMISSION_DENIED)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_INIT)
+ TI_CAMERA_DEFINE_STATUS_CODE(ALREADY_EXISTS)
+ TI_CAMERA_DEFINE_STATUS_CODE(DEAD_OBJECT)
+ TI_CAMERA_DEFINE_STATUS_CODE(FAILED_TRANSACTION)
+ TI_CAMERA_DEFINE_STATUS_CODE(JPARKS_BROKE_IT)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_INDEX)
+ TI_CAMERA_DEFINE_STATUS_CODE(NOT_ENOUGH_DATA)
+ TI_CAMERA_DEFINE_STATUS_CODE(WOULD_BLOCK)
+ TI_CAMERA_DEFINE_STATUS_CODE(TIMED_OUT)
+ TI_CAMERA_DEFINE_STATUS_CODE(UNKNOWN_TRANSACTION)
+ TI_CAMERA_DEFINE_STATUS_CODE(FDS_NOT_ALLOWED)
+};
+#undef TI_CAMERA_DEFINE_STATUS_CODE
+
+
+
+
+} // namespace Ti
+
+
+
+
+#endif // TI_UTILS_STATUS_H
diff --git a/libtiutils/UtilsCommon.h b/libtiutils/UtilsCommon.h
new file mode 100644
index 0000000..8aaeee7
--- /dev/null
+++ b/libtiutils/UtilsCommon.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TI_UTILS_COMMON_H
+#define TI_UTILS_COMMON_H
+
+#include <android/api-level.h>
+#include <android/log.h>
+
+
+
+namespace Ti {
+
+
+
+
+// default float point type
+typedef float real;
+
+
+
+
+template <typename T>
+int floor(T x);
+
+template <typename T>
+int round(T x);
+
+template <typename T>
+const T & min(const T & a, const T & b);
+
+template <typename T>
+const T & max(const T & a, const T & b);
+
+template <typename T>
+const T & bound(const T & min, const T & x, const T & max);
+
+template <typename T>
+T abs(const T & x);
+
+
+
+
+template <typename T>
+inline int floor(const T x) {
+ return static_cast<int>(x);
+}
+
+template <typename T>
+inline int round(const T x) {
+ if ( x >= 0 ) {
+ return floor(x + T(0.5));
+ } else {
+ return floor(x - floor(x - T(1)) + T(0.5)) + floor(x - T(1));
+ }
+}
+
+template <typename T>
+inline const T & min(const T & a, const T & b) {
+ return a < b ? a : b;
+}
+
+template <typename T>
+inline const T & max(const T & a, const T & b) {
+ return a < b ? b : a;
+}
+
+template <typename T>
+inline const T & bound(const T & min, const T & x, const T & max) {
+ return x < min ? min : x > max ? max : x;
+}
+
+template <typename T>
+inline T abs(const T & x) {
+ return x >= 0 ? x : -x;
+}
+
+
+
+
+} // namespace Ti
+
+
+
+
+#endif // TI_UTILS_COMMON_H
diff --git a/security/smc_pa_ctrl/Android.mk b/security/smc_pa_ctrl/Android.mk
index fe0bdf4..3a0e945 100644
--- a/security/smc_pa_ctrl/Android.mk
+++ b/security/smc_pa_ctrl/Android.mk
@@ -1,4 +1,8 @@
-ifeq ($(TARGET_BOARD_PLATFORM),omap4)
+# Only applicable for OMAP4 and OMAP5 boards.
+# First eliminate OMAP3 and then ensure that this is not used
+# for customer boards
+ifneq ($(TARGET_BOARD_PLATFORM),omap3)
+ifeq ($(findstring omap, $(TARGET_BOARD_PLATFORM)),omap)
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
@@ -9,13 +13,14 @@ LOCAL_ARM_MODE := arm
LOCAL_SRC_FILES:= \
smc_pa_ctrl.c smc_pa_ctrl_linux.c
-LOCAL_CFLAGS += -DLINUX
-LOCAL_CFLAGS += -D__ANDROID32__
-
ifdef S_VERSION_BUILD
LOCAL_CFLAGS += -DS_VERSION_BUILD=$(S_VERSION_BUILD)
endif
+LOCAL_LDLIBS += -llog
+
+LOCAL_CFLAGS += -DLINUX
+LOCAL_CFLAGS += -DANDROID
LOCAL_CFLAGS += -I $(LOCAL_PATH)/../tf_sdk/include/
LOCAL_MODULE:= smc_pa_ctrl
@@ -23,3 +28,4 @@ LOCAL_MODULE_TAGS := optional
include $(BUILD_EXECUTABLE)
endif
+endif
diff --git a/security/smc_pa_ctrl/s_version.h b/security/smc_pa_ctrl/s_version.h
index 139c11f..cff19d7 100644
--- a/security/smc_pa_ctrl/s_version.h
+++ b/security/smc_pa_ctrl/s_version.h
@@ -27,7 +27,6 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-
#ifndef __S_VERSION_H__
#define __S_VERSION_H__
@@ -45,7 +44,7 @@
#if defined(WIN32)
#define S_VERSION_OS "W" /* "W" for Windows PC (XP, Vista…) */
#define S_VERSION_PLATFORM "X" /* "X" for ix86 PC simulators */
-#elif defined(__ANDROID32__)
+#elif defined(ANDROID)
#define S_VERSION_OS "A" /* "A" for Android */
#define S_VERSION_PLATFORM "G" /* "G" for 4430 */
#elif defined(LINUX)
@@ -59,15 +58,15 @@
/*
* This version number must be updated for each new release
*/
-#define S_VERSION_MAIN "01.04"
-#define S_VERSION_RESOURCE 1,4,0,S_VERSION_BUILD
+#define S_VERSION_MAIN "01.06"
+#define S_VERSION_RESOURCE 1,6,0,S_VERSION_BUILD
/*
* If this is a patch or engineering version use the following
* defines to set the version number. Else set these values to 0.
*/
-#define S_VERSION_PATCH 11
#define S_VERSION_ENG 0
+#define S_VERSION_PATCH 6
#ifdef S_VERSION_BUILD
/* TRICK: detect if S_VERSION is defined but empty */
@@ -106,8 +105,8 @@
S_VERSION_OS \
S_VERSION_PLATFORM \
S_VERSION_MAIN \
- _S_VERSION_PATCH \
_S_VERSION_ENG \
+ _S_VERSION_PATCH \
"." __STRINGIFY2(S_VERSION_BUILD) " " \
S_VERSION_VARIANT
diff --git a/security/tee_client_api/Android.mk b/security/tee_client_api/Android.mk
index bfd92f4..d148b64 100644
--- a/security/tee_client_api/Android.mk
+++ b/security/tee_client_api/Android.mk
@@ -1,4 +1,8 @@
-ifeq ($(TARGET_BOARD_PLATFORM),omap4)
+# Only applicable for OMAP4 and OMAP5 boards.
+# First eliminate OMAP3 and then ensure that this is not used
+# for customer boards
+ifneq ($(TARGET_BOARD_PLATFORM),omap3)
+ifeq ($(findstring omap, $(TARGET_BOARD_PLATFORM)),omap)
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
@@ -9,13 +13,14 @@ LOCAL_ARM_MODE := arm
LOCAL_SRC_FILES:= \
tee_client_api_linux_driver.c
-LOCAL_CFLAGS += -DLINUX
-LOCAL_CFLAGS += -D__ANDROID32__
-
ifdef S_VERSION_BUILD
LOCAL_CFLAGS += -DS_VERSION_BUILD=$(S_VERSION_BUILD)
endif
+LOCAL_LDLIBS += -llog
+
+LOCAL_CFLAGS += -DLINUX
+LOCAL_CFLAGS += -DANDROID
LOCAL_CFLAGS += -I $(LOCAL_PATH)/../tf_sdk/include/
LOCAL_MODULE:= libtee_client_api_driver
@@ -23,3 +28,4 @@ LOCAL_MODULE_TAGS := optional
include $(BUILD_STATIC_LIBRARY)
endif
+endif
diff --git a/security/tee_client_api/s_version.h b/security/tee_client_api/s_version.h
index d112ea0..cff19d7 100644
--- a/security/tee_client_api/s_version.h
+++ b/security/tee_client_api/s_version.h
@@ -44,7 +44,7 @@
#if defined(WIN32)
#define S_VERSION_OS "W" /* "W" for Windows PC (XP, Vista…) */
#define S_VERSION_PLATFORM "X" /* "X" for ix86 PC simulators */
-#elif defined(__ANDROID32__)
+#elif defined(ANDROID)
#define S_VERSION_OS "A" /* "A" for Android */
#define S_VERSION_PLATFORM "G" /* "G" for 4430 */
#elif defined(LINUX)
@@ -58,15 +58,15 @@
/*
* This version number must be updated for each new release
*/
-#define S_VERSION_MAIN "01.04"
-#define S_VERSION_RESOURCE 1,4,0,S_VERSION_BUILD
+#define S_VERSION_MAIN "01.06"
+#define S_VERSION_RESOURCE 1,6,0,S_VERSION_BUILD
/*
* If this is a patch or engineering version use the following
* defines to set the version number. Else set these values to 0.
*/
-#define S_VERSION_PATCH 11
#define S_VERSION_ENG 0
+#define S_VERSION_PATCH 6
#ifdef S_VERSION_BUILD
/* TRICK: detect if S_VERSION is defined but empty */
@@ -105,8 +105,8 @@
S_VERSION_OS \
S_VERSION_PLATFORM \
S_VERSION_MAIN \
- _S_VERSION_PATCH \
_S_VERSION_ENG \
+ _S_VERSION_PATCH \
"." __STRINGIFY2(S_VERSION_BUILD) " " \
S_VERSION_VARIANT
diff --git a/security/tee_client_api/schannel6_protocol.h b/security/tee_client_api/schannel6_protocol.h
index 66ed12c..81769be 100644
--- a/security/tee_client_api/schannel6_protocol.h
+++ b/security/tee_client_api/schannel6_protocol.h
@@ -34,15 +34,6 @@
#include "s_type.h"
/**
- * This header file defines some structures needed for the secure channel
- * protocol. See your Product Reference Manual for a specification of the
- * SChannel protocol.
- */
-// jroux to do : remove
-#undef SMC_PROTOCOL_VERSION
-#define SMC_PROTOCOL_VERSION 0x06000000
-
-/**
* Time representation.
*/
typedef uint64_t SCTIME;
@@ -106,6 +97,15 @@ typedef uint64_t SCTIME;
*/
#include "schannel6_logins.h"
+/*
+ * Limits and sizes
+ */
+
+/* Maximum number of L1 descriptors covered by a registered shared memory block.
+ Must be kept in synch with TF_MAX_COARSE_PAGES in tf_protocol.h
+ in the Linux kernel driver. */
+#define SCHANNEL6_MAX_DESCRIPTORS_PER_REGISTERED_SHARED_MEM 128
+
/**
* Command parameters.
*/
@@ -217,7 +217,7 @@ typedef struct
uint32_t nBlockID;
uint32_t nSharedMemSize;
uint32_t nSharedMemStartOffset;
- uint32_t nSharedMemDescriptors[8];
+ uint32_t nSharedMemDescriptors[SCHANNEL6_MAX_DESCRIPTORS_PER_REGISTERED_SHARED_MEM];
}SCHANNEL6_REGISTER_SHARED_MEMORY_COMMAND;
diff --git a/security/tee_client_api/tee_client_api_linux_driver.c b/security/tee_client_api/tee_client_api_linux_driver.c
index 08a8210..af5d36f 100644
--- a/security/tee_client_api/tee_client_api_linux_driver.c
+++ b/security/tee_client_api/tee_client_api_linux_driver.c
@@ -96,6 +96,13 @@ typedef struct
#define TRACE_WARNING(...)
#define TRACE_INFO(...)
#else
+#if defined ANDROID
+#define LOG_TAG "TEE"
+#include <android/log.h>
+#define TRACE_INFO(format, ...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, format, __VA_ARGS__)
+#define TRACE_ERROR(format, ...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, format, __VA_ARGS__)
+#define TRACE_WARNING(format, ...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, format, __VA_ARGS__)
+#else
static void TRACE_ERROR(const char* format, ...)
{
va_list ap;
@@ -125,6 +132,7 @@ static void TRACE_INFO(const char* format, ...)
fprintf(stderr, "\n");
va_end(ap);
}
+#endif /* ANDROID */
#endif /* NDEBUG */
@@ -784,7 +792,7 @@ TEEC_Result TEEC_OpenSessionEx (
if (connectionData != NULL)
{
*(uint32_t*)sCommand.sOpenClientSession.sLoginData = *(uint32_t*)connectionData;
- sCommand.sHeader.nMessageSize += sizeof(uint32_t);
+ sCommand.sHeader.nMessageSize += 1;
}
}
sCommand.sOpenClientSession.nCancellationID = (uint32_t)operation; // used for TEEC_RequestCancellation
@@ -892,7 +900,7 @@ void TEEC_GetImplementationInfo(
TEEC_Context* context,
TEEC_ImplementationInfo* description)
{
- TRACE_INFO("TEEC_GetImplementationInfo");
+ TRACE_INFO("TEEC_GetImplementationInfo", NULL);
memset(description, 0, sizeof(TEEC_ImplementationInfo));
diff --git a/security/tf_crypto_sst/Android.mk b/security/tf_crypto_sst/Android.mk
index 6d3a9d8..21df372 100644
--- a/security/tf_crypto_sst/Android.mk
+++ b/security/tf_crypto_sst/Android.mk
@@ -15,18 +15,21 @@ LOCAL_SRC_FILES := \
pkcs11_object.c \
pkcs11_session.c
-LOCAL_CFLAGS += -DLINUX
-LOCAL_CFLAGS += -D__ANDROID32__
-
ifdef S_VERSION_BUILD
LOCAL_CFLAGS += -DS_VERSION_BUILD=$(S_VERSION_BUILD)
endif
+LOCAL_LDLIBS += -llog
+
+LOCAL_CFLAGS += -DLINUX
+LOCAL_CFLAGS += -DANDROID
LOCAL_CFLAGS += -I $(LOCAL_PATH)/../tf_sdk/include/
LOCAL_MODULE:= libtf_crypto_sst
LOCAL_STATIC_LIBRARIES := libtee_client_api_driver
LOCAL_MODULE_TAGS := optional
+LOCAL_SHARED_LIBRARIES:= liblog
+
include $(BUILD_SHARED_LIBRARY)
endif
diff --git a/security/tf_crypto_sst/pkcs11_object.c b/security/tf_crypto_sst/pkcs11_object.c
index 53caadc..7fcdb70 100644
--- a/security/tf_crypto_sst/pkcs11_object.c
+++ b/security/tf_crypto_sst/pkcs11_object.c
@@ -93,148 +93,117 @@ static CK_RV static_checkPreConditionsAndUpdateHandles(
return CKR_OK;
}
-/******************************************/
-/* The buffer must be freed by the caller */
-/******************************************/
-static CK_RV static_encodeTwoTemplates(
- uint8_t** ppBuffer,
- uint32_t * pBufferSize,
- const uint32_t nParamIndex,
- CK_ATTRIBUTE* pTemplate1,
- CK_ULONG ulCount1,
- CK_ATTRIBUTE* pTemplate2,
- CK_ULONG ulCount2)
+/* Add up the sizes of the items and values in an attribute template.
+ */
+static CK_RV static_analyzeTemplate(
+ uint32_t *const pDataOffset,
+ uint32_t *const pBufferSize,
+ const CK_ATTRIBUTE *const pTemplate,
+ CK_ULONG const ulCount)
{
- INPUT_TEMPLATE_ITEM sItem;
-
- uint32_t i;
- uint32_t nDataOffset = 0;
- uint32_t nBufferIndex = 0;
- uint32_t nBufferSize = 0;
- uint8_t* pBuffer = NULL;
- CK_RV nErrorCode = CKR_OK;
+ CK_ULONG i;
+ uint32_t nItemsSize;
+ uint32_t nValuesSize = 0;
- if (ulCount1 == 0)
- {
- /* Nothing to do */
- return CKR_OK;
- }
- if (pTemplate1 == NULL)
+ nItemsSize = sizeof(uint32_t); /* for the number of attributes */
+ if (ulCount == 0)
{
- /* Nothing to do */
+ /* There are zero attributes, so the buffer will only contain the size word. */
+ *pDataOffset += nItemsSize;
+ *pBufferSize += nItemsSize;
return CKR_OK;
}
+ nItemsSize += sizeof(INPUT_TEMPLATE_ITEM) * ulCount; /*for the attribute items*/
- /* First compute the total required buffer size that
- * will contain the full templates (for the template 1 AND 2)
- */
- nBufferSize = 4 + /* Nb Attributes */
- sizeof(INPUT_TEMPLATE_ITEM)*ulCount1; /* The attributes items */
- if (pTemplate2 != NULL)
- {
- nBufferSize += 4 + /* Nb Attributes */
- sizeof(INPUT_TEMPLATE_ITEM)*ulCount2; /* The attributes items */
- }
-
- /* First data (attribute values) on either template 1 or 2 will just be after the last item */
- nDataOffset = nBufferSize;
-
- for (i = 0; i < ulCount1; i++)
- {
- /* Each value will be aligned on 4 bytes.
- This computation includes the spare bytes. */
- nBufferSize += PKCS11_GET_SIZE_WITH_ALIGNMENT(pTemplate1[i].ulValueLen);
- }
- if (pTemplate2 != NULL)
+ /* Add up the attribute value sizes, taking the 4-byte alignment into account. */
+ for (i = 0; i < ulCount; i++)
{
- for (i = 0; i < ulCount2; i++)
+ if (*pBufferSize + nValuesSize > 0x40000000)
{
- /* Each value will be aligned on 4 bytes.
- This computation includes the spare bytes. */
- nBufferSize += PKCS11_GET_SIZE_WITH_ALIGNMENT(pTemplate2[i].ulValueLen);
+ /* Offsets above 0x40000000 aren't supported. */
+ return CKR_DEVICE_ERROR;
}
+ nValuesSize += PKCS11_GET_SIZE_WITH_ALIGNMENT(pTemplate[i].ulValueLen);
}
- pBuffer = (uint8_t*)malloc(nBufferSize);
- if (pBuffer == NULL)
- {
- /* Not enough memory */
- return CKR_DEVICE_MEMORY;
- }
-
- memset(pBuffer, 0, nBufferSize);
+ *pDataOffset += nItemsSize;
+ *pBufferSize += nItemsSize + nValuesSize;
+ return CKR_OK;
+}
- /*
- * First template
- */
- *(uint32_t*)(pBuffer + nBufferIndex) = ulCount1;
- nBufferIndex += 4;
- for (i = 0; i < ulCount1; i++)
+static void static_copyTemplate(
+ uint8_t *const pBuffer,
+ uint32_t const nParamIndex,
+ uint8_t **const ppAttributeCursor,
+ uint8_t **const ppDataCursor,
+ const CK_ATTRIBUTE *const pTemplate,
+ CK_ULONG const ulCount)
+{
+ INPUT_TEMPLATE_ITEM sItem;
+ CK_ULONG i;
+ *(uint32_t*)(*ppAttributeCursor) = ulCount;
+ *ppAttributeCursor += sizeof(uint32_t);
+ for (i = 0; i < ulCount; i++)
{
- sItem.attributeType = (uint32_t)pTemplate1[i].type;
+ sItem.attributeType = pTemplate[i].type;
/* dataOffset = 0 means NULL buffer */
- sItem.dataOffset = ((pTemplate1[i].pValue == NULL) ? 0 : nDataOffset);
+ sItem.dataOffset = ((pTemplate[i].pValue == NULL) ? 0 :
+ *ppDataCursor - pBuffer);
sItem.dataParamIndex = nParamIndex; /* The parameter where we store the data (0 to 3) */
- sItem.dataValueLen = (uint32_t)pTemplate1[i].ulValueLen;
+ sItem.dataValueLen = pTemplate[i].ulValueLen;
/* Copy the item */
- memcpy(pBuffer + nBufferIndex, &sItem, sizeof(INPUT_TEMPLATE_ITEM));
- nBufferIndex += sizeof(INPUT_TEMPLATE_ITEM);
- if (pTemplate1[i].pValue != NULL)
+ memcpy(*ppAttributeCursor, &sItem, sizeof(INPUT_TEMPLATE_ITEM));
+ *ppAttributeCursor += sizeof(INPUT_TEMPLATE_ITEM);
+ if (pTemplate[i].pValue != NULL)
{
/* Copy the data */
- memcpy(pBuffer + nDataOffset, (uint8_t*)pTemplate1[i].pValue, (uint32_t)pTemplate1[i].ulValueLen);
+ memcpy(*ppDataCursor, pTemplate[i].pValue, pTemplate[i].ulValueLen);
/* Next data will be stored just after the previous one but aligned on 4 bytes */
- nDataOffset += PKCS11_GET_SIZE_WITH_ALIGNMENT(pTemplate1[i].ulValueLen);
- if ((nDataOffset & 0xC0000000) != 0)
- {
- /* We whould never go in this case, that means the dataOffset will not be able to store the offset correctly */
- nErrorCode = CKR_DEVICE_ERROR;
- goto error;
- }
+ *ppDataCursor += PKCS11_GET_SIZE_WITH_ALIGNMENT(pTemplate[i].ulValueLen);
}
}
+}
- /*
- * Second template
- */
- if (pTemplate2 != NULL)
- {
- *(uint32_t*)(pBuffer + nBufferIndex) = ulCount2;
- nBufferIndex += 4;
- for (i = 0; i < ulCount2; i++)
- {
- sItem.attributeType = (uint32_t)pTemplate2[i].type;
- /* dataOffset = 0 means NULL buffer */
- sItem.dataOffset = ((pTemplate2[i].pValue == NULL) ? 0 : nDataOffset);
- sItem.dataParamIndex = nParamIndex; /* The parameter where we store the data (0..3) */
- sItem.dataValueLen = (uint32_t)pTemplate2[i].ulValueLen;
- /* Copy the item */
- memcpy(pBuffer + nBufferIndex, &sItem, sizeof(INPUT_TEMPLATE_ITEM));
- nBufferIndex += sizeof(INPUT_TEMPLATE_ITEM);
- if (pTemplate2[i].pValue != NULL)
- {
- /* Copy the data */
- memcpy(pBuffer + nDataOffset, (uint8_t*)pTemplate2[i].pValue, (uint32_t)pTemplate2[i].ulValueLen);
- /* Next data will be stored just after the previous one but aligned on 4 bytes */
- nDataOffset += PKCS11_GET_SIZE_WITH_ALIGNMENT(pTemplate2[i].ulValueLen);
- if ((nDataOffset & 0xC0000000) != 0)
- {
- /* We whould never go in this case, that means the dataOffset will not be able to store the offset correctly */
- nErrorCode = CKR_DEVICE_ERROR;
- goto error;
- }
- }
- }
- }
+/******************************************/
+/* The buffer must be freed by the caller */
+/******************************************/
+static CK_RV static_encodeTwoTemplates(
+ uint8_t** ppBuffer,
+ uint32_t * pBufferSize,
+ const uint32_t nParamIndex,
+ const CK_ATTRIBUTE* pTemplate1,
+ CK_ULONG ulCount1,
+ const CK_ATTRIBUTE* pTemplate2,
+ CK_ULONG ulCount2)
+{
+ uint8_t* pBuffer = NULL;
+ uint32_t nBufferSize = 0;
+ uint32_t nDataOffset = 0;
+ uint8_t *pAttributeCursor;
+ uint8_t *pDataCursor;
+ CK_RV nErrorCode;
+
+ nErrorCode = static_analyzeTemplate(&nDataOffset, &nBufferSize, pTemplate1, ulCount1);
+ if (nErrorCode != CKR_OK) return nErrorCode;
+ nErrorCode = static_analyzeTemplate(&nDataOffset, &nBufferSize, pTemplate2, ulCount2);
+ if (nErrorCode != CKR_OK) return nErrorCode;
+
+ pBuffer = malloc(nBufferSize);
+ if (pBuffer == NULL) return CKR_DEVICE_MEMORY;
+ memset(pBuffer, 0, nBufferSize);
+
+ pAttributeCursor = pBuffer;
+ pDataCursor = pBuffer + nDataOffset;
+ static_copyTemplate(pBuffer, nParamIndex,
+ &pAttributeCursor, &pDataCursor,
+ pTemplate1, ulCount1);
+ static_copyTemplate(pBuffer, nParamIndex,
+ &pAttributeCursor, &pDataCursor,
+ pTemplate2, ulCount2);
*ppBuffer = pBuffer;
*pBufferSize = nBufferSize;
-
return CKR_OK;
-
-error:
- free(pBuffer);
- return nErrorCode;
}
/******************************************/
@@ -247,7 +216,35 @@ static CK_RV static_encodeTemplate(
CK_ATTRIBUTE* pTemplate,
CK_ULONG ulCount)
{
- return static_encodeTwoTemplates(ppBuffer, pBufferSize, nParamIndex, pTemplate, ulCount, NULL, 0);
+ uint8_t* pBuffer = NULL;
+ uint32_t nBufferSize = 0;
+ uint32_t nDataOffset = 0;
+ uint8_t *pAttributeCursor;
+ uint8_t *pDataCursor;
+ CK_RV nErrorCode;
+
+ if (pTemplate == NULL || ulCount == 0)
+ {
+ *ppBuffer = NULL;
+ *pBufferSize = 0;
+ return CKR_OK;
+ }
+
+ nErrorCode = static_analyzeTemplate(&nDataOffset, &nBufferSize, pTemplate, ulCount);
+ if (nErrorCode != CKR_OK) return nErrorCode;
+
+ pBuffer = malloc(nBufferSize);
+ if (pBuffer == NULL) return CKR_DEVICE_MEMORY;
+
+ pAttributeCursor = pBuffer;
+ pDataCursor = pBuffer + nDataOffset;
+ static_copyTemplate(pBuffer, nParamIndex,
+ &pAttributeCursor, &pDataCursor,
+ pTemplate, ulCount);
+
+ *ppBuffer = pBuffer;
+ *pBufferSize = nBufferSize;
+ return CKR_OK;
}
/* ----------------------------------------------------------------------- */
@@ -1268,7 +1265,7 @@ CK_RV PKCS11_EXPORT C_GenerateKeyPair(
PPKCS11_PRIMARY_SESSION_CONTEXT pSession;
if ( (pMechanism == NULL) ||
- (pPublicKeyTemplate == NULL) || (pPrivateKeyTemplate == NULL) ||
+ (pPublicKeyTemplate == NULL) ||
(phPublicKey== NULL) || (phPrivateKey== NULL))
{
return CKR_ARGUMENTS_BAD;
@@ -1280,7 +1277,7 @@ CK_RV PKCS11_EXPORT C_GenerateKeyPair(
return nErrorCode;
}
- nErrorCode = static_encodeTwoTemplates(&pBuffer, &nBufferSize, 2, (CK_ATTRIBUTE*)pPublicKeyTemplate, ulPublicKeyAttributeCount, (CK_ATTRIBUTE*)pPrivateKeyTemplate, ulPrivateKeyAttributeCount);
+ nErrorCode = static_encodeTwoTemplates(&pBuffer, &nBufferSize, 2, pPublicKeyTemplate, ulPublicKeyAttributeCount, pPrivateKeyTemplate, ulPrivateKeyAttributeCount);
if (nErrorCode != CKR_OK)
{
return nErrorCode;
diff --git a/security/tf_crypto_sst/sst_stub.c b/security/tf_crypto_sst/sst_stub.c
index e8a78bc..e6e37b6 100644
--- a/security/tf_crypto_sst/sst_stub.c
+++ b/security/tf_crypto_sst/sst_stub.c
@@ -27,8 +27,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-
-#ifdef __ANDROID32__
+#ifdef ANDROID
#include <stddef.h>
#endif
diff --git a/security/tf_daemon/Android.mk b/security/tf_daemon/Android.mk
index eccba3d..a163d3c 100644
--- a/security/tf_daemon/Android.mk
+++ b/security/tf_daemon/Android.mk
@@ -1,4 +1,8 @@
-ifeq ($(TARGET_BOARD_PLATFORM),omap4)
+# Only applicable for OMAP4 and OMAP5 boards.
+# First eliminate OMAP3 and then ensure that this is not used
+# for customer boards
+ifneq ($(TARGET_BOARD_PLATFORM),omap3)
+ifeq ($(findstring omap, $(TARGET_BOARD_PLATFORM)),omap)
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
@@ -13,14 +17,15 @@ LOCAL_SRC_FILES := \
smc_properties_parser.c \
lib_manifest2.c
-LOCAL_CFLAGS += -DLINUX
-LOCAL_CFLAGS += -D__ANDROID32__
-LOCAL_CFLAGS += -DSUPPORT_DELEGATION_EXTENSION
-
ifdef S_VERSION_BUILD
LOCAL_CFLAGS += -DS_VERSION_BUILD=$(S_VERSION_BUILD)
endif
+LOCAL_LDLIBS += -llog
+
+LOCAL_CFLAGS += -DLINUX
+LOCAL_CFLAGS += -DANDROID
+LOCAL_CFLAGS += -DSUPPORT_DELEGATION_EXTENSION
LOCAL_CFLAGS += -I $(LOCAL_PATH)/../tf_sdk/include/
LOCAL_MODULE:= tf_daemon
@@ -29,3 +34,4 @@ LOCAL_MODULE_TAGS := optional
include $(BUILD_EXECUTABLE)
endif
+endif
diff --git a/security/tf_daemon/delegation_client.c b/security/tf_daemon/delegation_client.c
index 54ee112..a2bff0b 100644
--- a/security/tf_daemon/delegation_client.c
+++ b/security/tf_daemon/delegation_client.c
@@ -28,7 +28,7 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#if defined(__ANDROID32__)
+#if defined(ANDROID)
#include <stddef.h>
#endif
#include <stdio.h>
@@ -43,12 +43,12 @@
#include <sys/types.h>
#include <fcntl.h>
-#if defined(LINUX) || defined(__ANDROID32__)
+#if defined(LINUX) || defined(ANDROID)
#include <unistd.h>
#include <sys/resource.h>
-#if defined(__ANDROID32__)
+#if defined(ANDROID)
/* fdatasync does not exist on Android */
#define fdatasync fsync
#else
@@ -58,14 +58,14 @@
* in some distributions
*/
int fdatasync(int fd);
-#endif /* __ANDROID32__ */
+#endif /* ANDROID */
#include <syslog.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <pthread.h>
#include <semaphore.h>
#define PATH_SEPARATOR '/'
-#endif /* LINUX || __ANDROID32__ */
+#endif /* LINUX || ANDROID */
#ifdef WIN32
#include <windows.h>
@@ -97,6 +97,10 @@ int fdatasync(int fd);
#include "delegation_client_extension.h"
#endif
+#ifdef TFSW_FDM_ANDROID
+#include <android/log.h>
+#endif
+
/*----------------------------------------------------------------------------
* Design notes
* ============
@@ -109,7 +113,7 @@ int fdatasync(int fd);
/*----------------------------------------------------------------------------
* Defines and structures
*----------------------------------------------------------------------------*/
-#define ECHANGE_BUFFER_INSTRUCTIONS_NB 100
+#define ECHANGE_BUFFER_INSTRUCTIONS_NB 1000
#define DEFAULT_WORKSPACE_SIZE (128*1024)
@@ -124,6 +128,21 @@ typedef struct
uint8_t sWorkspace[1/*g_nWorkspaceSize*/];
} DELEGATION_EXCHANGE_BUFFER;
+#ifdef SUPPORT_RPMB_PARTITION
+typedef struct
+{
+ uint8_t pDummy[196];
+ uint8_t pMAC[32];
+ uint8_t pData[256];
+ uint8_t pNonce[16];
+ uint32_t nMC;
+ uint16_t nAddr;
+ uint16_t nBlockCount;
+ uint16_t nResult;
+ uint16_t nReqOrResp;
+} DELEGATION_RPMB_MESSAGE;
+#endif
+
#define MD_VAR_NOT_USED(variable) do{(void)(variable);}while(0);
#define MD_INLINE __inline
@@ -141,7 +160,7 @@ typedef struct
in release builds whereas logs are visible to the customer.
-----------------------------------------------*/
-#if defined(LINUX) || (defined __ANDROID32__)
+#if defined(LINUX) || (defined ANDROID)
static bool bDetached = false;
@@ -155,9 +174,13 @@ static MD_INLINE void LogError(const char* format, ...)
}
else
{
+#ifdef TFSW_FDM_ANDROID
+ __android_log_vprint(ANDROID_LOG_ERROR , "TF Daemon", format, ap);
+#else
fprintf(stderr, "ERROR: ");
vfprintf(stderr, format, ap);
fprintf(stderr, "\n");
+#endif
}
va_end(ap);
}
@@ -172,9 +195,13 @@ static MD_INLINE void LogWarning(const char* format, ...)
}
else
{
+#ifdef TFSW_FDM_ANDROID
+ __android_log_vprint(ANDROID_LOG_WARN , "TF Daemon", format, ap);
+#else
fprintf(stderr, "WARNING: ");
vfprintf(stderr, format, ap);
fprintf(stderr, "\n");
+#endif
}
va_end(ap);
}
@@ -188,8 +215,12 @@ static MD_INLINE void LogInfo(const char* format, ...)
}
else
{
+#ifdef TFSW_FDM_ANDROID
+ __android_log_vprint(ANDROID_LOG_INFO , "TF Daemon", format, ap);
+#else
vfprintf(stderr, format, ap);
fprintf(stderr, "\n");
+#endif
}
va_end(ap);
}
@@ -205,9 +236,13 @@ static MD_INLINE void TRACE_ERROR(const char* format, ...)
}
else
{
+#ifdef TFSW_FDM_ANDROID
+ __android_log_vprint(ANDROID_LOG_ERROR , "TF Daemon", format, ap);
+#else
fprintf(stderr, "TRACE: ERROR: ");
vfprintf(stderr, format, ap);
fprintf(stderr, "\n");
+#endif
}
va_end(ap);
#else
@@ -226,9 +261,13 @@ static MD_INLINE void TRACE_WARNING(const char* format, ...)
}
else
{
+#ifdef TFSW_FDM_ANDROID
+ __android_log_vprint(ANDROID_LOG_WARN , "TF Daemon", format, ap);
+#else
fprintf(stderr, "TRACE: WARNING: ");
vfprintf(stderr, format, ap);
fprintf(stderr, "\n");
+#endif
}
va_end(ap);
#else
@@ -247,9 +286,13 @@ static MD_INLINE void TRACE_INFO(const char* format, ...)
}
else
{
+#ifdef TFSW_FDM_ANDROID
+ __android_log_vprint(ANDROID_LOG_INFO , "TF Daemon", format, ap);
+#else
fprintf(stderr, "TRACE: ");
vfprintf(stderr, format, ap);
fprintf(stderr, "\n");
+#endif
}
va_end(ap);
#else
@@ -289,7 +332,7 @@ static MD_INLINE void TRACE_INFO(const char* format, ...)
}
#else
-/* !defined(LINUX) || !defined(__ANDROID32__) */
+/* !defined(LINUX) || !defined(ANDROID) */
static MD_INLINE void LogError(const char* format, ...)
{
@@ -359,7 +402,7 @@ static MD_INLINE void TRACE_INFO(const char* format, ...)
MD_VAR_NOT_USED(format);
#endif /* NDEBUG */
}
-#endif /* defined(LINUX) || defined(__ANDROID32__) */
+#endif /* defined(LINUX) || defined(ANDROID) */
/*----------------------------------------------------------------------------
* Globals
@@ -483,7 +526,7 @@ int static_checkStorageDirAndAccessRights(char * directoryName)
if (result == 0)
{
/* Storage dir exists. Check access rights */
-#if defined(LINUX) || (defined __ANDROID32__)
+#if defined(LINUX) || (defined ANDROID)
if ((buf.st_mode & (S_IXUSR | S_IWUSR)) != (S_IXUSR | S_IWUSR))
{
LogError("storageDir '%s' does not have read-write access", directoryName);
@@ -529,7 +572,7 @@ static TEEC_Result partitionDestroy(uint32_t nPartitionID)
}
/* Try to erase the file */
-#if defined(LINUX) || (defined __ANDROID32__) || defined (__SYMBIAN32__)
+#if defined(LINUX) || (defined ANDROID) || defined (__SYMBIAN32__)
if (unlink(g_pPartitionNames[nPartitionID]) != 0)
#endif
#ifdef WIN32
@@ -686,6 +729,46 @@ static TEEC_Result partitionRead(uint32_t nPartitionID, uint32_t nSectorIndex, u
return S_SUCCESS;
}
+#ifdef SUPPORT_RPMB_PARTITION
+static TEEC_Result rpmbRead(DELEGATION_RPMB_INSTRUCTION *pInstruction)
+{
+ DELEGATION_RPMB_MESSAGE* pMessages;
+ uint32_t nNbMsg, nIndex;
+
+ nNbMsg = g_nSectorSize >> 8;
+ pMessages = (DELEGATION_RPMB_MESSAGE*)malloc(nNbMsg * sizeof(DELEGATION_RPMB_MESSAGE));
+ if (pMessages == NULL)
+ {
+ return S_ERROR_OUT_OF_MEMORY;
+ }
+ memset(pMessages,0,nNbMsg * sizeof(DELEGATION_RPMB_MESSAGE));
+
+ for (nIndex=0;nIndex<nNbMsg;nIndex++)
+ {
+ memcpy(pMessages[nIndex].pNonce , pInstruction->pNonce, 16);
+ pMessages[nIndex].nAddr = pInstruction->nAddr;
+ pMessages[nIndex].nBlockCount = pInstruction->nBlockCount;
+ pMessages[nIndex].nReqOrResp = 0x0004;
+ }
+ memcpy(pMessages[nNbMsg-1].pMAC,pInstruction->nMAC,32);
+
+ /* TODO: send to the RPMB driver */
+
+ memcpy(pInstruction->pNonce,pMessages[0].pNonce , 16);
+ pInstruction->nAddr = pMessages[0].nAddr;
+ pInstruction->nBlockCount = pMessages[0].nBlockCount;
+ for (nIndex=0;nIndex<nNbMsg;nIndex++)
+ {
+ memcpy(g_pWorkspaceBuffer + pInstruction->nWorkspaceOffset[nIndex],pMessages[nIndex].pData,256);
+ }
+ memcpy(pInstruction->nMAC, pMessages[nNbMsg-1].pMAC,32);
+ pInstruction->nResult=pMessages[nNbMsg-1].nResult;
+
+ free(pMessages);
+
+ return S_SUCCESS;
+}
+#endif
/**
* This function executes the WRITE instruction.
*
@@ -724,7 +807,42 @@ static TEEC_Result partitionWrite(uint32_t nPartitionID, uint32_t nSectorIndex,
return S_SUCCESS;
}
+#ifdef SUPPORT_RPMB_PARTITION
+static TEEC_Result rpmbWrite(DELEGATION_RPMB_INSTRUCTION *pInstruction)
+{
+ DELEGATION_RPMB_MESSAGE* pMessages;
+ uint32_t nNbMsg, nIndex;
+ nNbMsg = g_nSectorSize >> 8;
+ pMessages = (DELEGATION_RPMB_MESSAGE*)malloc(nNbMsg * sizeof(DELEGATION_RPMB_MESSAGE));
+ if (pMessages == NULL)
+ {
+ return S_ERROR_OUT_OF_MEMORY;
+ }
+ memset(pMessages,0,nNbMsg * sizeof(DELEGATION_RPMB_MESSAGE));
+
+ for (nIndex=0;nIndex<nNbMsg;nIndex++)
+ {
+ memcpy(pMessages[nIndex].pData,g_pWorkspaceBuffer + pInstruction->nWorkspaceOffset[nIndex],256);
+ pMessages[nIndex].nMC = pInstruction->nMC;
+ pMessages[nIndex].nAddr = pInstruction->nAddr;
+ pMessages[nIndex].nBlockCount = pInstruction->nBlockCount;
+ pMessages[nIndex].nReqOrResp = 0x0003;
+ }
+ memcpy(pMessages[nNbMsg-1].pMAC,pInstruction->nMAC,32);
+
+ /* TODO: send to the RPMB driver */
+
+ pInstruction->nAddr = pMessages[0].nAddr;
+ pInstruction->nMC = pMessages[0].nMC;
+ memcpy(pInstruction->nMAC, pMessages[nNbMsg-1].pMAC,32);
+ pInstruction->nResult=pMessages[nNbMsg-1].nResult;
+
+ free(pMessages);
+
+ return S_SUCCESS;
+}
+#endif
/**
* This function executes the SET_SIZE instruction.
*
@@ -774,7 +892,7 @@ static TEEC_Result partitionSetSize(uint32_t nPartitionID, uint32_t nNewSectorCo
{
int result = 0;
/* Truncate the partition file */
-#if defined(LINUX) || (defined __ANDROID32__)
+#if defined(LINUX) || (defined ANDROID)
result = ftruncate(fileno(pFile),nNewSectorCount * g_nSectorSize);
#endif
#if defined (__SYMBIAN32__)
@@ -819,7 +937,7 @@ static TEEC_Result partitionSync(uint32_t nPartitionID)
}
/* Then synchronize the file descriptor with the file-system */
-#if defined(LINUX) || (defined __ANDROID32__)
+#if defined(LINUX) || (defined ANDROID)
result=fdatasync(fileno(pFile));
#endif
#if defined (__SYMBIAN32__)
@@ -1008,6 +1126,13 @@ static int runSession(TEEC_Context* pContext, TEEC_Session* pSession, TEEC_Opera
{
case DELEGATION_INSTRUCTION_PARTITION_CREATE:
nError = partitionCreate(nPartitionID);
+#ifdef SUPPORT_RPMB_PARTITION
+ if (nPartitionID == RPMB_PARTITION_ID)
+ {
+ /* TODO: get the Write counter */
+ pInstruction->sAuthRW.nMC = 0;
+ }
+#endif
TRACE_INFO("INSTRUCTION: ID=0x%x pid=%d err=%d", (nInstructionID & 0x0F), nPartitionID, nError);
break;
case DELEGATION_INSTRUCTION_PARTITION_OPEN:
@@ -1019,9 +1144,33 @@ static int runSession(TEEC_Context* pContext, TEEC_Session* pSession, TEEC_Opera
{
g_pExchangeBuffer->sAdministrativeData.nPartitionOpenSizes[nPartitionID] = nPartitionSize;
}
+#ifdef SUPPORT_RPMB_PARTITION
+ if (nPartitionID == RPMB_PARTITION_ID)
+ {
+ /* TODO: get the Write counter */
+ pInstruction->sAuthRW.nMC = 0;
+ }
+#endif
break;
}
case DELEGATION_INSTRUCTION_PARTITION_READ:
+#ifdef SUPPORT_RPMB_PARTITION
+ if (nPartitionID == RPMB_PARTITION_ID)
+ {
+ if (nInstructionsIndex + sizeof(DELEGATION_RPMB_INSTRUCTION)-sizeof(uint32_t) <= nInstructionsBufferSize)
+ {
+ nInstructionsIndex+=sizeof(DELEGATION_RPMB_INSTRUCTION)-sizeof(uint32_t);
+ }
+ else
+ {
+ goto instruction_parse_end;
+ }
+ nError = rpmbRead(&pInstruction->sAuthRW);
+ TRACE_INFO("INSTRUCTION: ID=0x%x pid=%d err=%d", (nInstructionID & 0x0F), nPartitionID, nError);
+ break;
+ }
+ else
+#endif
{
/* Parse parameters */
uint32_t nSectorID;
@@ -1041,6 +1190,23 @@ static int runSession(TEEC_Context* pContext, TEEC_Session* pSession, TEEC_Opera
break;
}
case DELEGATION_INSTRUCTION_PARTITION_WRITE:
+#ifdef SUPPORT_RPMB_PARTITION
+ if (nPartitionID == RPMB_PARTITION_ID)
+ {
+ if (nInstructionsIndex + sizeof(DELEGATION_RPMB_INSTRUCTION)-sizeof(uint32_t) <= nInstructionsBufferSize)
+ {
+ nInstructionsIndex+=sizeof(DELEGATION_RPMB_INSTRUCTION)-sizeof(uint32_t);
+ }
+ else
+ {
+ goto instruction_parse_end;
+ }
+ nError = rpmbWrite(&pInstruction->sAuthRW);
+ TRACE_INFO("INSTRUCTION: ID=0x%x pid=%d err=%d", (nInstructionID & 0x0F), nPartitionID, nError);
+ break;
+ }
+ else
+#endif
{
/* Parse parameters */
uint32_t nSectorID;
@@ -1317,7 +1483,7 @@ int main(int argc, char* argv[])
* Detach the daemon from the console
*/
-#if defined(LINUX) || (defined __ANDROID32__)
+#if defined(LINUX) || (defined ANDROID)
{
/*
* Turns this application into a daemon => fork off parent process, setup logging, ...
diff --git a/security/tf_daemon/lib_uuid.h b/security/tf_daemon/lib_uuid.h
new file mode 100644
index 0000000..09bee2e
--- /dev/null
+++ b/security/tf_daemon/lib_uuid.h
@@ -0,0 +1,155 @@
+/**
+ * Copyright(c) 2012 Trusted Logic. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name Trusted Logic nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __LIB_UUID_H__
+#define __LIB_UUID_H__
+
+
+#include "s_type.h"
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#if 0
+} /* balance curly quotes */
+#endif
+
+/**
+ * LIB_UUID is deprecated use S_UUID instead.
+ * @deprecated
+ */
+typedef S_UUID LIB_UUID;
+/**
+ * LIB_UUID_STRING_SIZE is deprecated use UUID_STRING_SIZE instead.
+ * @deprecated
+ */
+#define LIB_UUID_STRING_SIZE 36
+
+/**
+ * Defines the UUID string size in characters
+ *
+ * E.g. "f81d4fae-7dec-11d0-a765-00a0c91e6bf6"
+ **/
+#define UUID_STRING_SIZE 36
+
+/**
+ * Converts the string representation of an UUID to the binary representation as
+ * a S_UUID type. The binary UUID structure must be provided by the caller.
+ *
+ * @param pIdentifierString The UTF-8 representation of the identifier. This
+ * string does not need to be zero terminated. The decoder reads only
+ * the {UUID_STRING_SIZE} first bytes.
+ *
+ * @param pIdentifier The identifer structure receiving the binary value of
+ * the identifier.
+ *
+ * @return TRUE in case of success, FALSE if the string does not conform to the
+ * syntax of UUID as defined in RFC 4122
+ * (http://www.ietf.org/rfc/rfc4122.txt)
+ **/
+bool libUUIDFromString(
+ IN const uint8_t* pIdentifierString,
+ OUT S_UUID* pIdentifier);
+
+/**
+ * Converts the binary representation of an UUID to the string representation.
+ *
+ * @param pIdentifier The identifer structure with the binary value of the
+ * identifier.
+ *
+ * @param pIdentifierString The buffer receiving the UTF-8 representation of
+ * the identifier. This string is not zero terminated. The encoder
+ * writes only the first {UUID_STRING_SIZE} bytes.
+ *
+ **/
+void libUUIDToString(
+ IN const S_UUID* pIdentifier,
+ OUT uint8_t* pIdentifierString);
+
+/**
+ * Generates an UUID from the specified MD5 hash value, as specified in section
+ * 4.3, Algorithm for Creating a Name-Based UUID, of RFC 4122.
+ *
+ * This function assumes that the hash value is 128-bit long.
+ *
+ * @param pHashData A pointer to the first byte of the MD5 hash data. Only the
+ * first 16 bytes of this hash data will be used to generate the UUID.
+ *
+ * @param pIdentifier A pointer to the placeholder receiving the generated
+ * identifier.
+ **/
+void libUUIDFromMD5Hash(
+ IN const uint8_t* pHashData,
+ OUT S_UUID* pIdentifier);
+
+/**
+ * Generates an UUID from the specified SHA-1 hash value, as specified in
+ * section 4.3, Algorithm for Creating a Name-Based UUID, of RFC 4122.
+ *
+ * This function assumes that the hash value is 128-bit long.
+ *
+ * @param pHashData A pointer to the first byte of the SHA-1 hash data. Only the
+ * first 16 bytes of this hash data will be used to generate the UUID.
+ *
+ * @param pIdentifier A pointer to the placeholder receiving the generated
+ * identifier.
+ **/
+void libUUIDFromSHA1Hash(
+ IN const uint8_t* pHashData,
+ OUT S_UUID* pIdentifier);
+
+/**
+ * Checks if an identifier is the nil identifier as specified in RFC 4122.
+ *
+ * @param pIdentifier The identifier to check.
+ *
+ * @return TRUE if the identifier is the nil identifier, FALSE otherwise.
+ **/
+bool libUUIDIsNil(
+ IN const S_UUID* pIdentifier);
+
+/**
+ * Sets an identifier to the nil value as specified in RFC 4122.
+ *
+ * @param pIdentifier The identifier to set to nil.
+ **/
+void libUUIDSetToNil(
+ OUT S_UUID* pIdentifier);
+
+#if 0
+{ /* balance curly quotes */
+#endif
+#ifdef __cplusplus
+} /* closes extern "C" */
+#endif
+
+
+#endif /* !defined(__LIB_UUID_H__) */
diff --git a/security/tf_daemon/s_version.h b/security/tf_daemon/s_version.h
index d112ea0..cff19d7 100644
--- a/security/tf_daemon/s_version.h
+++ b/security/tf_daemon/s_version.h
@@ -44,7 +44,7 @@
#if defined(WIN32)
#define S_VERSION_OS "W" /* "W" for Windows PC (XP, Vista…) */
#define S_VERSION_PLATFORM "X" /* "X" for ix86 PC simulators */
-#elif defined(__ANDROID32__)
+#elif defined(ANDROID)
#define S_VERSION_OS "A" /* "A" for Android */
#define S_VERSION_PLATFORM "G" /* "G" for 4430 */
#elif defined(LINUX)
@@ -58,15 +58,15 @@
/*
* This version number must be updated for each new release
*/
-#define S_VERSION_MAIN "01.04"
-#define S_VERSION_RESOURCE 1,4,0,S_VERSION_BUILD
+#define S_VERSION_MAIN "01.06"
+#define S_VERSION_RESOURCE 1,6,0,S_VERSION_BUILD
/*
* If this is a patch or engineering version use the following
* defines to set the version number. Else set these values to 0.
*/
-#define S_VERSION_PATCH 11
#define S_VERSION_ENG 0
+#define S_VERSION_PATCH 6
#ifdef S_VERSION_BUILD
/* TRICK: detect if S_VERSION is defined but empty */
@@ -105,8 +105,8 @@
S_VERSION_OS \
S_VERSION_PLATFORM \
S_VERSION_MAIN \
- _S_VERSION_PATCH \
_S_VERSION_ENG \
+ _S_VERSION_PATCH \
"." __STRINGIFY2(S_VERSION_BUILD) " " \
S_VERSION_VARIANT
diff --git a/security/tf_daemon/service_delegation_protocol.h b/security/tf_daemon/service_delegation_protocol.h
index 22b291d..bf831f8 100644
--- a/security/tf_daemon/service_delegation_protocol.h
+++ b/security/tf_daemon/service_delegation_protocol.h
@@ -70,6 +70,10 @@
#define DELEGATION_NOTIFY_TYPE_INFO 0x000000E3
#define DELEGATION_NOTIFY_TYPE_DEBUG 0x000000E4
+#ifdef SUPPORT_RPMB_PARTITION
+#define RPMB_PARTITION_ID 14
+#endif
+
typedef struct
{
uint32_t nInstructionID;
@@ -90,6 +94,21 @@ typedef struct
uint32_t nWorkspaceOffset;
} DELEGATION_RW_INSTRUCTION;
+#ifdef SUPPORT_RPMB_PARTITION
+typedef struct
+{
+ uint32_t nInstructionID;
+ uint8_t nMAC[32];
+ uint32_t nWorkspaceOffset[16];
+ uint8_t pNonce[16];
+ uint32_t nMC;
+ uint16_t nAddr;
+ uint16_t nBlockCount;
+ uint16_t nResult;
+ uint16_t nRequest;
+} DELEGATION_RPMB_INSTRUCTION;
+#endif
+
typedef struct
{
uint32_t nInstructionID;
@@ -102,6 +121,9 @@ typedef union
DELEGATION_NOTIFY_INSTRUCTION sNotify;
DELEGATION_RW_INSTRUCTION sReadWrite;
DELEGATION_SET_SIZE_INSTRUCTION sSetSize;
+#ifdef SUPPORT_RPMB_PARTITION
+ DELEGATION_RPMB_INSTRUCTION sAuthRW;
+#endif
} DELEGATION_INSTRUCTION;
typedef struct
diff --git a/security/tf_daemon/smc_properties.c b/security/tf_daemon/smc_properties.c
index ce1e7e3..48bfa8c 100644
--- a/security/tf_daemon/smc_properties.c
+++ b/security/tf_daemon/smc_properties.c
@@ -76,7 +76,7 @@ typedef enum
STATE_BINARY
} INTEGER_FORMAT;
-#if defined (LINUX) || defined(__ANDROID32__)
+#if defined (LINUX) || defined(ANDROID)
#define SEPARATOR_CHAR '/'
#elif defined (WIN32) || defined (__SYMBIAN32__) || defined (_WIN32_WCE)
@@ -151,7 +151,7 @@ static bool checkFilePath(char *pPath)
printf("Path %s doesn't point on a directory.\n", pDir);
return false;
}
-#if (!defined(__SYMBIAN32__)) && (!defined(_WIN32_WCE)) && (!defined(__ANDROID32__))
+#if (!defined(__SYMBIAN32__)) && (!defined(_WIN32_WCE)) && (!defined(ANDROID))
// TODO : under Symbian, Android and WM, check access right of a directory failed? I don't know why...
/* check read access */
if ((buf.st_mode & S_IREAD) != S_IREAD)
diff --git a/security/tf_daemon/smc_properties_parser.c b/security/tf_daemon/smc_properties_parser.c
index 1f97224..8d0d676 100644
--- a/security/tf_daemon/smc_properties_parser.c
+++ b/security/tf_daemon/smc_properties_parser.c
@@ -42,6 +42,7 @@
#include "smc_properties_parser.h"
#include "lib_manifest2.h"
+#include "lib_uuid.h"
#include "s_error.h"
/* ---------------------------------------------------------------------------------
@@ -57,7 +58,7 @@
#define GET_LAST_ERR errno
#endif
-#if defined (LINUX) || defined (__SYMBIAN32__) || defined (__ANDROID32__)
+#if defined (LINUX) || defined (__SYMBIAN32__) || defined (ANDROID)
#define STRICMP strcasecmp
#elif defined(_WIN32_WCE)
#define STRICMP _stricmp
@@ -241,7 +242,7 @@ static NODE* SMCPropListFindElement(LIST* pList,char* pName,bool bIsCaseSensitiv
static S_RESULT SMCPropYacc(uint8_t* pBuffer, uint32_t nBufferLength,
- CONF_FILE* pConfFile)
+ CONF_FILE* pConfFile, SERVICE_SECTION* pService)
{
S_RESULT nError=S_SUCCESS;
LIST *pPublicPropertyList=NULL;
@@ -264,6 +265,15 @@ static S_RESULT SMCPropYacc(uint8_t* pBuffer, uint32_t nBufferLength,
sParserContext.nManifestLength = nBufferLength;
sParserContext.nType = LIB_MANIFEST2_TYPE_SOURCE_WITH_SECTIONS;
+ if (pService!=NULL)
+ {
+ pPublicPropertyList=&pService->sPublicPropertyList;
+ pPrivatePropertyList=&pService->sPrivatePropertyList;
+ /* read inside a service compiled manifest */
+ sParserContext.nType = LIB_MANIFEST2_TYPE_COMPILED;
+ sprintf(serviceManifestName, "%s(manifest)", pService->sNode.pName);
+ sParserContext.pManifestName = serviceManifestName;
+ }
libManifest2InitContext(&sParserContext);
while (true)
@@ -382,6 +392,47 @@ static S_RESULT SMCPropYacc(uint8_t* pBuffer, uint32_t nBufferLength,
}
else
{
+ if (strcmp(pProperty->sNode.pName,CONFIG_SERVICE_ID_PROPERTY_NAME) == 0)
+ {
+ if (pService!=NULL)
+ {
+ pService->sNode.pName=malloc(nValueLength+1);
+ if (pService->sNode.pName==NULL)
+ {
+ nError=S_ERROR_OUT_OF_MEMORY;
+ goto error;
+ }
+#if defined (LINUX) || defined (__SYMBIAN32__) || defined(ANDROID)
+ {
+ // put each char of the value in uppercase
+ char* p=pProperty->pValue;
+ while(*p)
+ {
+ *p=toupper(*p);
+ p++;
+ }
+ }
+#else
+ _strupr(pProperty->pValue);
+#endif
+ memcpy(pService->sNode.pName,pProperty->pValue,nValueLength+1);
+
+ if (!libUUIDFromString((const uint8_t*)pProperty->pValue,&pService->sUUID))
+ {
+ nError=S_ERROR_WRONG_SIGNATURE;
+ goto error;
+ }
+ {
+ S_UUID sNullUUID;
+ memset(&sNullUUID,0,sizeof(S_UUID));
+ if (!memcmp(&pService->sUUID,&sNullUUID,sizeof(S_UUID)))
+ {
+ nError=S_ERROR_WRONG_SIGNATURE;
+ goto error;
+ }
+ }
+ }
+ }
if ((nValueLength > strlen(CONFIG_PROPERTY_NAME)) &&
(memcmp(pProperty->sNode.pName, CONFIG_PROPERTY_NAME, strlen(CONFIG_PROPERTY_NAME)) == 0))
{
@@ -411,10 +462,10 @@ error:
TRACE_ERROR("Configuration file: wrong service UUID: %s\n", pValueZ);
break;
case S_ERROR_OUT_OF_MEMORY:
- TRACE_ERROR("Out of memory\n");
+ TRACE_ERROR("Out of memory\n");
break;
case S_ERROR_ITEM_NOT_FOUND:
- TRACE_ERROR("Configuration file: service \"%s\" not found\n", pNameZ);
+ TRACE_ERROR("Configuration file: service \"%s\" not found\n", pNameZ);
break;
}
}
@@ -544,7 +595,7 @@ S_RESULT SMCPropParseConfigFile(char* pConfigFilename,CONF_FILE* pConfFile)
assert(0);
}
- nError=SMCPropYacc(pFile,nFileLength,pConfFile);
+ nError=SMCPropYacc(pFile,nFileLength,pConfFile,NULL);
if(pConfigFilename != NULL)
{
diff --git a/security/tf_sdk/include/OEMCrypto.h b/security/tf_sdk/include/OEMCrypto.h
new file mode 100644
index 0000000..daefdc8
--- /dev/null
+++ b/security/tf_sdk/include/OEMCrypto.h
@@ -0,0 +1,388 @@
+/*******************************************************************************
+ *
+ * Reference APIs needed to support Widevine's crypto algorithms.
+ *
+ ******************************************************************************/
+
+#ifndef _OEMCRYPTO_AES_H
+#define _OEMCRYPTO_AES_H
+
+typedef unsigned char OEMCrypto_UINT8;
+typedef char OEMCrypto_INT8;
+typedef unsigned int OEMCrypto_UINT32;
+typedef unsigned int OEMCrypto_SECURE_BUFFER;
+
+
+typedef enum OEMCryptoResult {
+ OEMCrypto_SUCCESS = 0,
+ OEMCrypto_ERROR_INIT_FAILED,
+ OEMCrypto_ERROR_TERMINATE_FAILED,
+ OEMCrypto_ERROR_ENTER_SECURE_PLAYBACK_FAILED,
+ OEMCrypto_ERROR_EXIT_SECURE_PLAYBACK_FAILED,
+ OEMCrypto_ERROR_SHORT_BUFFER,
+ OEMCrypto_ERROR_NO_DEVICE_KEY,
+ OEMCrypto_ERROR_NO_ASSET_KEY,
+ OEMCrypto_ERROR_KEYBOX_INVALID,
+ OEMCrypto_ERROR_NO_KEYDATA,
+ OEMCrypto_ERROR_NO_CW,
+ OEMCrypto_ERROR_DECRYPT_FAILED,
+ OEMCrypto_ERROR_WRITE_KEYBOX,
+ OEMCrypto_ERROR_WRAP_KEYBOX,
+ OEMCrypto_ERROR_BAD_MAGIC,
+ OEMCrypto_ERROR_BAD_CRC,
+ OEMCrypto_ERROR_NO_DEVICEID,
+ OEMCrypto_ERROR_RNG_FAILED,
+ OEMCrypto_ERROR_RNG_NOT_SUPPORTED,
+ OEMCrypto_ERROR_SETUP
+} OEMCryptoResult;
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define OEMCrypto_Initialize _oec01
+#define OEMCrypto_Terminate _oec02
+#define OEMCrypto_SetEntitlementKey _oec03
+#define OEMCrypto_DeriveControlWord _oec04
+#define OEMCrypto_DecryptVideo _oec05
+#define OEMCrypto_DecryptAudio _oec06
+#define OEMCrypto_InstallKeybox _oec07
+#define OEMCrypto_GetKeyData _oec08
+#define OEMCrypto_IsKeyboxValid _oec09
+#define OEMCrypto_GetRandom _oec10
+#define OEMCrypto_GetDeviceID _oec11
+#define OEMCrypto_EnterSecurePlayback _oec12
+#define OEMCrypto_ExitSecurePlayback _oec13
+#define OEMCrypto_WrapKeybox _oec14
+
+/*
+ * OEMCrypto_Initialize
+ *
+ * Description:
+ * Initializes the crypto hardware
+ *
+ * Parameters:
+ * N/A
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_INIT_FAILED failed to initialize crypto hardware
+ */
+OEMCryptoResult OEMCrypto_Initialize(void);
+
+
+/*
+ * OEMCrypto_Terminate
+ *
+ * Description:
+ * The API closes the crypto operation and releases all resources used.
+ *
+ * Parameters:
+ * N/A
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_TERMINATE_FAILED failed to de-initialize crypto hardware
+ */
+OEMCryptoResult OEMCrypto_Terminate(void);
+
+/*
+ * OEMCrypto_EnterSecurePlayback
+ *
+ * Description:
+ * Configures the security processor for secure decryption. This may involve
+ * setting up firewall regions. It is called when the decrypt session for an
+ * asset is established.
+ *
+ * Parameters:
+ * N/A
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_ENTER_SECURE_PLAYBACK_FAILED
+ */
+OEMCryptoResult OEMCrypto_EnterSecurePlayback(void);
+
+/*
+ * OEMCrypto_ExitSecurePlayback
+ *
+ * Description:
+ * Exit the secure playback mode. This may involve releasing the firewall regions. It is
+ * called when the decrypt session for an asset is closed.
+ *
+ * Parameters:
+ * N/A
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_EXIT_SECURE_PLAYBACK_FAILED
+ */
+OEMCryptoResult OEMCrypto_ExitSecurePlayback(void);
+
+/*
+ * OEMCrypto_SetEntitlementKey
+ *
+ * Description:
+ * Decrypt the entitlement (EMM) key, also known as the asset key,
+ * using the encrypted device key (Device Key field) in the Widevine Keybox.
+ *
+ * As shown in Figure 1 on the next page, Step 1 uses an OEM root key to decrypt
+ * (AES-128-ECB) the Device Key in the Keybox; the result is “latched†in hardware
+ * key ladder.
+ *
+ * Step 2 uses the “latched†clear device key to decrypt (AES-128-ECB) the
+ * entitlement key passed in as the *emmKey parameter and “latched†the clear
+ * entitlement key in hardware for the next operation.
+ *
+ * Parameters:
+ * emmKey (in) - pointer to the encrypted entitlement key
+ * emmKeyLength (in) – length of entitlement key in bytes
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_NO_DEVICE_KEY failed to decrypt device key
+ * OEMCrypto_ERROR_NO_ASSET_KEY failed to decrypt asset key
+ * OEMCrypto_ERROR_KEYBOX_INVALID cannot decrypt and read from Keybox
+ */
+
+OEMCryptoResult OEMCrypto_SetEntitlementKey(const OEMCrypto_UINT8* emmKey,
+ const OEMCrypto_UINT32 emmKeyLength);
+
+/*
+ * OEMCrypto_DeriveControlWord
+ *
+ * Description:
+ * Using the active key ladder key from OEMCrypto_SetEntitlementKey(), decrypts
+ * (AES-128-CBC, iv=0) the 32-byte ECM referenced by the *ecm parameter; returns in
+ * *flags the first clear 4 bytes data. “Latched†the clear bytes [4..20] as the
+ * clear control word for subsequent payload decryption operation.
+ *
+ * Parameters:
+ * ecm (in) - points to encrypted ECM data
+ * length (in) – length of encrypted ECM data in bytes
+ * flags (out) - points to buffer to receive 4 byte clear flag value
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_NO_CW cannot decrypt control word
+*/
+
+OEMCryptoResult OEMCrypto_DeriveControlWord(const OEMCrypto_UINT8* ecm,
+ const OEMCrypto_UINT32 length,
+ OEMCrypto_UINT32* flags);
+
+
+/*
+ * OEMCrypto_DecryptVideo
+ *
+ * Description:
+ *
+ * The API decrypts (AES-128-CBC) the video payload in the buffer referenced by
+ * the *input parameter into the secure buffer referenced by the output
+ * parameter, using the control word “latched†in the active hardware key
+ * ladder. If inputLength is not a multiple of the crypto block size (16 bytes),
+ * the API handles the residual bytes using CipherText Stealing (CTS).
+ *
+ * Parameters:
+ * iv (in/out) - If iv is NULL, then no decryption is required, i.e. the packets are
+ * already clear. Otherwise, iv references the AES initialization
+ * vector. Note that the updated IV after processing the final crypto
+ * block must be passed back out in *iv.
+ * input (in) - buffer containing the encrypted data
+ * inputLength (in) - number of bytes in the input payload, which may not be a multiple of 16 bytes
+ * output (in) – reference to the secure buffer which will receive the decrypted data
+ * outputLength (out) - number of bytes written into the secure buffer
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_DECRYPT_FAILED failed decryption
+*/
+
+OEMCryptoResult
+OEMCrypto_DecryptVideo(const OEMCrypto_UINT8* iv,
+ const OEMCrypto_UINT8* input, const OEMCrypto_UINT32 inputLength,
+ OEMCrypto_UINT32 output_handle, OEMCrypto_UINT32 output_offset, OEMCrypto_UINT32 *outputLength);
+
+
+/*
+ * OEMCrypto_DecryptAudio
+ *
+ * Description:
+ * The API decrypts (AES-128-CBC) the audio payload in the buffer referenced by
+ * the *input parameter into the non-secure buffer referenced by the output
+ * parameter, using the control word “latched†in the active hardware key
+ * ladder. If inputLength is not a multiple of the crypto block size (16 bytes),
+ * the API handles the residual bytes using CipherText Stealing (CTS).
+ *
+ * OEMCrypto_DecryptAudio must make sure that it cannot be used to decrypt a video
+ * stream into non-firewalled buffers, by verifying that no video packets are
+ * processed.
+ *
+ * Parameters:
+ * iv (in/out) - If iv is NULL, then no decryption is required, i.e. the packets are
+ * already clear. Otherwise, iv references the AES initialization
+ * vector. Note that the updated IV after processing the final crypto
+ * block must be passed back out in *iv.
+ * input (in) - buffer containing the encrypted data
+ * inputLength (in) - number of bytes in the input payload, which may not be a multiple of 16 bytes
+ * output (in) – reference to the non-secure buffer which will receive the decrypted data
+ * outputLength (out) - number of bytes written into the non-secure buffer
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_DECRYPT_FAILED failed decryption
+*/
+OEMCryptoResult
+OEMCrypto_DecryptAudio(const OEMCrypto_UINT8* iv,
+ const OEMCrypto_UINT8* input, const OEMCrypto_UINT32 inputLength,
+ OEMCrypto_UINT8 *output, OEMCrypto_UINT32 *outputLength);
+
+
+/*
+ * OEMCrypto_InstallKeybox
+ *
+ * Description:
+ * Unwrap and store the keybox to persistent memory. The device key must be stored
+ * securely. The device key will be decrypted and
+ * “latched†into hardware key ladder by OEMCrypto_SetEntitlementKey.
+ *
+ * This function is used once to load the keybox onto the device at provisioning time.
+ *
+ * Parameters:
+ * keybox (in) - Pointer to clear keybox data. Must have been wrapped with OEMCrypto_WrapKeybox
+ * keyboxLength (in) - Length of the keybox data in bytes
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_WRITE_KEYBOX failed to handle and store Keybox
+ */
+
+OEMCryptoResult OEMCrypto_InstallKeybox(OEMCrypto_UINT8 *keybox,
+ OEMCrypto_UINT32 keyBoxLength);
+
+
+/*
+ * OEMCrypto_IsKeyboxValid
+ *
+ * Description:
+ * Validate the Widevine Keybox stored on the device.
+ *
+ * The API performs two verification steps on the Keybox. It first verifies the MAGIC
+ * field contains a valid signature (i.e. ‘k’’b’’o’’x’). The API then computes the
+ * CRC using CRC-32-IEEE 802.3 standard and compares the checksum to the CRC stored
+ * in the Keybox. The CRC is computed over the entire Keybox excluding the 4 bytes
+ * CRC (i.e. Keybox[0..123].
+ *
+ * Parameters:
+ * none
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS
+ * OEMCrypto_ERROR_BAD_MAGIC
+ * OEMCrypto_ERROR_BAD_CRC
+ */
+
+OEMCryptoResult OEMCrypto_IsKeyboxValid(void);
+
+
+/*
+ * OEMCrypto_GetDeviceID
+ *
+ * Description:
+ * Retrieve the device's unique identifier from the Keybox.
+ *
+ * Parameters:
+ * deviceId (out) - pointer to the buffer that receives the Device ID
+ * idLength (in/out) - on input, size of the caller's device ID buffer.
+ * On output, the number of bytes written into the buffer.
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_SHORT_BUFFER if the buffer is too small to return the device ID
+ * OEMCrypto_ERROR_NO_DEVICEID failed to return Device Id
+ */
+OEMCryptoResult OEMCrypto_GetDeviceID(OEMCrypto_UINT8* deviceID,
+ OEMCrypto_UINT32 *idLength);
+
+
+/*
+ * OEMCrypto_GetKeyData
+ *
+ * Description:
+ * Returns the Key Data field from the Keybox. The Key Data field does not need to be
+ * encrypted by an OEM root key, but may be if desired.
+ *
+ * If the Key Data field was encrypted with an OEM root key when the Keybox was stored
+ * on the device, then this function should decrypt it and return the clear Key Data.
+ * If the Key Data was not encrypted, then this function should just access and return
+ * the clear Key data.
+ *
+ * Parameters:
+ * keyData (out) - pointer to the buffer to hold the Key Data field from the Keybox
+ * dataLength (in/out) - on input, the allocated buffer size. On output, the number
+ * of bytes in KeyData.
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_SHORT_BUFFER if the buffer is too small to return the KeyData
+ * OEMCrypto_ERROR_NO_KEYDATA failed to return KeyData
+ */
+OEMCryptoResult OEMCrypto_GetKeyData(OEMCrypto_UINT8* keyData,
+ OEMCrypto_UINT32 *keyDataLength);
+
+
+/*
+ * OEMCrypto_GetRandom
+ *
+ * Description:
+ * Returns a buffer filled with hardware-generated random bytes, if supported by the hardware.
+ *
+ * Parameters:
+ * randomData (out) - Points to the buffer that should recieve the random data.
+ * dataLength (in) - Length of the random data buffer in bytes.
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_RNG_FAILED failed to generate random number
+ * OEMCrypto_ERROR_RNG_NOT_SUPPORTED function not supported
+ */
+
+OEMCryptoResult OEMCrypto_GetRandom(OEMCrypto_UINT8* randomData,
+ OEMCrypto_UINT32 dataLength);
+
+/*
+ * OEMCrypto_WrapKeybox
+ *
+ * Description:
+ * Wrap the Keybox with a key derived for the device key.
+ *
+ * Parameters:
+ * keybox (in) - Pointer to keybox data.
+ * keyboxLength - Length of the Keybox data in bytes
+ * wrappedKeybox (out) - Pointer to wrapped keybox
+ * wrappedKeyboxLength (out) - Pointer to the length of the wrapped keybox in bytes
+ * transportKey (in) - An optional AES transport key. If provided, the parameter
+ * keybox is passed encrypted with this transport key with AES-CBC
+ * and a null IV
+ * transportKeyLength - number of bytes in the transportKey
+ *
+ * Returns:
+ * OEMCrypto_SUCCESS success
+ * OEMCrypto_ERROR_WRAP_KEYBOX failed to wrap Keybox
+ */
+
+OEMCryptoResult OEMCrypto_WrapKeybox(OEMCrypto_UINT8 *keybox,
+ OEMCrypto_UINT32 keyBoxLength,
+ OEMCrypto_UINT8 *wrappedKeybox,
+ OEMCrypto_UINT32 *wrappedKeyBoxLength,
+ OEMCrypto_UINT8 *transportKey,
+ OEMCrypto_UINT32 transportKeyLength);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+
+/***************************** End of File *****************************/
diff --git a/security/tf_sdk/include/common_secure_driver_protocol.h b/security/tf_sdk/include/common_secure_driver_protocol.h
new file mode 100644
index 0000000..3f46113
--- /dev/null
+++ b/security/tf_sdk/include/common_secure_driver_protocol.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2011 Trusted Logic S.A.
+ * All Rights Reserved.
+ *
+ * This software is the confidential and proprietary information of
+ * Trusted Logic S.A. ("Confidential Information"). You shall not
+ * disclose such Confidential Information and shall use it only in
+ * accordance with the terms of the license agreement you entered
+ * into with Trusted Logic S.A.
+ *
+ * TRUSTED LOGIC S.A. MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE
+ * SUITABILITY OF THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING
+ * BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS
+ * FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT. TRUSTED LOGIC S.A. SHALL
+ * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING,
+ * MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES.
+ */
+#ifndef __COMMON_SECURE_DRIVER_PROTOCOL_H__
+#define __COMMON_SECURE_DRIVER_PROTOCOL_H__
+
+/*C2537CC3-36F0-48D9-820E-559601478029*/
+#define COMMON_SECURE_DRIVER_UUID {0xC2537CC3, 0x36F0, 0x48D9, {0x82, 0x0E, 0x55, 0x96, 0x01, 0x47, 0x80, 0x29}}
+
+#define COMMON_SECURE_DRIVER_GET_SECURE_BUFFER 0x00000000
+#define COMMON_SECURE_DRIVER_HDCP_SECURE_IS_SECURE 0x00000002
+#define COMMON_SECURE_DRIVER_ENTER_SECURE_PLAYBACK 0x00003000
+#define COMMON_SECURE_DRIVER_EXIT_SECURE_PLAYBACK 0x00003001
+#define COMMON_SECURE_DRIVER_LOCK_SECURE_PLAYBACK 0x00004000
+#define COMMON_SECURE_DRIVER_UNLOCK_SECURE_PLAYBACK 0x00004001
+
+#define COMMON_SECURE_DRIVER_CEK_UNWRAP 0x00006000
+#define COMMON_SECURE_DRIVER_KEK_WRAP 0x00006001
+#define COMMON_SECURE_DRIVER_KEK_UNWRAP 0x00006002
+
+#endif /* __COMMON_SECURE_DRIVER_PROTOCOL_H__ */
diff --git a/security/tf_sdk/include/s_type.h b/security/tf_sdk/include/s_type.h
index 72f2a8a..ae260cc 100644
--- a/security/tf_sdk/include/s_type.h
+++ b/security/tf_sdk/include/s_type.h
@@ -35,7 +35,7 @@
#define __S_TYPE_H__
/* C99 integer types */
-#if (!defined(__STDC_VERSION__) || __STDC_VERSION__ < 199901L) &&(!defined(__ANDROID32__))
+#if (!defined(__STDC_VERSION__) || __STDC_VERSION__ < 199901L) &&(!defined(ANDROID))
#include <limits.h>
diff --git a/security/tf_sdk/include/wvdrm_protocol.h b/security/tf_sdk/include/wvdrm_protocol.h
new file mode 100644
index 0000000..de51fb6
--- /dev/null
+++ b/security/tf_sdk/include/wvdrm_protocol.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2011 Trusted Logic S.A.
+ * All Rights Reserved.
+ *
+ * This software is the confidential and proprietary information of
+ * Trusted Logic S.A. ("Confidential Information"). You shall not
+ * disclose such Confidential Information and shall use it only in
+ * accordance with the terms of the license agreement you entered
+ * into with Trusted Logic S.A.
+ *
+ * TRUSTED LOGIC S.A. MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE
+ * SUITABILITY OF THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING
+ * BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS
+ * FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT. TRUSTED LOGIC S.A. SHALL
+ * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING,
+ * MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES.
+ */
+#ifndef __WVDRM_PROTOCOL_H__
+#define __WVDRM_PROTOCOL_H__
+
+#include <common_secure_driver_protocol.h>
+
+/* 45544DF9-B1DF-9BEE-D0B9-0C98CE3B41F6 */
+#define WVDRM_UUID {0x45544DF9, 0xB1DF, 0x9BEE, {0xD0, 0xB9, 0x0C, 0x98, 0xCE, 0x3B, 0x41, 0xF6}}
+
+/*
+ * Persistently install the DRM "key box" previously wrapped
+ * with WRAP_KEYBOX
+ *
+ * Param #0: MEMREF_INPUT:
+ * The encrypted keybox
+ */
+#define WVDRM_INSTALL_KEYBOX 0x00001000
+
+/*
+ * Test if a keybox is provisioned and optionnally get its key data
+ *
+ * #0:
+ * - NONE: for testing if the keybox is valid (returns S_ERROR_ITEM_NOT_FOUND if not)
+ * - MEMREF_OUTPUT: to actually get the key data
+ */
+#define WVDRM_GET_KEY_DATA 0x00001001
+
+/*
+ * Generate random data
+ *
+ * #0:
+ * - MEMREF_OUTPUT: buffer to fill with random data
+ */
+#define WVDRM_GET_RANDOM 0x00001002
+
+/*
+ * Get the device ID
+ *
+ * #0: MEMREF_OUTPUT: filled with the device ID
+ */
+#define WVDRM_GET_DEVICE_ID 0x00001003
+
+/*
+ * Optionnally decrypt a keybox with a transport key
+ * and wrap it with a device specific key. The result
+ * can be later passed to INSTALL_KEYBOX
+ *
+ * #0: MEMREF_INPUT: the input keybox
+ * - either in cleartext if param #2 is NONE
+ * - or encrypted with the key in param #2
+ * #1: MEMREF_OUTPUT: the resulting wrapped key box
+ * #2:
+ * - NONE: param#0 is the clear-text keybox
+ * - MEMREF_INPUT: a transport key, in which case
+ * param#0 is the encryption with AES-CBC-128 of the
+ * keybox with an IV filled with zeros
+ */
+#define WVDRM_WRAP_KEYBOX 0x00001004
+
+/*
+ * Unwrap an asset key. The asset key is stored in transient memory
+ * but available globally to all sessons. There can be only one asset key
+ * at a time.
+ *
+ * #0: MEMREF_INPUT
+ */
+#define WVDRM_SET_ENTITLEMENT_KEY 0x00002000
+
+/*
+ * Decrypt the ECM (Entitlement Control Message = content key) using the asset key.
+ * Store the flags associated with the ECM. These flags will be later used, e.g.,
+ * to activate HDCP protection. Also returns the flags.
+ *
+ * #0: MEMREF_INPUT
+ * #1: VALUE_OUTPUT: a=flags
+ *
+ */
+#define WVDRM_DERIVE_CONTROL_WORD 0x00002001
+
+/*
+ * Decrypt a chunk of content from a non-secure buffer into
+ * a secure buffer opaquely referred to as an offset within
+ * the Decrypted-Encoded-Buffer part of the carveout.
+ *
+ * #0: MEMREF_INPUT: the encrypted content
+ * #1: VALUE_INPUT:
+ * [in] a=physical address of the ION handle, b=size of the handle
+ * #2: MEMREF_INOUT: the IV
+ * #3: VALUE_INOUT:
+ * [in] a=offset from the physical address of the ION handle, b=max size
+ * [out] b=actual size or required size
+ */
+#define WVDRM_DECRYPT_VIDEO 0x00002002
+
+/*
+ * Decrypt a chunk of content into a non-secure buffer. This
+ * must be used only for audio content.
+ *
+ * #0: MEMREF_INPUT: the encrypted content
+ * #1: MEMREF_OUTPUT: the decrypted content
+ * #2: MEMREF_INOUT: the IV
+ */
+#define WVDRM_DECRYPT_AUDIO 0x00002003
+
+/*
+ * Enter in secure playback.
+ */
+#define WVDRM_ENTER_SECURE_PLAYBACK COMMON_SECURE_DRIVER_ENTER_SECURE_PLAYBACK
+
+/*
+ * Exit in secure playback.
+ */
+#define WVDRM_EXIT_SECURE_PLAYBACK COMMON_SECURE_DRIVER_EXIT_SECURE_PLAYBACK
+
+#endif /* __WVDRM_PROTOCOL_H__ */
diff --git a/system-core-headers-ti/ion/ion.h b/system-core-headers-ti/ion/ion.h
new file mode 100644
index 0000000..91ad802
--- /dev/null
+++ b/system-core-headers-ti/ion/ion.h
@@ -0,0 +1,50 @@
+/*
+ * ion.c
+ *
+ * Memory Allocator functions for ion
+ *
+ * Copyright 2011 Google, Inc
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __SYS_CORE_ION_H
+#define __SYS_CORE_ION_H
+
+#include <linux/ion.h>
+#include <linux/omap_ion.h>
+
+__BEGIN_DECLS
+
+int ion_open();
+int ion_close(int fd);
+int ion_alloc(int fd, size_t len, size_t align, unsigned int flags,
+ struct ion_handle **handle);
+int ion_alloc_tiler(int fd, size_t w, size_t h, int fmt, unsigned int flags,
+ struct ion_handle **handle, size_t *stride);
+int ion_sync_fd(int fd, int handle_fd);
+int ion_free(int fd, struct ion_handle *handle);
+int ion_map(int fd, struct ion_handle *handle, size_t length, int prot,
+ int flags, off_t offset, unsigned char **ptr, int *map_fd);
+int ion_share(int fd, struct ion_handle *handle, int *share_fd);
+int ion_import(int fd, int share_fd, struct ion_handle **handle);
+int ion_map_cacheable(int fd, struct ion_handle *handle, size_t length,
+ int prot, int flags, off_t offset, unsigned char **ptr, int *map_fd);
+int ion_flush_cached(int fd, struct ion_handle *handle, size_t length,
+ unsigned char *ptr);
+int ion_inval_cached(int fd, struct ion_handle *handle, size_t length,
+ unsigned char *ptr);
+
+__END_DECLS
+
+#endif /* __SYS_CORE_ION_H */
diff --git a/test/CameraHal/Android.mk b/test/CameraHal/Android.mk
index 9a31d85..73bc4d1 100644
--- a/test/CameraHal/Android.mk
+++ b/test/CameraHal/Android.mk
@@ -1,8 +1,16 @@
+
LOCAL_PATH:= $(call my-dir)
+# Temporary wrapper to disable the camera_test in non CPCAM mode.
+# The camera_test source code should respect the lack of
+# OMAP_ENHANCEMENT and OMAP_ENHANCEMENT_CPCAM macros in order to
+# be returned back into build.
+ifdef OMAP_ENHANCEMENT_CPCAM
+
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
+ camera_test_surfacetexture.cpp \
camera_test_menu.cpp \
camera_test_script.cpp
@@ -16,19 +24,68 @@ LOCAL_SHARED_LIBRARIES:= \
libmedia_native \
libui \
libgui \
- libcamera_client
+ libcamera_client \
+ libEGL \
+ libGLESv2 \
+
+ifdef OMAP_ENHANCEMENT_CPCAM
+LOCAL_STATIC_LIBRARIES += \
+ libcpcamcamera_client
+endif
LOCAL_C_INCLUDES += \
frameworks/base/include/ui \
frameworks/base/include/surfaceflinger \
frameworks/base/include/camera \
- frameworks/base/include/media \
- $(PV_INCLUDES)
+ frameworks/base/include/media
LOCAL_MODULE:= camera_test
LOCAL_MODULE_TAGS:= tests
-LOCAL_CFLAGS += -Wall -fno-short-enums -O0 -g -D___ANDROID___
+LOCAL_CFLAGS += -Wall -fno-short-enums -O0 -g -D___ANDROID___ $(ANDROID_API_CFLAGS)
+
+# Add TARGET FLAG for OMAP4 and OMAP5 boards only
+# First eliminate OMAP3 and then ensure that this is not used
+# for customer boards.
+ifneq ($(TARGET_BOARD_PLATFORM),omap3)
+ ifeq ($(findstring omap, $(TARGET_BOARD_PLATFORM)),omap)
+ LOCAL_CFLAGS += -DTARGET_OMAP4
+ endif
+endif
+
+include $(BUILD_HEAPTRACKED_EXECUTABLE)
+
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ surfacetexture_test.cpp
+
+LOCAL_SHARED_LIBRARIES:= \
+ libdl \
+ libui \
+ libutils \
+ libcutils \
+ libbinder \
+ libmedia \
+ libui \
+ libgui \
+ libcamera_client \
+ libEGL \
+ libGLESv2 \
+ libion
+
+LOCAL_C_INCLUDES += \
+ frameworks/base/include/ui \
+ frameworks/base/include/surfaceflinger \
+ frameworks/base/include/camera \
+ frameworks/base/include/media \
+ hardware/ti/omap4xxx/ion
+
+LOCAL_MODULE:= surfacetexture_test
+LOCAL_MODULE_TAGS:= tests
+
+LOCAL_CFLAGS += -Wall -fno-short-enums -O0 -g -D___ANDROID___ $(ANDROID_API_CFLAGS)
ifeq ($(TARGET_BOARD_PLATFORM),omap4)
LOCAL_CFLAGS += -DTARGET_OMAP4
@@ -36,4 +93,4 @@ endif
include $(BUILD_HEAPTRACKED_EXECUTABLE)
-
+endif
diff --git a/test/CameraHal/camera_test.h b/test/CameraHal/camera_test.h
index df44833..f23ad3d 100644
--- a/test/CameraHal/camera_test.h
+++ b/test/CameraHal/camera_test.h
@@ -1,15 +1,37 @@
#ifndef CAMERA_TEST_H
#define CAMERA_TEST_H
-#define PRINTOVER(arg...) ALOGD(#arg)
-#define LOG_FUNCTION_NAME ALOGD("%d: %s() ENTER", __LINE__, __FUNCTION__);
-#define LOG_FUNCTION_NAME_EXIT ALOGD("%d: %s() EXIT", __LINE__, __FUNCTION__);
+#ifdef ANDROID_API_JB_OR_LATER
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#else
+#include <surfaceflinger/Surface.h>
+#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/ISurfaceComposerClient.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+#endif
+
+#ifdef ANDROID_API_JB_OR_LATER
+# define CAMHAL_LOGV ALOGV
+# define CAMHAL_LOGE ALOGE
+# define PRINTOVER(arg...) ALOGD(#arg)
+# define LOG_FUNCTION_NAME ALOGD("%d: %s() ENTER", __LINE__, __FUNCTION__);
+# define LOG_FUNCTION_NAME_EXIT ALOGD("%d: %s() EXIT", __LINE__, __FUNCTION__);
+#else
+# define CAMHAL_LOGV LOGV
+# define CAMHAL_LOGE LOGE
+# define PRINTOVER(arg...) LOGD(#arg)
+# define LOG_FUNCTION_NAME LOGD("%d: %s() ENTER", __LINE__, __FUNCTION__);
+# define LOG_FUNCTION_NAME_EXIT LOGD("%d: %s() EXIT", __LINE__, __FUNCTION__);
+#endif
+
#define KEY_GBCE "gbce"
#define KEY_GLBCE "glbce"
#define KEY_CAMERA "camera-index"
#define KEY_SATURATION "saturation"
#define KEY_BRIGHTNESS "brightness"
-#define KEY_BURST "burst-capture"
+#define KEY_TI_BURST "burst-capture"
#define KEY_EXPOSURE "exposure"
#define KEY_CONTRAST "contrast"
#define KEY_SHARPNESS "sharpness"
@@ -19,28 +41,50 @@
#define KEY_VNF "vnf"
#define KEY_VSTAB "vstab"
#define KEY_COMPENSATION "exposure-compensation"
+#define KEY_SENSOR_ORIENTATION "sensor-orientation"
#define KEY_IPP "ipp"
#define KEY_BUFF_STARV "buff-starvation"
#define KEY_METERING_MODE "meter-mode"
-#define KEY_AUTOCONVERGENCE "auto-convergence"
-#define KEY_MANUALCONVERGENCE_VALUES "manual-convergence-values"
-#define AUTOCONVERGENCE_MODE_MANUAL "mode-manual"
+#define KEY_AUTOCONVERGENCE "auto-convergence-mode"
+#define KEY_MANUAL_CONVERGENCE "manual-convergence"
#define KEY_EXP_BRACKETING_RANGE "exp-bracketing-range"
+#define KEY_EXP_GAIN_BRACKETING_RANGE "exp-gain-bracketing-range"
#define KEY_TEMP_BRACKETING "temporal-bracketing"
#define KEY_TEMP_BRACKETING_POS "temporal-bracketing-range-positive"
#define KEY_TEMP_BRACKETING_NEG "temporal-bracketing-range-negative"
#define KEY_MEASUREMENT "measurement"
#define KEY_S3D2D_PREVIEW_MODE "s3d2d-preview"
-#define KEY_STEREO_CAMERA "s3d-supported"
+#define KEY_S3D_PRV_FRAME_LAYOUT "s3d-prv-frame-layout"
+#define KEY_S3D_CAP_FRAME_LAYOUT "s3d-cap-frame-layout"
#define KEY_EXIF_MODEL "exif-model"
#define KEY_EXIF_MAKE "exif-make"
+#define KEY_AF_TIMEOUT "af-timeout"
#define KEY_AUTO_EXPOSURE_LOCK "auto-exposure-lock"
#define KEY_AUTO_WHITEBALANCE_LOCK "auto-whitebalance-lock"
+#define KEY_MECHANICAL_MISALIGNMENT_CORRECTION "mechanical-misalignment-correction"
+
+//TI extensions for enable/disable algos
+#define KEY_ALGO_FIXED_GAMMA "ti-algo-fixed-gamma"
+#define KEY_ALGO_NSF1 "ti-algo-nsf1"
+#define KEY_ALGO_NSF2 "ti-algo-nsf2"
+#define KEY_ALGO_SHARPENING "ti-algo-sharpening"
+#define KEY_ALGO_THREELINCOLORMAP "ti-algo-threelinecolormap"
+#define KEY_ALGO_GIC "ti-algo-gic"
+
+#define KEY_TAP_OUT_SURFACES "tap-out"
+#define KEY_TAP_IN_SURFACE "tap-in"
+
+#define BRACKETING_IDX_DEFAULT 0
+#define BRACKETING_IDX_STREAM 1
+#define BRACKETING_STREAM_BUFFERS 9
+
#define SDCARD_PATH "/sdcard/"
+#define SECONDARY_SENSOR "_SEC"
+#define S3D_SENSOR "_S3D"
#define MAX_BURST 15
#define BURST_INC 5
@@ -50,19 +94,64 @@
#define MEMORY_DUMP "procrank -u"
#define KEY_METERING_MODE "meter-mode"
-#define TEST_FOCUS_AREA "(0,0,1000,1000,300),(-1000,-1000,1000,1000,300),(0,0,0,0,0)"
+#define TEST_FOCUS_AREA "(-1000,500,-500,1000,1000),(500,500,1000,1000,1)"
+#define TEST_METERING_AREA "(-1000,500,-500,1000,1000),(500,500,1000,1000,1)"
+#define TEST_METERING_AREA_CENTER "(-250,-250,250,250,1000)"
+#define TEST_METERING_AREA_AVERAGE "(-1000,-1000,1000,1000,1000)"
#define COMPENSATION_OFFSET 20
#define DELIMITER "|"
-#define MAX_PREVIEW_SURFACE_WIDTH 800
-#define MAX_PREVIEW_SURFACE_HEIGHT 480
-
#define MODEL "camera_test"
#define MAKE "camera_test"
+#define BLAZE 0
+#define BLAZE_TABLET1 1
+#define BLAZE_TABLET2 2
+
+#define MAX_LINES 80
+#define MAX_SYMBOLS 65
+
#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
+typedef enum test_type {
+ TEST_TYPE_REGRESSION,
+ TEST_TYPE_FUNCTIONAL,
+ TEST_TYPE_API,
+ TEST_TYPE_ERROR,
+} test_type_t;
+
+typedef enum param_ExpBracketParamType_t {
+ PARAM_EXP_BRACKET_PARAM_NONE,
+ PARAM_EXP_BRACKET_PARAM_COMP,
+ PARAM_EXP_BRACKET_PARAM_PAIR,
+} param_ExpBracketParamType;
+
+typedef enum param_ExpBracketValueType_t {
+ PARAM_EXP_BRACKET_VALUE_NONE,
+ PARAM_EXP_BRACKET_VALUE_ABS,
+ PARAM_EXP_BRACKET_VALUE_REL,
+} param_ExpBracketValueType;
+
+typedef enum param_ExpBracketApplyType_t {
+ PARAM_EXP_BRACKET_APPLY_NONE,
+ PARAM_EXP_BRACKET_APPLY_ADJUST,
+ PARAM_EXP_BRACKET_APPLY_FORCED,
+} param_ExpBracketApplyType;
+
+enum logging {
+ LOGGING_LOGCAT = 0x01,
+ LOGGING_SYSLINK = 0x02
+};
+
+typedef struct cmd_args {
+ test_type_t test_type;
+ const char *script_file_name;
+ const char *output_path;
+ int platform_id;
+ int logging;
+} cmd_args_t;
+
namespace android {
class CameraHandler: public CameraListener {
public:
@@ -78,24 +167,6 @@ namespace android {
using namespace android;
-char * get_cycle_cmd(const char *aSrc);
-int execute_functional_script(char *script);
-status_t dump_mem_status();
-int openCamera();
-int closeCamera();
-void initDefaults();
-int startPreview();
-void stopPreview();
-int startRecording();
-int stopRecording();
-int closeRecorder();
-int openRecorder();
-int configureRecorder();
-void printSupportedParams();
-char *load_script(char *config);
-int start_logging(char *config, int &pid);
-int stop_logging(int &pid);
-int execute_error_script(char *script);
typedef struct pixel_format_t {
int32_t pixelFormatDesc;
@@ -107,20 +178,15 @@ typedef struct output_format_t {
const char *desc;
} outformat;
-typedef struct preview_size_t {
- int width, height;
- const char *desc;
-} preview_size;
-
typedef struct Vcapture_size_t {
int width, height;
const char *desc;
} Vcapture_size;
-typedef struct capture_Size_t {
+typedef struct param_Array_t {
int width, height;
- const char *name;
-} capture_Size;
+ char name[60];
+} param_Array;
typedef struct video_Codecs_t {
video_encoder type;
@@ -143,20 +209,277 @@ typedef struct zoom_t {
} Zoom;
typedef struct fps_ranges_t {
- const char *range;
- const char *rangeDescription;
-} fps_ranges;
-
-typedef struct fpsConst_Ranges_t {
- const char *range;
- const char *rangeDescription;
- int constFramerate;
-} fpsConst_Ranges;
-
-typedef struct fpsConst_RangesSec_t {
- const char *range;
- const char *rangeDescription;
- int constFramerate;
-} fpsConst_RangesSec;
+ int rangeMin;
+ int rangeMax;
+} fps_Array;
+
+typedef struct buffer_info {
+ int size;
+ int width;
+ int height;
+ int format;
+ size_t offset;
+ Rect crop;
+ sp<GraphicBuffer> buf;
+} buffer_info_t;
+
+typedef struct param_NamedExpBracketList_t {
+ const char *desc;
+ param_ExpBracketParamType_t param_type;
+ param_ExpBracketValueType_t value_type;
+ param_ExpBracketApplyType_t apply_type;
+ const char *value;
+} param_NamedExpBracketList;
+
+
+char * get_cycle_cmd(const char *aSrc);
+void trim_script_cmd(char *cmd);
+int execute_functional_script(char *script);
+status_t dump_mem_status();
+int openCamera();
+int closeCamera();
+void createBufferOutputSource();
+void createBufferInputSource();
+void requestBufferSourceReset();
+void initDefaults();
+void setDefaultExpGainPreset(ShotParameters &params, int idx);
+void setSingleExpGainPreset(ShotParameters &params, int idx, int exp, int gain);
+void setExpGainPreset(ShotParameters &params, const char *input, bool force, param_ExpBracketParamType_t type, bool flush);
+void calcNextSingleExpGainPreset(int idx, int &exp, int &gain);
+void updateShotConfigFlushParam();
+int startPreview();
+void stopPreview();
+int startRecording();
+int stopRecording();
+int closeRecorder();
+int openRecorder();
+int configureRecorder();
+void printSupportedParams();
+char *load_script(const char *config);
+int start_logging(int flags, int &pid);
+int stop_logging(int flags, int &pid);
+int execute_error_script(char *script);
+int getParametersFromCapabilities();
+void getSizeParametersFromCapabilities();
+int getDefaultParameter(const char* val, int numOptions, char **array);
+int getDefaultParameterResol(const char* val, int numOptions, param_Array **array);
+int getSupportedParameters(char* parameters, int* optionsCount, char ***elem);
+int getSupportedParametersCaptureSize(char* parameters, int *optionsCount, param_Array array[], int arraySize);
+int getSupportedParametersVideoCaptureSize(char* parameters, int *optionsCount, param_Array array[], int arraySize);
+int getSupportedParametersPreviewSize(char* parameters, int *optionsCount, param_Array array[], int arraySize);
+int getSupportedParametersThumbnailSize(char* parameters, int *optionsCount, param_Array array[], int arraySize);
+int getSupportedParametersNames(int width, int height, param_Array array[], int arraySize);
+int checkSupportedParamScript(char **array, int size, char *param);
+int checkSupportedParamScriptLayout(char **array, int size, char *param,int *index);
+int checkSupportedParamScriptResol(param_Array **array, int size, char *param, int *num);
+int checkSupportedParamScriptResol(param_Array **array, int size, int w, int h, int *num);
+int getSupportedParametersfps(char* parameters, int *optionsCount);
+int checkSupportedParamScriptfpsConst(int *array, int size, char *param, int *num);
+int checkSupportedParamScriptfpsRange(char **array, int size, char *param, int *num);
+int trySetVideoStabilization(bool toggle);
+int trySetVideoNoiseFilter(bool toggle);
+int trySetAutoExposureLock(bool toggle);
+int trySetAutoWhiteBalanceLock(bool toggle);
+bool isRawPixelFormat (const char *format);
+int deleteAllocatedMemory();
+void initDefaultsSec();
+
+const char KEY_S3D_PRV_FRAME_LAYOUT_VALUES[] = "s3d-prv-frame-layout-values";
+const char KEY_S3D_CAP_FRAME_LAYOUT_VALUES[] = "s3d-cap-frame-layout-values";
+
+const char KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES[] = "supported-picture-topbottom-size-values";
+const char KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[] = "supported-preview-topbottom-size-values";
+const char KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[] = "supported-picture-sidebyside-size-values";
+const char KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[] = "supported-preview-sidebyside-size-values";
+const char KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES[] = "supported-picture-subsampled-size-values";
+const char KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[] = "supported-preview-subsampled-size-values";
+
+const char KEY_AUTOCONVERGENCE_MODE[] = "auto-convergence-mode";
+const char KEY_AUTOCONVERGENCE_MODE_VALUES[] = "auto-convergence-mode-values";
+
+const char KEY_MANUAL_EXPOSURE[] = "manual-exposure";
+const char KEY_MANUAL_GAIN_ISO[] = "manual-gain-iso";
+const char KEY_MANUAL_EXPOSURE_RIGHT[] = "manual-exposure-right";
+const char KEY_MANUAL_GAIN_ISO_RIGHT[] = "manual-gain-iso-right";
+
+const char KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN[] = "supported-manual-convergence-min";
+const char KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX[] = "supported-manual-convergence-max";
+const char KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP[] = "supported-manual-convergence-step";
+
+const char KEY_SUPPORTED_MANUAL_EXPOSURE_MIN[] = "supported-manual-exposure-min";
+const char KEY_SUPPORTED_MANUAL_EXPOSURE_MAX[] = "supported-manual-exposure-max";
+const char KEY_SUPPORTED_MANUAL_EXPOSURE_STEP[] = "supported-manual-exposure-step";
+
+const char KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN[] = "supported-manual-gain-iso-min";
+const char KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX[] = "supported-manual-gain-iso-max";
+const char KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP[] = "supported-manual-gain-iso-step";
+class BufferSourceThread : public Thread {
+public:
+ class Defer : public Thread {
+ private:
+ struct DeferContainer {
+ sp<GraphicBuffer> graphicBuffer;
+ uint8_t *mappedBuffer;
+ unsigned int count;
+ unsigned int slot;
+ Rect crop;
+ };
+ public:
+ Defer(BufferSourceThread* bst) :
+ Thread(false), mBST(bst), mExiting(false) { }
+ virtual ~Defer() {
+ Mutex::Autolock lock(mFrameQueueMutex);
+ mExiting = true;
+ while (!mDeferQueue.isEmpty()) {
+ DeferContainer defer = mDeferQueue.itemAt(0);
+ defer.graphicBuffer->unlock();
+ mDeferQueue.removeAt(0);
+ }
+ mFrameQueueCondition.signal();
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
+ mExiting = true;
+ mFrameQueueCondition.signal();
+ }
+
+ virtual bool threadLoop() {
+ Mutex::Autolock lock(mFrameQueueMutex);
+ while (mDeferQueue.isEmpty() && !mExiting) {
+ mFrameQueueCondition.wait(mFrameQueueMutex);
+ }
+
+ if (!mExiting) {
+ DeferContainer defer = mDeferQueue.itemAt(0);
+ printf ("=== handling buffer %d\n", defer.count);
+ mBST->handleBuffer(defer.graphicBuffer, defer.mappedBuffer,
+ defer.count, defer.crop);
+ defer.graphicBuffer->unlock();
+ mDeferQueue.removeAt(0);
+ mBST->onHandled(defer.graphicBuffer, defer.slot);
+ return true;
+ }
+ return false;
+ }
+ void add(sp<GraphicBuffer> &gbuf, const Rect &crop,
+ unsigned int count, unsigned int slot = 0) {
+ Mutex::Autolock lock(mFrameQueueMutex);
+ DeferContainer defer;
+ defer.graphicBuffer = gbuf;
+ defer.count = count;
+ defer.slot = slot;
+ defer.crop = crop;
+ gbuf->lock(GRALLOC_USAGE_SW_READ_RARELY, (void**) &defer.mappedBuffer);
+ mDeferQueue.add(defer);
+ mFrameQueueCondition.signal();
+ }
+ private:
+ Vector<DeferContainer> mDeferQueue;
+ Mutex mFrameQueueMutex;
+ Condition mFrameQueueCondition;
+ BufferSourceThread* mBST;
+ bool mExiting;
+ };
+public:
+ BufferSourceThread(sp<Camera> camera) :
+ Thread(false), mCamera(camera),
+ mDestroying(false), mRestartCapture(false),
+ mExpBracketIdx(BRACKETING_IDX_DEFAULT), mExp(0), mGain(0), mCounter(0),
+ kReturnedBuffersMaxCapacity(6) {
+
+ mDeferThread = new Defer(this);
+ mDeferThread->run();
+ }
+
+ virtual ~BufferSourceThread() {
+ mDestroying = true;
+
+ for (unsigned int i = 0; i < mReturnedBuffers.size(); i++) {
+ buffer_info_t info = mReturnedBuffers.itemAt(i);
+ mReturnedBuffers.removeAt(i);
+ }
+ mDeferThread->requestExit();
+ mDeferThread.clear();
+ }
+
+ virtual bool threadLoop() { return false;}
+ virtual void requestExit() {};
+ virtual void setBuffer(android::ShotParameters &params) {};
+ virtual void onHandled(sp<GraphicBuffer> &g, unsigned int slot) {};
+
+ bool setStreamCapture(bool restart, int expBracketIdx) {
+ Mutex::Autolock lock(mToggleStateMutex);
+ mExpBracketIdx = expBracketIdx;
+ mRestartCapture = restart;
+ return mRestartCapture;
+ }
+
+ buffer_info_t popBuffer() {
+ buffer_info_t buffer;
+ Mutex::Autolock lock(mReturnedBuffersMutex);
+ if (!mReturnedBuffers.isEmpty()) {
+ buffer = mReturnedBuffers.itemAt(0);
+ mReturnedBuffers.removeAt(0);
+ }
+ return buffer;
+ }
+
+ bool hasBuffer() {
+ Mutex::Autolock lock(mReturnedBuffersMutex);
+ return !mReturnedBuffers.isEmpty();
+ }
+
+ void handleBuffer(sp<GraphicBuffer> &, uint8_t *, unsigned int, const Rect &);
+ Rect getCrop(sp<GraphicBuffer> &buffer, const float *mtx);
+ void showMetadata(sp<IMemory> data);
+protected:
+ void restartCapture() {
+ Mutex::Autolock lock(mToggleStateMutex);
+ if (mRestartCapture) {
+ ShotParameters shotParams;
+ calcNextSingleExpGainPreset(mExpBracketIdx, mExp, mGain),
+ setSingleExpGainPreset(shotParams, mExpBracketIdx, mExp, mGain);
+ shotParams.set(ShotParameters::KEY_BURST, 1);
+ mCamera->takePictureWithParameters(0, shotParams.flatten());
+ }
+ }
+protected:
+ sp<Camera> mCamera;
+ bool mDestroying;
+ bool mRestartCapture;
+ int mExpBracketIdx;
+ int mExp;
+ int mGain;
+ sp<Defer> mDeferThread;
+ unsigned int mCounter;
+private:
+ Vector<buffer_info_t> mReturnedBuffers;
+ Mutex mReturnedBuffersMutex;
+ Mutex mToggleStateMutex;
+ const unsigned int kReturnedBuffersMaxCapacity;
+};
+
+
+class BufferSourceInput : public RefBase {
+public:
+ BufferSourceInput(sp<Camera> camera) : mCamera(camera) {
+ mTapOut = new BufferSourceThread(camera);
+ mTapOut->run();
+ }
+
+ virtual ~BufferSourceInput() {
+ mTapOut->requestExit();
+ mTapOut.clear();
+ }
+
+ virtual void setInput(buffer_info_t, const char *format, ShotParameters &params);
+
+protected:
+ sp<BufferSourceThread> mTapOut;
+ sp<ANativeWindow> mWindowTapIn;
+ sp<Camera> mCamera;
+};
#endif
diff --git a/test/CameraHal/camera_test_bufferqueue.h b/test/CameraHal/camera_test_bufferqueue.h
new file mode 100644
index 0000000..a7a2ef5
--- /dev/null
+++ b/test/CameraHal/camera_test_bufferqueue.h
@@ -0,0 +1,156 @@
+#ifndef CAMERA_TEST_BUFFER_QUEUE_H
+#define CAMERA_TEST_BUFFER_QUEUE_H
+
+#ifdef ANDROID_API_JB_OR_LATER
+
+#include <gui/Surface.h>
+#include <gui/SurfaceTexture.h>
+#include <gui/SurfaceComposerClient.h>
+
+#include "camera_test.h"
+
+#define CAMHAL_LOGV ALOGV
+#define CAMHAL_LOGE ALOGE
+#define PRINTOVER(arg...) ALOGD(#arg)
+#define LOG_FUNCTION_NAME ALOGD("%d: %s() ENTER", __LINE__, __FUNCTION__);
+#define LOG_FUNCTION_NAME_EXIT ALOGD("%d: %s() EXIT", __LINE__, __FUNCTION__);
+
+using namespace android;
+
+class FrameConsumer : public BufferQueue::ProxyConsumerListener {
+public:
+ FrameConsumer():
+ BufferQueue::ProxyConsumerListener(NULL), mPendingFrames(0) {
+ }
+
+ virtual ~FrameConsumer() {
+ onFrameAvailable();
+ }
+
+ void waitForFrame() {
+ Mutex::Autolock lock(mMutex);
+ while (mPendingFrames == 0) {
+ mCondition.wait(mMutex);
+ }
+ mPendingFrames--;
+ }
+
+ virtual void onFrameAvailable() {
+ Mutex::Autolock lock(mMutex);
+ mPendingFrames++;
+ mCondition.signal();
+ }
+
+ virtual void onBuffersReleased() {}
+
+ int mPendingFrames;
+ Mutex mMutex;
+ Condition mCondition;
+};
+
+class BQ_BufferSourceThread : public BufferSourceThread {
+public:
+ BQ_BufferSourceThread(int tex_id, sp<Camera> camera) : BufferSourceThread(camera) {
+ mBufferQueue = new BufferQueue(true, 1);
+ mFW = new FrameConsumer();
+ mBufferQueue->setSynchronousMode(true);
+ mBufferQueue->consumerConnect(mFW);
+ mCamera->setBufferSource(NULL, mBufferQueue);
+ }
+ virtual ~BQ_BufferSourceThread() {
+ mCamera->releaseBufferSource(NULL, mBufferQueue);
+ }
+
+ virtual bool threadLoop() {
+ sp<GraphicBuffer> graphic_buffer;
+ BufferQueue::BufferItem item;
+
+ mFW->waitForFrame();
+ if (!mDestroying) {
+ status_t status;
+ status = mBufferQueue->acquireBuffer(&item);
+ if (status == BufferQueue::NO_BUFFER_AVAILABLE) {
+ // no buffer to handle, return and we'll try again
+ return true;
+ }
+ printf("=== Metadata for buffer %d ===\n", mCounter);
+ if (item.mGraphicBuffer != NULL) {
+ unsigned int slot = item.mBuf;
+ // For whatever reason, BufferQueue only gives us the graphic buffer
+ // the first time we acquire it. We are expected to hold a reference to
+ // it there after...
+ mBufferSlots[slot].mGraphicBuffer = item.mGraphicBuffer;
+ mBufferSlots[slot].mCrop = item.mCrop;
+ }
+ showMetadata(item.mMetadata);
+ printf("\n");
+ graphic_buffer = mBufferSlots[item.mBuf].mGraphicBuffer;
+ mDeferThread->add(graphic_buffer, item.mCrop, mCounter++, item.mBuf);
+ restartCapture();
+ return true;
+ }
+ return false;
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
+ mDestroying = true;
+ mFW->onFrameAvailable();
+ }
+
+ virtual void setBuffer(android::ShotParameters &params) {
+ {
+ String8 id = mBufferQueue->getId();
+
+ if (!id.isEmpty()) {
+ params.set(KEY_TAP_OUT_SURFACES, id);
+ } else {
+ params.remove(KEY_TAP_OUT_SURFACES);
+ }
+ }
+ }
+
+ virtual void onHandled(sp<GraphicBuffer> &gbuf, unsigned int slot) {
+ mBufferQueue->releaseBuffer(slot, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR);
+ }
+
+private:
+ sp<BufferQueue> mBufferQueue;
+ sp<FrameConsumer> mFW;
+ BufferQueue::BufferItem mBufferSlots[BufferQueue::NUM_BUFFER_SLOTS];
+};
+
+class BQ_BufferSourceInput : public BufferSourceInput {
+public:
+ BQ_BufferSourceInput(int tex_id, sp<Camera> camera) :
+ BufferSourceInput(camera), mTexId(tex_id) {
+ mBufferQueue = new BufferQueue(true, 1);
+ sp<ISurfaceTexture> surfaceTexture = mBufferQueue;
+ mWindowTapIn = new SurfaceTextureClient(surfaceTexture);
+ mCamera->setBufferSource(mBufferQueue, NULL);
+ }
+ virtual ~BQ_BufferSourceInput() {
+ mCamera->releaseBufferSource(mBufferQueue, NULL);
+ }
+
+ virtual void setInput(buffer_info_t bufinfo, const char *format, android::ShotParameters &params) {
+ mBufferQueue->setDefaultBufferSize(bufinfo.width, bufinfo.height);
+ BufferSourceInput::setInput(bufinfo, format, params);
+ {
+ String8 id = mBufferQueue->getId();
+
+ if (!id.isEmpty()) {
+ params.set(KEY_TAP_IN_SURFACE, id);
+ } else {
+ params.remove(KEY_TAP_IN_SURFACE);
+ }
+ }
+ }
+
+private:
+ sp<BufferQueue> mBufferQueue;
+ int mTexId;
+};
+#endif // ANDROID_API_JB_OR_LATER
+#endif // CAMERA_TEST_BUFFER_QUEUE_H
diff --git a/test/CameraHal/camera_test_menu.cpp b/test/CameraHal/camera_test_menu.cpp
index bd956a6..8d75710 100644
--- a/test/CameraHal/camera_test_menu.cpp
+++ b/test/CameraHal/camera_test_menu.cpp
@@ -6,9 +6,15 @@
#include <time.h>
#include <semaphore.h>
#include <pthread.h>
+#include <string.h>
+#include <assert.h>
+#include <climits>
-#include <gui/Surface.h>
-#include <gui/SurfaceComposerClient.h>
+#include <ui/DisplayInfo.h>
+
+#include <gui/SurfaceTexture.h>
+#include <gui/SurfaceTextureClient.h>
+#include <ui/GraphicBuffer.h>
#include <camera/Camera.h>
#include <camera/ICamera.h>
@@ -19,52 +25,70 @@
#include <binder/IServiceManager.h>
#include <cutils/properties.h>
#include <camera/CameraParameters.h>
+#include <camera/ShotParameters.h>
#include <system/audio.h>
#include <system/camera.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+
#include <cutils/memory.h>
#include <utils/Log.h>
#include <sys/wait.h>
#include "camera_test.h"
+#include "camera_test_surfacetexture.h"
+#ifdef ANDROID_API_JB_OR_LATER
+#include "camera_test_bufferqueue.h"
+#endif
using namespace android;
int camera_index = 0;
int print_menu;
+
sp<Camera> camera;
sp<MediaRecorder> recorder;
sp<SurfaceComposerClient> client;
sp<SurfaceControl> surfaceControl;
sp<Surface> previewSurface;
+sp<BufferSourceThread> bufferSourceOutputThread;
+sp<BufferSourceInput> bufferSourceInput;
+
CameraParameters params;
+ShotParameters shotParams;
float compensation = 0.0;
double latitude = 0.0;
double longitude = 0.0;
-double degree_by_step = 17.5609756;//..0975609756097;
+double degree_by_step = 17.5609756;
double altitude = 0.0;
int awb_mode = 0;
int effects_mode = 0;
int scene_mode = 0;
int caf_mode = 0;
-int vnf_mode = 0;
-int vstab_mode = 0;
-
int tempBracketRange = 1;
int tempBracketIdx = 0;
int measurementIdx = 0;
-int expBracketIdx = 0;
+int expBracketIdx = BRACKETING_IDX_DEFAULT;
int AutoConvergenceModeIDX = 0;
-int ManualConvergenceValuesIDX = 0;
-int ManualConvergenceDefaultValueIDX = 2;
int gbceIDX = 0;
int glbceIDX = 0;
int rotation = 0;
+int previewRotation = 0;
bool reSizePreview = true;
bool hardwareActive = false;
bool recordingMode = false;
bool previewRunning = false;
+bool vstabtoggle = false;
+bool AutoExposureLocktoggle = false;
+bool AutoWhiteBalanceLocktoggle = false;
+bool vnftoggle = false;
+bool faceDetectToggle = false;
+bool metaDataToggle = false;
+bool shotConfigFlush = false;
+bool streamCapture = false;
int saturation = 0;
int zoomIDX = 0;
int videoCodecIDX = 0;
@@ -73,6 +97,7 @@ int outputFormatIDX = 0;
int contrast = 0;
int brightness = 0;
unsigned int burst = 0;
+unsigned int burstCount = 0;
int sharpness = 0;
int iso_mode = 0;
int capture_mode = 0;
@@ -80,6 +105,7 @@ int exposure_mode = 0;
int ippIDX = 0;
int ippIDX_old = 0;
int previewFormat = 0;
+int pictureFormat = 0;
int jpegQuality = 85;
int thumbQuality = 85;
int flashIdx = 0;
@@ -88,19 +114,164 @@ timeval autofocus_start, picture_start;
char script_name[80];
int prevcnt = 0;
int videoFd = -1;
-int elockidx = 0;
-int wblockidx = 0;
-
+int afTimeoutIdx = 0;
+int platformID = BLAZE_TABLET2;
+int numAntibanding = 0;
+int numEffects = 0;
+int numcaptureSize = 0;
+int nummodevalues = 0;
+int numVcaptureSize = 0;
+int numpreviewSize = 0;
+int numthumbnailSize = 0;
+int numawb = 0;
+int numscene = 0;
+int numfocus = 0;
+int numflash = 0;
+int numExposureMode = 0;
+int numisoMode = 0;
+int antibanding_mode = 0;
+int effectsStrLenght = 0;
+int numfps = 0;
+int numpreviewFormat = 0;
+int numpictureFormat = 0;
+int *constFramerate = 0;
+int rangeCnt = 0;
+int constCnt = 0;
+int focus_mode = 0;
+int thumbSizeIDX = 0;
+int previewSizeIDX = 1;
+int captureSizeIDX = 0;
+int VcaptureSizeIDX = 1;
+int frameRateIDX = 0;
+char *str;
+char *param;
+char *antibandStr = 0;
+char *exposureModeStr = 0;
+char *isoModeStr = 0;
+char *effectssStr = 0;
+char *captureSizeStr = 0;
+char *modevaluesstr = 0;
+char *videosnapshotstr = 0;
+char *autoconvergencestr = 0;
+char *VcaptureSizeStr = 0;
+char *thumbnailSizeStr = 0;
+char *vstabstr = 0;
+char *vnfstr = 0;
+char *zoomstr = 0;
+char *smoothzoomstr = 0;
+char *AutoExposureLockstr = 0;
+char *AutoWhiteBalanceLockstr = 0;
+char *previewSizeStr = 0;
+char *awbStr = 0;
+char *sceneStr = 0;
+char *focusStr = 0;
+char *flashStr = 0;
+char *fpsstr = 0;
+char *previewFormatStr = 0;
+char *pictureFormatStr = 0;
+char **modevalues = 0;
+char **elem;
+char **antiband = 0;
+char **effectss = 0;
+char **awb = 0;
+char **scene = 0;
+char **focus = 0;
+char **flash = 0;
+char **exposureMode = 0;
+char **isoMode = 0;
+char **previewFormatArray = 0;
+char **pictureFormatArray = 0;
+char **fps_const_str = 0;
+char **rangeDescription = 0;
+char **fps_range_str = 0;
+param_Array ** capture_Array = 0;
+param_Array ** Vcapture_Array = 0;
+param_Array ** preview_Array = 0;
+param_Array ** thumbnail_Array = 0;
+fps_Array * fpsArray = 0;
+
+int enableMisalignmentCorrectionIdx = 0;
+
+char **autoconvergencemode = 0;
+int numAutoConvergence = 0;
+const char MeteringAreas[] = "(-656,-671,188,454,1)";
+
+char **stereoLayout;
+int numLay = 0;
+
+char **stereoCapLayout;
+int numCLay = 0;
+
+int stereoLayoutIDX = 1;
+int stereoCapLayoutIDX = 0;
+
+char *layoutstr =0;
+char *capturelayoutstr =0;
+
+char output_dir_path[256];
+char videos_dir_path[256 + 8];
+char images_dir_path[256 + 8];
+
+const char *cameras[] = {"Primary Camera", "Secondary Camera 1", "Stereo Camera"};
+const char *measurement[] = {"disable", "enable"};
-char dir_path[80] = SDCARD_PATH;
+param_NamedExpBracketList_t expBracketing[] = {
+ {
+ "Disabled",
+ PARAM_EXP_BRACKET_PARAM_NONE,
+ PARAM_EXP_BRACKET_VALUE_NONE,
+ PARAM_EXP_BRACKET_APPLY_NONE,
+ "0"
+ },
+ {
+ "Relative exposure compensation",
+ PARAM_EXP_BRACKET_PARAM_COMP,
+ PARAM_EXP_BRACKET_VALUE_REL,
+ PARAM_EXP_BRACKET_APPLY_ADJUST,
+ "-300,-150,0,150,300,150,0,-150,-300"
+ },
+ {
+ "Relative exposure compensation (forced)",
+ PARAM_EXP_BRACKET_PARAM_COMP,
+ PARAM_EXP_BRACKET_VALUE_REL,
+ PARAM_EXP_BRACKET_APPLY_FORCED,
+ "-300F,-150F,0F,150F,300F,150F,0F,-150F,-300F"
+ },
+ {
+ "Absolute exposure and gain",
+ PARAM_EXP_BRACKET_PARAM_PAIR,
+ PARAM_EXP_BRACKET_VALUE_ABS,
+ PARAM_EXP_BRACKET_APPLY_ADJUST,
+ "(33000,10),(0,70),(33000,100),(0,130),(33000,160),(0,180),(33000,200),(0,130),(33000,200)"
+ },
+ {
+ "Absolute exposure and gain (forced)",
+ PARAM_EXP_BRACKET_PARAM_PAIR,
+ PARAM_EXP_BRACKET_VALUE_ABS,
+ PARAM_EXP_BRACKET_APPLY_FORCED,
+ "(33000,10)F,(0,70)F,(33000,100)F,(0,130)F,(33000,160)F,(0,180)F,(33000,200)F,(0,130)F,(33000,200)F"
+ },
+ {
+ "Relative exposure and gain",
+ PARAM_EXP_BRACKET_PARAM_PAIR,
+ PARAM_EXP_BRACKET_VALUE_REL,
+ PARAM_EXP_BRACKET_APPLY_ADJUST,
+ "(-300,-100),(-300,+0),(-100, +0),(-100,+100),(+0,+0),(+100,-100),(+100,+0),(+300,+0),(+300,+100)"
+ },
+ {
+ "Relative exposure and gain (forced)",
+ PARAM_EXP_BRACKET_PARAM_PAIR,
+ PARAM_EXP_BRACKET_VALUE_REL,
+ PARAM_EXP_BRACKET_APPLY_FORCED,
+ "(-300,-100)F,(-300,+0)F,(-100, +0)F,(-100,+100)F,(+0,+0)F,(+100,-100)F,(+100,+0)F,(+300,+0)F,(+300,+100)F"
+ },
+};
-const char *cameras[] = {"Primary Camera", "Secondary Camera 1", "Stereo Camera", "USB Camera", "Fake Camera"};
-const char *measurement[] = {"disable", "enable"};
-const char *expBracketing[] = {"disable", "enable"};
-const char *expBracketingRange[] = {"", "-30,0,30,0,-30"};
-const char *tempBracketing[] = {"disable", "enable"};
+const char *tempBracketing[] = {"false", "true"};
const char *faceDetection[] = {"disable", "enable"};
-const char *lock[] = {"false", "true"};
+const char *afTimeout[] = {"enable", "disable" };
+
+const char *misalignmentCorrection[] = {"enable", "disable" };
#if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP3)
const char *ipp_mode[] = { "off", "Chroma Suppression", "Edge Enhancement" };
@@ -108,111 +279,26 @@ const char *ipp_mode[] = { "off", "Chroma Suppression", "Edge Enhancement" };
const char *ipp_mode[] = { "off", "ldc", "nsf", "ldc-nsf" };
#endif
-const char *iso [] = { "auto", "100", "200", "400", "800", "1200", "1600"};
-
-const char *effects [] = {
-#if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP3)
- "none",
- "mono",
- "negative",
- "solarize",
- "sepia",
- "whiteboard",
- "blackboard",
- "cool",
- "emboss"
-#else
- "none",
- "mono",
- "negative",
- "solarize",
- "sepia",
- "vivid",
- "whiteboard",
- "blackboard",
- "cool",
- "emboss",
- "blackwhite",
- "aqua",
- "posterize"
-#endif
-};
-
-const char CameraParameters::FLASH_MODE_OFF[] = "off";
-const char CameraParameters::FLASH_MODE_AUTO[] = "auto";
-const char CameraParameters::FLASH_MODE_ON[] = "on";
-const char CameraParameters::FLASH_MODE_RED_EYE[] = "red-eye";
-const char CameraParameters::FLASH_MODE_TORCH[] = "torch";
-
-const char *flashModes[] = {
- "off",
- "auto",
- "on",
- "red-eye",
- "torch",
- "fill-in",
-};
const char *caf [] = { "Off", "On" };
-const char *vnf [] = { "Off", "On" };
-const char *vstab [] = { "Off", "On" };
-
-const char *scene [] = {
-#if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP3)
- "auto",
- "portrait",
- "landscape",
- "night",
- "night-portrait",
- "fireworks",
- "snow",
- "action",
-#else
- "auto",
- "portrait",
- "landscape",
- "night",
- "night-portrait",
- "night-indoor",
- "fireworks",
- "sport",
- "cine",
- "beach",
- "snow",
- "mood",
- "closeup",
- "underwater",
- "document",
- "barcode",
- "oldfilm",
- "candlelight",
- "party",
- "steadyphoto",
- "sunset",
- "action",
- "theatre"
-#endif
-};
-const char *strawb_mode[] = {
- "auto",
- "incandescent",
- "fluorescent",
- "daylight",
- "horizon",
- "shadow",
- "tungsten",
- "shade",
- "twilight",
- "warm-fluorescent",
- "facepriority",
- "sunset"
-};
-
-size_t length_cam = ARRAY_SIZE(cameras);
-
-
-preview_size previewSize [] = {
+int numCamera = 0;
+bool stereoMode = false;
+
+int manualExp = 0;
+int manualExpMin = 0;
+int manualExpMax = 0;
+int manualExpStep = 0;
+int manualGain = 0;
+int manualGainMin = 0;
+int manualGainMax = 0;
+int manualGainStep = 0;
+int manualConv = 0;
+int manualConvMin = 0;
+int manualConvMax = 0;
+int manualConvStep = 0;
+
+param_Array previewSize [] = {
{ 0, 0, "NULL"},
{ 128, 96, "SQCIF" },
{ 176, 144, "QCIF" },
@@ -228,20 +314,57 @@ preview_size previewSize [] = {
{ 992, 560, "WVGA4"},
{ 1280, 720, "HD" },
{ 1920, 1080, "FULLHD"},
+ { 240, 160,"240x160"},
+ { 768, 576, "768x576" },
+ { 960, 720, "960x720"},
+ { 256, 96,"SQCIF"},// stereo
+ { 128, 192, "SQCIF"},
+ { 352, 144,"QCIF"},
+ { 176, 288, "QCIF"},
+ { 480, 160, "240x160"},
+ { 240, 320, "240x160"},
+ { 704, 288, "CIF"},
+ { 352, 576, "CIF"},
+ { 640, 240,"QVGA"},
+ { 320, 480, "QVGA"},
+ { 1280, 480,"VGA"},
+ { 640, 960, "VGA"},
+ { 1536, 576,"768x576"},
+ { 768, 1152, "768x576"},
+ { 1440, 480,"NTSC"},
+ { 720, 960,"NTSC"},
+ { 1440, 576, "PAL"},
+ { 720, 1152, "PAL"},
+ { 1600, 480, "WVGA"},
+ { 800, 960,"WVGA"},
+ { 2560, 720, "HD"},
+ { 1280, 1440, "HD"}
};
size_t length_previewSize = ARRAY_SIZE(previewSize);
-Vcapture_size VcaptureSize [] = {
+param_Array thumbnailSize [] = {
+ { 0, 0, "NULL"},
{ 128, 96, "SQCIF" },
{ 176, 144, "QCIF" },
{ 352, 288, "CIF" },
{ 320, 240, "QVGA" },
+ { 352, 288, "CIF" },
{ 640, 480, "VGA" },
- { 704, 480, "TVNTSC" },
- { 704, 576, "TVPAL" },
- { 720, 480, "D1NTSC" },
- { 720, 576, "D1PAL" },
+};
+
+size_t length_thumbnailSize = ARRAY_SIZE(thumbnailSize);
+
+param_Array VcaptureSize [] = {
+ { 0, 0, "NULL"},
+ { 128, 96, "SQCIF" },
+ { 176, 144, "QCIF" },
+ { 352, 288, "CIF" },
+ { 320, 240, "QVGA" },
+ { 352, 288, "CIF" },
+ { 640, 480, "VGA" },
+ { 720, 480, "NTSC" },
+ { 720, 576, "PAL" },
{ 800, 480, "WVGA" },
#if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP3)
{ 848, 480, "WVGA2"},
@@ -250,28 +373,66 @@ Vcapture_size VcaptureSize [] = {
#endif
{ 1280, 720, "HD" },
{ 1920, 1080, "FULLHD"},
+ { 240, 160,"240x160"},
+ { 768, 576, "768x576" },
+ { 960, 720, "960x720"},
+ { 256, 96,"SQCIF"},// stereo
+ { 128, 192, "SQCIF"},
+ { 352, 144,"QCIF"},
+ { 176, 288, "QCIF"},
+ { 480, 160, "240x160"},
+ { 240, 320, "240x160"},
+ { 704, 288, "CIF"},
+ { 352, 576, "CIF"},
+ { 640, 240,"QVGA"},
+ { 320, 480, "QVGA"},
+ { 1280, 480,"VGA"},
+ { 640, 960, "VGA"},
+ { 1536, 576,"768x576"},
+ { 768, 1152, "768x576"},
+ { 1440, 480,"NTSC"},
+ { 720, 960,"NTSC"},
+ { 1440, 576, "PAL"},
+ { 720, 1152, "PAL"},
+ { 1600, 480, "WVGA"},
+ { 800, 960,"WVGA"},
+ { 2560, 720, "HD"},
+ { 1280, 1440, "HD"}
};
size_t lenght_Vcapture_size = ARRAY_SIZE(VcaptureSize);
-capture_Size captureSize[] = {
+param_Array captureSize[] = {
{ 320, 240, "QVGA" },
{ 640, 480, "VGA" },
{ 800, 600, "SVGA" },
{ 1152, 864, "1MP" },
{ 1280, 1024, "1.3MP" },
{ 1600, 1200, "2MP" },
- { 2048, 1536, "3MP" },
+ { 2016, 1512, "3MP" },
{ 2592, 1944, "5MP" },
{ 2608, 1960, "5MP" },
{ 3264, 2448, "8MP" },
{ 3648, 2736, "10MP"},
{ 4032, 3024, "12MP"},
+ { 640, 240, "QVGA"}, //stereo
+ { 320, 480, "QVGA"},
+ { 1280, 480, "VGA"},
+ { 640, 960, "VGA"},
+ { 2560, 960, "1280x960"},
+ { 1280, 1920, "1280x960"},
+ { 2304, 864, "1MP"},
+ { 1152, 1728, "1MP"},
+ { 2560, 1024, "1.3MP"},
+ { 1280, 2048, "1.3MP"},
+ { 3200, 1200, "2MP"},
+ { 1600, 2400, "2MP"},
+ { 4096, 1536, "3MP"},
+ { 2048, 3072, "3MP"}
};
size_t length_capture_Size = ARRAY_SIZE(captureSize);
-
outformat outputFormat[] = {
{ OUTPUT_FORMAT_THREE_GPP, "3gp" },
{ OUTPUT_FORMAT_MPEG_4, "mp4" },
@@ -320,9 +481,9 @@ size_t length_V_bitRate = ARRAY_SIZE(VbitRate);
Zoom zoom[] = {
{ 0, "1x" },
- { 12, "1.5x"},
+ { 12, "1.5x"},
{ 20, "2x" },
- { 27, "2.5x"},
+ { 28, "2.5x"},
{ 32, "3x" },
{ 36, "3.5x"},
{ 40, "4x" },
@@ -331,87 +492,16 @@ Zoom zoom[] = {
size_t length_Zoom = ARRAY_SIZE(zoom);
-fps_ranges fpsRanges[] = {
- { "5000,30000", "[5:30]" },
- { "5000,10000", "[5:10]" },
- { "5000,15000", "[5:15]" },
- { "5000,20000", "[5:20]" },
-};
-
-size_t length_fps_ranges = ARRAY_SIZE(fpsRanges);
-
-fpsConst_Ranges fpsConstRanges[] = {
- { "5000,5000", "[5:5]", 5 },
- { "10000,10000", "[10:10]", 10 },
- { "15000,15000", "[15:15]", 15 },
- { "20000,20000", "[20:20]", 20 },
- { "25000,25000", "[25:25]", 25 },
- { "30000,30000", "[30:30]", 30 },
-};
-
-size_t length_fpsConst_Ranges = ARRAY_SIZE(fpsConstRanges);
-
-fpsConst_RangesSec fpsConstRangesSec[] = {
- { "5000,5000", "[5:5]", 5 },
- { "10000,10000", "[10:10]", 10 },
- { "15000,15000", "[15:15]", 15 },
- { "20000,20000", "[20:20]", 20 },
- { "25000,25000", "[25:25]", 25 },
- { "27000,27000", "[27:27]", 27 },
-};
-
-size_t length_fpsConst_RangesSec = ARRAY_SIZE(fpsConstRangesSec);
-
-const char *antibanding[] = {
- "off",
- "auto",
- "50hz",
- "60hz",
-};
-int antibanding_mode = 0;
-const char *focus[] = {
- "auto",
- "infinity",
- "macro",
- "continuous-video",
- "extended",
- "portrait",
-};
-int focus_mode = 0;
pixel_format pixelformat[] = {
{ HAL_PIXEL_FORMAT_YCbCr_422_I, CameraParameters::PIXEL_FORMAT_YUV422I },
{ HAL_PIXEL_FORMAT_YCrCb_420_SP, CameraParameters::PIXEL_FORMAT_YUV420SP },
{ HAL_PIXEL_FORMAT_RGB_565, CameraParameters::PIXEL_FORMAT_RGB565 },
{ -1, CameraParameters::PIXEL_FORMAT_JPEG },
- { -1, "raw" },
+ { -1, CameraParameters::PIXEL_FORMAT_BAYER_RGGB },
};
-const char *codingformat[] = {"yuv422i-yuyv", "yuv420sp", "rgb565", "jpeg", "raw", "jps", "mpo", "raw+jpeg", "raw+mpo"};
const char *gbce[] = {"disable", "enable"};
-int pictureFormat = 3; // jpeg
-const char *exposure[] = {"auto", "macro", "portrait", "landscape", "sports", "night", "night-portrait", "backlighting", "manual"};
-const char *capture[] = { "high-performance", "high-quality", "video-mode" };
-const char *autoconvergencemode[] = { "mode-disable", "mode-frame", "mode-center", "mode-fft", "mode-manual" };
-const char *manualconvergencevalues[] = { "-100", "-50", "-30", "-25", "0", "25", "50", "100" };
-
-const struct {
- int fps;
-} frameRate[] = {
- {0},
- {5},
- {10},
- {15},
- {20},
- {25},
- {30}
-};
-int thumbSizeIDX = 3;
-int previewSizeIDX = ARRAY_SIZE(previewSize) - 1;
-int captureSizeIDX = ARRAY_SIZE(captureSize) - 1;
-int frameRateIDX = ARRAY_SIZE(fpsConstRanges) - 1;
-int frameRateIDXSec = ARRAY_SIZE(fpsConstRangesSec) - 1;
-int VcaptureSizeIDX = ARRAY_SIZE(VcaptureSize) - 1;
int VbitRateIDX = ARRAY_SIZE(VbitRate) - 1;
static unsigned int recording_counter = 1;
@@ -425,10 +515,29 @@ const char *metering[] = {
"average",
};
int meter_mode = 0;
-bool bLogSysLinkTrace = true;
bool stressTest = false;
bool stopScript = false;
int restartCount = 0;
+bool firstTime = true;
+bool firstTimeStereo = true;
+
+//TI extensions for enable/disable algos
+const char *algoFixedGamma[] = {CameraParameters::FALSE, CameraParameters::TRUE};
+const char *algoNSF1[] = {CameraParameters::FALSE, CameraParameters::TRUE};
+const char *algoNSF2[] = {CameraParameters::FALSE, CameraParameters::TRUE};
+const char *algoSharpening[] = {CameraParameters::FALSE, CameraParameters::TRUE};
+const char *algoThreeLinColorMap[] = {CameraParameters::FALSE, CameraParameters::TRUE};
+const char *algoGIC[] = {CameraParameters::FALSE, CameraParameters::TRUE};
+int algoFixedGammaIDX = 1;
+int algoNSF1IDX = 1;
+int algoNSF2IDX = 1;
+int algoSharpeningIDX = 1;
+int algoThreeLinColorMapIDX = 1;
+int algoGICIDX = 1;
+
+/** Buffer source reset */
+bool bufferSourceInputReset = false;
+bool bufferSourceOutputReset = false;
/** Calculate delay from a reference time */
unsigned long long timeval_delay(const timeval *ref) {
@@ -451,18 +560,20 @@ void my_raw_callback(const sp<IMemory>& mem) {
unsigned char *buff = NULL;
int size;
int fd = -1;
- char fn[256];
+ char fn[384];
LOG_FUNCTION_NAME;
if (mem == NULL)
goto out;
- //Start preview after capture.
- camera->startPreview();
+ if( strcmp(modevalues[capture_mode], "cp-cam") ) {
+ //Start preview after capture.
+ camera->startPreview();
+ }
fn[0] = 0;
- sprintf(fn, "/sdcard/img%03d.raw", counter);
+ sprintf(fn, "%s/img%03d.raw", images_dir_path, counter);
fd = open(fn, O_CREAT | O_WRONLY | O_TRUNC, 0777);
if (fd < 0)
@@ -498,7 +609,7 @@ void saveFile(const sp<IMemory>& mem) {
unsigned char *buff = NULL;
int size;
int fd = -1;
- char fn[256];
+ char fn[384];
LOG_FUNCTION_NAME;
@@ -506,22 +617,22 @@ void saveFile(const sp<IMemory>& mem) {
goto out;
fn[0] = 0;
- sprintf(fn, "/sdcard/preview%03d.yuv", counter);
+ sprintf(fn, "%s/preview%03d.yuv", images_dir_path, counter);
fd = open(fn, O_CREAT | O_WRONLY | O_TRUNC, 0777);
if(fd < 0) {
- ALOGE("Unable to open file %s: %s", fn, strerror(fd));
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
goto out;
}
size = mem->size();
if (size <= 0) {
- ALOGE("IMemory object is of zero size");
+ CAMHAL_LOGE("IMemory object is of zero size");
goto out;
}
buff = (unsigned char *)mem->pointer();
if (!buff) {
- ALOGE("Buffer pointer is invalid");
+ CAMHAL_LOGE("Buffer pointer is invalid");
goto out;
}
@@ -584,34 +695,42 @@ void my_jpeg_callback(const sp<IMemory>& mem) {
unsigned char *buff = NULL;
int size;
int fd = -1;
- char fn[256];
+ char fn[384];
LOG_FUNCTION_NAME;
- //Start preview after capture.
- camera->startPreview();
+ if( strcmp(modevalues[capture_mode], "cp-cam")) {
+ if(burstCount > 1) {
+ burstCount --;
+ // Restart preview if taking a single capture
+ // or after the last iteration of burstCount
+ } else if(burstCount == 0 || burstCount == 1) {
+ camera->startPreview();
+ burstCount = burst;
+ }
+ }
if (mem == NULL)
goto out;
fn[0] = 0;
- sprintf(fn, "%s/img%03d.jpg", dir_path,counter);
+ sprintf(fn, "%s/img%03d.jpg", images_dir_path, counter);
fd = open(fn, O_CREAT | O_WRONLY | O_TRUNC, 0777);
if(fd < 0) {
- ALOGE("Unable to open file %s: %s", fn, strerror(fd));
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
goto out;
}
size = mem->size();
if (size <= 0) {
- ALOGE("IMemory object is of zero size");
+ CAMHAL_LOGE("IMemory object is of zero size");
goto out;
}
buff = (unsigned char *)mem->pointer();
if (!buff) {
- ALOGE("Buffer pointer is invalid");
+ CAMHAL_LOGE("Buffer pointer is invalid");
goto out;
}
@@ -685,12 +804,17 @@ void CameraHandler::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
void CameraHandler::postData(int32_t msgType,
const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata) {
+ int32_t msgMask;
printf("Data cb: %d\n", msgType);
if ( msgType & CAMERA_MSG_PREVIEW_FRAME )
my_preview_callback(dataPtr);
- if ( msgType & CAMERA_MSG_RAW_IMAGE ) {
+ msgMask = CAMERA_MSG_RAW_IMAGE;
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ msgMask |= CAMERA_MSG_RAW_BURST;
+#endif
+ if ( msgType & msgMask) {
printf("RAW done in %llu us\n", timeval_delay(&picture_start));
my_raw_callback(dataPtr);
}
@@ -706,16 +830,21 @@ void CameraHandler::postData(int32_t msgType,
if ( ( msgType & CAMERA_MSG_PREVIEW_METADATA ) &&
( NULL != metadata ) ) {
- printf("Face detected %d \n", metadata->number_of_faces);
- my_face_callback(metadata);
+ if (metaDataToggle) {
+ printf("Preview exposure: %6d Preview gain: %4d\n",
+ metadata->exposure_time, metadata->analog_gain);
+ }
+
+ if (faceDetectToggle) {
+ printf("Face detected %d \n", metadata->number_of_faces);
+ my_face_callback(metadata);
+ }
}
}
void CameraHandler::postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
{
- printf("Recording cb: %d %lld %p\n", msgType, timestamp, dataPtr.get());
-
static uint32_t count = 0;
//if(count==100)
@@ -725,13 +854,26 @@ void CameraHandler::postDataTimestamp(nsecs_t timestamp, int32_t msgType, const
uint8_t *ptr = (uint8_t*) dataPtr->pointer();
- printf("VID_CB: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x", ptr[0], ptr[1], ptr[2], ptr[3], ptr[4], ptr[5], ptr[6], ptr[7], ptr[8], ptr[9]);
-
- camera->releaseRecordingFrame(dataPtr);
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ if ( msgType & CAMERA_MSG_RAW_BURST) {
+ printf("RAW done timestamp: %llu\n", timestamp);
+ my_raw_callback(dataPtr);
+ } else
+#endif
+ {
+ printf("Recording cb: %d %lld %p\n", msgType, timestamp, dataPtr.get());
+ printf("VID_CB: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x", ptr[0], ptr[1], ptr[2], ptr[3], ptr[4], ptr[5], ptr[6], ptr[7], ptr[8], ptr[9]);
+ camera->releaseRecordingFrame(dataPtr);
+ }
}
int createPreviewSurface(unsigned int width, unsigned int height, int32_t pixFormat) {
unsigned int previewWidth, previewHeight;
+ DisplayInfo dinfo;
+ SurfaceComposerClient::getDisplayInfo(0, &dinfo);
+
+ const unsigned MAX_PREVIEW_SURFACE_WIDTH = dinfo.w;
+ const unsigned MAX_PREVIEW_SURFACE_HEIGHT = dinfo.h;
if ( MAX_PREVIEW_SURFACE_WIDTH < width ) {
previewWidth = MAX_PREVIEW_SURFACE_WIDTH;
@@ -753,10 +895,10 @@ int createPreviewSurface(unsigned int width, unsigned int height, int32_t pixFor
return -1;
}
- surfaceControl = client->createSurface(String8("camera_test_menu"),
+ surfaceControl = client->createSurface(0,
previewWidth,
previewHeight,
- pixFormat, 0);
+ pixFormat);
previewSurface = surfaceControl->getSurface();
@@ -775,6 +917,7 @@ void printSupportedParams()
printf("\n\r\tSupported Cameras: %s", params.get("camera-indexes"));
printf("\n\r\tSupported Picture Sizes: %s", params.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES));
printf("\n\r\tSupported Picture Formats: %s", params.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS));
+ printf("\n\r\tSupported Video Formats: %s", params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
printf("\n\r\tSupported Preview Sizes: %s", params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
printf("\n\r\tSupported Preview Formats: %s", params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS));
printf("\n\r\tSupported Preview Frame Rates: %s", params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES));
@@ -782,10 +925,22 @@ void printSupportedParams()
printf("\n\r\tSupported Whitebalance Modes: %s", params.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE));
printf("\n\r\tSupported Effects: %s", params.get(CameraParameters::KEY_SUPPORTED_EFFECTS));
printf("\n\r\tSupported Scene Modes: %s", params.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES));
+ printf("\n\r\tSupported ISO Modes: %s", params.get("iso-mode-values"));
printf("\n\r\tSupported Focus Modes: %s", params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
printf("\n\r\tSupported Antibanding Options: %s", params.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING));
printf("\n\r\tSupported Flash Modes: %s", params.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES));
printf("\n\r\tSupported Focus Areas: %d", params.getInt(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
+ printf("\n\r\tSupported Metering Areas: %d", params.getInt(CameraParameters::KEY_MAX_NUM_METERING_AREAS));
+ printf("\n\r\tSupported Preview FPS Range: %s", params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE));
+ printf("\n\r\tSupported Exposure modes: %s", params.get("exposure-mode-values"));
+ printf("\n\r\tSupported VSTAB modes: %s", params.get(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED));
+ printf("\n\r\tSupported VNF modes: %s", params.get("vnf-supported"));
+ printf("\n\r\tSupported AutoExposureLock: %s", params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED));
+ printf("\n\r\tSupported AutoWhiteBalanceLock: %s", params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED));
+ printf("\n\r\tSupported Zoom: %s", params.get(CameraParameters::KEY_ZOOM_SUPPORTED));
+ printf("\n\r\tSupported Smooth Zoom: %s", params.get(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED));
+ printf("\n\r\tSupported Video Snapshot: %s", params.get(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED));
+ printf("\n\r\tSupported Capture modes: %s", params.get("mode-values"));
if ( NULL != params.get(CameraParameters::KEY_FOCUS_DISTANCES) ) {
printf("\n\r\tFocus Distances: %s \n", params.get(CameraParameters::KEY_FOCUS_DISTANCES));
@@ -858,8 +1013,10 @@ int closeRecorder() {
int configureRecorder() {
- char videoFile[256],vbit_string[50];
+ char videoFile[384],vbit_string[50];
videoFd = -1;
+ struct CameraInfo cameraInfo;
+ camera->getCameraInfo(camera_index, &cameraInfo);
if ( ( NULL == recorder.get() ) || ( NULL == camera.get() ) ) {
printf("invalid recorder and/or camera references\n");
@@ -904,9 +1061,7 @@ int configureRecorder() {
return -1;
}
- if(mkdir("/mnt/sdcard/videos",0777) == -1)
- printf("\n Directory --videos-- was not created \n");
- sprintf(videoFile, "/mnt/sdcard/videos/video%d.%s", recording_counter,outputFormat[outputFormatIDX].desc);
+ sprintf(videoFile, "%s/video%d.%s", videos_dir_path, recording_counter, outputFormat[outputFormatIDX].desc);
videoFd = open(videoFile, O_CREAT | O_RDWR);
@@ -924,25 +1079,18 @@ int configureRecorder() {
recording_counter++;
- if (camera_index == 0) {
- if ( recorder->setVideoFrameRate(fpsConstRanges[frameRateIDX].constFramerate) < 0 ) {
- printf("error while configuring video framerate\n");
+ if (cameraInfo.orientation == 90 || cameraInfo.orientation == 270 ) {
+ if ( recorder->setVideoSize(Vcapture_Array[VcaptureSizeIDX]->height, Vcapture_Array[VcaptureSizeIDX]->width) < 0 ) {
+ printf("error while configuring video size\n");
return -1;
}
- }
- else {
- if ( recorder->setVideoFrameRate(fpsConstRangesSec[frameRateIDXSec].constFramerate) < 0 ) {
- printf("error while configuring video framerate\n");
+ } else {
+ if ( recorder->setVideoSize(Vcapture_Array[VcaptureSizeIDX]->width, Vcapture_Array[VcaptureSizeIDX]->height) < 0 ) {
+ printf("error while configuring video size\n");
return -1;
}
}
- if ( recorder->setVideoSize(VcaptureSize[VcaptureSizeIDX].width, VcaptureSize[VcaptureSizeIDX].height) < 0 ) {
- printf("error while configuring video size\n");
-
- return -1;
- }
-
if ( recorder->setVideoEncoder(videoCodecs[videoCodecIDX].type) < 0 ) {
printf("error while configuring video codec\n");
@@ -1011,6 +1159,37 @@ int stopRecording() {
}
int openCamera() {
+
+ antibandStr = new char [256];
+ effectssStr = new char [256];
+ exposureModeStr = new char [256];
+ captureSizeStr = new char [500];
+ VcaptureSizeStr = new char [500];
+ previewSizeStr = new char [500];
+ thumbnailSizeStr = new char [500];
+ awbStr = new char [400];
+ sceneStr = new char [400];
+ isoModeStr = new char [256];
+ focusStr = new char [256];
+ flashStr = new char [256];
+ fpsstr = new char [256];
+ previewFormatStr = new char [256];
+ pictureFormatStr = new char [256];
+ constFramerate = new int[32];
+ vstabstr = new char[256];
+ vnfstr = new char[256];
+ AutoExposureLockstr = new char[256];
+ AutoWhiteBalanceLockstr = new char[256];
+ zoomstr = new char[256];
+ smoothzoomstr = new char[256];
+ modevaluesstr = new char[256];
+ videosnapshotstr = new char[256];
+ autoconvergencestr = new char[256];
+ layoutstr = new char[256];
+ capturelayoutstr = new char[256];
+
+ requestBufferSourceReset();
+
printf("openCamera(camera_index=%d)\n", camera_index);
camera = Camera::connect(camera_index);
@@ -1026,13 +1205,19 @@ int openCamera() {
}
}
- params = camera->getParameters();
+ if ( firstTime ) {
+ params = camera->getParameters();
+ firstTime = false;
+ }
+ getParametersFromCapabilities();
+ getSizeParametersFromCapabilities();
camera->setParameters(params.flatten());
-
camera->setListener(new CameraHandler());
hardwareActive = true;
+
+
return 0;
}
@@ -1043,30 +1228,98 @@ int closeCamera() {
return -1;
}
+ deleteAllocatedMemory();
+
camera->disconnect();
camera.clear();
hardwareActive = false;
-
return 0;
}
+void createBufferOutputSource() {
+ if(bufferSourceOutputThread.get() && bufferSourceOutputReset) {
+ bufferSourceOutputThread->requestExit();
+ bufferSourceOutputThread.clear();
+ }
+ if(!bufferSourceOutputThread.get()) {
+#ifdef ANDROID_API_JB_OR_LATER
+ bufferSourceOutputThread = new BQ_BufferSourceThread(123, camera);
+#else
+ bufferSourceOutputThread = new ST_BufferSourceThread(false, 123, camera);
+#endif
+ bufferSourceOutputThread->run();
+ }
+ bufferSourceOutputReset = false;
+}
+
+void createBufferInputSource() {
+ if (bufferSourceInput.get() && bufferSourceInputReset) {
+ bufferSourceInput.clear();
+ }
+ if (!bufferSourceInput.get()) {
+#ifdef ANDROID_API_JB_OR_LATER
+ bufferSourceInput = new BQ_BufferSourceInput(1234, camera);
+#else
+ bufferSourceInput = new ST_BufferSourceInput(1234, camera);
+#endif
+ }
+ bufferSourceInputReset = false;
+}
+
+void requestBufferSourceReset() {
+ bufferSourceInputReset = true;
+ bufferSourceOutputReset = true;
+}
+
int startPreview() {
int previewWidth, previewHeight;
+ struct CameraInfo cameraInfo;
+ DisplayInfo dinfo;
+ int orientation;
+ unsigned int correctedHeight;
+
+ SurfaceComposerClient::getDisplayInfo(0, &dinfo);
+
+ printf ("dinfo.orientation = %d\n", dinfo.orientation);
+ printf ("dinfo.w = %d\n", dinfo.w);
+ printf ("dinfo.h = %d\n", dinfo.h);
+
+ // calculate display orientation from sensor orientation
+ camera->getCameraInfo(camera_index, &cameraInfo);
+ if (cameraInfo.facing == CAMERA_FACING_FRONT) {
+ orientation = (cameraInfo.orientation + dinfo.orientation) % 360;
+ orientation = (360 - orientation) % 360; // compensate the mirror
+ } else { // back-facing
+ orientation = (cameraInfo.orientation - dinfo.orientation + 360) % 360;
+ }
+
+
if (reSizePreview) {
+ int orientedWidth, orientedHeight;
if(recordingMode)
{
- previewWidth = VcaptureSize[VcaptureSizeIDX].width;
- previewHeight = VcaptureSize[VcaptureSizeIDX].height;
+ previewWidth = Vcapture_Array[VcaptureSizeIDX]->width;
+ previewHeight = Vcapture_Array[VcaptureSizeIDX]->height;
}else
{
- previewWidth = previewSize[previewSizeIDX].width;
- previewHeight = previewSize[previewSizeIDX].height;
+ previewWidth = preview_Array[previewSizeIDX]->width;
+ previewHeight = preview_Array[previewSizeIDX]->height;
}
- if ( createPreviewSurface(previewWidth,
- previewHeight,
+ // corrected height for aspect ratio
+ if ((orientation == 90) || (orientation == 270)) {
+ orientedHeight = previewWidth;
+ orientedWidth = previewHeight;
+ } else {
+ orientedHeight = previewHeight;
+ orientedWidth = previewWidth;
+ }
+ correctedHeight = (dinfo.w * orientedHeight) / orientedWidth;
+ printf("correctedHeight = %d", correctedHeight);
+
+ if ( createPreviewSurface(dinfo.w, correctedHeight,
pixelformat[previewFormat].pixelFormatDesc) < 0 ) {
printf("Error while creating preview surface\n");
return -1;
@@ -1075,25 +1328,778 @@ int startPreview() {
if ( !hardwareActive ) {
openCamera();
}
+ if(stereoMode && firstTimeStereo)
+ {
+ params.set(KEY_S3D_PRV_FRAME_LAYOUT, stereoLayout[stereoLayoutIDX]);
+ params.set(KEY_S3D_CAP_FRAME_LAYOUT, stereoCapLayout[stereoCapLayoutIDX]);
+ }
+
+ if ((cameraInfo.orientation == 90 || cameraInfo.orientation == 270) && recordingMode) {
+ params.setPreviewSize(previewHeight, previewWidth);
+ } else {
+ params.setPreviewSize(previewWidth, previewHeight);
+ }
+ params.setPictureSize(capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height);
+
+ // calculate display orientation from sensor orientation
+ camera->getCameraInfo(camera_index, &cameraInfo);
+ if (cameraInfo.facing == CAMERA_FACING_FRONT) {
+ orientation = (cameraInfo.orientation + dinfo.orientation) % 360;
+ orientation= (360 - orientation) % 360; // compensate the mirror
+ } else { // back-facing
+ orientation = (cameraInfo.orientation - dinfo.orientation + 360) % 360;
+ }
+
+ if(!strcmp(params.get(KEY_MODE), "video-mode") ) {
+ orientation = 0;
+ }
- params.setPreviewSize(previewWidth, previewHeight);
- params.setPictureSize(captureSize[captureSizeIDX].width, captureSize[captureSizeIDX].height);
+ camera->sendCommand(CAMERA_CMD_SET_DISPLAY_ORIENTATION, orientation, 0);
camera->setParameters(params.flatten());
camera->setPreviewDisplay(previewSurface);
+ }
- if(!hardwareActive) prevcnt = 0;
+ if(hardwareActive) prevcnt = 0;
+ camera->startPreview();
+ previewRunning = true;
+ reSizePreview = false;
- camera->startPreview();
+ const char *format = params.getPictureFormat();
+ if((NULL != format) && isRawPixelFormat(format)) {
+ createBufferOutputSource();
+ createBufferInputSource();
+ }
+
+ return 0;
+}
+
+int getParametersFromCapabilities() {
+ const char *valstr = NULL;
+
+ numCamera = camera->getNumberOfCameras();
+
+ params.unflatten(camera->getParameters());
+
+ valstr = params.get(KEY_AUTOCONVERGENCE_MODE_VALUES);
+ if (NULL != valstr) {
+ strcpy(autoconvergencestr, valstr);
+ getSupportedParameters(autoconvergencestr,&numAutoConvergence,(char***)&autoconvergencemode);
+ } else {
+ printf("no supported parameteters for autoconvergence\n\t");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_EFFECTS);
+ if (NULL != valstr) {
+ strcpy(effectssStr, valstr);
+ getSupportedParameters(effectssStr, &numEffects, (char***)&effectss);
+ } else {
+ printf("Color effects are not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING);
+ if (NULL != valstr) {
+ strcpy(antibandStr, valstr);
+ getSupportedParameters(antibandStr, &numAntibanding, (char***)&antiband);
+ } else {
+ printf("Antibanding not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+ if (NULL != valstr) {
+ strcpy(awbStr, valstr);
+ getSupportedParameters(awbStr, &numawb, (char***)&awb);
+ } else {
+ printf("White balance is not supported\n");
+ }
+
+ valstr = params.get(KEY_S3D_PRV_FRAME_LAYOUT_VALUES);
+ if ((NULL != valstr) && (0 != strcmp(valstr, "none"))) {
+ stereoMode = true;
+ strcpy(layoutstr, valstr);
+ getSupportedParameters(layoutstr,&numLay,(char***)&stereoLayout);
+ } else {
+ stereoMode = false;
+ printf("layout is not supported\n");
+ }
+
+ valstr = params.get(KEY_S3D_CAP_FRAME_LAYOUT_VALUES);
+ if ((NULL != valstr) && (0 != strcmp(valstr, "none"))) {
+ strcpy(capturelayoutstr, valstr);
+ getSupportedParameters(capturelayoutstr,&numCLay,(char***)&stereoCapLayout);
+ } else {
+ printf("capture layout is not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES);
+ if (NULL != valstr) {
+ strcpy(sceneStr, valstr);
+ getSupportedParameters(sceneStr, &numscene, (char***)&scene);
+ } else {
+ printf("Scene modes are not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ if (NULL != valstr) {
+ strcpy(focusStr, valstr);
+ getSupportedParameters(focusStr, &numfocus, (char***)&focus);
+ } else {
+ printf("Focus modes are not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+ if (NULL != valstr) {
+ strcpy(flashStr, valstr);
+ getSupportedParameters(flashStr, &numflash, (char***)&flash);
+ } else {
+ printf("Flash modes are not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
+ if (NULL != valstr) {
+ strcpy(VcaptureSizeStr, valstr);
+ getSupportedParametersVideoCaptureSize(VcaptureSizeStr, &numVcaptureSize, VcaptureSize, lenght_Vcapture_size);
+ } else {
+ printf("Preview sizes are not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE);
+ if (NULL != valstr) {
+ strcpy(fpsstr, valstr);
+ getSupportedParametersfps(fpsstr, &numfps);
+ } else {
+ printf("Preview fps range is not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
+ if (NULL != valstr) {
+ strcpy(previewFormatStr, valstr);
+ getSupportedParameters(previewFormatStr, &numpreviewFormat, (char ***)&previewFormatArray);
+ } else {
+ printf("Preview formats are not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS);
+ if (NULL != valstr) {
+ strcpy(pictureFormatStr, valstr);
+ getSupportedParameters(pictureFormatStr, &numpictureFormat, (char ***)&pictureFormatArray);
+ } else {
+ printf("Picture formats are not supported\n");
+ }
+
+ valstr = params.get("exposure-mode-values");
+ if (NULL != valstr) {
+ strcpy(exposureModeStr, valstr);
+ getSupportedParameters(exposureModeStr, &numExposureMode, (char***)&exposureMode);
+ } else {
+ printf("Exposure modes are not supported\n");
+ }
+
+ valstr = params.get("iso-mode-values");
+ if (NULL != valstr) {
+ strcpy(isoModeStr, valstr);
+ getSupportedParameters(isoModeStr, &numisoMode , (char***)&isoMode);
+ } else {
+ printf("iso modes are not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES);
+ if (NULL != valstr) {
+ strcpy(thumbnailSizeStr, valstr);
+ getSupportedParametersThumbnailSize(thumbnailSizeStr, &numthumbnailSize, thumbnailSize, length_thumbnailSize);
+ } else {
+ printf("Thumbnail sizes are not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED);
+ if (NULL != valstr) {
+ strcpy(vstabstr, valstr);
+ } else {
+ printf("VSTAB is not supported\n");
+ }
+
+ valstr = params.get("vnf-supported");
+ if (NULL != valstr) {
+ strcpy(vnfstr, valstr);
+ } else {
+ printf("VNF is not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
+ if (NULL != valstr) {
+ strcpy(AutoExposureLockstr, valstr);
+ } else {
+ printf("AutoExposureLock is not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
+ if (NULL != valstr) {
+ strcpy(AutoWhiteBalanceLockstr, valstr);
+ } else {
+ printf("AutoWhiteBalanceLock is not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_ZOOM_SUPPORTED);
+ if (NULL != valstr) {
+ strcpy(zoomstr, valstr);
+ } else {
+ printf("Zoom is not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED);
+ if (NULL != valstr) {
+ strcpy(smoothzoomstr, valstr);
+ } else {
+ printf("SmoothZoom is not supported\n");
+ }
+
+ valstr = params.get("mode-values");
+ if (NULL != valstr) {
+ strcpy(modevaluesstr, valstr);
+ getSupportedParameters(modevaluesstr, &nummodevalues , (char***)&modevalues);
+ } else {
+ printf("Mode values is not supported\n");
+ }
+
+ valstr = params.get(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED);
+ if (NULL != valstr) {
+ strcpy(videosnapshotstr, valstr);
+ } else {
+ printf("Video Snapshot is not supported\n");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN) != NULL) {
+ manualConvMin = params.getInt(KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN);
+ } else {
+ printf("no supported parameteters for manual convergence min\n\t");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX) != NULL) {
+ manualConvMax = params.getInt(KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX);
+ } else {
+ printf("no supported parameteters for manual convergence max\n\t");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP) != NULL) {
+ manualConvStep = params.getInt(KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP);
+ } else {
+ printf("no supported parameteters for manual convergence step\n\t");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_EXPOSURE_MIN) != NULL) {
+ manualExpMin = params.getInt(KEY_SUPPORTED_MANUAL_EXPOSURE_MIN);
+ } else {
+ printf("no supported parameteters for manual exposure min\n\t");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_EXPOSURE_MAX) != NULL) {
+ manualExpMax = params.getInt(KEY_SUPPORTED_MANUAL_EXPOSURE_MAX);
+ } else {
+ printf("no supported parameteters for manual exposure max\n\t");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_EXPOSURE_STEP) != NULL) {
+ manualExpStep = params.getInt(KEY_SUPPORTED_MANUAL_EXPOSURE_STEP);
+ } else {
+ printf("no supported parameteters for manual exposure step\n\t");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN) != NULL) {
+ manualGainMin = params.getInt(KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN);
+ } else {
+ printf("no supported parameteters for manual gain min\n\t");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX) != NULL) {
+ manualGainMax = params.getInt(KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX);
+ } else {
+ printf("no supported parameteters for manual gain max\n\t");
+ }
+
+ if (params.get(KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP) != NULL) {
+ manualGainStep = params.getInt(KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP);
+ } else {
+ printf("no supported parameteters for manual gain step\n\t");
+ }
+
+ return 0;
+}
+
+void getSizeParametersFromCapabilities() {
+ if(!stereoMode) {
+ if (params.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES) != NULL) {
+ strcpy(captureSizeStr, params.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES));
+ } else {
+ printf("Picture sizes are not supported\n");
+ }
+
+ if (params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES) != NULL) {
+ strcpy(previewSizeStr, params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
+ strcpy(VcaptureSizeStr, params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
+ } else {
+ printf("Preview sizes are not supported\n");
+ }
+ } else { //stereo
+ if(strcmp(stereoLayout[stereoLayoutIDX],"tb-full") == 0)
+ {
+ if (params.get(KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES) != NULL) {
+ strcpy(previewSizeStr, params.get(KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ strcpy(VcaptureSizeStr, params.get(KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ } else {
+ printf("Preview sizes are not supported\n");
+ }
+ }
+ else if(strcmp(stereoLayout[stereoLayoutIDX],"ss-full") == 0)
+ {
+ if (params.get(KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES) != NULL) {
+ strcpy(previewSizeStr, params.get(KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ strcpy(VcaptureSizeStr, params.get(KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ } else {
+ printf("Preview sizes are not supported\n");
+ }
+ }
+ else if(strcmp(stereoLayout[stereoLayoutIDX],"tb-subsampled") == 0)
+ {
+ if (params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES) != NULL) {
+ strcpy(previewSizeStr, params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
+ strcpy(VcaptureSizeStr, params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
+ } else {
+ printf("Preview sizes are not supported\n");
+ }
+ }
+ else if(strcmp(stereoLayout[stereoLayoutIDX],"ss-subsampled") == 0)
+ {
+ if (params.get(KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES) != NULL) {
+ strcpy(previewSizeStr, params.get(KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ strcpy(VcaptureSizeStr, params.get(KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ } else {
+ printf("Preview sizes are not supported\n");
+ }
+ }
+ else
+ {
+ printf("Preview sizes are not supported\n");
+ }
+ if(strcmp(stereoCapLayout[stereoCapLayoutIDX],"tb-full") == 0)
+ {
+ if (params.get(KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES) != NULL) {
+ strcpy(captureSizeStr, params.get(KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ } else {
+ printf("Picture sizes are not supported\n");
+ }
+ }
+ else if(strcmp(stereoCapLayout[stereoCapLayoutIDX],"ss-full") == 0)
+ {
+ if (params.get(KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES) != NULL) {
+ strcpy(captureSizeStr, params.get(KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ } else {
+ printf("Picture sizes are not supported\n");
+ }
+ }
+ else
+ {
+ printf("Picture sizes are not supported\n");
+ }
+
+ }
+ getSupportedParametersCaptureSize(captureSizeStr, &numcaptureSize, captureSize, length_capture_Size);
+ getSupportedParametersPreviewSize(previewSizeStr, &numpreviewSize, previewSize, length_previewSize);
+ getSupportedParametersVideoCaptureSize(VcaptureSizeStr, &numVcaptureSize, VcaptureSize, lenght_Vcapture_size);
+}
+
+int getDefaultParameter(const char* val, int numOptions, char **array) {
+ int cnt = 0;
+
+ if ((NULL == val) || (NULL == array)) {
+ printf("Some default parameters are not valid");
+ return 0;
+ }
+
+ for(cnt=0;cnt<numOptions;cnt++) {
+ if (NULL == array[cnt]) {
+ printf("Some parameter arrays are not valid");
+ continue;
+ }
+ if (strcmp(val, array[cnt]) ==0 ) {
+ return cnt;
+ }
+ }
+ return 0;
+}
+
+int getDefaultParameterResol(const char* val, int numOptions, param_Array **array) {
+ int cnt = 0;
+
+ for(cnt=0;cnt<numOptions;cnt++) {
+ if (strcmp(val, array[cnt]->name) ==0 ) {
+ return cnt;
+ }
+ }
+ return 0;
+}
+
+int getSupportedParameters(char* parameters, int *optionsCount, char ***elem) {
+ str = new char [400];
+ param = new char [400];
+ int cnt = 0;
+
+ strcpy(str, parameters);
+ param = strtok(str, ",");
+ *elem = new char*[30];
+
+ while (param != NULL) {
+ (*elem)[cnt] = new char[strlen(param) + 1];
+ strcpy((*elem)[cnt], param);
+ param = strtok (NULL, ",");
+ cnt++;
+ }
+ *optionsCount = cnt;
+ return 0;
+}
+
+int getSupportedParametersfps(char* parameters, int *optionsCount) {
+ str = new char [400];
+ param = new char [400];
+ int cnt = 0;
+ constCnt = 0;
+ rangeCnt = 0;
+ strcpy(str, parameters);
+ fps_const_str = new char*[32];
+ fps_range_str = new char*[32];
+ rangeDescription = new char*[32];
+ fpsArray = new fps_Array[50];
+ param = strtok(str, "(,)");
+
+ while (param != NULL) {
+ fps_const_str[constCnt] = new char;
+ fps_range_str[rangeCnt] = new char;
+ rangeDescription[rangeCnt] = new char;
+ fpsArray[cnt].rangeMin = atoi(param);
+ param = strtok (NULL, "(,)");
+ fpsArray[cnt].rangeMax = atoi(param);
+ param = strtok (NULL, "(,)");
+ if (fpsArray[cnt].rangeMin == fpsArray[cnt].rangeMax) {
+ sprintf(fps_const_str[constCnt], "%d,%d", fpsArray[cnt].rangeMin, fpsArray[cnt].rangeMax);
+ constFramerate[constCnt] = fpsArray[cnt].rangeMin/1000;
+ sprintf(fps_range_str[rangeCnt], "%d,%d", fpsArray[cnt].rangeMin, fpsArray[cnt].rangeMax);
+ sprintf(rangeDescription[rangeCnt], "[%d:%d]", fpsArray[cnt].rangeMin/1000, fpsArray[cnt].rangeMax/1000);
+ constCnt ++;
+ rangeCnt ++;
+
+ } else {
+ sprintf(fps_range_str[rangeCnt], "%d,%d", fpsArray[cnt].rangeMin, fpsArray[cnt].rangeMax);
+ sprintf(rangeDescription[rangeCnt], "[%d:%d]", fpsArray[cnt].rangeMin/1000, fpsArray[cnt].rangeMax/1000);
+ rangeCnt ++;
+ }
+
+ cnt++;
+ }
+ *optionsCount = cnt;
+ return 0;
+}
+
+
+int getSupportedParametersCaptureSize(char* parameters, int *optionsCount, param_Array array[], int arraySize) {
+ str = new char [400];
+ param = new char [400];
+ int cnt = 0;
+ strcpy(str, parameters);
+ param = strtok(str, ",x");
+ capture_Array = new param_Array*[50];
+ while (param != NULL) {
+
+ capture_Array[cnt] = new param_Array;
+ capture_Array[cnt]->width = atoi(param);
+ param = strtok (NULL, ",x");
+ capture_Array[cnt]->height = atoi(param);
+ param = strtok (NULL, ",x");
+
+ int x = getSupportedParametersNames(capture_Array[cnt]->width,
+ capture_Array[cnt]->height, array, arraySize);
+
+ if (x > -1) {
+ strcpy(capture_Array[cnt]->name, array[x].name);
+ } else {
+ strcpy(capture_Array[cnt]->name, "Needs to be added/Not supported");
+ }
+
+ cnt++;
+ }
+
+ *optionsCount = cnt;
+ return 0;
+}
+
+int getSupportedParametersVideoCaptureSize(char* parameters, int *optionsCount, param_Array array[], int arraySize) {
+ str = new char [800];
+ param = new char [800];
+ int cnt = 0;
+ strcpy(str, parameters);
+ param = strtok(str, ",x");
+ Vcapture_Array = new param_Array*[100];
+ while (param != NULL) {
+
+ Vcapture_Array[cnt] = new param_Array;
+ Vcapture_Array[cnt]->width = atoi(param);
+ param = strtok (NULL, ",x");
+ Vcapture_Array[cnt]->height = atoi(param);
+ param = strtok (NULL, ",x");
+
+ int x = getSupportedParametersNames(Vcapture_Array[cnt]->width,
+ Vcapture_Array[cnt]->height, array, arraySize);
+
+ if (x > -1) {
+ strcpy(Vcapture_Array[cnt]->name, array[x].name);
+ } else {
+ strcpy(Vcapture_Array[cnt]->name, "Needs to be added/Not supported");
+ }
+
+ cnt++;
+ }
+
+ *optionsCount = cnt;
+ return 0;
+}
+
+int getSupportedParametersPreviewSize(char* parameters, int *optionsCount, param_Array array[], int arraySize) {
+ str = new char [500];
+ param = new char [500];
+ int cnt = 0;
+ strcpy(str, parameters);
+ param = strtok(str, ",x");
+ preview_Array = new param_Array*[60];
+ while (param != NULL) {
+ preview_Array[cnt] = new param_Array;
+ preview_Array[cnt]->width = atoi(param);
+ param = strtok (NULL, ",x");
+ preview_Array[cnt]->height = atoi(param);
+ param = strtok (NULL, ",x");
+
+ int x = getSupportedParametersNames(preview_Array[cnt]->width,
+ preview_Array[cnt]->height, array, arraySize);
+ if (x > -1) {
+ strcpy(preview_Array[cnt]->name, array[x].name);
+ } else {
+ strcpy(preview_Array[cnt]->name, "Needs to be added/Not supported");
+ }
+
+ cnt++;
+ }
+
+ *optionsCount = cnt;
+ return 0;
+}
+
+int getSupportedParametersThumbnailSize(char* parameters, int *optionsCount, param_Array array[], int arraySize) {
+ str = new char [500];
+ param = new char [500];
+ int cnt = 0;
+ strcpy(str, parameters);
+ param = strtok(str, ",x");
+ thumbnail_Array = new param_Array*[60];
+ while (param != NULL) {
+ thumbnail_Array[cnt] = new param_Array;
+ thumbnail_Array[cnt]->width = atoi(param);
+ param = strtok (NULL, ",x");
+ thumbnail_Array[cnt]->height = atoi(param);
+ param = strtok (NULL, ",x");
+
+ int x = getSupportedParametersNames(thumbnail_Array[cnt]->width,
+ thumbnail_Array[cnt]->height, array, arraySize);
+ if (x > -1) {
+ strcpy(thumbnail_Array[cnt]->name, array[x].name);
+ } else {
+ strcpy(thumbnail_Array[cnt]->name, "Needs to be added/Not supported");
+ }
+
+ cnt++;
+ }
+
+ *optionsCount = cnt;
+ return 0;
+}
+
+int getSupportedParametersNames(int width, int height, param_Array array[], int arraySize) {
+ for (int i = 0; i<arraySize; i++) {
+
+ if ((width == array[i].width) && (height == array[i].height)) {
+ return (i);
+ }
+ }
+ return -1;
+}
+
+int deleteAllocatedMemory() {
+ int i;
+
+ for (i=0; i<numAntibanding; i++){
+ delete [] antiband[i];
+ }
+
+
+
+
+ for (i=0; i<numEffects; i++){
+ delete [] effectss[i];
+ }
+
+ for (i=0; i<numExposureMode; i++){
+ delete [] exposureMode[i];
+ }
- previewRunning = true;
- reSizePreview = false;
+ for (i=0; i<numawb; i++) {
+ delete [] awb[i];
+ }
+
+ for (i=0; i<numscene; i++){
+ delete [] scene[i];
+ }
+
+ for (i=0; i<numfocus; i++){
+ delete [] focus[i];
+ }
+
+ for (i=0; i<numflash; i++){
+ delete [] flash[i];
+ }
+
+ for (i=0; i<numpreviewSize; i++){
+ delete [] preview_Array[i];
+ }
+
+ for (i=0; i<numcaptureSize; i++){
+ delete [] capture_Array[i];
+ }
+
+ for (i=0; i<numVcaptureSize; i++){
+ delete [] Vcapture_Array[i];
+ }
+
+ for (i=0; i<numthumbnailSize; i++){
+ delete [] thumbnail_Array[i];
+ }
+ for (i=0; i<constCnt; i++){
+ delete [] fps_const_str[i];
+ }
+
+ for (i=0; i<rangeCnt; i++){
+ delete [] fps_range_str[i];
+ }
+
+ for (i=0; i<rangeCnt; i++){
+ delete [] rangeDescription[i];
+ }
+
+ for (i=0; i<numpreviewFormat; i++){
+ delete [] previewFormatArray[i];
+ }
+
+ for (i=0; i<numpictureFormat; i++){
+ delete [] pictureFormatArray[i];
+ }
+
+ for (i=0; i<nummodevalues; i++){
+ delete [] modevalues[i];
+ }
+
+ if (numLay) {
+ for (i = 0; i < numLay; i++) {
+ delete [] stereoLayout[i];
+ }
+ numLay = 0;
+ }
+
+ if (numCLay) {
+ for (i = 0; i < numCLay; i++) {
+ delete [] stereoCapLayout[i];
+ }
+ numCLay = 0;
+ }
+
+ delete [] antibandStr;
+ delete [] effectssStr;
+ delete [] exposureModeStr;
+ delete [] awbStr;
+ delete [] sceneStr;
+ delete [] focusStr;
+ delete [] flashStr;
+ delete [] previewSizeStr;
+ delete [] captureSizeStr;
+ delete [] VcaptureSizeStr;
+ delete [] thumbnailSizeStr;
+ delete [] fpsstr;
+ delete [] previewFormatStr;
+ delete [] pictureFormatStr;
+ delete [] fpsArray;
+ delete [] vstabstr;
+ delete [] vnfstr;
+ delete [] isoModeStr;
+ delete [] AutoExposureLockstr;
+ delete [] AutoWhiteBalanceLockstr;
+ delete [] zoomstr;
+ delete [] smoothzoomstr;
+ delete [] modevaluesstr;
+ delete [] videosnapshotstr;
+ delete [] autoconvergencestr;
+ delete [] layoutstr;
+ delete [] capturelayoutstr;
+
+ // Release buffer sources if any
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->requestExit();
+ bufferSourceOutputThread.clear();
+ }
+ if ( bufferSourceInput.get() ) {
+ bufferSourceInput.clear();
}
return 0;
}
+int trySetVideoStabilization(bool toggle) {
+ if (strcmp(vstabstr, "true") == 0) {
+ params.set(params.KEY_VIDEO_STABILIZATION, toggle ? params.TRUE : params.FALSE);
+ return 0;
+ }
+ return 0;
+}
+
+int trySetVideoNoiseFilter(bool toggle) {
+ if (strcmp(vnfstr, "true") == 0) {
+ params.set("vnf", toggle ? params.TRUE : params.FALSE);
+ return 0;
+ }
+ return 0;
+}
+
+int trySetAutoExposureLock(bool toggle) {
+ if (strcmp(AutoExposureLockstr, "true") == 0) {
+ params.set(KEY_AUTO_EXPOSURE_LOCK, toggle ? params.TRUE : params.FALSE);
+ return 0;
+ }
+ return 0;
+}
+
+int trySetAutoWhiteBalanceLock(bool toggle) {
+ if (strcmp(AutoWhiteBalanceLockstr, "true") == 0) {
+ params.set(KEY_AUTO_WHITEBALANCE_LOCK, toggle ? params.TRUE : params.FALSE);
+ return 0;
+ }
+ return 0;
+}
+
+bool isRawPixelFormat (const char *format) {
+ bool ret = false;
+ if ((0 == strcmp (format, CameraParameters::PIXEL_FORMAT_YUV422I)) ||
+ (0 == strcmp (format, CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
+ (0 == strcmp (format, CameraParameters::PIXEL_FORMAT_RGB565)) ||
+ (0 == strcmp (format, CameraParameters::PIXEL_FORMAT_BAYER_RGGB))) {
+ ret = true;
+ }
+ return ret;
+}
+
void stopPreview() {
if ( hardwareActive ) {
camera->stopPreview();
@@ -1102,96 +2108,250 @@ void stopPreview() {
previewRunning = false;
reSizePreview = true;
- closeCamera();
}
}
void initDefaults() {
- camera_index = 0;
- antibanding_mode = 0;
- focus_mode = 0;
- fpsRangeIdx = 0;
- previewSizeIDX = 1; /* Default resolution set to WVGA */
- captureSizeIDX = 3; /* Default capture resolution is 8MP */
- frameRateIDX = ARRAY_SIZE(fpsConstRanges) - 1; /* Default frame rate is 30 FPS */
-#if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP3)
- VcaptureSizeIDX = ARRAY_SIZE(VcaptureSize) - 6;/* Default video record is WVGA */
-#else
- VcaptureSizeIDX = ARRAY_SIZE(VcaptureSize) - 2;/* Default video record is WVGA */
-#endif
- VbitRateIDX = ARRAY_SIZE(VbitRate) - 4; /*Default video bit rate is 4M */
- thumbSizeIDX = 0;
+
+ struct CameraInfo cameraInfo;
+
+ camera->getCameraInfo(camera_index, &cameraInfo);
+ if (cameraInfo.facing == CAMERA_FACING_FRONT) {
+ rotation = cameraInfo.orientation;
+ } else { // back-facing
+ rotation = cameraInfo.orientation;
+ }
+
+ antibanding_mode = getDefaultParameter("off", numAntibanding, antiband);
+ focus_mode = getDefaultParameter("auto", numfocus, focus);
+ fpsRangeIdx = getDefaultParameter("5000,30000", rangeCnt, fps_range_str);
+ afTimeoutIdx = 0;
+ previewSizeIDX = getDefaultParameterResol("VGA", numpreviewSize, preview_Array);
+ captureSizeIDX = getDefaultParameterResol("12MP", numcaptureSize, capture_Array);
+ frameRateIDX = getDefaultParameter("30000,30000", constCnt, fps_const_str);
+ VcaptureSizeIDX = getDefaultParameterResol("HD", numVcaptureSize, Vcapture_Array);
+ VbitRateIDX = 0;
+ thumbSizeIDX = getDefaultParameterResol("VGA", numthumbnailSize, thumbnail_Array);
compensation = 0.0;
- awb_mode = 0;
- effects_mode = 0;
- scene_mode = 0;
+ awb_mode = getDefaultParameter("auto", numawb, awb);
+ effects_mode = getDefaultParameter("none", numEffects, effectss);
+ scene_mode = getDefaultParameter("auto", numscene, scene);
caf_mode = 0;
- vnf_mode = 0;
- vstab_mode = 0;
- expBracketIdx = 0;
- flashIdx = 0;
- rotation = 0;
+
+ shotConfigFlush = false;
+ streamCapture = false;
+ vstabtoggle = false;
+ vnftoggle = false;
+ AutoExposureLocktoggle = false;
+ AutoWhiteBalanceLocktoggle = false;
+ faceDetectToggle = false;
+ metaDataToggle = false;
+ expBracketIdx = BRACKETING_IDX_DEFAULT;
+ flashIdx = getDefaultParameter("off", numflash, flash);
+ previewRotation = 0;
zoomIDX = 0;
videoCodecIDX = 0;
gbceIDX = 0;
glbceIDX = 0;
+ contrast = 100;
#ifdef TARGET_OMAP4
///Temporary fix until OMAP3 and OMAP4 3A values are synced
- contrast = 90;
brightness = 50;
- sharpness = 0;
- saturation = 50;
+ sharpness = 100;
#else
- contrast = 100;
brightness = 100;
sharpness = 0;
- saturation = 100;
#endif
- iso_mode = 0;
- capture_mode = 0;
- exposure_mode = 0;
- ippIDX = 0;//set the ipp to ldc-nsf as the capture mode is set to HQ by default
+ saturation = 100;
+ iso_mode = getDefaultParameter("auto", numisoMode, isoMode);
+ capture_mode = getDefaultParameter("high-quality", nummodevalues, modevalues);
+ exposure_mode = getDefaultParameter("auto", numExposureMode, exposureMode);
+ ippIDX = 0;
ippIDX_old = ippIDX;
jpegQuality = 85;
bufferStarvationTest = 0;
meter_mode = 0;
- previewFormat = 1;
- pictureFormat = 3; // jpeg
- params.setPreviewSize(previewSize[previewSizeIDX].width, previewSize[previewSizeIDX].height);
- params.setPictureSize(captureSize[captureSizeIDX].width, captureSize[captureSizeIDX].height);
+ previewFormat = getDefaultParameter("yuv420sp", numpreviewFormat, previewFormatArray);
+ pictureFormat = getDefaultParameter("jpeg", numpictureFormat, pictureFormatArray);
+ stereoCapLayoutIDX = 0;
+ stereoLayoutIDX = 1;
+ manualConv = 0;
+ manualExp = manualExpMin;
+ manualGain = manualGainMin;
+
+ algoFixedGammaIDX = 1;
+ algoNSF1IDX = 1;
+ algoNSF2IDX = 1;
+ algoSharpeningIDX = 1;
+ algoThreeLinColorMapIDX = 1;
+ algoGICIDX = 1;
+
+ params.set(params.KEY_VIDEO_STABILIZATION, params.FALSE);
+ params.set("vnf", params.FALSE);
+ params.setPreviewSize(preview_Array[previewSizeIDX]->width, preview_Array[previewSizeIDX]->height);
+ params.setPictureSize(capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height);
params.set(CameraParameters::KEY_ROTATION, rotation);
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
params.set(KEY_COMPENSATION, (int) (compensation * 10));
- params.set(params.KEY_WHITE_BALANCE, strawb_mode[awb_mode]);
- params.set(KEY_MODE, (capture[capture_mode]));
+ params.set(params.KEY_WHITE_BALANCE, awb[awb_mode]);
+ params.set(KEY_MODE, (modevalues[capture_mode]));
params.set(params.KEY_SCENE_MODE, scene[scene_mode]);
params.set(KEY_CAF, caf_mode);
- params.set(KEY_ISO, iso_mode);
+ params.set(KEY_ISO, isoMode[iso_mode]);
params.set(KEY_GBCE, gbce[gbceIDX]);
params.set(KEY_GLBCE, gbce[glbceIDX]);
params.set(KEY_SHARPNESS, sharpness);
params.set(KEY_CONTRAST, contrast);
params.set(CameraParameters::KEY_ZOOM, zoom[zoomIDX].idx);
- params.set(KEY_EXPOSURE, exposure[exposure_mode]);
+ params.set(KEY_EXPOSURE, exposureMode[exposure_mode]);
params.set(KEY_BRIGHTNESS, brightness);
params.set(KEY_SATURATION, saturation);
- params.set(params.KEY_EFFECT, effects[effects_mode]);
- params.setPreviewFrameRate(frameRate[ARRAY_SIZE(frameRate) - 1].fps);
- params.set(params.KEY_ANTIBANDING, antibanding[antibanding_mode]);
+ params.set(params.KEY_EFFECT, effectss[effects_mode]);
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_const_str[frameRateIDX]);
+ params.set(params.KEY_ANTIBANDING, antiband[antibanding_mode]);
params.set(params.KEY_FOCUS_MODE, focus[focus_mode]);
params.set(KEY_IPP, ipp_mode[ippIDX]);
params.set(CameraParameters::KEY_JPEG_QUALITY, jpegQuality);
- params.setPreviewFormat(pixelformat[previewFormat].pixformat);
- params.setPictureFormat(codingformat[pictureFormat]);
- params.set(KEY_BUFF_STARV, bufferStarvationTest); //enable buffer starvation
+ params.setPreviewFormat(previewFormatArray[previewFormat]);
+ params.setPictureFormat(pictureFormatArray[pictureFormat]);
+ params.set(KEY_BUFF_STARV, bufferStarvationTest);
params.set(KEY_METERING_MODE, metering[meter_mode]);
- params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, previewSize[thumbSizeIDX].width);
- params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, previewSize[thumbSizeIDX].height);
- ManualConvergenceValuesIDX = ManualConvergenceDefaultValueIDX;
- params.set(KEY_MANUALCONVERGENCE_VALUES, manualconvergencevalues[ManualConvergenceValuesIDX]);
+ params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, thumbnail_Array[thumbSizeIDX]->width);
+ params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, thumbnail_Array[thumbSizeIDX]->height);
+ params.set(KEY_MANUAL_CONVERGENCE, manualConv);
+ params.set(KEY_MANUAL_EXPOSURE, manualExp);
+ params.set(KEY_MANUAL_GAIN_ISO, manualGain);
+ params.set(KEY_MANUAL_EXPOSURE_RIGHT, manualExp);
+ params.set(KEY_MANUAL_GAIN_ISO_RIGHT, manualGain);
params.set(KEY_S3D2D_PREVIEW_MODE, "off");
- params.set(KEY_STEREO_CAMERA, "false");
params.set(KEY_EXIF_MODEL, MODEL);
params.set(KEY_EXIF_MAKE, MAKE);
+
+ setDefaultExpGainPreset(shotParams, expBracketIdx);
+}
+
+void setDefaultExpGainPreset(ShotParameters &params, int idx) {
+ if ( ((int)ARRAY_SIZE(expBracketing) > idx) && (0 <= idx) ) {
+ setExpGainPreset(params, expBracketing[idx].value, false, expBracketing[idx].param_type, shotConfigFlush);
+ } else {
+ printf("setDefaultExpGainPreset: Index (%d) is out of range 0 ~ %u\n", idx, ARRAY_SIZE(expBracketing) - 1);
+ }
+}
+
+void setSingleExpGainPreset(ShotParameters &params, int idx, int exp, int gain) {
+ String8 val;
+
+ if (PARAM_EXP_BRACKET_PARAM_PAIR == expBracketing[idx].param_type) {
+ val.append("(");
+ }
+
+ if (PARAM_EXP_BRACKET_VALUE_REL == expBracketing[idx].value_type) {
+ val.appendFormat("%+d", exp);
+ } else {
+ val.appendFormat("%u", (unsigned int) exp);
+ }
+
+ if (PARAM_EXP_BRACKET_PARAM_PAIR == expBracketing[idx].param_type) {
+ if (PARAM_EXP_BRACKET_VALUE_REL == expBracketing[idx].value_type) {
+ val.appendFormat(",%+d)", gain);
+ } else {
+ val.appendFormat(",%u)", (unsigned int) gain);
+ }
+ }
+
+ if (PARAM_EXP_BRACKET_APPLY_FORCED == expBracketing[idx].apply_type) {
+ val.append("F");
+ }
+
+ setExpGainPreset(params, val, false, expBracketing[idx].param_type, false);
+}
+
+void setExpGainPreset(ShotParameters &params, const char *input, bool force, param_ExpBracketParamType_t type, bool flush) {
+ const char *startPtr = NULL;
+ size_t i = 0;
+
+ if (NULL == input) {
+ printf("setExpGainPreset: missing input string\n");
+ } else if ( (force && (NULL == strpbrk(input, "()"))) ||
+ (PARAM_EXP_BRACKET_PARAM_COMP == type) ) {
+ // parse for the number of inputs (count the number of ',' + 1)
+ startPtr = strchr(input, ',');
+ while (startPtr != NULL) {
+ i++;
+ startPtr = strchr(startPtr + 1, ',');
+ }
+ i++;
+ printf("relative EV input: \"%s\"\nnumber of relative EV values: %d (%s)\n",
+ input, i, flush ? "reset" : "append");
+ burst = i;
+ burstCount = i;
+ params.set(ShotParameters::KEY_BURST, burst);
+ params.set(ShotParameters::KEY_EXP_COMPENSATION, input);
+ params.remove(ShotParameters::KEY_EXP_GAIN_PAIRS);
+ params.set(ShotParameters::KEY_FLUSH_CONFIG,
+ flush ? ShotParameters::TRUE : ShotParameters::FALSE);
+ } else if ( force || (PARAM_EXP_BRACKET_PARAM_PAIR == type) ) {
+ // parse for the number of inputs (count the number of '(')
+ startPtr = strchr(input, '(');
+ while (startPtr != NULL) {
+ i++;
+ startPtr = strchr(startPtr + 1, '(');
+ }
+ printf("absolute exposure,gain input: \"%s\"\nNumber of brackets: %d (%s)\n",
+ input, i, flush ? "reset" : "append");
+ burst = i;
+ burstCount = i;
+ params.set(ShotParameters::KEY_BURST, burst);
+ params.set(ShotParameters::KEY_EXP_GAIN_PAIRS, input);
+ params.remove(ShotParameters::KEY_EXP_COMPENSATION);
+ params.set(ShotParameters::KEY_FLUSH_CONFIG,
+ flush ? ShotParameters::TRUE : ShotParameters::FALSE);
+ } else {
+ printf("no bracketing input: \"%s\"\n", input);
+ params.remove(ShotParameters::KEY_EXP_GAIN_PAIRS);
+ params.remove(ShotParameters::KEY_EXP_COMPENSATION);
+ params.remove(ShotParameters::KEY_BURST);
+ params.remove(ShotParameters::KEY_FLUSH_CONFIG);
+ }
+}
+
+void calcNextSingleExpGainPreset(int idx, int &exp, int &gain) {
+ if (PARAM_EXP_BRACKET_VALUE_ABS == expBracketing[idx].value_type) {
+ // absolute
+ if ( (0 == exp) && (0 == gain) ) {
+ exp=100;
+ gain = 150;
+ printf("Streaming: Init default absolute exp./gain: %d,%d\n", exp, gain);
+ }
+
+ exp *= 2;
+ if (1000000 < exp) {
+ exp = 100;
+ gain += 50;
+ if(400 < gain) {
+ gain = 50;
+ }
+ }
+ } else {
+ // relative
+ exp += 50;
+ if (200 < exp) {
+ exp = -200;
+ gain += 50;
+ if(200 < gain) {
+ gain = -200;
+ }
+ }
+ }
+}
+
+void updateShotConfigFlushParam() {
+ // Will update flush shot config parameter if already present
+ // Otherwise, keep empty (will be set later in setExpGainPreset())
+ if (NULL != shotParams.get(ShotParameters::KEY_FLUSH_CONFIG)) {
+ shotParams.set(ShotParameters::KEY_FLUSH_CONFIG,
+ shotConfigFlush ? ShotParameters::TRUE : ShotParameters::FALSE);
+ }
}
int menu_gps() {
@@ -1261,7 +2421,6 @@ int menu_gps() {
break;
- case 'Q':
case 'q':
return -1;
@@ -1273,107 +2432,229 @@ int menu_gps() {
return 0;
}
-int functional_menu() {
+int menu_algo() {
char ch;
if (print_menu) {
+ printf("\n\n== ALGO ENABLE/DISABLE MENU ============\n\n");
+ printf(" a. Fixed Gamma: %s\n", algoFixedGamma[algoFixedGammaIDX]);
+ printf(" s. NSF1: %s\n", algoNSF1[algoNSF1IDX]);
+ printf(" d. NSF2: %s\n", algoNSF2[algoNSF2IDX]);
+ printf(" f. Sharpening: %s\n", algoSharpening[algoSharpeningIDX]);
+ printf(" g. Color Conversion: %s\n", algoThreeLinColorMap[algoThreeLinColorMapIDX]);
+ printf(" h. Green Inballance Correction: %s\n", algoGIC[algoGICIDX]);
+ printf("\n");
+ printf(" q. Return to main menu\n");
+ printf("\n");
+ printf(" Choice: ");
+ }
- printf("\n\n=========== FUNCTIONAL TEST MENU ===================\n\n");
+ ch = getchar();
+ printf("%c", ch);
- printf(" \n\nSTART / STOP / GENERAL SERVICES \n");
- printf(" -----------------------------\n");
- printf(" A Select Camera %s\n", cameras[camera_index]);
- printf(" [. Resume Preview after capture\n");
- printf(" 0. Reset to defaults\n");
- printf(" q. Quit\n");
- printf(" @. Disconnect and Reconnect to CameraService \n");
- printf(" /. Enable/Disable showfps: %s\n", ((showfps)? "Enabled":"Disabled"));
- printf(" a. GEO tagging settings menu\n");
- printf(" E. Camera Capability Dump");
+ print_menu = 1;
+
+ switch (ch) {
+
+ case 'a':
+ case 'A':
+ algoFixedGammaIDX++;
+ algoFixedGammaIDX %= ARRAY_SIZE(algoFixedGamma);
+ params.set(KEY_ALGO_FIXED_GAMMA, (algoFixedGamma[algoFixedGammaIDX]));
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
+ case 's':
+ case 'S':
+ algoNSF1IDX++;
+ algoNSF1IDX %= ARRAY_SIZE(algoNSF1);
+ params.set(KEY_ALGO_NSF1, (algoNSF1[algoNSF1IDX]));
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
+ case 'd':
+ case 'D':
+ algoNSF2IDX++;
+ algoNSF2IDX %= ARRAY_SIZE(algoNSF2);
+ params.set(KEY_ALGO_NSF2, (algoNSF2[algoNSF2IDX]));
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
+ case 'f':
+ case 'F':
+ algoSharpeningIDX++;
+ algoSharpeningIDX %= ARRAY_SIZE(algoSharpening);
+ params.set(KEY_ALGO_SHARPENING, (algoSharpening[algoSharpeningIDX]));
- printf(" \n\n PREVIEW SUB MENU \n");
- printf(" -----------------------------\n");
- printf(" 1. Start Preview\n");
- printf(" 2. Stop Preview\n");
- printf(" ~. Preview format %s\n", pixelformat[previewFormat].pixformat);
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
+ case 'g':
+ case 'G':
+ algoThreeLinColorMapIDX++;
+ algoThreeLinColorMapIDX %= ARRAY_SIZE(algoThreeLinColorMap);
+ params.set(KEY_ALGO_THREELINCOLORMAP, (algoThreeLinColorMap[algoThreeLinColorMapIDX]));
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
+ case 'h':
+ case 'H':
+ algoGICIDX++;
+ algoGICIDX %= ARRAY_SIZE(algoGIC);
+ params.set(KEY_ALGO_GIC, (algoGIC[algoGICIDX]));
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
+ case 'Q':
+ case 'q':
+ return -1;
+
+ default:
+ print_menu = 0;
+ break;
+ }
+
+ return 0;
+}
+
+int functional_menu() {
+ char ch;
+ char area1[MAX_LINES][MAX_SYMBOLS+1];
+ char area2[MAX_LINES][MAX_SYMBOLS+1];
+ int j = 0;
+ int k = 0;
+ const char *valstr = NULL;
+ struct CameraInfo cameraInfo;
+ bool queueEmpty = true;
+
+ memset(area1, '\0', MAX_LINES*(MAX_SYMBOLS+1));
+ memset(area2, '\0', MAX_LINES*(MAX_SYMBOLS+1));
+
+ if (print_menu) {
+
+ printf("\n========================================= FUNCTIONAL TEST MENU =========================================\n");
+
+ snprintf(area1[j++], MAX_SYMBOLS, " START / STOP / GENERAL SERVICES");
+ snprintf(area1[j++], MAX_SYMBOLS, " -----------------------------");
+ snprintf(area1[j++], MAX_SYMBOLS, "A Select Camera %s", cameras[camera_index]);
+ snprintf(area1[j++], MAX_SYMBOLS, "[. Resume Preview after capture");
+ snprintf(area1[j++], MAX_SYMBOLS, "0. Reset to defaults");
+ snprintf(area1[j++], MAX_SYMBOLS, "q. Quit");
+ snprintf(area1[j++], MAX_SYMBOLS, "@. Disconnect and Reconnect to CameraService");
+ snprintf(area1[j++], MAX_SYMBOLS, "/. Enable/Disable showfps: %s", ((showfps)? "Enabled":"Disabled"));
+ snprintf(area1[j++], MAX_SYMBOLS, "a. GEO tagging settings menu");
+ snprintf(area1[j++], MAX_SYMBOLS, "E. Camera Capability Dump");
+
+ snprintf(area1[j++], MAX_SYMBOLS, " PREVIEW SUB MENU");
+ snprintf(area1[j++], MAX_SYMBOLS, " -----------------------------");
+ snprintf(area1[j++], MAX_SYMBOLS, "1. Start Preview");
+ snprintf(area1[j++], MAX_SYMBOLS, "2. Stop Preview");
+ snprintf(area1[j++], MAX_SYMBOLS, "~. Preview format %s", previewFormatArray[previewFormat]);
#if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP3)
- printf(" 4. Preview size: %4d x %4d - %s\n",previewSize[previewSizeIDX].width, previewSize[previewSizeIDX].height, previewSize[previewSizeIDX].desc);
+ snprintf(area1[j++], MAX_SYMBOLS, "4. Preview size: %4d x %4d - %s",preview_Array[previewSizeIDX]->width, preview_Array[previewSizeIDX]->height, preview_Array[previewSizeIDX]->name);
#else
- printf(" 4. Preview size: %4d x %4d - %s\n",previewSize[previewSizeIDX].width, camera_index == 2 ? previewSize[previewSizeIDX].height*2 : previewSize[previewSizeIDX].height, previewSize[previewSizeIDX].desc);
+ snprintf(area1[j++], MAX_SYMBOLS, "4. Preview size: %4d x %4d - %s",preview_Array[previewSizeIDX]->width, stereoMode ? preview_Array[previewSizeIDX]->height*2 : preview_Array[previewSizeIDX]->height, preview_Array[previewSizeIDX]->name);
#endif
- printf(" R. Preview framerate range: %s\n", fpsRanges[fpsRangeIdx].rangeDescription);
- printf(" &. Dump a preview frame\n");
- printf(" _. Auto Convergence mode: %s\n", autoconvergencemode[AutoConvergenceModeIDX]);
- printf(" ^. Manual Convergence Value: %s\n", manualconvergencevalues[ManualConvergenceValuesIDX]);
- printf(" {. 2D Preview in 3D Stereo Mode: %s\n", params.get(KEY_S3D2D_PREVIEW_MODE));
-
- printf(" \n\n IMAGE CAPTURE SUB MENU \n");
- printf(" -----------------------------\n");
- printf(" p. Take picture/Full Press\n");
- printf(" H. Exposure Bracketing: %s\n", expBracketing[expBracketIdx]);
- printf(" U. Temporal Bracketing: %s\n", tempBracketing[tempBracketIdx]);
- printf(" W. Temporal Bracketing Range: [-%d;+%d]\n", tempBracketRange, tempBracketRange);
- printf(" $. Picture Format: %s\n", codingformat[pictureFormat]);
- printf(" 3. Picture Rotation: %3d degree\n", rotation );
- printf(" 5. Picture size: %4d x %4d - %s\n",captureSize[captureSizeIDX].width, captureSize[captureSizeIDX].height, captureSize[captureSizeIDX].name);
- printf(" i. ISO mode: %s\n", iso[iso_mode]);
- printf(" u. Capture Mode: %s\n", capture[capture_mode]);
- printf(" k. IPP Mode: %s\n", ipp_mode[ippIDX]);
- printf(" K. GBCE: %s\n", gbce[gbceIDX]);
- printf(" O. GLBCE %s\n", gbce[glbceIDX]);
- printf(" o. Jpeg Quality: %d\n", jpegQuality);
- printf(" #. Burst Images: %3d\n", burst);
- printf(" :. Thumbnail Size: %4d x %4d - %s\n",previewSize[thumbSizeIDX].width, previewSize[thumbSizeIDX].height, previewSize[thumbSizeIDX].desc);
- printf(" ': Thumbnail Quality %d\n", thumbQuality);
-
- printf(" \n\n VIDEO CAPTURE SUB MENU \n");
- printf(" -----------------------------\n");
-
- printf(" 6. Start Video Recording\n");
- printf(" 2. Stop Recording\n");
- printf(" l. Video Capture resolution: %4d x %4d - %s\n",VcaptureSize[VcaptureSizeIDX].width,VcaptureSize[VcaptureSizeIDX].height, VcaptureSize[VcaptureSizeIDX].desc);
- printf(" ]. Video Bit rate : %s\n", VbitRate[VbitRateIDX].desc);
- printf(" 9. Video Codec: %s\n", videoCodecs[videoCodecIDX].desc);
- printf(" D. Audio Codec: %s\n", audioCodecs[audioCodecIDX].desc);
- printf(" v. Output Format: %s\n", outputFormat[outputFormatIDX].desc);
-
- if (camera_index == 1) {
- printf(" r. Framerate: %d\n", fpsConstRangesSec[frameRateIDXSec].constFramerate);
- }
- else {
- printf(" r. Framerate: %d\n", fpsConstRanges[frameRateIDX].constFramerate);
- }
- printf(" *. Start Video Recording dump ( 1 raw frame ) \n");
- printf(" B VNF %s \n", vnf[vnf_mode]);
- printf(" C VSTAB %s", vstab[vstab_mode]);
-
- printf(" \n\n 3A SETTING SUB MENU \n");
- printf(" -----------------------------\n");
-
- printf(" M. Measurement Data: %s\n", measurement[measurementIdx]);
- printf(" F. Start face detection \n");
- printf(" T. Stop face detection \n");
- printf(" G. Touch/Focus area AF\n");
- printf(" f. Auto Focus/Half Press\n");
- printf(" J.Flash: %s\n", flashModes[flashIdx]);
- printf(" 7. EV offset: %4.1f\n", compensation);
- printf(" 8. AWB mode: %s\n", strawb_mode[awb_mode]);
- printf(" z. Zoom %s\n", zoom[zoomIDX].zoom_description);
- printf(" j. Exposure %s\n", exposure[exposure_mode]);
- printf(" e. Effect: %s\n", effects[effects_mode]);
- printf(" w. Scene: %s\n", scene[scene_mode]);
- printf(" s. Saturation: %d\n", saturation);
- printf(" c. Contrast: %d\n", contrast);
- printf(" h. Sharpness: %d\n", sharpness);
- printf(" b. Brightness: %d\n", brightness);
- printf(" x. Antibanding: %s\n", antibanding[antibanding_mode]);
- printf(" g. Focus mode: %s\n", focus[focus_mode]);
- printf(" m. Metering mode: %s\n" , metering[meter_mode]);
- printf(" <. Exposure Lock: %s\n", lock[elockidx]);
- printf(" >. WhiteBalance Lock: %s\n",lock[wblockidx]);
+ snprintf(area1[j++], MAX_SYMBOLS, "R. Preview framerate range: %s", rangeDescription[fpsRangeIdx]);
+ snprintf(area1[j++], MAX_SYMBOLS, "&. Dump a preview frame");
+ if (stereoMode) {
+ snprintf(area1[j++], MAX_SYMBOLS, "_. Auto Convergence mode: %s", autoconvergencemode[AutoConvergenceModeIDX]);
+ snprintf(area1[j++], MAX_SYMBOLS, "^. Manual Convergence Value: %d\n", manualConv);
+ snprintf(area1[j++], MAX_SYMBOLS, "L. Stereo Preview Layout: %s\n", stereoLayout[stereoLayoutIDX]);
+ snprintf(area1[j++], MAX_SYMBOLS, ". Stereo Capture Layout: %s\n", stereoCapLayout[stereoCapLayoutIDX]);
+ }
+ snprintf(area1[j++], MAX_SYMBOLS, "{. 2D Preview in 3D Stereo Mode: %s", params.get(KEY_S3D2D_PREVIEW_MODE));
+
+ snprintf(area1[j++], MAX_SYMBOLS, " IMAGE CAPTURE SUB MENU");
+ snprintf(area1[j++], MAX_SYMBOLS, " -----------------------------");
+ snprintf(area1[j++], MAX_SYMBOLS, "p. Take picture/Full Press");
+ snprintf(area1[j++], MAX_SYMBOLS, "n. Flush shot config queue: %s", shotConfigFlush ? "On" : "Off");
+ snprintf(area1[j++], MAX_SYMBOLS, "H. Exposure Bracketing: %s", expBracketing[expBracketIdx].desc);
+ snprintf(area1[j++], MAX_SYMBOLS, "U. Temporal Bracketing: %s", tempBracketing[tempBracketIdx]);
+ snprintf(area1[j++], MAX_SYMBOLS, "W. Temporal Bracketing Range: [-%d;+%d]", tempBracketRange, tempBracketRange);
+ snprintf(area1[j++], MAX_SYMBOLS, "$. Picture Format: %s", pictureFormatArray[pictureFormat]);
+ snprintf(area1[j++], MAX_SYMBOLS, "3. Picture Rotation: %3d degree", rotation );
+ snprintf(area1[j++], MAX_SYMBOLS, "V. Preview Rotation: %3d degree", previewRotation );
+ snprintf(area1[j++], MAX_SYMBOLS, "5. Picture size: %4d x %4d - %s",capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height, capture_Array[captureSizeIDX]->name);
+ snprintf(area1[j++], MAX_SYMBOLS, "i. ISO mode: %s", isoMode[iso_mode]);
+ snprintf(area1[j++], MAX_SYMBOLS, ", Manual gain iso value = %d\n", manualGain);
+ snprintf(area1[j++], MAX_SYMBOLS, "u. Capture Mode: %s", modevalues[capture_mode]);
+ snprintf(area1[j++], MAX_SYMBOLS, "k. IPP Mode: %s", ipp_mode[ippIDX]);
+ snprintf(area1[j++], MAX_SYMBOLS, "K. GBCE: %s", gbce[gbceIDX]);
+ snprintf(area1[j++], MAX_SYMBOLS, "O. GLBCE %s", gbce[glbceIDX]);
+ snprintf(area1[j++], MAX_SYMBOLS, "o. Jpeg Quality: %d", jpegQuality);
+ snprintf(area1[j++], MAX_SYMBOLS, "#. Burst Images: %3d", burst);
+ snprintf(area1[j++], MAX_SYMBOLS, ":. Thumbnail Size: %4d x %4d - %s",thumbnail_Array[thumbSizeIDX]->width, thumbnail_Array[thumbSizeIDX]->height, thumbnail_Array[thumbSizeIDX]->name);
+ snprintf(area1[j++], MAX_SYMBOLS, "': Thumbnail Quality %d", thumbQuality);
+
+ snprintf(area2[k++], MAX_SYMBOLS, " VIDEO CAPTURE SUB MENU");
+ snprintf(area2[k++], MAX_SYMBOLS, " -----------------------------");
+ snprintf(area2[k++], MAX_SYMBOLS, "6. Start Video Recording");
+ snprintf(area2[k++], MAX_SYMBOLS, "2. Stop Recording");
+ snprintf(area2[k++], MAX_SYMBOLS, "l. Video Capture resolution: %4d x %4d - %s",Vcapture_Array[VcaptureSizeIDX]->width,Vcapture_Array[VcaptureSizeIDX]->height, Vcapture_Array[VcaptureSizeIDX]->name);
+ snprintf(area2[k++], MAX_SYMBOLS, "]. Video Bit rate : %s", VbitRate[VbitRateIDX].desc);
+ snprintf(area2[k++], MAX_SYMBOLS, "9. Video Codec: %s", videoCodecs[videoCodecIDX].desc);
+ snprintf(area2[k++], MAX_SYMBOLS, "D. Audio Codec: %s", audioCodecs[audioCodecIDX].desc);
+ snprintf(area2[k++], MAX_SYMBOLS, "v. Output Format: %s", outputFormat[outputFormatIDX].desc);
+ snprintf(area2[k++], MAX_SYMBOLS, "r. Framerate: %d", constFramerate[frameRateIDX]);
+ snprintf(area2[k++], MAX_SYMBOLS, "*. Start Video Recording dump ( 1 raw frame )");
+ snprintf(area2[k++], MAX_SYMBOLS, "B VNF %s", vnftoggle? "On" : "Off");
+ snprintf(area2[k++], MAX_SYMBOLS, "C VSTAB %s", vstabtoggle? "On" : "Off");
+
+ snprintf(area2[k++], MAX_SYMBOLS, " 3A SETTING SUB MENU");
+ snprintf(area2[k++], MAX_SYMBOLS, " -----------------------------");
+ snprintf(area2[k++], MAX_SYMBOLS, "M. Measurement Data: %s", measurement[measurementIdx]);
+ snprintf(area2[k++], MAX_SYMBOLS, "F. Toggle face detection: %s", faceDetectToggle ? "On" : "Off");
+ snprintf(area2[k++], MAX_SYMBOLS, "T. Toggle metadata: %s", metaDataToggle ? "On" : "Off");
+ snprintf(area2[k++], MAX_SYMBOLS, "G. Touch/Focus area AF");
+ snprintf(area2[k++], MAX_SYMBOLS, "y. Metering area");
+ snprintf(area2[k++], MAX_SYMBOLS, "Y. Metering area center");
+ snprintf(area2[k++], MAX_SYMBOLS, "N. Metering area average");
+ snprintf(area2[k++], MAX_SYMBOLS, "f. Auto Focus/Half Press");
+ snprintf(area2[k++], MAX_SYMBOLS, "I. AF Timeout %s", afTimeout[afTimeoutIdx]);
+ snprintf(area2[k++], MAX_SYMBOLS, "J.Flash: %s", flash[flashIdx]);
+ snprintf(area2[k++], MAX_SYMBOLS, "7. EV offset: %4.1f", compensation);
+ snprintf(area2[k++], MAX_SYMBOLS, "8. AWB mode: %s", awb[awb_mode]);
+ snprintf(area2[k++], MAX_SYMBOLS, "z. Zoom %s", zoom[zoomIDX].zoom_description);
+ snprintf(area2[k++], MAX_SYMBOLS, "Z. Smooth Zoom %s", zoom[zoomIDX].zoom_description);
+ snprintf(area2[k++], MAX_SYMBOLS, "j. Exposure %s", exposureMode[exposure_mode]);
+ snprintf(area2[k++], MAX_SYMBOLS, "Q. manual exposure value = %d\n", manualExp);
+ snprintf(area2[k++], MAX_SYMBOLS, "e. Effect: %s", effectss[effects_mode]);
+ snprintf(area2[k++], MAX_SYMBOLS, "w. Scene: %s", scene[scene_mode]);
+ snprintf(area2[k++], MAX_SYMBOLS, "s. Saturation: %d", saturation);
+ snprintf(area2[k++], MAX_SYMBOLS, "c. Contrast: %d", contrast);
+ snprintf(area2[k++], MAX_SYMBOLS, "h. Sharpness: %d", sharpness);
+ snprintf(area2[k++], MAX_SYMBOLS, "b. Brightness: %d", brightness);
+ snprintf(area2[k++], MAX_SYMBOLS, "x. Antibanding: %s", antiband[antibanding_mode]);
+ snprintf(area2[k++], MAX_SYMBOLS, "g. Focus mode: %s", focus[focus_mode]);
+ snprintf(area2[k++], MAX_SYMBOLS, "m. Metering mode: %s" , metering[meter_mode]);
+ snprintf(area2[k++], MAX_SYMBOLS, "<. Exposure Lock: %s", AutoExposureLocktoggle ? "On" : "Off");
+ snprintf(area2[k++], MAX_SYMBOLS, ">. WhiteBalance Lock: %s",AutoWhiteBalanceLocktoggle ? "On": "Off");
+ snprintf(area2[k++], MAX_SYMBOLS, "). Mechanical Misalignment Correction: %s",misalignmentCorrection[enableMisalignmentCorrectionIdx]);
+ snprintf(area2[k++], MAX_SYMBOLS, "d. Algo enable/disable functions menu");
+
printf("\n");
- printf(" Choice: ");
+ for (int i=0; (i<j || i < k) && i<MAX_LINES; i++) {
+ printf("%-65s \t %-65s\n", area1[i], area2[i]);
+ }
+ printf(" Choice:");
}
ch = getchar();
@@ -1385,43 +2666,37 @@ int functional_menu() {
case '_':
AutoConvergenceModeIDX++;
- AutoConvergenceModeIDX %= ARRAY_SIZE(autoconvergencemode);
+ AutoConvergenceModeIDX %= numAutoConvergence;
params.set(KEY_AUTOCONVERGENCE, autoconvergencemode[AutoConvergenceModeIDX]);
- if ( strcmp (autoconvergencemode[AutoConvergenceModeIDX], AUTOCONVERGENCE_MODE_MANUAL) == 0) {
- params.set(KEY_MANUALCONVERGENCE_VALUES, manualconvergencevalues[ManualConvergenceValuesIDX]);
- }
- else {
- params.set(KEY_MANUALCONVERGENCE_VALUES, manualconvergencevalues[ManualConvergenceDefaultValueIDX]);
- ManualConvergenceValuesIDX = ManualConvergenceDefaultValueIDX;
+ if ( strcmp (autoconvergencemode[AutoConvergenceModeIDX], "manual") == 0) {
+ params.set(KEY_MANUAL_CONVERGENCE, manualConv);
+ } else {
+ if ( strcmp (autoconvergencemode[AutoConvergenceModeIDX], "touch") == 0) {
+ params.set(CameraParameters::KEY_METERING_AREAS, MeteringAreas);
+ }
+ manualConv = 0;
+ params.set(KEY_MANUAL_CONVERGENCE, manualConv);
}
camera->setParameters(params.flatten());
break;
case '^':
- if ( strcmp (autoconvergencemode[AutoConvergenceModeIDX], AUTOCONVERGENCE_MODE_MANUAL) == 0) {
- ManualConvergenceValuesIDX++;
- ManualConvergenceValuesIDX %= ARRAY_SIZE(manualconvergencevalues);
- params.set(KEY_MANUALCONVERGENCE_VALUES, manualconvergencevalues[ManualConvergenceValuesIDX]);
+ if ( strcmp (autoconvergencemode[AutoConvergenceModeIDX], "manual") == 0) {
+ manualConv += manualConvStep;
+ if( manualConv > manualConvMax) {
+ manualConv = manualConvMin;
+ }
+ params.set(KEY_MANUAL_CONVERGENCE, manualConv);
camera->setParameters(params.flatten());
}
break;
case 'A':
camera_index++;
- camera_index %= ARRAY_SIZE(cameras);
- if ( camera_index == 2) {
- params.set(KEY_STEREO_CAMERA, "true");
- } else {
- params.set(KEY_STEREO_CAMERA, "false");
- }
+ camera_index %= numCamera;
+ firstTime = true;
closeCamera();
-
openCamera();
-
- if (camera_index == 0) {
- params.setPreviewFrameRate(30);
- } else {
- params.setPreviewFrameRate(27);
- }
+ initDefaults();
break;
@@ -1434,6 +2709,7 @@ int functional_menu() {
case '0':
initDefaults();
+ camera_index = 0;
break;
case '1':
@@ -1447,14 +2723,12 @@ int functional_menu() {
break;
case '2':
- stopPreview();
-
if ( recordingMode ) {
- camera->disconnect();
- camera.clear();
stopRecording();
+ stopPreview();
closeRecorder();
-
+ camera->disconnect();
+ camera.clear();
camera = Camera::connect(camera_index);
if ( NULL == camera.get() ) {
sleep(1);
@@ -1466,6 +2740,8 @@ int functional_menu() {
camera->setListener(new CameraHandler());
camera->setParameters(params.flatten());
recordingMode = false;
+ } else {
+ stopPreview();
}
break;
@@ -1479,16 +2755,21 @@ int functional_menu() {
break;
+ case 'V':
+ previewRotation += 90;
+ previewRotation %= 360;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
case '4':
previewSizeIDX += 1;
- previewSizeIDX %= ARRAY_SIZE(previewSize);
- if ( NULL != params.get(KEY_STEREO_CAMERA) ) {
- if ( strcmp(params.get(KEY_STEREO_CAMERA), "false") == 0 ) {
- params.setPreviewSize(previewSize[previewSizeIDX].width, previewSize[previewSizeIDX].height);
- } else {
- params.setPreviewSize(previewSize[previewSizeIDX].width, previewSize[previewSizeIDX].height*2);
- }
- }
+ previewSizeIDX %= numpreviewSize;
+ params.setPreviewSize(preview_Array[previewSizeIDX]->width, preview_Array[previewSizeIDX]->height);
+
reSizePreview = true;
if ( hardwareActive && previewRunning ) {
@@ -1503,17 +2784,63 @@ int functional_menu() {
case '5':
captureSizeIDX += 1;
- captureSizeIDX %= ARRAY_SIZE(captureSize);
- params.setPictureSize(captureSize[captureSizeIDX].width, captureSize[captureSizeIDX].height);
+ captureSizeIDX %= numcaptureSize;
+ printf("CaptureSizeIDX %d \n", captureSizeIDX);
+ params.setPictureSize(capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height);
if ( hardwareActive )
camera->setParameters(params.flatten());
+
+ requestBufferSourceReset();
+
break;
case 'l':
- case 'L':
+
VcaptureSizeIDX++;
- VcaptureSizeIDX %= ARRAY_SIZE(VcaptureSize);
+ VcaptureSizeIDX %= numVcaptureSize;
+ break;
+
+ case 'L' :
+ stereoLayoutIDX++;
+ stereoLayoutIDX %= numLay;
+
+ if (stereoMode) {
+ firstTimeStereo = false;
+ params.set(KEY_S3D_PRV_FRAME_LAYOUT, stereoLayout[stereoLayoutIDX]);
+ }
+
+ getSizeParametersFromCapabilities();
+
+ if (hardwareActive && previewRunning) {
+ stopPreview();
+ camera->setParameters(params.flatten());
+ startPreview();
+ } else if (hardwareActive) {
+ camera->setParameters(params.flatten());
+ }
+
+ break;
+
+ case '.' :
+ stereoCapLayoutIDX++;
+ stereoCapLayoutIDX %= numCLay;
+
+ if (stereoMode) {
+ firstTimeStereo = false;
+ params.set(KEY_S3D_CAP_FRAME_LAYOUT, stereoCapLayout[stereoCapLayoutIDX]);
+ }
+
+ getSizeParametersFromCapabilities();
+
+ if (hardwareActive && previewRunning) {
+ stopPreview();
+ camera->setParameters(params.flatten());
+ startPreview();
+ } else if (hardwareActive) {
+ camera->setParameters(params.flatten());
+ }
+
break;
case ']':
@@ -1572,8 +2899,8 @@ int functional_menu() {
case '8':
awb_mode++;
- awb_mode %= ARRAY_SIZE(strawb_mode);
- params.set(params.KEY_WHITE_BALANCE, strawb_mode[awb_mode]);
+ awb_mode %= numawb;
+ params.set(params.KEY_WHITE_BALANCE, awb[awb_mode]);
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -1586,8 +2913,8 @@ int functional_menu() {
break;
case '~':
previewFormat += 1;
- previewFormat %= ARRAY_SIZE(pixelformat) - 1;
- params.setPreviewFormat(pixelformat[previewFormat].pixformat);
+ previewFormat %= numpreviewFormat;
+ params.setPreviewFormat(previewFormatArray[previewFormat]);
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -1595,29 +2922,29 @@ int functional_menu() {
break;
case '$':
pictureFormat += 1;
- if ( NULL != params.get(KEY_STEREO_CAMERA) ) {
- if ( strcmp(params.get(KEY_STEREO_CAMERA), "false") == 0 && pictureFormat > 4 )
- pictureFormat = 0;
+ pictureFormat %= numpictureFormat;
+ printf("pictureFormat %d\n", pictureFormat);
+ printf("numpreviewFormat %d\n", numpictureFormat);
+ params.setPictureFormat(pictureFormatArray[pictureFormat]);
+
+ queueEmpty = true;
+ if ( bufferSourceOutputThread.get() ) {
+ if ( 0 < bufferSourceOutputThread->hasBuffer() ) {
+ queueEmpty = false;
+ }
}
- pictureFormat %= ARRAY_SIZE(codingformat);
- params.setPictureFormat(codingformat[pictureFormat]);
- if ( hardwareActive )
+ if ( hardwareActive && queueEmpty )
camera->setParameters(params.flatten());
break;
- case '?' :
- ///Set mode=3 to select video mode
- params.set(KEY_MODE, 3);
- params.set(KEY_VNF, 1);
- params.set(KEY_VSTAB, 1);
- break;
-
case ':':
thumbSizeIDX += 1;
- thumbSizeIDX %= ARRAY_SIZE(previewSize);
- params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, previewSize[thumbSizeIDX].width);
- params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, previewSize[thumbSizeIDX].height);
+ thumbSizeIDX %= numthumbnailSize;
+ printf("ThumbnailSizeIDX %d \n", thumbSizeIDX);
+
+ params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, thumbnail_Array[thumbSizeIDX]->width);
+ params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,thumbnail_Array[thumbSizeIDX]->height);
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -1637,21 +2964,43 @@ int functional_menu() {
break;
case 'B' :
- vnf_mode++;
- vnf_mode %= ARRAY_SIZE(vnf);
- params.set(KEY_VNF, vnf_mode);
+ if(strcmp(vnfstr, "true") == 0) {
+ if(vnftoggle == false) {
+ trySetVideoNoiseFilter(true);
+ vnftoggle = true;
+ } else {
+ trySetVideoNoiseFilter(false);
+ vnftoggle = false;
+ }
+ }else {
+ trySetVideoNoiseFilter(false);
+ vnftoggle = false;
+ printf("VNF is not supported\n");
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
+
break;
case 'C' :
- vstab_mode++;
- vstab_mode %= ARRAY_SIZE(vstab);
- params.set(KEY_VSTAB, vstab_mode);
+ if(strcmp(vstabstr, "true") == 0) {
+ if(vstabtoggle == false) {
+ trySetVideoStabilization(true);
+ vstabtoggle = true;
+ } else {
+ trySetVideoStabilization(false);
+ vstabtoggle = false;
+ }
+ } else {
+ trySetVideoStabilization(false);
+ vstabtoggle = false;
+ printf("VSTAB is not supported\n");
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
+
break;
case 'E':
@@ -1683,14 +3032,13 @@ int functional_menu() {
if ( hardwareActive )
camera->setParameters(params.flatten());
break;
+
case 'm':
- {
meter_mode = (meter_mode + 1)%ARRAY_SIZE(metering);
params.set(KEY_METERING_MODE, metering[meter_mode]);
if ( hardwareActive )
camera->setParameters(params.flatten());
break;
- }
case 'k':
ippIDX += 1;
@@ -1702,6 +3050,8 @@ int functional_menu() {
if ( hardwareActive )
camera->setParameters(params.flatten());
+ requestBufferSourceReset();
+
break;
case 'K':
@@ -1723,16 +3073,26 @@ int functional_menu() {
break;
case 'F':
+ faceDetectToggle = !faceDetectToggle;
+ if ( hardwareActive ) {
+ if (faceDetectToggle)
+ camera->sendCommand(CAMERA_CMD_START_FACE_DETECTION, 0, 0);
+ else
+ camera->sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, 0, 0);
+ }
+ break;
+
+ case 'I':
+ afTimeoutIdx++;
+ afTimeoutIdx %= ARRAY_SIZE(afTimeout);
+ params.set(KEY_AF_TIMEOUT, afTimeout[afTimeoutIdx]);
if ( hardwareActive )
- camera->sendCommand(CAMERA_CMD_START_FACE_DETECTION, 0, 0);
+ camera->setParameters(params.flatten());
break;
case 'T':
-
- if ( hardwareActive )
- camera->sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, 0, 0);
-
+ metaDataToggle = !metaDataToggle;
break;
case '@':
@@ -1754,7 +3114,8 @@ int functional_menu() {
} else {
burst += BURST_INC;
}
- params.set(KEY_BURST, burst);
+ burstCount = burst;
+ params.set(KEY_TI_BURST, burst);
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -1763,8 +3124,8 @@ int functional_menu() {
case 'J':
flashIdx++;
- flashIdx %= ARRAY_SIZE(flashModes);
- params.set(CameraParameters::KEY_FLASH_MODE, (flashModes[flashIdx]));
+ flashIdx %= numflash;
+ params.set(CameraParameters::KEY_FLASH_MODE, (flash[flashIdx]));
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -1773,22 +3134,52 @@ int functional_menu() {
case 'u':
capture_mode++;
- capture_mode %= ARRAY_SIZE(capture);
+ capture_mode %= nummodevalues;
// HQ should always be in ldc-nsf
// if not HQ, then return the ipp to its previous state
- if( !strcmp(capture[capture_mode], "high-quality") ) {
+ if( !strcmp(modevalues[capture_mode], "high-quality") ) {
ippIDX_old = ippIDX;
ippIDX = 3;
params.set(KEY_IPP, ipp_mode[ippIDX]);
+ params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::FALSE);
+ previewRotation = 0;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
+ } else if ( !strcmp(modevalues[capture_mode], "video-mode") ) {
+ params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+ camera->getCameraInfo(camera_index, &cameraInfo);
+ previewRotation = ((360-cameraInfo.orientation)%360);
+ if (previewRotation >= 0 || previewRotation <=360) {
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
+ }
} else {
ippIDX = ippIDX_old;
+ params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::FALSE);
+ previewRotation = 0;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
}
- params.set(KEY_MODE, (capture[capture_mode]));
+ params.set(KEY_MODE, (modevalues[capture_mode]));
- if ( hardwareActive )
+ if ( hardwareActive ) {
+ if (previewRunning) {
+ stopPreview();
+ }
+ camera->setParameters(params.flatten());
+ // Get parameters from capabilities for the new capture mode
+ params = camera->getParameters();
+ getSizeParametersFromCapabilities();
+ getParametersFromCapabilities();
+ // Set framerate 30fps and 12MP capture resolution if available for the new capture mode.
+ // If not available set framerate and capture mode under index 0 from fps_const_str and capture_Array.
+ frameRateIDX = getDefaultParameter("30000,30000", constCnt, fps_const_str);
+ captureSizeIDX = getDefaultParameterResol("12MP", numcaptureSize, capture_Array);
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_const_str[frameRateIDX]);
+ params.setPictureSize(capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height);
camera->setParameters(params.flatten());
+ }
+
+ requestBufferSourceReset();
break;
@@ -1805,14 +3196,28 @@ int functional_menu() {
case 'H':
expBracketIdx++;
expBracketIdx %= ARRAY_SIZE(expBracketing);
+ setDefaultExpGainPreset(shotParams, expBracketIdx);
- params.set(KEY_EXP_BRACKETING_RANGE, expBracketingRange[expBracketIdx]);
+ break;
- if ( hardwareActive )
- camera->setParameters(params.flatten());
+ case 'n':
+ if (shotConfigFlush)
+ shotConfigFlush = false;
+ else
+ shotConfigFlush = true;
+
+ updateShotConfigFlushParam();
break;
+ case '(':
+ {
+ char input[256];
+ input[0] = ch;
+ scanf("%254s", input+1);
+ setExpGainPreset(shotParams, input, true, PARAM_EXP_BRACKET_PARAM_NONE, shotConfigFlush);
+ break;
+ }
case 'W':
tempBracketRange++;
tempBracketRange %= TEMP_BRACKETING_MAX_RANGE;
@@ -1830,7 +3235,7 @@ int functional_menu() {
case 'w':
scene_mode++;
- scene_mode %= ARRAY_SIZE(scene);
+ scene_mode %= numscene;
params.set(params.KEY_SCENE_MODE, scene[scene_mode]);
if ( hardwareActive )
@@ -1840,15 +3245,20 @@ int functional_menu() {
case 'i':
iso_mode++;
- iso_mode %= ARRAY_SIZE(iso);
- params.set(KEY_ISO, iso[iso_mode]);
-
+ iso_mode %= numisoMode;
+ params.set(KEY_ISO, isoMode[iso_mode]);
if ( hardwareActive )
camera->setParameters(params.flatten());
break;
+
case 'h':
- if ( sharpness >= 100) {
+#ifdef TARGET_OMAP4
+ if ( sharpness >= 200)
+#else
+ if ( sharpness >= 100)
+#endif
+ {
sharpness = 0;
} else {
sharpness += 10;
@@ -1873,25 +3283,76 @@ int functional_menu() {
}
case 'z':
- zoomIDX++;
- zoomIDX %= ARRAY_SIZE(zoom);
- params.set(CameraParameters::KEY_ZOOM, zoom[zoomIDX].idx);
+ if(strcmp(zoomstr, "true") == 0) {
+ zoomIDX++;
+ zoomIDX %= ARRAY_SIZE(zoom);
+ params.set(CameraParameters::KEY_ZOOM, zoom[zoomIDX].idx);
- if ( hardwareActive )
- camera->setParameters(params.flatten());
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+ }
+ break;
+
+ case 'Z':
+ if(strcmp(smoothzoomstr, "true") == 0) {
+ zoomIDX++;
+ zoomIDX %= ARRAY_SIZE(zoom);
+ if ( hardwareActive )
+ camera->sendCommand(CAMERA_CMD_START_SMOOTH_ZOOM, zoom[zoomIDX].idx, 0);
+ }
break;
case 'j':
exposure_mode++;
- exposure_mode %= ARRAY_SIZE(exposure);
- params.set(KEY_EXPOSURE, exposure[exposure_mode]);
+ exposure_mode %= numExposureMode;
+ params.set(KEY_EXPOSURE, exposureMode[exposure_mode]);
+ if ( strcmp (exposureMode[exposure_mode], "manual") == 0) {
+ params.set(KEY_MANUAL_EXPOSURE, manualExp);
+ params.set(KEY_MANUAL_GAIN_ISO, manualGain);
+ params.set(KEY_MANUAL_EXPOSURE_RIGHT, manualExp);
+ params.set(KEY_MANUAL_GAIN_ISO_RIGHT, manualGain);
+ }
+ else
+ {
+ manualExp = manualExpMin;
+ params.set(KEY_MANUAL_EXPOSURE, manualExp);
+ params.set(KEY_MANUAL_EXPOSURE_RIGHT, manualExp);
+ manualGain = manualGainMin;
+ params.set(KEY_MANUAL_GAIN_ISO, manualGain);
+ params.set(KEY_MANUAL_GAIN_ISO_RIGHT, manualGain);
+ }
- if ( hardwareActive )
+ if ( hardwareActive ) {
camera->setParameters(params.flatten());
+ }
break;
+ case 'Q':
+ if ( strcmp (exposureMode[exposure_mode], "manual") == 0) {
+ manualExp += manualExpStep;
+ if( manualExp > manualExpMax) {
+ manualExp = manualExpMin;
+ }
+ params.set(KEY_MANUAL_EXPOSURE, manualExp);
+ params.set(KEY_MANUAL_EXPOSURE_RIGHT, manualExp);
+ camera->setParameters(params.flatten());
+ }
+ break;
+
+ case ',':
+ if ( strcmp (exposureMode[exposure_mode], "manual") == 0) {
+ manualGain += manualGainStep;
+ if( manualGain > manualGainMax) {
+ manualGain = manualGainMin;
+ }
+ params.set(KEY_MANUAL_GAIN_ISO, manualGain);
+ params.set(KEY_MANUAL_GAIN_ISO_RIGHT, manualGain);
+ camera->setParameters(params.flatten());
+ }
+ break;
+
case 'c':
if( contrast >= 200){
contrast = 0;
@@ -1899,11 +3360,17 @@ int functional_menu() {
contrast += 10;
}
params.set(KEY_CONTRAST, contrast);
- if ( hardwareActive )
+ if ( hardwareActive ) {
camera->setParameters(params.flatten());
+ }
break;
case 'b':
- if ( brightness >= 200) {
+#ifdef TARGET_OMAP4
+ if ( brightness >= 100)
+#else
+ if ( brightness >= 200)
+#endif
+ {
brightness = 0;
} else {
brightness += 10;
@@ -1911,14 +3378,14 @@ int functional_menu() {
params.set(KEY_BRIGHTNESS, brightness);
- if ( hardwareActive )
+ if ( hardwareActive ) {
camera->setParameters(params.flatten());
+ }
break;
case 's':
- case 'S':
- if ( saturation >= 100) {
+ if ( saturation >= 200) {
saturation = 0;
} else {
saturation += 10;
@@ -1933,8 +3400,10 @@ int functional_menu() {
case 'e':
effects_mode++;
- effects_mode %= ARRAY_SIZE(effects);
- params.set(params.KEY_EFFECT, effects[effects_mode]);
+ effects_mode %= numEffects;
+ printf("%d", numEffects);
+ params.set(params.KEY_EFFECT, effectss[effects_mode]);
+ printf("Effects_mode %d", effects_mode);
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -1942,20 +3411,10 @@ int functional_menu() {
break;
case 'r':
-
-
- if (camera_index == 0) {
- frameRateIDX += 1;
- frameRateIDX %= ARRAY_SIZE(fpsConstRanges);
- params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fpsConstRanges[frameRateIDX].range);
- } else
- {
- frameRateIDXSec += 1;
- frameRateIDXSec %= ARRAY_SIZE(fpsConstRangesSec);
- params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fpsConstRangesSec[frameRateIDXSec].range);
-
-
- }
+ frameRateIDX++;
+ frameRateIDX %= constCnt;
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_const_str[frameRateIDX]);
+ printf("fps_const_str[frameRateIDX] %s\n", fps_const_str[frameRateIDX]);
if ( hardwareActive ) {
camera->setParameters(params.flatten());
@@ -1965,8 +3424,9 @@ int functional_menu() {
case 'R':
fpsRangeIdx += 1;
- fpsRangeIdx %= ARRAY_SIZE(fpsRanges);
- params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fpsRanges[fpsRangeIdx].range);
+ fpsRangeIdx %= rangeCnt;
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_range_str[fpsRangeIdx]);
+ printf("fps_range_str[fpsRangeIdx] %s\n", fps_range_str[fpsRangeIdx]);
if ( hardwareActive ) {
camera->setParameters(params.flatten());
@@ -1976,8 +3436,9 @@ int functional_menu() {
case 'x':
antibanding_mode++;
- antibanding_mode %= ARRAY_SIZE(antibanding);
- params.set(params.KEY_ANTIBANDING, antibanding[antibanding_mode]);
+ antibanding_mode %= numAntibanding;
+ printf("%d", numAntibanding);
+ params.set(params.KEY_ANTIBANDING, antiband[antibanding_mode]);
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -1986,7 +3447,7 @@ int functional_menu() {
case 'g':
focus_mode++;
- focus_mode %= ARRAY_SIZE(focus);
+ focus_mode %= numfocus;
params.set(params.KEY_FOCUS_MODE, focus[focus_mode]);
if ( hardwareActive )
@@ -1995,16 +3456,43 @@ int functional_menu() {
break;
case 'G':
-
params.set(CameraParameters::KEY_FOCUS_AREAS, TEST_FOCUS_AREA);
if ( hardwareActive )
camera->setParameters(params.flatten());
- params.remove(CameraParameters::KEY_FOCUS_AREAS);
+ break;
- case 'f':
+ case 'y':
+ params.set(CameraParameters::KEY_METERING_AREAS, TEST_METERING_AREA);
+ if ( hardwareActive ) {
+ camera->setParameters(params.flatten());
+ }
+
+ break;
+
+ case 'Y':
+
+ params.set(CameraParameters::KEY_METERING_AREAS, TEST_METERING_AREA_CENTER);
+
+ if ( hardwareActive ) {
+ camera->setParameters(params.flatten());
+ }
+
+ break;
+
+ case 'N':
+
+ params.set(CameraParameters::KEY_METERING_AREAS, TEST_METERING_AREA_AVERAGE);
+
+ if ( hardwareActive ) {
+ camera->setParameters(params.flatten());
+ }
+
+ break;
+
+ case 'f':
gettimeofday(&autofocus_start, 0);
if ( hardwareActive )
@@ -2013,13 +3501,73 @@ int functional_menu() {
break;
case 'p':
+ {
+ int msgType = 0;
- gettimeofday(&picture_start, 0);
+ if((0 == strcmp(modevalues[capture_mode], "video-mode")) &&
+ (0 != strcmp(videosnapshotstr, "true"))) {
+ printf("Video Snapshot is not supported\n");
+ } else if ( hardwareActive ) {
+ if(isRawPixelFormat(pictureFormatArray[pictureFormat])) {
+ createBufferOutputSource();
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->setBuffer(shotParams);
+ bufferSourceOutputThread->setStreamCapture(streamCapture, expBracketIdx);
+ }
+ } else {
+ msgType = CAMERA_MSG_COMPRESSED_IMAGE |
+ CAMERA_MSG_RAW_IMAGE;
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ msgType |= CAMERA_MSG_RAW_BURST;
+#endif
+ }
- if ( hardwareActive )
- camera->takePicture(CAMERA_MSG_COMPRESSED_IMAGE|CAMERA_MSG_RAW_IMAGE);
+ gettimeofday(&picture_start, 0);
+ camera->setParameters(params.flatten());
+ camera->takePictureWithParameters(msgType, shotParams.flatten());
+ }
+ break;
+ }
+ case 'S':
+ {
+ if (streamCapture) {
+ streamCapture = false;
+ setDefaultExpGainPreset(shotParams, expBracketIdx);
+ // Stop streaming
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->setStreamCapture(streamCapture, expBracketIdx);
+ }
+ } else {
+ streamCapture = true;
+ setSingleExpGainPreset(shotParams, expBracketIdx, 0, 0);
+ // Queue more frames initially
+ shotParams.set(ShotParameters::KEY_BURST, BRACKETING_STREAM_BUFFERS);
+ }
break;
+ }
+
+ case 'P':
+ {
+ int msgType = CAMERA_MSG_COMPRESSED_IMAGE;
+ ShotParameters reprocParams;
+
+ gettimeofday(&picture_start, 0);
+ createBufferInputSource();
+ if (bufferSourceOutputThread.get() &&
+ bufferSourceOutputThread->hasBuffer())
+ {
+ bufferSourceOutputThread->setStreamCapture(false, expBracketIdx);
+ if (hardwareActive) camera->setParameters(params.flatten());
+
+ if (bufferSourceInput.get()) {
+ buffer_info_t info = bufferSourceOutputThread->popBuffer();
+ bufferSourceInput->setInput(info, pictureFormatArray[pictureFormat], reprocParams);
+ if (hardwareActive) camera->reprocess(msgType, reprocParams.flatten());
+ }
+ }
+ break;
+ }
case '&':
printf("Enabling Preview Callback");
@@ -2029,13 +3577,14 @@ int functional_menu() {
break;
case '{':
- if ( strcmp(params.get(KEY_S3D2D_PREVIEW_MODE), "off") == 0 )
+ valstr = params.get(KEY_S3D2D_PREVIEW_MODE);
+ if ( (NULL != valstr) && (0 == strcmp(valstr, "on")) )
{
- params.set(KEY_S3D2D_PREVIEW_MODE, "on");
+ params.set(KEY_S3D2D_PREVIEW_MODE, "off");
}
else
{
- params.set(KEY_S3D2D_PREVIEW_MODE, "off");
+ params.set(KEY_S3D2D_PREVIEW_MODE, "on");
}
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -2051,8 +3600,8 @@ int functional_menu() {
break;
case 'q':
-
stopPreview();
+ deleteAllocatedMemory();
return -1;
@@ -2071,41 +3620,90 @@ int functional_menu() {
break;
}
- case '<':
- elockidx += 1;
- elockidx %= ARRAY_SIZE(lock);
- params.set(KEY_AUTO_EXPOSURE_LOCK, lock[elockidx]);
- if ( hardwareActive )
- camera->setParameters(params.flatten());
- break;
+ case '<':
+ if(strcmp(AutoExposureLockstr, "true") == 0) {
+ if(AutoExposureLocktoggle == false) {
+ trySetAutoExposureLock(true);
+ AutoExposureLocktoggle = true;
+ } else {
+ trySetAutoExposureLock(false);
+ AutoExposureLocktoggle = false;
+ printf("ExposureLock is not supported\n");
+ }
+ }
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
+ case '>':
+ if(strcmp(AutoWhiteBalanceLockstr, "true") == 0) {
+ if(AutoWhiteBalanceLocktoggle == false) {
+ trySetAutoWhiteBalanceLock(true);
+ AutoWhiteBalanceLocktoggle = true;
+ } else {
+ trySetAutoWhiteBalanceLock(false);
+ AutoWhiteBalanceLocktoggle = false;
+ printf("ExposureLock is not supported\n");
+ }
+ }
+
+ if ( hardwareActive ) {
+ camera->setParameters(params.flatten());
+ }
+
+ break;
- case '>':
- wblockidx += 1;
- wblockidx %= ARRAY_SIZE(lock);
- params.set(KEY_AUTO_WHITEBALANCE_LOCK, lock[wblockidx]);
- if ( hardwareActive )
+ case ')':
+ enableMisalignmentCorrectionIdx++;
+ enableMisalignmentCorrectionIdx %= ARRAY_SIZE(misalignmentCorrection);
+ params.set(KEY_MECHANICAL_MISALIGNMENT_CORRECTION, misalignmentCorrection[enableMisalignmentCorrectionIdx]);
+ if ( hardwareActive ) {
camera->setParameters(params.flatten());
+ }
break;
- default:
- print_menu = 0;
+ case 'd':
+ while (1) {
+ if ( menu_algo() < 0)
+ break;
+ }
+ break;
- break;
+ default:
+ print_menu = 0;
+
+ break;
}
return 0;
}
void print_usage() {
- printf(" USAGE: camera_test <param> <script>\n");
- printf(" <param>\n-----------\n\n");
- printf(" F or f -> Functional tests \n");
- printf(" A or a -> API tests \n");
- printf(" E or e -> Error scenario tests \n");
- printf(" S or s -> Stress tests; with syslink trace \n");
- printf(" SN or sn -> Stress tests; No syslink trace \n\n");
- printf(" <script>\n----------\n");
- printf("Script name (Only for stress tests)\n\n");
+ printf(" USAGE: camera_test <options>\n");
+ printf(" <options> (case insensitive)\n");
+ printf("-----------\n");
+ printf(" -f -> Functional tests.\n");
+ printf(" -a -> API tests.\n");
+ printf(" -e [<script>] -> Error scenario tests. If no script file is provided\n");
+ printf(" the test is run in interactive mode.\n");
+ printf(" -s <script> -c <sensorID> -> Stress / regression tests.\n");
+ printf(" -l [<flags>] -> Enable different kinds of logging capture. Multiple flags\n");
+ printf(" should be combined into a string. If flags are not provided\n");
+ printf(" no logs are captured.\n");
+ printf(" <flags>\n");
+ printf(" ---------\n");
+ printf(" l -> logcat [default]\n");
+ printf(" s -> syslink [default]\n");
+ printf(" -o <path> -> Output directory to store the test results. Image and video\n");
+ printf(" files are stored in corresponding sub-directories.\n");
+ printf(" -p <platform> -> Target platform. Only for stress tests.\n");
+ printf(" <platform>\n");
+ printf(" ------------\n");
+ printf(" blaze or B -> BLAZE\n");
+ printf(" tablet1 or T1 -> Blaze TABLET-1\n");
+ printf(" tablet2 or T2 -> Blaze TABLET-2 [default]\n\n");
return;
}
@@ -2306,8 +3904,6 @@ int error_scenario() {
int restartCamera() {
- const char dir_path_name[80] = SDCARD_PATH;
-
printf("+++Restarting Camera After Error+++\n");
stopPreview();
@@ -2322,12 +3918,6 @@ int restartCamera() {
restartCount++;
- if (strcpy(dir_path, dir_path_name) == NULL)
- {
- printf("Error reseting dir name");
- return -1;
- }
-
if ( openCamera() < 0 )
{
printf("+++Camera Restarted Failed+++\n");
@@ -2343,165 +3933,358 @@ int restartCamera() {
return 0;
}
-int main(int argc, char *argv[]) {
- char *cmd;
- int pid;
- sp<ProcessState> proc(ProcessState::self());
-
- unsigned long long st, end, delay;
- timeval current_time;
-
- gettimeofday(&current_time, 0);
-
- st = current_time.tv_sec * 1000000 + current_time.tv_usec;
+int parseCommandLine(int argc, char *argv[], cmd_args_t *cmd_args) {
+ if (argc < 2) {
+ printf("Please enter at least 1 argument\n");
+ return -2;
+ }
- cmd = NULL;
+ // Set defaults
+ memset(cmd_args, 0, sizeof(*cmd_args));
+ cmd_args->logging = LOGGING_LOGCAT | LOGGING_SYSLINK;
+ cmd_args->platform_id = BLAZE_TABLET2;
- if ( argc < 2 ) {
- printf(" Please enter atleast 1 argument\n");
- print_usage();
+ for (int a = 1; a < argc; a++) {
+ const char * const arg = argv[a];
+ if (arg[0] != '-') {
+ printf("Error: Invalid argument \"%s\"\n", arg);
+ return -2;
+ }
- return 0;
- }
- system("echo camerahal_test > /sys/power/wake_lock");
- if ( argc < 3 ) {
- switch (*argv[1]) {
- case 'S':
+ switch (arg[1]) {
case 's':
- printf("This is stress / regression tests \n");
- printf("Provide script file as 2nd argument\n");
-
+ cmd_args->test_type = TEST_TYPE_REGRESSION;
+ if (a < argc - 1) {
+ cmd_args->script_file_name = argv[++a];
+ } else {
+ printf("Error: No script is specified for stress / regression test.\n");
+ return -2;
+ }
break;
- case 'F':
case 'f':
- ProcessState::self()->startThreadPool();
+ cmd_args->test_type = TEST_TYPE_FUNCTIONAL;
+ break;
- if ( openCamera() < 0 ) {
- printf("Camera initialization failed\n");
- system("echo camerahal_test > /sys/power/wake_unlock");
- return -1;
- }
+ case 'a':
+ cmd_args->test_type = TEST_TYPE_API;
+ break;
- initDefaults();
- print_menu = 1;
+ case 'e':
+ cmd_args->test_type = TEST_TYPE_ERROR;
+ if (a < argc - 1) {
+ cmd_args->script_file_name = argv[++a];
+ }
+ break;
- while ( 1 ) {
- if ( functional_menu() < 0 )
- break;
- };
+ case 'l':
+ cmd_args->logging = 0;
+
+ if (a < argc - 1 && argv[a + 1][0] != '-') {
+ const char *flags = argv[++a];
+ while (*flags) {
+ char flag = *flags++;
+ switch (flag) {
+ case 'l':
+ cmd_args->logging |= LOGGING_LOGCAT;
+ break;
+
+ case 's':
+ cmd_args->logging |= LOGGING_SYSLINK;
+ break;
+
+ default:
+ printf("Error: Unknown logging type \"%c\"\n", flag);
+ return -2;
+ }
+ }
+ }
+ break;
+ case 'p':
+ if (a < argc - 1) {
+ const char *platform = argv[++a];
+ if( strcasecmp(platform,"blaze") == 0 || strcasecmp(platform,"B") == 0 ){
+ cmd_args->platform_id = BLAZE;
+ }
+ else if( (strcasecmp(platform,"tablet1") == 0) || (strcasecmp(platform,"T1") == 0) ) {
+ cmd_args->platform_id = BLAZE_TABLET1;
+ }
+ else if( (strcasecmp(platform,"tablet2") == 0) || (strcasecmp(platform,"T2") == 0) ) {
+ cmd_args->platform_id = BLAZE_TABLET2;
+ }
+ else {
+ printf("Error: Unknown argument for platform ID.\n");
+ return -2;
+ }
+ } else {
+ printf("Error: No argument is specified for platform ID.\n");
+ return -2;
+ }
break;
- case 'A':
- case 'a':
- printf("API level test cases coming soon ... \n");
+ case 'o':
+ if (a < argc - 1) {
+ cmd_args->output_path = argv[++a];
+ } else {
+ printf("Error: No output path is specified.\n");
+ return -2;
+ }
+ break;
+ case 'c':
+ if (a < argc -1) {
+ camera_index = atoi(argv[++a]);
+ } else {
+ printf("Error: No sensorID is specified.\n");
+ return -2;
+ }
break;
- case 'E':
- case 'e': {
- ProcessState::self()->startThreadPool();
+ default:
+ printf("Error: Unknown option \"%s\"\n", argv[a]);
+ return -2;
+ }
+ }
- if ( openCamera() < 0 ) {
- printf("Camera initialization failed\n");
- system("echo camerahal_test > /sys/power/wake_unlock");
- return -1;
- }
+ return 0;
+}
- initDefaults();
- print_menu = 1;
+int setOutputDirPath(cmd_args_t *cmd_args, int restart_count) {
+ if ((cmd_args->output_path != NULL) &&
+ (strlen(cmd_args->output_path) < sizeof(output_dir_path))) {
+ strcpy(output_dir_path, cmd_args->output_path);
+ } else {
+ strcpy(output_dir_path, SDCARD_PATH);
- while (1) {
- if (error_scenario() < 0) {
- break;
- }
+ if (cmd_args->script_file_name != NULL) {
+ const char *config = cmd_args->script_file_name;
+ char dir_name[40];
+ size_t count = 0;
+ char *p;
+
+ // remove just the '.txt' part of the config
+ while ((config[count] != '.') && ((count + 1) < sizeof(dir_name))) {
+ count++;
+ }
+
+ strncpy(dir_name, config, count);
+
+ dir_name[count] = NULL;
+ p = dir_name;
+ while (*p != '\0') {
+ if (*p == '/') {
+ printf("SDCARD_PATH is not added to the output directory.\n");
+ // Needed when camera_test script is executed using the OTC
+ strcpy(output_dir_path, "");
+ break;
}
+ }
- break;
+ strcat(output_dir_path, dir_name);
+ if (camera_index == 1) {
+ strcat(output_dir_path, SECONDARY_SENSOR);
+ }else if (camera_index == 2) {
+ strcat(output_dir_path, S3D_SENSOR);
}
+ }
+ }
- default:
- printf("INVALID OPTION USED\n");
- print_usage();
+ if (restart_count && (strlen(output_dir_path) + 16) < sizeof(output_dir_path)) {
+ char count[16];
+ sprintf(count, "_%d", restart_count);
+ strcat(output_dir_path, count);
+ }
- break;
+ if (access(output_dir_path, F_OK) == -1) {
+ if (mkdir(output_dir_path, 0777) == -1) {
+ printf("\nError: Output directory \"%s\" was not created\n", output_dir_path);
+ return -1;
}
- } else if ( ( argc == 3) && ( ( *argv[1] == 'S' ) || ( *argv[1] == 's') ) ) {
+ }
- if((argv[1][1] == 'N') || (argv[1][1] == 'n')) {
- bLogSysLinkTrace = false;
+ sprintf(videos_dir_path, "%s/videos", output_dir_path);
+
+ if (access(videos_dir_path, F_OK) == -1) {
+ if (mkdir(videos_dir_path, 0777) == -1) {
+ printf("\nError: Videos directory \"%s\" was not created\n", videos_dir_path);
+ return -1;
}
+ }
- ProcessState::self()->startThreadPool();
+ sprintf(images_dir_path, "%s/images", output_dir_path);
- if ( openCamera() < 0 ) {
- printf("Camera initialization failed\n");
- system("echo camerahal_test > /sys/power/wake_unlock");
- return -1;
+ if (access(images_dir_path, F_OK) == -1) {
+ if (mkdir(images_dir_path, 0777) == -1) {
+ printf("\nError: Images directory \"%s\" was not created\n", images_dir_path);
+ return -1;
}
+ }
- initDefaults();
+ return 0;
+}
- cmd = load_script(argv[2]);
+int startTest() {
+ ProcessState::self()->startThreadPool();
- if ( cmd != NULL) {
- start_logging(argv[2], pid);
- stressTest = true;
+ if (openCamera() < 0) {
+ printf("Camera initialization failed\n");
+ return -1;
+ }
- while (1)
- {
- if ( execute_functional_script(cmd) == 0 )
- {
- break;
- }
- else
- {
- printf("CameraTest Restarting Camera...\n");
+ initDefaults();
- free(cmd);
- cmd = NULL;
+ return 0;
+}
+
+int runRegressionTest(cmd_args_t *cmd_args) {
+ char *cmd;
+ int pid;
+
+ platformID = cmd_args->platform_id;
+
+ int res = startTest();
+ if (res != 0) {
+ return res;
+ }
+
+ cmd = load_script(cmd_args->script_file_name);
+
+ if (cmd != NULL) {
+ start_logging(cmd_args->logging, pid);
+ stressTest = true;
+
+ while (1) {
+ if (execute_functional_script(cmd) == 0) {
+ break;
+ }
+
+ printf("CameraTest Restarting Camera...\n");
- if ( (restartCamera() != 0) || ((cmd = load_script(argv[2])) == NULL) )
- {
- printf("ERROR::CameraTest Restarting Camera...\n");
- break;
- }
- }
- }
free(cmd);
- stop_logging(pid);
+ cmd = NULL;
+
+ if ( (restartCamera() != 0) || ((cmd = load_script(cmd_args->script_file_name)) == NULL) ) {
+ printf("ERROR::CameraTest Restarting Camera...\n");
+ res = -1;
+ break;
+ }
+
+ res = setOutputDirPath(cmd_args, restartCount);
+ if (res != 0) {
+ break;
+ }
}
- } else if ( ( argc == 3) && ( ( *argv[1] == 'E' ) || ( *argv[1] == 'e') ) ) {
- ProcessState::self()->startThreadPool();
+ free(cmd);
+ stop_logging(cmd_args->logging, pid);
+ }
- if ( openCamera() < 0 ) {
- printf("Camera initialization failed\n");
- system("echo camerahal_test > /sys/power/wake_unlock");
- return -1;
+ return 0;
+}
+
+int runFunctionalTest() {
+ int res = startTest();
+ if (res != 0) {
+ return res;
+ }
+
+ print_menu = 1;
+
+ while (1) {
+ if (functional_menu() < 0) {
+ break;
}
+ }
- initDefaults();
+ return 0;
+}
- cmd = load_script(argv[2]);
+int runApiTest() {
+ printf("API level test cases coming soon ... \n");
+ return 0;
+}
+
+int runErrorTest(cmd_args_t *cmd_args) {
+ int res = startTest();
+ if (res != 0) {
+ return res;
+ }
+
+ if (cmd_args->script_file_name != NULL) {
+ char *cmd;
+ int pid;
- if ( cmd != NULL) {
- start_logging(argv[2], pid);
+ cmd = load_script(cmd_args->script_file_name);
+
+ if (cmd != NULL) {
+ start_logging(cmd_args->logging, pid);
execute_error_script(cmd);
free(cmd);
- stop_logging(pid);
+ stop_logging(cmd_args->logging, pid);
}
-
} else {
- printf("INVALID OPTION USED\n");
+ print_menu = 1;
+
+ while (1) {
+ if (error_scenario() < 0) {
+ break;
+ }
+ }
+ }
+
+ return 0;
+}
+
+int main(int argc, char *argv[]) {
+ sp<ProcessState> proc(ProcessState::self());
+
+ unsigned long long st, end, delay;
+ timeval current_time;
+ cmd_args_t cmd_args;
+ int res;
+
+ res = parseCommandLine(argc, argv, &cmd_args);
+ if (res != 0) {
print_usage();
+ return res;
+ }
+
+ res = setOutputDirPath(&cmd_args, 0);
+ if (res != 0) {
+ return res;
}
gettimeofday(&current_time, 0);
+
+ st = current_time.tv_sec * 1000000 + current_time.tv_usec;
+
+ system("echo camerahal_test > /sys/power/wake_lock");
+
+ switch (cmd_args.test_type) {
+ case TEST_TYPE_REGRESSION:
+ res = runRegressionTest(&cmd_args);
+ break;
+
+ case TEST_TYPE_FUNCTIONAL:
+ res = runFunctionalTest();
+ break;
+
+ case TEST_TYPE_API:
+ res = runApiTest();
+ break;
+
+ case TEST_TYPE_ERROR:
+ res = runErrorTest(&cmd_args);
+ break;
+ }
+
+ system("echo camerahal_test > /sys/power/wake_unlock");
+
+ gettimeofday(&current_time, 0);
end = current_time.tv_sec * 1000000 + current_time.tv_usec;
delay = end - st;
- printf("Application clossed after: %llu ms\n", delay);
- system("echo camerahal_test > /sys/power/wake_unlock");
- return 0;
+ printf("Application closed after: %llu ms\n", delay);
+
+ return res;
}
diff --git a/test/CameraHal/camera_test_script.cpp b/test/CameraHal/camera_test_script.cpp
index afd88be..21ed7e7 100644
--- a/test/CameraHal/camera_test_script.cpp
+++ b/test/CameraHal/camera_test_script.cpp
@@ -16,30 +16,37 @@
#include <binder/IServiceManager.h>
#include <cutils/properties.h>
#include <camera/CameraParameters.h>
+#include <camera/ShotParameters.h>
#include <sys/wait.h>
#include "camera_test.h"
+#include "camera_test_surfacetexture.h"
+#ifdef ANDROID_API_JB_OR_LATER
+#include "camera_test_bufferqueue.h"
+#endif
using namespace android;
extern bool stopScript;
extern bool hardwareActive;
extern sp<Camera> camera;
+extern sp<BufferSourceThread> bufferSourceOutputThread;
+extern sp<BufferSourceInput> bufferSourceInput;
extern CameraParameters params;
+extern ShotParameters shotParams;
+extern bool shotConfigFlush;
+extern bool streamCapture;
extern bool recordingMode;
extern int camera_index;
extern int rotation;
-extern const preview_size previewSize [];
-extern const Vcapture_size VcaptureSize [];
-extern const capture_Size captureSize[];
+extern int previewRotation;
+extern const param_Array captureSize[];
+extern const param_Array VcaptureSize[];
extern const outformat outputFormat[];
extern const video_Codecs videoCodecs[];
extern const audio_Codecs audioCodecs[];
extern const V_bitRate VbitRate[];
-extern const fps_ranges fpsRanges[];
-extern const fpsConst_Ranges fpsConstRanges[];
-extern const fpsConst_RangesSec fpsConstRangesSec[];
extern const Zoom zoom [];
extern int previewSizeIDX;
extern bool reSizePreview;
@@ -68,23 +75,73 @@ extern int zoomIDX;
extern int brightness;
extern int saturation;
extern int fpsRangeIdx;
+extern int numAntibanding;
+extern int numEffects;
+extern int numawb;
+extern int numExposureMode;
+extern int numscene;
+extern int numisoMode;
+extern int numflash;
+extern int numcaptureSize;
+extern int numVcaptureSize;
+extern int numpreviewSize;
+extern int numthumbnailSize;
+extern int numfocus;
+extern int numpreviewFormat;
+extern int numpictureFormat;
+extern int nummodevalues;
+extern int numLay;
+extern int numCLay;
+extern int constCnt;
+extern int rangeCnt;
+extern int * constFramerate;
+extern int frameRateIDX;
+extern int fpsRangeIdx;
+extern int stereoLayoutIDX;
+extern int stereoCapLayoutIDX;
+extern int expBracketIdx;
+int resol_index = 0;
+int a = 0;
+extern char * vstabstr;
+extern char * vnfstr;
+extern char * zoomstr;
+extern char * smoothzoomstr;
+extern char * videosnapshotstr;
+extern char ** antiband;
+extern char **effectss;
+extern bool firstTime;
+extern char **exposureMode;
+extern char **awb;
+extern char **scene;
+extern char ** isoMode;
+extern char ** modevalues;
+extern char **focus;
+extern char **flash;
+extern char **previewFormatArray;
+extern char **pictureFormatArray;
+extern char ** fps_const_str;
+extern char ** fps_range_str;
+extern char ** rangeDescription;
+extern param_Array ** capture_Array;
+extern param_Array ** Vcapture_Array;
+extern param_Array ** preview_Array;
+extern param_Array ** thumbnail_Array;
extern timeval autofocus_start, picture_start;
extern const char *cameras[];
extern double latitude;
extern double degree_by_step;
extern double longitude;
extern double altitude;
-extern char dir_path[80];
+extern char output_dir_path[];
+extern char images_dir_path[];
extern int AutoConvergenceModeIDX;
extern const char *autoconvergencemode[];
-extern const char *manualconvergencevalues[];
-extern const int ManualConvergenceDefaultValueIDX;
-extern size_t length_cam;
+extern int numCamera;
+extern bool stereoMode;
extern char script_name[];
-extern int restartCount;
-extern bool bLogSysLinkTrace;
extern int bufferStarvationTest;
extern size_t length_previewSize;
+extern size_t length_thumbnailSize;
extern size_t lenght_Vcapture_size;
extern size_t length_outformat;
extern size_t length_capture_Size;
@@ -95,7 +152,53 @@ extern size_t length_Zoom;
extern size_t length_fps_ranges;
extern size_t length_fpsConst_Ranges;
extern size_t length_fpsConst_RangesSec;
+extern int platformID;
+extern char **stereoLayout;
+extern char **stereoCapLayout;
+extern void getSizeParametersFromCapabilities();
+extern int exposure_mode;
+int manE = 0;
+extern int manualExp ;
+extern int manualExpMin ;
+extern int manualExpMax ;
+int manG = 0;
+extern int manualGain ;
+extern int manualGainMin ;
+extern int manualGainMax ;
+int manC = 0;
+extern int manualConv ;
+extern int manualConvMin ;
+extern int manualConvMax ;
+extern bool faceDetectToggle;
+extern unsigned int burstCount;
+
+/** Buffer source reset */
+extern bool bufferSourceInputReset;
+extern bool bufferSourceOutputReset;
+
+void trim_script_cmd(char *cmd) {
+ char *nl, *cr;
+
+ // first remove all carriage return symbols
+ while ( NULL != (cr = strchr(cmd, '\r'))) {
+ for (char *c = cr; '\0' != *c; c++) {
+ *c = *(c+1);
+ }
+ }
+ // then remove all single line feed symbols
+ while ( NULL != (nl = strchr(cmd, '\n'))) {
+ if (*nl == *(nl+1)) {
+ // two or more concatenated newlines:
+ // end of script found
+ break;
+ }
+ // clip the newline
+ for (char *c = nl; '\0' != *c; c++) {
+ *c = *(c+1);
+ }
+ }
+}
int execute_functional_script(char *script) {
char *cmd, *ctx, *cycle_cmd, *temp_cmd;
@@ -105,9 +208,14 @@ int execute_functional_script(char *script) {
int cycleCounter = 1;
int tLen = 0;
unsigned int iteration = 0;
+ bool zoomtoggle = false;
+ bool smoothzoomtoggle = false;
status_t ret = NO_ERROR;
- int frameR = 20;
- int frameRateIndex = 0;
+ //int frameR = 20;
+ int frameRConst = 0;
+ int frameRRange = 0;
+ struct CameraInfo cameraInfo;
+ bool queueEmpty = true;
LOG_FUNCTION_NAME;
@@ -116,6 +224,7 @@ int execute_functional_script(char *script) {
cmd = strtok_r((char *) script, DELIMITER, &ctx);
while ( NULL != cmd && (stopScript == false)) {
+ trim_script_cmd(cmd);
id = cmd[0];
printf("Full Command: %s \n", cmd);
printf("Command: %c \n", cmd[0]);
@@ -228,28 +337,25 @@ int execute_functional_script(char *script) {
break;
case '2':
- stopPreview();
-
if ( recordingMode ) {
-
- camera->disconnect();
- camera.clear();
stopRecording();
+ stopPreview();
closeRecorder();
-
+ camera->disconnect();
+ camera.clear();
camera = Camera::connect(camera_index);
if ( NULL == camera.get() ) {
sleep(1);
camera = Camera::connect(camera_index);
-
if ( NULL == camera.get() ) {
return -1;
}
}
camera->setListener(new CameraHandler());
camera->setParameters(params.flatten());
-
recordingMode = false;
+ } else {
+ stopPreview();
}
break;
@@ -263,40 +369,25 @@ int execute_functional_script(char *script) {
break;
- case '4':
- {
- printf("Setting resolution...");
- int width, height;
- for(i = 0; i < length_previewSize ; i++)
- {
- if( strcmp((cmd + 1), previewSize[i].desc) == 0)
- {
- width = previewSize[i].width;
- height = previewSize[i].height;
- previewSizeIDX = i;
- break;
- }
- }
+ case 'V':
+ previewRotation = atoi(cmd + 1);
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
- if (i == length_previewSize ) //if the resolution is not in the supported ones
- {
- char *res = NULL;
- res = strtok(cmd + 1, "x");
- width = atoi(res);
- res = strtok(NULL, "x");
- height = atoi(res);
- }
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
- if ( NULL != params.get(KEY_STEREO_CAMERA) ) {
- if ( strcmp(params.get(KEY_STEREO_CAMERA), "true") == 0 ) {
- height *=2;
- }
- }
+ break;
- printf("Resolution: %d x %d\n", width, height);
- params.setPreviewSize(width, height);
- reSizePreview = true;
+ case '4':
+ printf("Setting resolution...");
+ a = checkSupportedParamScriptResol(preview_Array, numpreviewSize, cmd, &resol_index);
+ if (a > -1) {
+ params.setPreviewSize(preview_Array[resol_index]->width, preview_Array[resol_index]->height);
+ previewSizeIDX = resol_index;
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive && previewRunning ) {
camera->stopPreview();
camera->setParameters(params.flatten());
@@ -304,23 +395,51 @@ int execute_functional_script(char *script) {
} else if ( hardwareActive ) {
camera->setParameters(params.flatten());
}
-
break;
- }
- case '5':
- for (i = 0; i < length_capture_Size; i++) {
- if ( strcmp((cmd + 1), captureSize[i].name) == 0)
- break;
- }
+ case '5':
+ if( strcmp((cmd + 1), "MAX_CAPTURE_SIZE") == 0) {
+ resol_index = 0;
+ for (int i=0; i<numcaptureSize; i++) {
+ if ((capture_Array[resol_index]->width * capture_Array[resol_index]->height) < (capture_Array[i]->width * capture_Array[i]->height)) {
+ resol_index = i;
+ }
+ }
+ if ((0 < capture_Array[resol_index]->width) && (0 < capture_Array[resol_index]->height)) {
+ params.setPictureSize(capture_Array[resol_index]->width, capture_Array[resol_index]->height);
+ captureSizeIDX = resol_index;
+ printf("Capture Size set: %dx%d\n", capture_Array[resol_index]->width, capture_Array[resol_index]->height);
+ } else {
+ printf("\nCapture size is 0!\n");
+ }
+ } else {
+ a = checkSupportedParamScriptResol(capture_Array, numcaptureSize, cmd, &resol_index);
+ if (camera_index != 2) {
+ if (a > -1) {
+ params.setPictureSize(capture_Array[resol_index]->width, capture_Array[resol_index]->height);
+ captureSizeIDX = resol_index;
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
+ } else {
+ int widthC, heightC;
+ char *resC = NULL;
+ resC = strtok(cmd + 1, "x");
+ widthC = atoi(resC);
+ resC = strtok(NULL, "x");
+ heightC = atoi(resC);
+ params.setPictureSize(widthC,heightC);
+ a = checkSupportedParamScriptResol(capture_Array, numcaptureSize,
+ widthC, heightC, &resol_index);
+ if (a > -1) captureSizeIDX = resol_index;
+ }
- if ( i < length_capture_Size ) {
- params.setPictureSize(captureSize[i].width, captureSize[i].height);
- captureSizeIDX = i;
+ if ( hardwareActive ) {
+ camera->setParameters(params.flatten());
+ }
}
- if ( hardwareActive )
- camera->setParameters(params.flatten());
+ requestBufferSourceReset();
break;
@@ -368,7 +487,13 @@ int execute_functional_script(char *script) {
break;
case '8':
- params.set(params.KEY_WHITE_BALANCE, (cmd + 1));
+
+ a = checkSupportedParamScript(awb, numawb, cmd);
+ if (a > -1) {
+ params.set(params.KEY_WHITE_BALANCE, (cmd + 1));
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -403,16 +528,37 @@ int execute_functional_script(char *script) {
break;
case '~':
- params.setPreviewFormat(cmd + 1);
+
+ a = checkSupportedParamScript(previewFormatArray, numpreviewFormat, cmd);
+ if (a > -1) {
+ params.setPreviewFormat(cmd + 1);
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
+
if ( hardwareActive )
camera->setParameters(params.flatten());
break;
case '$':
- params.setPictureFormat(cmd + 1);
- if ( hardwareActive )
+
+ a = checkSupportedParamScript(pictureFormatArray, numpictureFormat, cmd);
+ if (a > -1) {
+ params.setPictureFormat(cmd + 1);
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
+
+ queueEmpty = true;
+ if ( bufferSourceOutputThread.get() ) {
+ if ( 0 < bufferSourceOutputThread->hasBuffer() ) {
+ queueEmpty = false;
+ }
+ }
+ if ( hardwareActive && queueEmpty ) {
camera->setParameters(params.flatten());
+ }
break;
case '-':
@@ -429,14 +575,10 @@ int execute_functional_script(char *script) {
case 'A':
camera_index=atoi(cmd+1);
- // camera_index %= ARRAY_SIZE(cameras);
- camera_index %= length_cam;
- if ( camera_index == 2)
- params.set(KEY_STEREO_CAMERA, "true");
- else
- params.set(KEY_STEREO_CAMERA, "false");
+ camera_index %= numCamera;
printf("%s selected.\n", cameras[camera_index]);
+ firstTime = true;
if ( hardwareActive ) {
stopPreview();
@@ -446,11 +588,6 @@ int execute_functional_script(char *script) {
closeCamera();
openCamera();
}
-
- if (camera_index == 0) params.setPreviewFrameRate(30);
- else params.setPreviewFrameRate(27);
-
-
break;
case 'a':
@@ -471,17 +608,59 @@ int execute_functional_script(char *script) {
break;
case 'l':
+ a = checkSupportedParamScriptResol(Vcapture_Array, numVcaptureSize, cmd, &resol_index);
+ if (a > -1) {
+ VcaptureSizeIDX = resol_index;
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
+ break;
+
case 'L':
- for(i = 0; i < lenght_Vcapture_size; i++)
+ if(stereoMode)
{
- if( strcmp((cmd + 1), VcaptureSize[i].desc) == 0)
- {
- VcaptureSizeIDX = i;
- printf("Video Capture Size: %s\n", VcaptureSize[i].desc);
- break;
+ a = checkSupportedParamScriptLayout(stereoLayout, numLay, cmd, &stereoLayoutIDX);
+ if (a > -1) {
+ params.set(KEY_S3D_PRV_FRAME_LAYOUT, cmd + 1);
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
+
+
+ getSizeParametersFromCapabilities();
+ if (hardwareActive && previewRunning) {
+ stopPreview();
+ camera->setParameters(params.flatten());
+ startPreview();
+ } else if (hardwareActive) {
+ camera->setParameters(params.flatten());
+ }
+ }
+ break;
+
+
+ case '.':
+ if(stereoMode)
+ {
+ a = checkSupportedParamScriptLayout(stereoCapLayout, numCLay, cmd, &stereoCapLayoutIDX);
+ if (a > -1) {
+ params.set(KEY_S3D_CAP_FRAME_LAYOUT_VALUES, cmd + 1);
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
+
+
+ getSizeParametersFromCapabilities();
+ if (hardwareActive && previewRunning) {
+ stopPreview();
+ camera->setParameters(params.flatten());
+ startPreview();
+ } else if (hardwareActive) {
+ camera->setParameters(params.flatten());
}
}
break;
+
case ']':
for(i = 0; i < length_V_bitRate; i++)
{
@@ -494,32 +673,20 @@ int execute_functional_script(char *script) {
}
break;
case ':':
- int width, height;
- for(i = 0; i < length_previewSize ; i++)
- {
- if( strcmp((cmd + 1), previewSize[i].desc) == 0)
- {
- width = previewSize[i].width;
- height = previewSize[i].height;
- thumbSizeIDX = i;
- break;
- }
- }
- if (i == length_previewSize ) //if the resolution is not in the supported ones
- {
- char *res = NULL;
- res = strtok(cmd + 1, "x");
- width = atoi(res);
- res = strtok(NULL, "x");
- height = atoi(res);
+ a = checkSupportedParamScriptResol(thumbnail_Array, numthumbnailSize, cmd, &resol_index);
+ if (a > -1) {
+ params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, thumbnail_Array[resol_index]->width);
+ params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,thumbnail_Array[resol_index]->height);
+ thumbSizeIDX = resol_index;
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
}
- params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
- params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
-
- if ( hardwareActive )
+ if ( hardwareActive ) {
camera->setParameters(params.flatten());
+ }
+
break;
@@ -565,6 +732,8 @@ int execute_functional_script(char *script) {
if ( hardwareActive )
camera->setParameters(params.flatten());
+ requestBufferSourceReset();
+
break;
case 'K':
@@ -575,15 +744,29 @@ int execute_functional_script(char *script) {
break;
case 'F':
- if ( hardwareActive )
+ if ( hardwareActive ) {
camera->sendCommand(CAMERA_CMD_START_FACE_DETECTION, 0, 0);
+ faceDetectToggle = true;
+ }
break;
- case 'T':
+ case 'I':
+ params.set(KEY_AF_TIMEOUT, (cmd + 1));
if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
+ case 'T':
+
+ if ( hardwareActive ) {
camera->sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, 0, 0);
+ faceDetectToggle = false;
+ }
+
+ break;
case 'O':
params.set(KEY_GLBCE, (cmd+1));
@@ -595,17 +778,53 @@ int execute_functional_script(char *script) {
case 'u':
// HQ should always be in ldc-nsf
// if not HQ, then return the ipp to its previous state
- if( !strcmp(capture[capture_mode], "high-quality") ) {
+ if ( !strcmp((cmd + 1), "high-quality") ) {
ippIDX_old = ippIDX;
ippIDX = 3;
params.set(KEY_IPP, ipp_mode[ippIDX]);
+ params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::FALSE);
+ previewRotation = 0;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
+ } else if ( !strcmp((cmd + 1), "video-mode") ) {
+ params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+ camera->getCameraInfo(camera_index, &cameraInfo);
+ previewRotation = ((360-cameraInfo.orientation)%360);
+ if (previewRotation >= 0 || previewRotation <=360) {
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
+ }
+ printf("previewRotation: %d\n", previewRotation);
} else {
ippIDX = ippIDX_old;
+ params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::FALSE);
+ previewRotation = 0;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
+ }
+ a = checkSupportedParamScript(modevalues, nummodevalues, cmd);
+ if (a > -1) {
+ params.set(KEY_MODE, (cmd + 1));
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
}
- params.set(KEY_MODE, (cmd + 1));
- if ( hardwareActive )
+ if ( hardwareActive ) {
+ if (previewRunning) {
+ stopPreview();
+ }
+ camera->setParameters(params.flatten());
+ // Get parameters from capabilities for the new capture mode
+ params = camera->getParameters();
+ getSizeParametersFromCapabilities();
+ getParametersFromCapabilities();
+ // Set framerate 30fps and 12MP capture resolution if available for the new capture mode.
+ // If not available set framerate and capture mode under index 0 from fps_const_str and capture_Array.
+ frameRateIDX = getDefaultParameter("30000,30000", constCnt, fps_const_str);
+ captureSizeIDX = getDefaultParameterResol("12MP", numcaptureSize, capture_Array);
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_const_str[frameRateIDX]);
+ params.setPictureSize(capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height);
camera->setParameters(params.flatten());
+ }
+
+ requestBufferSourceReset();
break;
@@ -618,6 +837,36 @@ int execute_functional_script(char *script) {
break;
+ case 'H':
+
+ setDefaultExpGainPreset(shotParams, atoi(cmd + 1));
+ break;
+
+
+ case 'n':
+
+ switch (*(cmd + 1)) {
+ case 0:
+ shotConfigFlush = false;
+ break;
+ case 1:
+ shotConfigFlush = true;
+ break;
+ default:
+ printf ("Mangling flush shot config command: \"%s\"\n", (cmd + 1));
+ break;
+ }
+
+ updateShotConfigFlushParam();
+
+ break;
+
+ case '?':
+
+ setExpGainPreset(shotParams, cmd + 1, true, PARAM_EXP_BRACKET_PARAM_NONE, shotConfigFlush);
+
+ break;
+
case 'W':
tempBracketRange = atoi(cmd + 1);
@@ -636,7 +885,8 @@ int execute_functional_script(char *script) {
case '#':
- params.set(KEY_BURST, atoi(cmd + 1));
+ params.set(KEY_TI_BURST, atoi(cmd + 1));
+ burstCount = atoi(cmd + 1);
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -644,7 +894,13 @@ int execute_functional_script(char *script) {
break;
case 'J':
- params.set(CameraParameters::KEY_FLASH_MODE, (cmd+1));
+
+ a = checkSupportedParamScript(flash, numflash, cmd);
+ if (a > -1) {
+ params.set(CameraParameters::KEY_FLASH_MODE, (cmd + 1));
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -652,25 +908,54 @@ int execute_functional_script(char *script) {
break;
case 'w':
- params.set(params.KEY_SCENE_MODE, (cmd + 1));
+ a = checkSupportedParamScript(scene, numscene, cmd);
+ if (a > -1) {
+ params.set(params.KEY_SCENE_MODE, (cmd + 1));
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
break;
case 'B' :
- params.set(KEY_VNF, (cmd + 1));
+ if(strcmp(vnfstr, "true") == 0) {
+ if (strcmp(cmd + 1, "1") == 0) {
+ trySetVideoNoiseFilter(true);
+ }
+ else if (strcmp(cmd + 1, "0") == 0){
+ trySetVideoNoiseFilter(false);
+ }
+ } else {
+ trySetVideoNoiseFilter(false);
+ printf("\n VNF is not supported \n\n");
+ }
- if ( hardwareActive )
+ if ( hardwareActive ) {
camera->setParameters(params.flatten());
+ }
+ break;
case 'C' :
- params.set(KEY_VSTAB, (cmd + 1));
- if ( hardwareActive )
+ if (strcmp(vstabstr, "true") == 0) {
+ if (strcmp(cmd + 1, "1") == 0) {
+ trySetVideoStabilization(true);
+ } else if (strcmp(cmd + 1, "0") == 0) {
+ trySetVideoStabilization(false);
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
+ } else {
+ printf("\nNot supported parameter vstab from sensor %d\n\n", camera_index);
+ }
+
+ if ( hardwareActive ) {
camera->setParameters(params.flatten());
+ }
break;
case 'D':
@@ -686,7 +971,11 @@ int execute_functional_script(char *script) {
case 'i':
iso_mode = atoi(cmd + 1);
- params.set(KEY_ISO, iso_mode);
+ if (iso_mode < numisoMode) {
+ params.set(KEY_ISO, isoMode[iso_mode]);
+ } else {
+ printf("\nNot supported parameter %s for iso mode from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -725,31 +1014,59 @@ int execute_functional_script(char *script) {
break;
case 'z':
- case 'Z':
+ zoomtoggle = false;
+
+ if(strcmp(zoomstr, "true") == 0) {
+ for(i = 0; i < length_Zoom; i++) {
+ if( strcmp((cmd + 1), zoom[i].zoom_description) == 0) {
+ zoomIDX = i;
+ zoomtoggle = true;
+ break;
+ }
+ }
-#if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP3)
- params.set(CameraParameters::KEY_ZOOM, atoi(cmd + 1));
-#else
+ if (!zoomtoggle) {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
- for(i = 0; i < length_Zoom; i++)
- {
- if( strcmp((cmd + 1), zoom[i].zoom_description) == 0)
- {
- zoomIDX = i;
- break;
+
+ params.set(CameraParameters::KEY_ZOOM, zoom[zoomIDX].idx);
+
+ if ( hardwareActive ) {
+ camera->setParameters(params.flatten());
}
}
- params.set(CameraParameters::KEY_ZOOM, zoom[zoomIDX].idx);
-#endif
+ case 'Z':
+ smoothzoomtoggle = false;
+
+ if(strcmp(smoothzoomstr, "true") == 0) {
+ for(i = 0; i < length_Zoom; i++) {
+ if( strcmp((cmd + 1), zoom[i].zoom_description) == 0) {
+ zoomIDX = i;
+ smoothzoomtoggle = true;
+ break;
+ }
+ }
- if ( hardwareActive )
- camera->setParameters(params.flatten());
+ if (!smoothzoomtoggle) {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
+ if ( hardwareActive ) {
+ camera->sendCommand(CAMERA_CMD_START_SMOOTH_ZOOM, zoom[zoomIDX].idx, 0);
+ }
+ }
break;
case 'j':
- params.set(KEY_EXPOSURE, (cmd + 1));
+
+ a = checkSupportedParamScript(exposureMode, numExposureMode, cmd);
+ if (a > -1) {
+ params.set(KEY_EXPOSURE, (cmd + 1));
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -775,7 +1092,12 @@ int execute_functional_script(char *script) {
break;
case 'e':
- params.set(params.KEY_EFFECT, (cmd + 1));
+ a = checkSupportedParamScript(effectss, numEffects, cmd);
+ if (a > -1) {
+ params.set(params.KEY_EFFECT, (cmd + 1));
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -783,30 +1105,29 @@ int execute_functional_script(char *script) {
break;
case 'r':
-
- frameR = atoi(cmd + 1);
-
-
- if (camera_index == 0) {
- for (i = 0; i < length_fpsConst_Ranges; i++) {
- if (frameR == fpsConstRanges[i].constFramerate)
- frameRateIndex = i;
-
+ if (strcmp((cmd + 1), "MAX_FRAMERATE") == 0) {
+ frameRConst = 0;
+ for (int i=0; i<constCnt; i++) {
+ if (constFramerate[frameRConst] < constFramerate[i]) {
+ frameRConst = i;
+ }
+ }
+ if (0 < constFramerate[frameRConst]) {
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_const_str[frameRConst]);
+ frameRateIDX = frameRConst;
+ printf("Framerate set: %d fps\n", constFramerate[frameRConst]);
+ } else {
+ printf("\nFramerate is 0!\n");
}
} else {
- for (i = 0; i < length_fpsConst_RangesSec; i++) {
- if (frameR == fpsConstRangesSec[i].constFramerate)
- frameRateIndex = i;
+ a = checkSupportedParamScriptfpsConst(constFramerate, constCnt, cmd, &frameRConst);
+ if (a > -1) {
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_const_str[frameRConst]);
+ frameRateIDX = frameRConst;
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
}
}
-
-
- if (camera_index == 0)
- params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fpsConstRanges[frameRateIndex].range);
- else
- params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fpsConstRangesSec[frameRateIndex].range);
-
-
if ( hardwareActive && previewRunning ) {
camera->stopPreview();
camera->setParameters(params.flatten());
@@ -814,40 +1135,40 @@ int execute_functional_script(char *script) {
} else if ( hardwareActive ) {
camera->setParameters(params.flatten());
}
-
break;
case 'R':
- for(i = 0; i < length_fps_ranges; i++)
- {
- if( strcmp((cmd + 1), fpsRanges[i].rangeDescription) == 0)
- {
- fpsRangeIdx = i;
- printf("Selected Framerate range: %s\n", fpsRanges[i].rangeDescription);
- if ( hardwareActive ) {
- params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fpsRanges[i].range);
- params.remove(CameraParameters::KEY_PREVIEW_FRAME_RATE);
- camera->setParameters(params.flatten());
- }
- break;
- }
+ a = checkSupportedParamScriptfpsRange(rangeDescription, rangeCnt, cmd, &frameRRange);
+ if (a > -1) {
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_range_str[frameRRange]);
+ fpsRangeIdx = frameRRange;
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
}
break;
case 'x':
+ a = checkSupportedParamScript(antiband, numAntibanding, cmd);
+ if (a > -1) {
params.set(params.KEY_ANTIBANDING, (cmd + 1));
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
-
break;
case 'g':
- params.set(params.KEY_FOCUS_MODE, (cmd + 1));
+ a = checkSupportedParamScript(focus, numfocus, cmd);
+ if (a > -1) {
+ params.set(params.KEY_FOCUS_MODE, (cmd + 1));
+ } else {
+ printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
-
break;
case 'G':
@@ -857,7 +1178,17 @@ int execute_functional_script(char *script) {
if ( hardwareActive )
camera->setParameters(params.flatten());
- params.remove(CameraParameters::KEY_FOCUS_AREAS);
+ break;
+
+ case 'y':
+
+ params.set(CameraParameters::KEY_METERING_AREAS, (cmd + 1));
+
+ if ( hardwareActive ) {
+ camera->setParameters(params.flatten());
+ }
+
+ break;
case 'f':
gettimeofday(&autofocus_start, 0);
@@ -868,14 +1199,94 @@ int execute_functional_script(char *script) {
break;
case 'p':
+ {
+ int msgType = 0;
+ const char *format = params.getPictureFormat();
+
+ if((0 == strcmp(modevalues[capture_mode], "video-mode")) &&
+ (0 != strcmp(videosnapshotstr, "true"))) {
+ printf("Video Snapshot is not supported\n");
+ } else if ( hardwareActive ) {
+ if((NULL != format) && isRawPixelFormat(format)) {
+ createBufferOutputSource();
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->setBuffer(shotParams);
+ bufferSourceOutputThread->setStreamCapture(streamCapture, expBracketIdx);
+ }
+ } else if(strcmp(modevalues[capture_mode], "video-mode") == 0) {
+ msgType = CAMERA_MSG_COMPRESSED_IMAGE |
+ CAMERA_MSG_RAW_IMAGE;
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ msgType |= CAMERA_MSG_RAW_BURST;
+#endif
+ } else {
+ msgType = CAMERA_MSG_POSTVIEW_FRAME |
+ CAMERA_MSG_RAW_IMAGE_NOTIFY |
+ CAMERA_MSG_COMPRESSED_IMAGE |
+ CAMERA_MSG_SHUTTER;
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ msgType |= CAMERA_MSG_RAW_BURST;
+#endif
+ }
+
+ gettimeofday(&picture_start, 0);
+ ret = camera->setParameters(params.flatten());
+ if ( ret != NO_ERROR ) {
+ printf("Error returned while setting parameters");
+ break;
+ }
+ ret = camera->takePictureWithParameters(msgType, shotParams.flatten());
+ if ( ret != NO_ERROR ) {
+ printf("Error returned while taking a picture");
+ break;
+ }
+ }
+ break;
+ }
+
+ case 'S':
+ {
+ if (streamCapture) {
+ streamCapture = false;
+ expBracketIdx = BRACKETING_IDX_DEFAULT;
+ setDefaultExpGainPreset(shotParams, expBracketIdx);
+ // Stop streaming
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->setStreamCapture(streamCapture, expBracketIdx);
+ }
+ } else {
+ streamCapture = true;
+ expBracketIdx = BRACKETING_IDX_STREAM;
+ setSingleExpGainPreset(shotParams, expBracketIdx, 0, 0);
+ // Queue more frames initially
+ shotParams.set(ShotParameters::KEY_BURST, BRACKETING_STREAM_BUFFERS);
+ }
+ break;
+ }
+
+ case 'P':
+ {
+ int msgType = CAMERA_MSG_COMPRESSED_IMAGE;
+ ShotParameters reprocParams;
+
gettimeofday(&picture_start, 0);
- if ( hardwareActive )
- ret = camera->takePicture(CAMERA_MSG_COMPRESSED_IMAGE|CAMERA_MSG_RAW_IMAGE);
+ createBufferInputSource();
+
+ if (bufferSourceOutputThread.get() &&
+ bufferSourceOutputThread->hasBuffer())
+ {
+ bufferSourceOutputThread->setStreamCapture(false, expBracketIdx);
+ if (hardwareActive) camera->setParameters(params.flatten());
- if ( ret != NO_ERROR )
- printf("Error returned while taking a picture");
+ if (bufferSourceInput.get()) {
+ buffer_info_t info = bufferSourceOutputThread->popBuffer();
+ bufferSourceInput->setInput(info, params.getPictureFormat(), reprocParams);
+ if (hardwareActive) camera->reprocess(msgType, String8());
+ }
+ }
break;
+ }
case 'd':
dly = atoi(cmd + 1);
@@ -975,12 +1386,14 @@ int execute_functional_script(char *script) {
case 'X':
{
- char rem_str[50];
- printf("Deleting images from %s \n", dir_path);
- if(!sprintf(rem_str,"rm %s/*.jpg",dir_path))
+ char rem_str[384];
+ printf("Deleting images from %s \n", images_dir_path);
+ if (!sprintf(rem_str, "rm %s/*.jpg", images_dir_path)) {
printf("Sprintf Error");
- if(system(rem_str))
+ }
+ if (system(rem_str)) {
printf("Images were not deleted\n");
+ }
break;
}
@@ -990,24 +1403,89 @@ int execute_functional_script(char *script) {
if ( AutoConvergenceModeIDX < 0 || AutoConvergenceModeIDX > 4 )
AutoConvergenceModeIDX = 0;
params.set(KEY_AUTOCONVERGENCE, autoconvergencemode[AutoConvergenceModeIDX]);
- if ( AutoConvergenceModeIDX != 4 )
- params.set(KEY_MANUALCONVERGENCE_VALUES, manualconvergencevalues[ManualConvergenceDefaultValueIDX]);
- if ( hardwareActive )
+ if (AutoConvergenceModeIDX != 4) {
+ params.set(KEY_MANUAL_CONVERGENCE, manualConv);
+ }
+ if (hardwareActive) {
camera->setParameters(params.flatten());
+ }
break;
}
case '^':
- {
- char strtmpval[7];
- if ( strcmp (autoconvergencemode[AutoConvergenceModeIDX], AUTOCONVERGENCE_MODE_MANUAL) == 0) {
- sprintf(strtmpval,"%d", atoi(cmd + 1));
- params.set(KEY_MANUALCONVERGENCE_VALUES, strtmpval);
+ if (strcmp(autoconvergencemode[AutoConvergenceModeIDX], "manual") == 0) {
+ manC = atoi(cmd + 1);
+ if(manC >= manualConvMin && manC <= manualConvMax)
+ {
+ params.set(KEY_MANUAL_CONVERGENCE, manC);
+ }
+ else if(manC < manualConvMin)
+ {
+ printf(" wrong parameter for manual convergence \n");
+ params.set(KEY_MANUAL_CONVERGENCE, manualConvMin);
+ }
+ else
+ {
+ printf(" wrong parameter for manual convergence \n");
+ params.set(KEY_MANUAL_CONVERGENCE, manualConvMax);
+ }
if ( hardwareActive )
camera->setParameters(params.flatten());
}
break;
- }
+
+
+ case 'Q':
+ if ( strcmp (exposureMode[exposure_mode], "manual") == 0) {
+ manE = atoi(cmd + 1);
+ if(manE >= manualExpMin && manE <= manualExpMax)
+ {
+ params.set(KEY_MANUAL_EXPOSURE, manE);
+ params.set(KEY_MANUAL_EXPOSURE_RIGHT, manE);
+ }
+ else if(manE < manualExpMin)
+ {
+ printf(" wrong parameter for manual exposure \n");
+ params.set(KEY_MANUAL_EXPOSURE, manualExpMin);
+ params.set(KEY_MANUAL_EXPOSURE_RIGHT, manualExpMin);
+ }
+ else
+ {
+ printf(" wrong parameter for manual exposure \n");
+ params.set(KEY_MANUAL_EXPOSURE, manualExpMax);
+ params.set(KEY_MANUAL_EXPOSURE_RIGHT, manualExpMax);
+ }
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+ }
+ break;
+
+ case ',':
+ if ( strcmp (exposureMode[exposure_mode], "manual") == 0) {
+ manG = atoi(cmd + 1);
+ if(manG >= manualGainMin && manG <= manualGainMax)
+ {
+ params.set(KEY_MANUAL_GAIN_ISO, manG);
+ params.set(KEY_MANUAL_GAIN_ISO_RIGHT, manG);
+ }
+ else if(manG < manualGainMin)
+ {
+ printf(" wrong parameter for manual gain \n");
+ params.set(KEY_MANUAL_GAIN_ISO, manualGainMin);
+ params.set(KEY_MANUAL_GAIN_ISO_RIGHT, manualGainMin);
+ }
+ else
+ {
+ printf(" wrong parameter for manual gain \n");
+ params.set(KEY_MANUAL_GAIN_ISO, manualGainMax);
+ params.set(KEY_MANUAL_GAIN_ISO_RIGHT, manualGainMax);
+ }
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+ }
+ break;
default:
printf("Unrecognized command!\n");
@@ -1029,6 +1507,66 @@ exit:
}
+int checkSupportedParamScript(char **array, int size, char *param) {
+ for (int i=0; i<size; i++) {
+ if (strcmp((param + 1), array[i]) == 0) {
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int checkSupportedParamScriptLayout(char **array, int size, char *param, int *index) {
+ for (int i=0; i<size; i++) {
+ if (strcmp((param + 1), array[i]) == 0) {
+ *index = i;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int checkSupportedParamScriptResol(param_Array **array, int size, char *param, int *num) {
+ for (int i=0; i<size; i++) {
+ if (strcmp((param + 1), array[i]->name) == 0) {
+ *num = i;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int checkSupportedParamScriptResol(param_Array **array, int size,
+ int width, int height, int *num) {
+ for (int i=0; i<size; i++) {
+ if ((width == array[i]->width) && (height == array[i]->height)) {
+ *num = i;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int checkSupportedParamScriptfpsConst(int *array, int size, char *param, int *num) {
+ for (int i=0; i<size; i++) {
+ if (atoi(param + 1) == array[i]) {
+ *num = i;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int checkSupportedParamScriptfpsRange(char **array, int size, char *param, int *num) {
+ for (int i=0; i<size; i++) {
+ if (strcmp(param + 1, array[i]) == 0) {
+ *num = i;
+ return 0;
+ }
+ }
+ return -1;
+}
+
char * get_cycle_cmd(const char *aSrc) {
unsigned ind = 0;
char *cycle_cmd = new char[256];
@@ -1046,48 +1584,19 @@ status_t dump_mem_status() {
return system(MEMORY_DUMP);
}
-char *load_script(char *config) {
+char *load_script(const char *config) {
FILE *infile;
size_t fileSize;
char *script;
size_t nRead = 0;
- char dir_name[40];
- size_t count;
- char rCount [5];
-
- count = 0;
infile = fopen(config, "r");
strcpy(script_name,config);
- // remove just the '.txt' part of the config
- while((config[count] != '.') && (count < sizeof(dir_name)/sizeof(dir_name[0])))
- count++;
+ printf("\n SCRIPT : <%s> is currently being executed \n", script_name);
- printf("\n SCRIPT : <%s> is currently being executed \n",script_name);
- if(strncpy(dir_name,config,count) == NULL)
- printf("Strcpy error");
-
- dir_name[count]=NULL;
-
- if(strcat(dir_path,dir_name) == NULL)
- printf("Strcat error");
-
- if(restartCount)
- {
- sprintf(rCount,"_%d",restartCount);
- if(strcat(dir_path, rCount) == NULL)
- printf("Strcat error RestartCount");
- }
-
- printf("\n COMPLETE FOLDER PATH : %s \n",dir_path);
- if(mkdir(dir_path,0777) == -1) {
- printf("\n Directory %s was not created \n",dir_path);
- } else {
- printf("\n Directory %s was created \n",dir_path);
- }
- printf("\n DIRECTORY CREATED FOR TEST RESULT IMAGES IN MMC CARD : %s \n",dir_name);
+ printf("\n DIRECTORY CREATED FOR TEST RESULT IMAGES IN MMC CARD : %s \n", output_dir_path);
if( (NULL == infile)){
printf("Error while opening script file %s!\n", config);
@@ -1098,7 +1607,7 @@ char *load_script(char *config) {
fileSize = ftell(infile);
fseek(infile, 0, SEEK_SET);
- script = (char *) malloc(fileSize);
+ script = (char *) malloc(fileSize + 1);
if ( NULL == script ) {
printf("Unable to allocate buffer for the script\n");
@@ -1106,6 +1615,8 @@ char *load_script(char *config) {
return NULL;
}
+ memset(script, 0, fileSize + 1);
+
if ((nRead = fread(script, 1, fileSize, infile)) != fileSize) {
printf("Error while reading script file!\n");
@@ -1119,25 +1630,19 @@ char *load_script(char *config) {
return script;
}
-int start_logging(char *config, int &pid) {
- char dir_name[40];
- size_t count = 0;
+int start_logging(int flags, int &pid) {
int status = 0;
- // remove just the '.txt' part of the config
- while((config[count] != '.') && (count < sizeof(dir_name)/sizeof(dir_name[0])))
- count++;
-
- if(strncpy(dir_name,config,count) == NULL)
- printf("Strcpy error");
-
- dir_name[count]=NULL;
+ if (flags == 0) {
+ pid = -1;
+ return 0;
+ }
pid = fork();
if (pid == 0)
{
char *command_list[] = {"sh", "-c", NULL, NULL};
- char log_cmd[120];
+ char log_cmd[1024];
// child process to run logging
// set group id of this process to itself
@@ -1146,13 +1651,17 @@ int start_logging(char *config, int &pid) {
setpgid(getpid(), getpid());
/* Start logcat */
- if(!sprintf(log_cmd,"logcat > /sdcard/%s/log.txt &",dir_name))
- printf(" Sprintf Error");
+ if (flags & LOGGING_LOGCAT) {
+ if (!sprintf(log_cmd,"logcat > %s/log.txt &", output_dir_path)) {
+ printf(" Sprintf Error");
+ }
+ }
/* Start Syslink Trace */
- if(bLogSysLinkTrace) {
- if(!sprintf(log_cmd,"%s /system/bin/syslink_trace_daemon.out -l /sdcard/%s/syslink_trace.txt -f &",log_cmd, dir_name))
+ if (flags & LOGGING_SYSLINK) {
+ if (!sprintf(log_cmd,"%s /system/bin/syslink_trace_daemon.out -l %s/syslink_trace.txt -f &", log_cmd, output_dir_path)) {
printf(" Sprintf Error");
+ }
}
command_list[2] = (char *)log_cmd;
@@ -1174,18 +1683,22 @@ int start_logging(char *config, int &pid) {
return 0;
}
-int stop_logging(int &pid)
+int stop_logging(int flags, int &pid)
{
- if(pid > 0)
- {
- if(killpg(pid, SIGKILL))
- {
+ if (pid > 0) {
+ if (killpg(pid, SIGKILL)) {
printf("Exit command failed");
return -1;
} else {
- printf("\nlogging for script %s is complete\n logcat saved @ location: %s\n",script_name,dir_path);
- if (bLogSysLinkTrace)
- printf(" syslink_trace is saved @ location: %s\n\n",dir_path);
+ printf("\nlogging for script %s is complete\n", script_name);
+
+ if (flags & LOGGING_LOGCAT) {
+ printf(" logcat saved @ location: %s\n", output_dir_path);
+ }
+
+ if (flags & LOGGING_SYSLINK) {
+ printf(" syslink_trace is saved @ location: %s\n\n", output_dir_path);
+ }
}
}
return 0;
diff --git a/test/CameraHal/camera_test_surfacetexture.cpp b/test/CameraHal/camera_test_surfacetexture.cpp
new file mode 100644
index 0000000..5c52094
--- /dev/null
+++ b/test/CameraHal/camera_test_surfacetexture.cpp
@@ -0,0 +1,909 @@
+#include <stdlib.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <time.h>
+#include <semaphore.h>
+#include <pthread.h>
+#include <string.h>
+#include <climits>
+#include <math.h>
+
+#include <gui/SurfaceTexture.h>
+#include <gui/SurfaceTextureClient.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+#include <camera/Camera.h>
+#include <camera/ICamera.h>
+#include <media/mediarecorder.h>
+
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <camera/CameraParameters.h>
+#include <camera/ShotParameters.h>
+#include <camera/CameraMetadata.h>
+#include <system/audio.h>
+#include <system/camera.h>
+
+#include <cutils/memory.h>
+#include <utils/Log.h>
+
+#include <sys/wait.h>
+
+#include "camera_test.h"
+#include "camera_test_surfacetexture.h"
+
+#define ASSERT(X) \
+ do { \
+ if(!(X)) { \
+ printf("error: %s():%d", __FUNCTION__, __LINE__); \
+ return; \
+ } \
+ } while(0);
+
+#define ALIGN_DOWN(x, n) ((x) & (~((n) - 1)))
+#define ALIGN_UP(x, n) ((((x) + (n) - 1)) & (~((n) - 1)))
+#define ALIGN_WIDTH 32 // Should be 32...but the calculated dimension causes an ion crash
+#define ALIGN_HEIGHT 2 // Should be 2...but the calculated dimension causes an ion crash
+
+//temporarily define format here
+#define HAL_PIXEL_FORMAT_TI_NV12 0x100
+#define HAL_PIXEL_FORMAT_TI_Y8 0x103
+#define HAL_PIXEL_FORMAT_TI_Y16 0x104
+#define HAL_PIXEL_FORMAT_TI_UYVY 0x105
+
+using namespace android;
+
+static EGLint getSurfaceWidth() {
+ return 512;
+}
+
+static EGLint getSurfaceHeight() {
+ return 512;
+}
+
+static size_t calcBufSize(int format, int width, int height)
+{
+ int buf_size;
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ buf_size = width * height * 3 /2;
+ break;
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ buf_size = width * height * 2;
+ break;
+ // add more formats later
+ default:
+ buf_size = width * height * 3 /2;
+ break;
+ }
+
+ return buf_size;
+}
+
+static unsigned int calcOffset(int format, unsigned int width, unsigned int top, unsigned int left)
+{
+ unsigned int bpp;
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ bpp = 1;
+ break;
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ bpp = 2;
+ break;
+ // add more formats later
+ default:
+ bpp = 1;
+ break;
+ }
+
+ return top * width + left * bpp;
+}
+
+static int getHalPixFormat(const char *format)
+{
+ int pixformat = HAL_PIXEL_FORMAT_TI_NV12;
+ if ( NULL != format ) {
+ if ( strcmp(format, CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0 ) {
+ pixformat = HAL_PIXEL_FORMAT_TI_Y16;
+ } else if ( strcmp(format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ) {
+ pixformat = HAL_PIXEL_FORMAT_TI_NV12;
+ } else if ( strcmp(format, CameraParameters::PIXEL_FORMAT_YUV422I) == 0 ) {
+ pixformat = HAL_PIXEL_FORMAT_TI_UYVY;
+ } else {
+ pixformat = HAL_PIXEL_FORMAT_TI_NV12;
+ }
+ }
+
+ return pixformat;
+}
+
+static int getUsageFromANW(int format)
+{
+ int usage = GRALLOC_USAGE_SW_READ_RARELY |
+ GRALLOC_USAGE_SW_WRITE_NEVER;
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ // This usage flag indicates to gralloc we want the
+ // buffers to come from system heap
+ usage |= GRALLOC_USAGE_PRIVATE_0;
+ break;
+ default:
+ // No special flags needed
+ break;
+ }
+ return usage;
+}
+
+static status_t writeCroppedNV12(unsigned int offset,
+ unsigned int stride,
+ unsigned int bufWidth,
+ unsigned int bufHeight,
+ const Rect &crop,
+ int fd,
+ unsigned char *buffer)
+{
+ unsigned char *luma = NULL, *chroma = NULL, *src = NULL;
+ unsigned int uvoffset;
+ int write_size;
+
+ if (!buffer || !crop.isValid()) {
+ return BAD_VALUE;
+ }
+
+ src = buffer;
+ // offset to beginning of uv plane
+ uvoffset = stride * bufHeight;
+ // offset to beginning of valid region of uv plane
+ uvoffset += (offset - (offset % stride)) / 2 + (offset % stride);
+
+ // start of valid luma region
+ luma = src + offset;
+ // start of valid chroma region
+ chroma = src + uvoffset;
+
+ // write luma line x line
+ unsigned int height = crop.height();
+ unsigned int width = crop.width();
+ write_size = width;
+ for (unsigned int i = 0; i < height; i++) {
+ if (write_size != write(fd, luma, width)) {
+ printf("Bad Write error (%d)%s\n",
+ errno, strerror(errno));
+ return UNKNOWN_ERROR;
+ }
+ luma += stride;
+ }
+
+ // write chroma line x line
+ height /= 2;
+ write_size = width;
+ for (unsigned int i = 0; i < height; i++) {
+ if (write_size != write(fd, chroma, width)) {
+ printf("Bad Write error (%d)%s\n",
+ errno, strerror(errno));
+ return UNKNOWN_ERROR;
+ }
+ chroma += stride;
+ }
+
+ return NO_ERROR;
+}
+
+static status_t writeCroppedUYVY(unsigned int offset,
+ unsigned int stride,
+ unsigned int bufWidth,
+ unsigned int bufHeight,
+ const Rect &crop,
+ int fd,
+ unsigned char *buffer)
+{
+ unsigned char *src = NULL;
+ int write_size;
+
+ if (!buffer) {
+ return BAD_VALUE;
+ }
+
+ src = buffer + offset;
+ int height = crop.height();
+ int width = crop.width();
+ write_size = width*2;
+ for (unsigned int i = 0; i < height; i++) {
+ if (write_size != write(fd, src, width*2)) {
+ printf("Bad Write error (%d)%s\n",
+ errno, strerror(errno));
+ return UNKNOWN_ERROR;
+ }
+ src += stride*2;
+ }
+
+ return NO_ERROR;
+}
+
+static status_t copyCroppedNV12(unsigned int offset,
+ unsigned int strideSrc,
+ unsigned int strideDst,
+ unsigned int bufWidth,
+ unsigned int bufHeight,
+ const Rect &crop,
+ void *bufferSrc,
+ void *bufferDst)
+{
+ unsigned char *lumaSrc = NULL, *chromaSrc = NULL;
+ unsigned char *lumaDst = NULL, *chromaDst = NULL;
+ unsigned int uvoffset;
+ int write_size;
+
+ if (!bufferSrc || !bufferDst) {
+ return BAD_VALUE;
+ }
+
+ uvoffset = strideSrc * crop.height();
+ uvoffset += (offset - (offset % strideSrc)) / 2 + (offset % strideSrc);
+
+ lumaSrc = static_cast<unsigned char *>(bufferSrc) + offset;
+ chromaSrc = static_cast<unsigned char *>(bufferSrc) + uvoffset;
+
+ int height = crop.height();
+ int width = crop.width();
+
+ uvoffset = strideDst * height;
+
+ lumaDst = static_cast<unsigned char *>(bufferDst);
+ chromaDst = static_cast<unsigned char *>(bufferDst) + uvoffset;
+
+ write_size = width;
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(lumaDst, lumaSrc, width);
+ lumaSrc += strideSrc;
+ lumaDst += strideDst;
+ }
+
+ height /= 2;
+ write_size = width;
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(chromaDst, chromaSrc, width);
+ chromaSrc += strideSrc;
+ chromaDst += strideDst;
+ }
+
+ return NO_ERROR;
+}
+
+static status_t copyCroppedPacked16(unsigned int offset,
+ unsigned int stride,
+ unsigned int bufWidth,
+ unsigned int bufHeight,
+ const Rect &crop,
+ void *bufferSrc,
+ void *bufferDst)
+{
+ unsigned char *src = NULL, *dst = NULL;
+
+ if (!bufferSrc || !bufferDst) {
+ return BAD_VALUE;
+ }
+
+ src = static_cast<unsigned char *>(bufferSrc) + offset;
+ dst = static_cast<unsigned char *>(bufferDst);
+
+ int height = crop.height();
+ int width = crop.width();
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(dst, src, width*2);
+ src += stride*2;
+ dst += width*2;
+ }
+
+ return NO_ERROR;
+}
+
+void GLSurface::initialize(int display) {
+ mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+ ASSERT(EGL_SUCCESS == eglGetError());
+ ASSERT(EGL_NO_DISPLAY != mEglDisplay);
+
+ EGLint majorVersion;
+ EGLint minorVersion;
+ ASSERT(eglInitialize(mEglDisplay, &majorVersion, &minorVersion));
+ ASSERT(EGL_SUCCESS == eglGetError());
+
+ EGLint numConfigs = 0;
+ ASSERT(eglChooseConfig(mEglDisplay, getConfigAttribs(), &mGlConfig,
+ 1, &numConfigs));
+ ASSERT(EGL_SUCCESS == eglGetError());
+
+ if (display) {
+ mComposerClient = new SurfaceComposerClient;
+ ASSERT(NO_ERROR == mComposerClient->initCheck());
+ mSurfaceControl = mComposerClient->createSurface(
+ String8("Test Surface"), 0,
+ 800, 480, HAL_PIXEL_FORMAT_YCrCb_420_SP, 0);
+
+ ASSERT(mSurfaceControl != NULL);
+ ASSERT(mSurfaceControl->isValid());
+
+ SurfaceComposerClient::openGlobalTransaction();
+ ASSERT(NO_ERROR == mSurfaceControl->setLayer(0x7FFFFFFF));
+ ASSERT(NO_ERROR == mSurfaceControl->show());
+ SurfaceComposerClient::closeGlobalTransaction();
+
+ sp<ANativeWindow> window = mSurfaceControl->getSurface();
+ mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
+ window.get(), NULL);
+ } else {
+ EGLint pbufferAttribs[] = {
+ EGL_WIDTH, getSurfaceWidth(),
+ EGL_HEIGHT, getSurfaceHeight(),
+ EGL_NONE };
+ mEglSurface = eglCreatePbufferSurface(mEglDisplay, mGlConfig,
+ pbufferAttribs);
+ }
+ ASSERT(EGL_SUCCESS == eglGetError());
+ ASSERT(EGL_NO_SURFACE != mEglSurface);
+
+ mEglContext = eglCreateContext(mEglDisplay, mGlConfig, EGL_NO_CONTEXT,
+ getContextAttribs());
+ ASSERT(EGL_SUCCESS == eglGetError());
+ ASSERT(EGL_NO_CONTEXT != mEglContext);
+
+ ASSERT(eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface,
+ mEglContext));
+ ASSERT(EGL_SUCCESS == eglGetError());
+
+ EGLint w, h;
+ ASSERT(eglQuerySurface(mEglDisplay, mEglSurface, EGL_WIDTH, &w));
+ ASSERT(EGL_SUCCESS == eglGetError());
+ ASSERT(eglQuerySurface(mEglDisplay, mEglSurface, EGL_HEIGHT, &h));
+ ASSERT(EGL_SUCCESS == eglGetError());
+
+ glViewport(0, 0, w, h);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+}
+
+void GLSurface::deinit() {
+ if (mComposerClient != NULL) {
+ mComposerClient->dispose();
+ }
+
+ if (mEglContext != EGL_NO_CONTEXT) {
+ eglDestroyContext(mEglDisplay, mEglContext);
+ }
+
+ if (mEglSurface != EGL_NO_SURFACE) {
+ eglDestroySurface(mEglDisplay, mEglSurface);
+ }
+ if (mEglDisplay != EGL_NO_DISPLAY) {
+ eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE,
+ EGL_NO_CONTEXT);
+ eglTerminate(mEglDisplay);
+ }
+ ASSERT(EGL_SUCCESS == eglGetError());
+}
+
+EGLint const* GLSurface::getConfigAttribs() {
+ static EGLint sDefaultConfigAttribs[] = {
+ EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
+ EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RED_SIZE, 8,
+ EGL_GREEN_SIZE, 8,
+ EGL_BLUE_SIZE, 8,
+ EGL_ALPHA_SIZE, 8,
+ EGL_DEPTH_SIZE, 16,
+ EGL_STENCIL_SIZE, 8,
+ EGL_NONE };
+
+ return sDefaultConfigAttribs;
+}
+
+EGLint const* GLSurface::getContextAttribs() {
+ static EGLint sDefaultContextAttribs[] = {
+ EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL_NONE };
+
+ return sDefaultContextAttribs;
+}
+
+void GLSurface::loadShader(GLenum shaderType, const char* pSource, GLuint* outShader) {
+ GLuint shader = glCreateShader(shaderType);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ if (shader) {
+ glShaderSource(shader, 1, &pSource, NULL);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ glCompileShader(shader);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ GLint compiled = 0;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ if (!compiled) {
+ GLint infoLen = 0;
+ glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ if (infoLen) {
+ char* buf = (char*) malloc(infoLen);
+ if (buf) {
+ glGetShaderInfoLog(shader, infoLen, NULL, buf);
+ printf("Shader compile log:\n%s\n", buf);
+ free(buf);
+ }
+ } else {
+ char* buf = (char*) malloc(0x1000);
+ if (buf) {
+ glGetShaderInfoLog(shader, 0x1000, NULL, buf);
+ printf("Shader compile log:\n%s\n", buf);
+ free(buf);
+ }
+ }
+ glDeleteShader(shader);
+ shader = 0;
+ }
+ }
+ ASSERT(shader != 0);
+ *outShader = shader;
+}
+
+void GLSurface::createProgram(const char* pVertexSource, const char* pFragmentSource,
+ GLuint* outPgm) {
+ GLuint vertexShader, fragmentShader;
+ {
+ loadShader(GL_VERTEX_SHADER, pVertexSource, &vertexShader);
+ }
+ {
+ loadShader(GL_FRAGMENT_SHADER, pFragmentSource, &fragmentShader);
+ }
+
+ GLuint program = glCreateProgram();
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ if (program) {
+ glAttachShader(program, vertexShader);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ glAttachShader(program, fragmentShader);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ glLinkProgram(program);
+ GLint linkStatus = GL_FALSE;
+ glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+ if (linkStatus != GL_TRUE) {
+ GLint bufLength = 0;
+ glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
+ if (bufLength) {
+ char* buf = (char*) malloc(bufLength);
+ if (buf) {
+ glGetProgramInfoLog(program, bufLength, NULL, buf);
+ printf("Program link log:\n%s\n", buf);
+ free(buf);
+ }
+ }
+ glDeleteProgram(program);
+ program = 0;
+ }
+ }
+ glDeleteShader(vertexShader);
+ glDeleteShader(fragmentShader);
+ ASSERT(program != 0);
+ *outPgm = program;
+}
+
+// SurfaceTexture specific
+sp<SurfaceTexture> SurfaceTextureBase::getST() {
+ return mST;
+}
+
+void SurfaceTextureBase::initialize(int tex_id, EGLenum tex_target) {
+ mTexId = tex_id;
+ mST = new SurfaceTexture(tex_id, true, tex_target);
+ mSTC = new SurfaceTextureClient(mST);
+ mANW = mSTC;
+}
+
+void SurfaceTextureBase::deinit() {
+ mANW.clear();
+ mSTC.clear();
+
+ mST->abandon();
+ mST.clear();
+}
+
+void SurfaceTextureBase::getId(const char **name) {
+ sp<ANativeWindow> windowTapOut = mSTC;
+
+ *name = NULL;
+ if (windowTapOut.get()) {
+ windowTapOut->perform(windowTapOut.get(), NATIVE_WINDOW_GET_ID, name);
+ }
+
+ windowTapOut.clear();
+}
+
+// SurfaceTexture with GL specific
+
+void SurfaceTextureGL::initialize(int display, int tex_id) {
+ GLSurface::initialize(display);
+ SurfaceTextureBase::initialize(tex_id, GL_TEXTURE_EXTERNAL_OES);
+
+ const char vsrc[] =
+ "attribute vec4 vPosition;\n"
+ "varying vec2 texCoords;\n"
+ "uniform mat4 texMatrix;\n"
+ "void main() {\n"
+ " vec2 vTexCoords = 0.5 * (vPosition.xy + vec2(1.0, 1.0));\n"
+ " texCoords = (texMatrix * vec4(vTexCoords, 0.0, 1.0)).xy;\n"
+ " gl_Position = vPosition;\n"
+ "}\n";
+
+ const char fsrc[] =
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES texSampler;\n"
+ "varying vec2 texCoords;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(texSampler, texCoords);\n"
+ "}\n";
+
+ {
+ createProgram(vsrc, fsrc, &mPgm);
+ }
+
+ mPositionHandle = glGetAttribLocation(mPgm, "vPosition");
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ ASSERT(-1 != mPositionHandle);
+ mTexSamplerHandle = glGetUniformLocation(mPgm, "texSampler");
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ ASSERT(-1 != mTexSamplerHandle);
+ mTexMatrixHandle = glGetUniformLocation(mPgm, "texMatrix");
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ ASSERT(-1 != mTexMatrixHandle);
+}
+
+void SurfaceTextureGL::deinit() {
+ SurfaceTextureBase::deinit();
+ GLSurface::deinit();
+}
+
+// drawTexture draws the SurfaceTexture over the entire GL viewport.
+void SurfaceTextureGL::drawTexture() {
+ const GLfloat triangleVertices[] = {
+ -1.0f, 1.0f,
+ -1.0f, -1.0f,
+ 1.0f, -1.0f,
+ 1.0f, 1.0f,
+ };
+
+ glVertexAttribPointer(mPositionHandle, 2, GL_FLOAT, GL_FALSE, 0,
+ triangleVertices);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ glEnableVertexAttribArray(mPositionHandle);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+
+ glUseProgram(mPgm);
+ glUniform1i(mTexSamplerHandle, 0);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTexId);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+
+ // XXX: These calls are not needed for GL_TEXTURE_EXTERNAL_OES as
+ // they're setting the defautls for that target, but when hacking things
+ // to use GL_TEXTURE_2D they are needed to achieve the same behavior.
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER,
+ GL_LINEAR);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER,
+ GL_LINEAR);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S,
+ GL_CLAMP_TO_EDGE);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T,
+ GL_CLAMP_TO_EDGE);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+
+ GLfloat texMatrix[16];
+ mST->getTransformMatrix(texMatrix);
+ glUniformMatrix4fv(mTexMatrixHandle, 1, GL_FALSE, texMatrix);
+
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+ ASSERT(GLenum(GL_NO_ERROR) == glGetError());
+
+ eglSwapBuffers(mEglDisplay, mEglSurface);
+}
+
+// buffer source stuff
+void BufferSourceThread::handleBuffer(sp<GraphicBuffer> &graphic_buffer, uint8_t *buffer,
+ unsigned int count, const Rect &crop) {
+ int size;
+ buffer_info_t info;
+ unsigned int offset = 0;
+ int fd = -1;
+ char fn[256];
+
+ if (!graphic_buffer.get()) {
+ printf("Invalid graphic_buffer!\n");
+ return;
+ }
+
+ size = calcBufSize((int)graphic_buffer->getPixelFormat(),
+ graphic_buffer->getWidth(),
+ graphic_buffer->getHeight());
+ if (size <= 0) {
+ printf("Can't get size!\n");
+ return;
+ }
+
+ if (!buffer) {
+ printf("Invalid mapped buffer!\n");
+ return;
+ }
+
+ info.size = size;
+ info.width = graphic_buffer->getWidth();
+ info.height = graphic_buffer->getHeight();
+ info.format = graphic_buffer->getPixelFormat();
+ info.buf = graphic_buffer;
+ info.crop = crop;
+
+ {
+ Mutex::Autolock lock(mReturnedBuffersMutex);
+ if (mReturnedBuffers.size() >= kReturnedBuffersMaxCapacity) mReturnedBuffers.removeAt(0);
+ }
+
+ // re-calculate size and offset
+ size = calcBufSize((int) graphic_buffer->getPixelFormat(), crop.width(), crop.height());
+ offset = calcOffset((int) graphic_buffer->getPixelFormat(), info.width, crop.top, crop.left);
+
+ // Do not write buffer to file if we are streaming capture
+ // It adds too much latency
+ if (!mRestartCapture) {
+ fn[0] = 0;
+ sprintf(fn, "/sdcard/img%03d.raw", count);
+ fd = open(fn, O_CREAT | O_WRONLY | O_TRUNC, 0777);
+ if (fd >= 0) {
+ if (HAL_PIXEL_FORMAT_TI_NV12 == info.format) {
+ writeCroppedNV12(offset, info.width, info.width, info.height,
+ crop, fd, buffer);
+ } else if (HAL_PIXEL_FORMAT_TI_UYVY == info.format) {
+ writeCroppedUYVY(offset, info.width, info.width, info.height,
+ crop, fd, buffer);
+ } else if (size != write(fd, buffer + offset, size)) {
+ printf("Bad Write int a %s error (%d)%s\n", fn, errno, strerror(errno));
+ }
+ printf("%s: buffer=%08X, size=%d stored at %s\n"
+ "\tRect: top[%d] left[%d] right[%d] bottom[%d] width[%d] height[%d] offset[%d] stride[%d]\n",
+ __FUNCTION__, (int)buffer, size, fn,
+ crop.top, crop.left, crop.right, crop.bottom,
+ crop.width(), crop.height(),
+ offset, info.width);
+ close(fd);
+ } else {
+ printf("error opening or creating %s\n", fn);
+ }
+ }
+}
+
+Rect BufferSourceThread::getCrop(sp<GraphicBuffer> &graphic_buffer, const float *mtx) {
+ Rect crop(graphic_buffer->getWidth(), graphic_buffer->getHeight());
+
+ // calculate crop rectangle from tranformation matrix
+ float sx, sy, tx, ty, h, w;
+ unsigned int rect_x, rect_y;
+ /* sx, 0, 0, 0,
+ 0, sy, 0, 0,
+ 0, 0, 1, 0,
+ tx, ty, 0, 1 */
+
+ sx = mtx[0];
+ sy = mtx[5];
+ tx = mtx[12];
+ ty = mtx[13];
+ w = float(graphic_buffer->getWidth());
+ h = float(graphic_buffer->getHeight());
+
+ unsigned int bottom = (unsigned int)(h - (ty * h + 1));
+ unsigned int left = (unsigned int)(tx * w -1);
+ rect_y = (unsigned int)(fabsf(sy) * h);
+ rect_x = (unsigned int)(fabsf(sx) * w);
+
+ // handle v-flip
+ if (sy < 0.0f) {
+ bottom = h - bottom;
+ }
+
+ // handle h-flip
+ if (sx < 0.0f) {
+ left = w - left;
+ }
+
+ unsigned int top = bottom - rect_y;
+ unsigned int right = left + rect_x;
+
+ Rect updatedCrop(left, top, right, bottom);
+ if (updatedCrop.isValid()) {
+ crop = updatedCrop;
+ } else {
+ printf("Crop for buffer %d is not valid: "
+ "left=%u, top=%u, right=%u, bottom=%u. "
+ "Will use default.\n",
+ mCounter,
+ left, top, right, bottom);
+ }
+
+ return crop;
+}
+
+void BufferSourceInput::setInput(buffer_info_t bufinfo, const char *format, ShotParameters &params) {
+ ANativeWindowBuffer* anb;
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ int pixformat = HAL_PIXEL_FORMAT_TI_NV12;
+ size_t tapInMinUndequeued = 0;
+
+ int aligned_width, aligned_height;
+
+ pixformat = bufinfo.format;
+
+ // Aligning is not needed for Bayer
+ if ( ( pixformat == HAL_PIXEL_FORMAT_TI_Y16 ) ||
+ ( pixformat == HAL_PIXEL_FORMAT_TI_UYVY ) ) {
+ aligned_width = bufinfo.crop.right - bufinfo.crop.left;
+ } else {
+ aligned_width = ALIGN_UP(bufinfo.crop.right - bufinfo.crop.left, ALIGN_WIDTH);
+ }
+ aligned_height = bufinfo.crop.bottom - bufinfo.crop.top;
+ printf("aligned width: %d height: %d \n", aligned_width, aligned_height);
+
+ if (mWindowTapIn.get() == 0) {
+ return;
+ }
+
+ native_window_set_usage(mWindowTapIn.get(),
+ getUsageFromANW(pixformat));
+ mWindowTapIn->perform(mWindowTapIn.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &tapInMinUndequeued);;
+ native_window_set_buffer_count(mWindowTapIn.get(), tapInMinUndequeued);
+ native_window_set_buffers_geometry(mWindowTapIn.get(),
+ aligned_width, aligned_height, bufinfo.format);
+
+ // if buffer dimensions are the same as the aligned dimensions, then we can
+ // queue the buffer directly to tapin surface. if the dimensions are different
+ // then the aligned ones, then we have to copy the buffer into our own buffer
+ // to make sure the stride of the buffer is correct
+ if ((aligned_width != bufinfo.width) || (aligned_height != bufinfo.height) ||
+ ( pixformat == HAL_PIXEL_FORMAT_TI_Y16 ) ||
+ ( pixformat == HAL_PIXEL_FORMAT_TI_UYVY) ) {
+ void *dest[3] = { 0 };
+ void *src[3] = { 0 };
+ Rect bounds(aligned_width, aligned_height);
+
+ mWindowTapIn->dequeueBuffer(mWindowTapIn.get(), &anb);
+ mapper.lock(anb->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, dest);
+ // copy buffer to input buffer if available
+ if (bufinfo.buf.get()) {
+ bufinfo.buf->lock(GRALLOC_USAGE_SW_READ_OFTEN, src);
+ }
+ if (src[0]) {
+ switch (pixformat) {
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ copyCroppedPacked16(bufinfo.offset,
+ bufinfo.width,
+ bufinfo.width,
+ bufinfo.height,
+ bufinfo.crop,
+ src[0],
+ dest[0]);
+ break;
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ copyCroppedNV12(bufinfo.offset,
+ bufinfo.width,
+ aligned_width,
+ bufinfo.width,
+ bufinfo.height,
+ bufinfo.crop,
+ src[0],
+ dest[0]);
+ break;
+ default:
+ printf("Pixel format 0x%x not supported\n", pixformat);
+ exit(1);
+ break;
+ }
+ }
+ if (bufinfo.buf.get()) {
+ bufinfo.buf->unlock();
+ }
+
+ mapper.unlock(anb->handle);
+ } else {
+ mWindowTapIn->perform(mWindowTapIn.get(), NATIVE_WINDOW_ADD_BUFFER_SLOT, &bufinfo.buf);
+ anb = bufinfo.buf->getNativeBuffer();
+ }
+
+ mWindowTapIn->queueBuffer(mWindowTapIn.get(), anb);
+
+ {
+ sp<ANativeWindow> windowTapIn = mWindowTapIn;
+ const char* id = NULL;
+
+ if (windowTapIn.get()) {
+ windowTapIn->perform(windowTapIn.get(), NATIVE_WINDOW_GET_ID, &id);
+ }
+
+ if (id) {
+ params.set(KEY_TAP_IN_SURFACE, id);
+ } else {
+ params.remove(KEY_TAP_IN_SURFACE);
+ }
+
+ windowTapIn.clear();
+ }
+}
+
+void BufferSourceThread::showMetadata(sp<IMemory> data) {
+ static nsecs_t prevTime = 0;
+ nsecs_t currTime = 0;
+
+ ssize_t offset;
+ size_t size;
+
+ if ( NULL == data.get() ) {
+ printf("No Metadata!");
+ return;
+ }
+
+ sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
+ camera_metadata_t * meta = static_cast<camera_metadata_t *> (heap->base());
+
+ printf(" frame nmber: %d\n", meta->frame_number);
+ printf(" shot number: %d\n", meta->shot_number);
+ printf(" analog gain: %d req: %d range: %d~%d dev: %d err: %d\n",
+ meta->analog_gain,
+ meta->analog_gain_req,
+ meta->analog_gain_min,
+ meta->analog_gain_max,
+ meta->analog_gain_dev,
+ meta->analog_gain_error);
+ printf(" exposure time: %d req: %d range: %d~%d dev: %d err: %d\n",
+ meta->exposure_time,
+ meta->exposure_time_req,
+ meta->exposure_time_min,
+ meta->exposure_time_max,
+ meta->exposure_time_dev,
+ meta->exposure_time_error);
+ printf(" EV compensation: req: %d dev: %d\n",
+ meta->exposure_compensation_req,
+ meta->exposure_dev);
+ printf(" awb gain: %d\n", meta->analog_gain);
+ printf(" awb offsets: %d\n", meta->offset_b);
+ printf(" awb temperature: %d\n", meta->awb_temp);
+
+ printf(" LSC table applied: %d\n", meta->lsc_table_applied);
+ if ( meta->lsc_table_applied ) {
+ uint8_t *lscTable = (uint8_t *)meta + meta->lsc_table_offset;
+ printf("LSC Table Size:%d Data[0:7]: %d:%d:%d:%d:%d:%d:%d:%d\n",
+ meta->lsc_table_size,
+ lscTable[0],
+ lscTable[1],
+ lscTable[2],
+ lscTable[3],
+ lscTable[4],
+ lscTable[5],
+ lscTable[6],
+ lscTable[7]);
+ }
+
+ printf(" Faces detected: %d\n", meta->number_of_faces);
+
+ currTime = meta->timestamp;
+ printf(" timestamp (ns): %llu\n", currTime);
+ if (prevTime) printf("inter-shot time (ms): %llu\n", (currTime - prevTime) / 1000000l);
+ prevTime = currTime;
+}
diff --git a/test/CameraHal/camera_test_surfacetexture.h b/test/CameraHal/camera_test_surfacetexture.h
new file mode 100644
index 0000000..395e82d
--- /dev/null
+++ b/test/CameraHal/camera_test_surfacetexture.h
@@ -0,0 +1,236 @@
+#ifndef CAMERA_TEST_SURFACE_TEXTURE_H
+#define CAMERA_TEST_SURFACE_TEXTURE_H
+
+#include "camera_test.h"
+
+#ifdef ANDROID_API_JB_OR_LATER
+#include <gui/Surface.h>
+#include <gui/SurfaceTexture.h>
+#include <gui/SurfaceComposerClient.h>
+#else
+#include <surfaceflinger/Surface.h>
+#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/ISurfaceComposerClient.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+#endif
+
+#ifdef ANDROID_API_JB_OR_LATER
+# define CAMHAL_LOGV ALOGV
+# define CAMHAL_LOGE ALOGE
+# define PRINTOVER(arg...) ALOGD(#arg)
+# define LOG_FUNCTION_NAME ALOGD("%d: %s() ENTER", __LINE__, __FUNCTION__);
+# define LOG_FUNCTION_NAME_EXIT ALOGD("%d: %s() EXIT", __LINE__, __FUNCTION__);
+#else
+# define CAMHAL_LOGV LOGV
+# define CAMHAL_LOGE LOGE
+# define PRINTOVER(arg...) LOGD(#arg)
+# define LOG_FUNCTION_NAME LOGD("%d: %s() ENTER", __LINE__, __FUNCTION__);
+# define LOG_FUNCTION_NAME_EXIT LOGD("%d: %s() EXIT", __LINE__, __FUNCTION__);
+#endif
+
+using namespace android;
+
+class FrameWaiter : public android::SurfaceTexture::FrameAvailableListener {
+public:
+ FrameWaiter():
+ mPendingFrames(0) {
+ }
+
+ virtual ~FrameWaiter() {
+ onFrameAvailable();
+ }
+
+ void waitForFrame() {
+ Mutex::Autolock lock(mMutex);
+ while (mPendingFrames == 0) {
+ mCondition.wait(mMutex);
+ }
+ mPendingFrames--;
+ }
+
+ virtual void onFrameAvailable() {
+ Mutex::Autolock lock(mMutex);
+ mPendingFrames++;
+ mCondition.signal();
+ }
+
+ int mPendingFrames;
+ Mutex mMutex;
+ Condition mCondition;
+};
+
+class GLSurface {
+public:
+
+ GLSurface():
+ mEglDisplay(EGL_NO_DISPLAY),
+ mEglSurface(EGL_NO_SURFACE),
+ mEglContext(EGL_NO_CONTEXT) {
+ }
+
+ virtual ~GLSurface() {}
+
+ void initialize(int display);
+ void deinit();
+ void loadShader(GLenum shaderType, const char* pSource, GLuint* outShader);
+ void createProgram(const char* pVertexSource, const char* pFragmentSource,
+ GLuint* outPgm);
+
+private:
+ EGLint const* getConfigAttribs();
+ EGLint const* getContextAttribs();
+
+protected:
+ sp<SurfaceComposerClient> mComposerClient;
+ sp<SurfaceControl> mSurfaceControl;
+
+ EGLDisplay mEglDisplay;
+ EGLSurface mEglSurface;
+ EGLContext mEglContext;
+ EGLConfig mGlConfig;
+};
+
+class SurfaceTextureBase {
+public:
+ virtual ~SurfaceTextureBase() {}
+
+ void initialize(int tex_id, EGLenum tex_target = EGL_NONE);
+ void deinit();
+ void getId(const char **name);
+
+ virtual sp<SurfaceTexture> getST();
+
+protected:
+ sp<SurfaceTexture> mST;
+ sp<SurfaceTextureClient> mSTC;
+ sp<ANativeWindow> mANW;
+ int mTexId;
+};
+
+class SurfaceTextureGL : public GLSurface, public SurfaceTextureBase {
+public:
+ virtual ~SurfaceTextureGL() {}
+
+ void initialize(int display, int tex_id);
+ void deinit();
+
+ // drawTexture draws the SurfaceTexture over the entire GL viewport.
+ void drawTexture();
+
+private:
+ GLuint mPgm;
+ GLint mPositionHandle;
+ GLint mTexSamplerHandle;
+ GLint mTexMatrixHandle;
+};
+
+class ST_BufferSourceThread : public BufferSourceThread {
+public:
+ ST_BufferSourceThread(int tex_id, sp<Camera> camera) : BufferSourceThread(camera) {
+ mSurfaceTextureBase = new SurfaceTextureBase();
+ mSurfaceTextureBase->initialize(tex_id);
+ mSurfaceTexture = mSurfaceTextureBase->getST();
+ mSurfaceTexture->setSynchronousMode(true);
+ mFW = new FrameWaiter();
+ mSurfaceTexture->setFrameAvailableListener(mFW);
+#ifndef ANDROID_API_JB_OR_LATER
+ mCamera->setBufferSource(NULL, mSurfaceTexture);
+#endif
+ }
+ virtual ~ST_BufferSourceThread() {
+#ifndef ANDROID_API_JB_OR_LATER
+ mCamera->releaseBufferSource(NULL, mSurfaceTexture);
+#endif
+ mSurfaceTextureBase->deinit();
+ delete mSurfaceTextureBase;
+ }
+
+ virtual bool threadLoop() {
+ sp<GraphicBuffer> graphic_buffer;
+
+ mFW->waitForFrame();
+ if (!mDestroying) {
+ float mtx[16] = {0.0};
+ mSurfaceTexture->updateTexImage();
+ printf("=== Metadata for buffer %d ===\n", mCounter);
+#ifndef ANDROID_API_JB_OR_LATER
+ showMetadata(mSurfaceTexture->getMetadata());
+#endif
+ printf("\n");
+ graphic_buffer = mSurfaceTexture->getCurrentBuffer();
+ mSurfaceTexture->getTransformMatrix(mtx);
+ Rect crop = getCrop(graphic_buffer, mtx);
+
+ mDeferThread->add(graphic_buffer, crop, mCounter++);
+ restartCapture();
+ return true;
+ }
+ return false;
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
+ mDestroying = true;
+ mFW->onFrameAvailable();
+ }
+
+ virtual void setBuffer(android::ShotParameters &params) {
+ {
+ const char* id = NULL;
+
+ mSurfaceTextureBase->getId(&id);
+
+ if (id) {
+ params.set(KEY_TAP_OUT_SURFACES, id);
+ } else {
+ params.remove(KEY_TAP_OUT_SURFACES);
+ }
+ }
+ }
+
+private:
+ SurfaceTextureBase *mSurfaceTextureBase;
+ sp<SurfaceTexture> mSurfaceTexture;
+ sp<FrameWaiter> mFW;
+};
+
+class ST_BufferSourceInput : public BufferSourceInput {
+public:
+ ST_BufferSourceInput(int tex_id, sp<Camera> camera) :
+ BufferSourceInput(camera), mTexId(tex_id) {
+ mSurfaceTexture = new SurfaceTextureBase();
+ sp<SurfaceTexture> surface_texture;
+ mSurfaceTexture->initialize(mTexId);
+ surface_texture = mSurfaceTexture->getST();
+ surface_texture->setSynchronousMode(true);
+
+ mWindowTapIn = new SurfaceTextureClient(surface_texture);
+#ifndef ANDROID_API_JB_OR_LATER
+ mCamera->setBufferSource(mSurfaceTexture->getST(), NULL);
+#else
+ mCamera->setBufferSource(mSurfaceTexture->getST()->getBufferQueue(), NULL);
+#endif
+ }
+ virtual ~ST_BufferSourceInput() {
+#ifndef ANDROID_API_JB_OR_LATER
+ mCamera->releaseBufferSource(mSurfaceTexture->getST(), NULL);
+#else
+ mCamera->releaseBufferSource(mSurfaceTexture->getST()->getBufferQueue(), NULL);
+#endif
+ delete mSurfaceTexture;
+ }
+
+ virtual void setInput(buffer_info_t bufinfo, const char *format) {
+ android::ShotParameters params;
+ mSurfaceTexture->getST()->setDefaultBufferSize(bufinfo.width, bufinfo.height);
+ BufferSourceInput::setInput(bufinfo, format, params);
+ }
+
+private:
+ int mTexId;
+ SurfaceTextureBase *mSurfaceTexture;
+};
+
+#endif
diff --git a/test/CameraHal/surfacetexture_test.cpp b/test/CameraHal/surfacetexture_test.cpp
new file mode 100644
index 0000000..b25853e
--- /dev/null
+++ b/test/CameraHal/surfacetexture_test.cpp
@@ -0,0 +1,253 @@
+/*
+ * Copyright (c) 2010, Texas Instruments Incorporated
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * * Neither the name of Texas Instruments Incorporated nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+ * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <stdlib.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <time.h>
+#include <semaphore.h>
+#include <pthread.h>
+#include <string.h>
+#include <climits>
+
+#include <gui/SurfaceTexture.h>
+#include <gui/SurfaceTextureClient.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+#include <camera/Camera.h>
+#include <camera/ICamera.h>
+#include <media/mediarecorder.h>
+
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <camera/CameraParameters.h>
+#include <camera/ShotParameters.h>
+#include <camera/CameraMetadata.h>
+#include <system/audio.h>
+#include <system/camera.h>
+
+#include <cutils/memory.h>
+#include <utils/Log.h>
+
+#include <sys/wait.h>
+
+#include <sys/mman.h>
+
+#ifdef ANDROID_API_JB_OR_LATER
+#include <gui/Surface.h>
+#include <gui/ISurface.h>
+#include <gui/ISurfaceComposer.h>
+#include <gui/ISurfaceComposerClient.h>
+#include <gui/SurfaceComposerClient.h>
+#include <ion/ion.h>
+#else
+#include <surfaceflinger/Surface.h>
+#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/ISurfaceComposerClient.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+#include "ion.h"
+#endif
+
+#include "camera_test.h"
+
+#define ASSERT(X) \
+ do { \
+ if(!(X)) { \
+ printf("error: %s():%d", __FUNCTION__, __LINE__); \
+ return; \
+ } \
+ } while(0);
+
+#define ALIGN_DOWN(x, n) ((x) & (~((n) - 1)))
+#define ALIGN_UP(x, n) ((((x) + (n) - 1)) & (~((n) - 1)))
+#define ALIGN_WIDTH 32 // Should be 32...but the calculated dimension causes an ion crash
+#define ALIGN_HEIGHT 2 // Should be 2...but the calculated dimension causes an ion crash
+
+//temporarily define format here
+#define HAL_PIXEL_FORMAT_TI_NV12 0x100
+#define HAL_PIXEL_FORMAT_TI_NV12_1D 0x102
+#define HAL_PIXEL_FORMAT_TI_Y8 0x103
+#define HAL_PIXEL_FORMAT_TI_Y16 0x104
+
+using namespace android;
+
+#define N_BUFFERS 15
+
+static void
+test_format (int format, int page_mode, int width, int height)
+{
+ sp<SurfaceTexture> st;
+ SurfaceTextureClient *stc;
+ GLint tex_id = 0;
+ sp<ANativeWindow> anw;
+ ANativeWindowBuffer* anb[30] = { 0 };
+ int i;
+ unsigned int usage;
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+ printf("testing format %x, page_mode %d\n", format, page_mode);
+
+ st = new SurfaceTexture (tex_id, true, GL_TEXTURE_EXTERNAL_OES);
+
+ st->setDefaultBufferSize (width, height);
+
+ stc = new SurfaceTextureClient(st);
+ anw = stc;
+
+ usage = GRALLOC_USAGE_SW_READ_RARELY |
+ GRALLOC_USAGE_SW_WRITE_NEVER;
+ if (page_mode) {
+ usage |= GRALLOC_USAGE_PRIVATE_0;
+ }
+
+ native_window_set_usage(anw.get(), usage);
+ native_window_set_buffer_count(anw.get(), N_BUFFERS);
+ native_window_set_buffers_geometry(anw.get(),
+ width, height, format);
+
+ for(i=0;i<N_BUFFERS;i++) {
+ void *data = NULL;
+ Rect bounds(width, height);
+
+ anb[i] = NULL;
+ anw->dequeueBuffer(anw.get(), &anb[i]);
+ printf("%d: %p\n", i, anb[i]);
+ if (anb[i] == NULL) {
+ printf ("FAILED: buffer should be non-NULL\n");
+ break;
+ }
+
+ mapper.lock(anb[i]->handle, GRALLOC_USAGE_SW_READ_RARELY,
+ bounds, &data);
+ if (data == NULL) {
+ printf ("FAILED: mapping should be non-NULL\n");
+ break;
+ }
+
+ mapper.unlock(anb[i]->handle);
+ }
+ for(i=0;i<N_BUFFERS;i++) {
+ if (anb[i]) {
+ anw->cancelBuffer (anw.get(), anb[i]);
+ }
+ }
+
+ //delete stc;
+ st.clear();
+}
+
+void
+ion_test (void)
+{
+ struct ion_handle *handle;
+ int fd;
+ int map_fd;
+ unsigned char *ptr;
+ int i;
+ int ret;
+ int share_fd;
+
+ fd = ion_open ();
+
+#define SIZE (10*1024*1024)
+ for(i=0;i<10;i++){
+ handle = NULL;
+ ret = ion_alloc (fd, SIZE, 4096, (1<<0), &handle);
+ if (ret < 0) {
+ printf("ion_alloc returned error %d, %s\n", ret, strerror(errno));
+ break;
+ }
+ printf("ion_alloc returned %d\n", ret);
+
+ ret = ion_share (fd, handle, &share_fd);
+ if (ret < 0) {
+ printf("ion_share returned error %d, %s\n", ret, strerror(errno));
+ break;
+ }
+ printf("ion_share returned %d\n", ret);
+
+ ptr = (unsigned char *)mmap (NULL, SIZE, PROT_READ|PROT_WRITE,
+ MAP_SHARED, share_fd, 0);
+ printf("mmap returned %p\n", ptr);
+
+ ptr = (unsigned char *)mmap (NULL, SIZE, PROT_READ|PROT_WRITE,
+ MAP_SHARED, share_fd, 0);
+ printf("mmap returned %p\n", ptr);
+
+#if 0
+ ret = ion_map (fd, handle, SIZE, PROT_READ, 0, 0, &ptr, &map_fd);
+ if (ret < 0) {
+ printf("ion_map returned error %d, %s\n", ret, strerror(errno));
+ break;
+ }
+ printf("ion_map returned %d\n", ret);
+#endif
+
+ printf("%d: %p\n", i, ptr);
+
+ ion_free (fd, handle);
+ }
+
+}
+
+
+int
+main (int argc, char *argv[])
+{
+ int width, height;
+
+ width = 640;
+ height = 480;
+ test_format (HAL_PIXEL_FORMAT_TI_NV12, 0, width, height);
+ test_format (HAL_PIXEL_FORMAT_TI_NV12, 1, width, height);
+ test_format (HAL_PIXEL_FORMAT_TI_NV12_1D, 0, width, height);
+ test_format (HAL_PIXEL_FORMAT_TI_Y8, 1, width, height);
+ test_format (HAL_PIXEL_FORMAT_TI_Y16, 1, width, height);
+
+ width = 2608;
+ height = 1960;
+ test_format (HAL_PIXEL_FORMAT_TI_NV12, 1, width, height);
+ test_format (HAL_PIXEL_FORMAT_TI_NV12_1D, 0, width, height);
+ test_format (HAL_PIXEL_FORMAT_TI_Y8, 1, width, height);
+ test_format (HAL_PIXEL_FORMAT_TI_Y16, 1, width, height);
+
+ ion_test();
+
+ return 0;
+}
+