summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVishal Mahaveer <vishalm@ti.com>2016-10-26 22:20:19 -0400
committerVishal Mahaveer <vishalm@ti.com>2016-10-26 22:20:19 -0400
commite3fc0c5c240cd2041966f58ba33054d16d276b6c (patch)
tree47f41bbb9a58f2fa899c44e422c768d2b0820033
parent66ba098d464051d94346dd85ae4485f350bc9e9a (diff)
parent4abb78d52425efe38f91e0d2e36cd8ee96463414 (diff)
downloaddra7xx-d-nougat-release.tar.gz
Merge remote-tracking branch 'omap-mirror/d-marshmallow-mr2-release' into d-nougat-released-nougat-release
-rw-r--r--camera/Android.mk5
-rw-r--r--camera/DecoderFactory.cpp67
-rw-r--r--camera/FrameDecoder.cpp204
-rw-r--r--camera/OmxFrameDecoder.cpp1077
-rw-r--r--camera/SwFrameDecoder.cpp85
-rw-r--r--camera/V4LCameraAdapter/V4LCameraAdapter.cpp395
-rw-r--r--camera/V4LCameraAdapter/V4LCapabilities.cpp2
-rw-r--r--camera/V4LCameraAdapter/V4LM2M.cpp4
-rw-r--r--camera/inc/DecoderFactory.h35
-rw-r--r--camera/inc/FrameDecoder.h90
-rw-r--r--camera/inc/OmxFrameDecoder.h204
-rw-r--r--camera/inc/SwFrameDecoder.h47
-rw-r--r--camera/inc/V4LCameraAdapter/V4LCameraAdapter.h9
-rw-r--r--hwcomposer/display.c100
-rw-r--r--hwcomposer/display.h1
-rw-r--r--hwcomposer/hal_public.h2
-rw-r--r--omx/videoencode/omx_h264_enc/src/omx_H264videoencoder.c33
-rw-r--r--omx/videoencode/omx_h264_enc/src/omx_H264videoencoderutils.c2
18 files changed, 130 insertions, 2232 deletions
diff --git a/camera/Android.mk b/camera/Android.mk
index 192f99c..ad454dc 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -71,10 +71,7 @@ TI_CAMERAHAL_COMMON_SRC := \
NV12_resize.cpp \
CameraParameters.cpp \
TICameraParameters.cpp \
- CameraHalCommon.cpp \
- FrameDecoder.cpp \
- SwFrameDecoder.cpp \
- DecoderFactory.cpp
+ CameraHalCommon.cpp
TI_CAMERAHAL_USB_SRC := \
V4LCameraAdapter/V4LCameraAdapter.cpp \
diff --git a/camera/DecoderFactory.cpp b/camera/DecoderFactory.cpp
deleted file mode 100644
index ba4ae88..0000000
--- a/camera/DecoderFactory.cpp
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (C) Texas Instruments - http://www.ti.com/
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "FrameDecoder.h"
-#include "SwFrameDecoder.h"
-#include "OmxFrameDecoder.h"
-#include "CameraHal.h"
-#include "DecoderFactory.h"
-
-namespace Ti {
-namespace Camera {
-
-
-FrameDecoder* DecoderFactory::createDecoderByType(DecoderType type, bool forceSwDecoder) {
- FrameDecoder* decoder = NULL;
- switch (type) {
- case DecoderType_MJPEG: {
-
-#ifndef OMX_CAMERA_ADAPTER
- /* If OMX Camera Adapter is not used, OMX implementation is not available in the device, switch to sw
- * decoder
- */
- forceSwDecoder = true;
-#endif
- if (!forceSwDecoder) {
- decoder = new OmxFrameDecoder(DecoderType_MJPEG);
- CAMHAL_LOGD("Using HW Decoder for MJPEG");
- } else {
- decoder = new SwFrameDecoder();
- CAMHAL_LOGD("Using SW Decoder for MJPEG");
- }
-
- //TODO add logic that handle verification is HW Decoder is available ?
- // And if no - create SW decoder.
- break;
- }
-#ifdef OMX_CAMERA_ADAPTER
- case DecoderType_H264: {
- decoder = new OmxFrameDecoder(DecoderType_H264);
- CAMHAL_LOGD("Using HW Decoder for H264");
- break;
- }
-#endif
- default: {
- CAMHAL_LOGE("Unrecognized decoder type %d", type);
- }
- }
-
- return decoder;
-}
-
-} // namespace Camera
-} // namespace Ti
-
diff --git a/camera/FrameDecoder.cpp b/camera/FrameDecoder.cpp
deleted file mode 100644
index 80b4946..0000000
--- a/camera/FrameDecoder.cpp
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Copyright (C) Texas Instruments - http://www.ti.com/
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Common.h"
-#include "FrameDecoder.h"
-
-
-namespace Ti {
-namespace Camera {
-
-FrameDecoder::FrameDecoder()
-: mCameraHal(NULL), mState(DecoderState_Uninitialized) {
-}
-
-FrameDecoder::~FrameDecoder() {
-}
-
-status_t FrameDecoder::start() {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
- status_t ret;
- if (mState == DecoderState_Running) {
- return NO_INIT;
- }
- ret = doStart();
- if (ret == NO_ERROR) {
- mState = DecoderState_Running;
- }
-
- LOG_FUNCTION_NAME_EXIT;
- return ret;
-}
-
-void FrameDecoder::stop() {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
- if (mState >= DecoderState_Requested_Stop) {
- return;
- }
- mState = DecoderState_Requested_Stop;
- doStop();
- mState = DecoderState_Stoppped;
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void FrameDecoder::release() {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
- if (mState <= DecoderState_Requested_Stop) {
- return;
- }
- doRelease();
- mState = DecoderState_Uninitialized;
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void FrameDecoder::flush() {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
- if (mState <= DecoderState_Requested_Stop) {
- return;
- }
- doFlush();
- mInQueue.clear();
- mOutQueue.clear();
-
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void FrameDecoder::configure(const DecoderParameters& params) {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
- if (mState == DecoderState_Running) {
- return;
- }
- mParams = params;
- mInQueue.reserve(mParams.inputBufferCount);
- mOutQueue.reserve(mParams.outputBufferCount);
- doConfigure(params);
- mState = DecoderState_Initialized;
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-status_t FrameDecoder::dequeueInputBuffer(int &id) {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
-
- if (mState != DecoderState_Running) {
- CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
- return INVALID_OPERATION;
- }
-
- for (size_t i = 0; i < mInQueue.size(); i++) {
- int index = mInQueue[i];
- android::sp<MediaBuffer>& in = mInBuffers->editItemAt(index);
- android::AutoMutex bufferLock(in->getLock());
- if (in->getStatus() == BufferStatus_InDecoded) {
- id = index;
- in->setStatus(BufferStatus_Unknown);
- mInQueue.removeAt(i);
- return NO_ERROR;
- }
- }
-
- LOG_FUNCTION_NAME_EXIT;
- return INVALID_OPERATION;
-}
-
-status_t FrameDecoder::dequeueOutputBuffer(int &id) {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
-
- if (mState != DecoderState_Running) {
- CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
- return INVALID_OPERATION;
- }
-
- for (size_t i = 0; i < mOutQueue.size(); i++) {
- int index = mOutQueue[i];
- android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
- android::AutoMutex bufferLock(out->getLock());
- if (out->getStatus() == BufferStatus_OutFilled) {
- id = index;
- out->setStatus(BufferStatus_Unknown);
- mOutQueue.removeAt(i);
- return NO_ERROR;
- }
- }
-
- LOG_FUNCTION_NAME_EXIT;
- return INVALID_OPERATION;
-}
-
-status_t FrameDecoder::queueOutputBuffer(int index) {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
-
- //We queue all available buffers to Decoder not in recording mode - before start
- if (mState > DecoderState_Running) {
- CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
- return INVALID_OPERATION;
- }
-
- android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
- android::AutoMutex bufferLock(out->getLock());
- out->setStatus(BufferStatus_OutQueued);
- mOutQueue.push_back(index);
-
- LOG_FUNCTION_NAME_EXIT;
- return NO_ERROR;
-}
-
-status_t FrameDecoder::queueInputBuffer(int id) {
- LOG_FUNCTION_NAME;
-
- android::AutoMutex lock(mLock);
-
- if (mState != DecoderState_Running) {
- CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
- return INVALID_OPERATION;
- }
-
- {
- android::sp<MediaBuffer>& in = mInBuffers->editItemAt(id);
- android::AutoMutex bufferLock(in->getLock());
- in->setStatus(BufferStatus_InQueued);
- mInQueue.push_back(id);
- }
-
- // Since we got queued buffer - we can process it
- doProcessInputBuffer();
-
- LOG_FUNCTION_NAME_EXIT;
- return NO_ERROR;
-}
-
-
-} // namespace Camera
-} // namespace Ti
diff --git a/camera/OmxFrameDecoder.cpp b/camera/OmxFrameDecoder.cpp
deleted file mode 100644
index c65aadd..0000000
--- a/camera/OmxFrameDecoder.cpp
+++ /dev/null
@@ -1,1077 +0,0 @@
-/*
- * Copyright (C) Texas Instruments - http://www.ti.com/
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ErrorUtils.h"
-#include "OmxFrameDecoder.h"
-#include "OMX_TI_IVCommon.h"
-#include "OMX_TI_Index.h"
-#include "Decoder_libjpeg.h"
-
-
-namespace Ti {
-namespace Camera {
-
-const static uint32_t kMaxColorFormatSupported = 1000;
-const static int kMaxStateSwitchTimeOut = 1 * 1000 * 1000 * 1000; // 1 sec
-
-static const char* gDecoderRole[2] = {"video_decoder.mjpeg", "video_decoder.avc"};
-static const OMX_VIDEO_CODINGTYPE gCompressionFormat[2] = {OMX_VIDEO_CodingMJPEG, OMX_VIDEO_CodingAVC};
-
-
-template<class T>
-static void InitOMXParams(T *params) {
- params->nSize = sizeof(T);
- params->nVersion.s.nVersionMajor = 1;
- params->nVersion.s.nVersionMinor = 0;
- params->nVersion.s.nRevision = 0;
- params->nVersion.s.nStep = 0;
-}
-
-
-
-CallbackDispatcher::CallbackDispatcher()
-: mDone(false) {
- mThread = new CallbackDispatcherThread(this);
- mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
-}
-
-CallbackDispatcher::~CallbackDispatcher() {
- {
- android::Mutex::Autolock autoLock(mLock);
-
- mDone = true;
- mQueueChanged.signal();
- }
-
- status_t status = mThread->join();
- if (status != WOULD_BLOCK) {
- //CAMHAL_ASSERT(status, (status_t)NO_ERROR);
- }
-}
-
-void CallbackDispatcher::post(const OmxMessage &msg) {
- android::Mutex::Autolock autoLock(mLock);
-
- mQueue.push_back(msg);
- mQueueChanged.signal();
-}
-
-void CallbackDispatcher::dispatch(const OmxMessage &msg) {
-
- switch(msg.type)
- {
- case OmxMessage::EVENT :
- {
- static_cast<OmxFrameDecoder*>(msg.u.eventData.appData)->eventHandler(msg.u.eventData.event, msg.u.eventData.data1, msg.u.eventData.data2, msg.u.eventData.pEventData);
- break;
- }
-
- case OmxMessage::EMPTY_BUFFER_DONE:
- {
- static_cast<OmxFrameDecoder*>(msg.u.bufferData.appData)->emptyBufferDoneHandler(msg.u.bufferData.pBuffHead);
- break;
- }
-
- case OmxMessage::FILL_BUFFER_DONE:
- {
- static_cast<OmxFrameDecoder*>(msg.u.bufferData.appData)->fillBufferDoneHandler(msg.u.bufferData.pBuffHead);
- break;
- }
- };
-}
-
-bool CallbackDispatcher::loop() {
- for (;;) {
- OmxMessage msg;
-
- {
- android::Mutex::Autolock autoLock(mLock);
- while (!mDone && mQueue.empty()) {
- mQueueChanged.wait(mLock);
- }
-
- if (mDone) {
- break;
- }
-
- msg = *mQueue.begin();
- mQueue.erase(mQueue.begin());
- }
-
- dispatch(msg);
- }
-
- return false;
-}
-
-bool CallbackDispatcherThread::threadLoop() {
- return mDispatcher->loop();
-}
-
-//Static
-OMX_ERRORTYPE OmxFrameDecoder::eventCallback(const OMX_HANDLETYPE component,
- const OMX_PTR appData, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
- const OMX_PTR pEventData) {
- OmxMessage msg;
- msg.type = OmxMessage::EVENT;
- msg.u.eventData.appData = appData;
- msg.u.eventData.event = event;
- msg.u.eventData.data1 = data1;
- msg.u.eventData.data2 = data2;
- ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
- return OMX_ErrorNone;
-}
-
-//Static
-OMX_ERRORTYPE OmxFrameDecoder::emptyBufferDoneCallback(OMX_HANDLETYPE hComponent,
- OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead) {
- OmxMessage msg;
- msg.type = OmxMessage::EMPTY_BUFFER_DONE;
- msg.u.bufferData.appData = appData;
- msg.u.bufferData.pBuffHead = pBuffHead;
- ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
- return OMX_ErrorNone;
-}
-
-//Static
-OMX_ERRORTYPE OmxFrameDecoder::fillBufferDoneCallback(OMX_HANDLETYPE hComponent,
- OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead) {
- OmxMessage msg;
- msg.type = OmxMessage::FILL_BUFFER_DONE;
- msg.u.bufferData.appData = appData;
- msg.u.bufferData.pBuffHead = pBuffHead;
- ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
- return OMX_ErrorNone;
-}
-
-OmxFrameDecoder::OmxFrameDecoder(DecoderType type)
- : mOmxInialized(false), mCurrentState(OmxDecoderState_Unloaded), mPreviousState(OmxDecoderState_Unloaded),
- mStopping(false), mDecoderType(type), mIsNeedCheckDHT(true), mAlwaysAppendDHT(false) {
-}
-
-OmxFrameDecoder::~OmxFrameDecoder() {
-}
-
-OMX_ERRORTYPE OmxFrameDecoder::emptyBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead) {
- LOG_FUNCTION_NAME;
- android::AutoMutex lock(mHwLock);
-
- int bufferIndex = reinterpret_cast<int>(pBuffHead->pAppPrivate);
- CAMHAL_LOGD("Got header %p id = %d", pBuffHead, bufferIndex);
- android::sp<MediaBuffer>& in = mInBuffers->editItemAt(bufferIndex);
-
- android::AutoMutex itemLock(in->getLock());
- in->setStatus((getOmxState() == OmxDecoderState_Executing) ? BufferStatus_InDecoded : BufferStatus_InQueued);
-
- return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE OmxFrameDecoder::fillBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead) {
- LOG_FUNCTION_NAME;
- android::AutoMutex lock(mHwLock);
-
- int index = (int)pBuffHead->pAppPrivate;
- android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
-
- android::AutoMutex itemLock(out->getLock());
- CameraBuffer* frame = static_cast<CameraBuffer*>(out->buffer);
- out->setOffset(pBuffHead->nOffset);
- out->setTimestamp(pBuffHead->nTimeStamp);
- out->setStatus((getOmxState() == OmxDecoderState_Executing) ? BufferStatus_OutFilled : BufferStatus_OutQueued);
-
- return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE OmxFrameDecoder::eventHandler(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
- const OMX_PTR pEventData) {
-
- LOG_FUNCTION_NAME;
-
- OMX_ERRORTYPE ret = OMX_ErrorNone;
- android::AutoMutex lock(mHwLock);
-
- switch(event) {
-
- case OMX_EventCmdComplete:
- {
- if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateIdle)) {
- CAMHAL_LOGD("Component State Changed To OMX_StateIdle\n");
- commitState(OmxDecoderState_Idle);
- mStateCondition.signal();
- }
- else if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateExecuting)) {
- CAMHAL_LOGD("Component State Changed To OMX_StateExecuting\n");
- commitState(OmxDecoderState_Executing);
- mStateCondition.signal();
- }
- else if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateLoaded)) {
- CAMHAL_LOGD("Component State Changed To OMX_StateLoaded\n");
- if(getOmxState() == OmxDecoderState_Executing)
- commitState(OmxDecoderState_Loaded);
- mStateCondition.signal();
- }
- else if (data1 == OMX_CommandFlush) {
- CAMHAL_LOGD("OMX_CommandFlush done on %d port\n", data2);
- mStateCondition.signal();
- }
- else if (data1 == OMX_CommandPortDisable) {
- CAMHAL_LOGD("OMX_CommandPortDisable done on %d port\n", data2);
- mStateCondition.signal();
- }
- else if (data1 == OMX_CommandPortEnable) {
- CAMHAL_LOGD("OMX_CommandPortEnable done on %d port\n", data2);
- mStateCondition.signal();
- } else {
- CAMHAL_LOGD("Event %d done on %d port\n", data1, data2);
- }
- break;
- }
- case OMX_EventError:
- {
- CAMHAL_LOGD("\n\n\nOMX Component reported an Error!!!! 0x%x 0x%x\n\n\n", data1, data2);
- commitState(OmxDecoderState_Error);
- omxSendCommand(OMX_CommandStateSet, OMX_StateInvalid);
- mStateCondition.signal();
- break;
- }
- case OMX_EventPortSettingsChanged:
- {
- CAMHAL_LOGD("\n\n\nOMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)\n\n\n",
- data1, data2);
- if (data2 == 0) {
- // This means that some serious change to port happens
- commitState(OmxDecoderState_Reconfigure);
- } else if (data2 == OMX_IndexConfigCommonOutputCrop) {
-#if 0
- OMX_CONFIG_RECTTYPE rect;
- InitOMXParams(&rect);
- rect.nPortIndex = PortIndexOutput;
- status_t ret = omxGetConfig(OMX_IndexConfigCommonOutputCrop, &rect);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("Can't get new crop parameters 0x%x", ret);
- break;
- }
-
- CAMHAL_LOGV("Crop should change to %d %d %d %d", rect.nLeft, rect.nTop, rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight);
-#endif
- }
- break;
- }
- default:
- {
- CAMHAL_LOGD("\n\n\nOMX Unhandelled event ID=0x%x!!!!\n\n\n", event);
- }
- }
-
- LOG_FUNCTION_NAME_EXIT;
-
- return ret;
- }
-
-void OmxFrameDecoder::doConfigure(const DecoderParameters& config) {
- LOG_FUNCTION_NAME;
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-status_t OmxFrameDecoder::enableGrallockHandles() {
- OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
- InitOMXParams(&domxUseGrallocHandles);
-
- domxUseGrallocHandles.nPortIndex = PortIndexOutput;
- domxUseGrallocHandles.bEnable = OMX_TRUE;
-
- return omxSetParameter((OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
-}
-
-status_t OmxFrameDecoder::omxSwitchToExecutingSync() {
- CAMHAL_LOGV("Try set OMX_StateExecuting");
- android::AutoMutex lock(mHwLock);
- omxSendCommand(OMX_CommandStateSet, OMX_StateExecuting);
- status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("State transition to EXECUTING ERROR 0x%x", ret);
- return UNKNOWN_ERROR;
- }
- return NO_ERROR;
-}
-
-void OmxFrameDecoder::dumpPortSettings(PortType port) {
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
- def.nPortIndex = port;
- omxGetParameter(OMX_IndexParamPortDefinition, &def);
- omxDumpPortSettings(def);
-}
-
-status_t OmxFrameDecoder::disablePortSync(int port) {
- OMX_ERRORTYPE eError;
- android::AutoMutex lock(mHwLock);
- eError = OMX_SendCommand(mHandleComp, OMX_CommandPortDisable, port, NULL);
- if (eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_CommandPortDisable OMX_ALL returned error 0x%x", eError);
- return Utils::ErrorUtils::omxToAndroidError(eError);
- }
- status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
- return UNKNOWN_ERROR;
- }
- return NO_ERROR;
-}
-
-status_t OmxFrameDecoder::enablePortSync(int port) {
- android::AutoMutex lock(mHwLock);
- OMX_ERRORTYPE eError = OMX_SendCommand(mHandleComp, OMX_CommandPortEnable, port, NULL);
- status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_SendCommand OMX_CommandPortEnable OUT returned error 0x%x", eError);
- return Utils::ErrorUtils::omxToAndroidError(eError);
- }
- return NO_ERROR;
-}
-
-
-status_t OmxFrameDecoder::doPortReconfigure() {
- OMX_ERRORTYPE eError;
- status_t ret = NO_ERROR;
-
- CAMHAL_LOGD("Starting port reconfiguration !");
- dumpPortSettings(PortIndexInput);
- dumpPortSettings(PortIndexOutput);
-
- android::AutoMutex lock(mHwLock);
-
- omxSendCommand(OMX_CommandFlush, PortIndexOutput);
- ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("State transition to OMX_CommandFlush ERROR 0x%x", ret);
- return UNKNOWN_ERROR;
- }
-
- omxSendCommand(OMX_CommandFlush, PortIndexInput);
- ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("State transition to OMX_CommandFlush ERROR 0x%x", ret);
- return UNKNOWN_ERROR;
- }
-
- ret = omxSendCommand(OMX_CommandPortDisable, PortIndexOutput);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("OMX_CommandPortDisable PortIndexOutput returned error 0x%x", ret);
- return ret;
- }
-
- freeBuffersOnOutput();
-
- ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
- return UNKNOWN_ERROR;
- }
-
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
- def.nPortIndex = PortIndexOutput;
- omxGetParameter(OMX_IndexParamPortDefinition, &def);
- def.nBufferCountActual = mParams.outputBufferCount;
- CAMHAL_LOGD("Will set def.nBufferSize=%d stride=%d height=%d", def.nBufferSize , def.format.video.nStride, def.format.video.nFrameHeight);
- omxSetParameter(OMX_IndexParamPortDefinition, &def);
-
-
-
- ret = omxSendCommand(OMX_CommandPortEnable, PortIndexOutput);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("omxSendCommand OMX_CommandPortEnable returned error 0x%x", ret);
- return ret;
- }
-
- allocateBuffersOutput();
-
- ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("omxSendCommand OMX_CommandPortEnable timeout 0x%x", ret);
- return UNKNOWN_ERROR;
- }
-
- CAMHAL_LOGD("Port reconfiguration DONE!");
- //dumpPortSettings(PortIndexOutput);
-
- return NO_ERROR;
-}
-
-void OmxFrameDecoder::queueOutputBuffers() {
-
- LOG_FUNCTION_NAME;
-
- android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
-
- for (size_t i = 0; i < mOutQueue.size(); i++) {
- int index = mOutQueue[i];
- android::sp<MediaBuffer> &outBuffer = mOutBuffers->editItemAt(index);
- android::AutoMutex bufferLock(outBuffer->getLock());
- if (outBuffer->getStatus() == BufferStatus_OutQueued) {
- outBuffer->setStatus(BufferStatus_OutWaitForFill);
- CameraBuffer* frame = static_cast<CameraBuffer*>(outBuffer->buffer);
- OMX_BUFFERHEADERTYPE *pOutBufHdr = mOutBufferHeaders[outBuffer->bufferId];
- CAMHAL_LOGV("Fill this buffer cf=%p bh=%p id=%d", frame, pOutBufHdr, outBuffer->bufferId);
- status_t status = omxFillThisBuffer(pOutBufHdr);
- CAMHAL_ASSERT(status == NO_ERROR);
- }
- }
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void OmxFrameDecoder::doProcessInputBuffer() {
-
- LOG_FUNCTION_NAME;
-
- if (getOmxState() == OmxDecoderState_Reconfigure) {
- if (doPortReconfigure() == NO_ERROR) {
- commitState(OmxDecoderState_Executing);
- queueOutputBuffers();
- } else {
- commitState(OmxDecoderState_Error);
- return;
- }
-
- }
-
- if (getOmxState() == OmxDecoderState_Idle) {
- CAMHAL_ASSERT(omxSwitchToExecutingSync() == NO_ERROR);
- queueOutputBuffers();
- }
-
- if (getOmxState() == OmxDecoderState_Executing) {
- for (size_t i = 0; i < mInQueue.size(); i++) {
- int index = mInQueue[i];
- CAMHAL_LOGD("Got in inqueue[%d] buffer id=%d", i, index);
- android::sp<MediaBuffer> &inBuffer = mInBuffers->editItemAt(index);
- android::AutoMutex bufferLock(inBuffer->getLock());
- if (inBuffer->getStatus() == BufferStatus_InQueued) {
- OMX_BUFFERHEADERTYPE *pInBufHdr = mInBufferHeaders[index];
- inBuffer->setStatus(BufferStatus_InWaitForEmpty);
- omxEmptyThisBuffer(inBuffer, pInBufHdr);
- }
- }
- queueOutputBuffers();
- }
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-status_t OmxFrameDecoder::omxInit() {
-
- LOG_FUNCTION_NAME;
-
- OMX_ERRORTYPE eError = OMX_Init();
- if (eError != OMX_ErrorNone) {
- CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
- }
- else mOmxInialized = true;
-
- LOG_FUNCTION_NAME_EXIT;
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-status_t OmxFrameDecoder::omxFillThisBuffer(OMX_BUFFERHEADERTYPE *pOutBufHdr) {
- OMX_ERRORTYPE eError = OMX_ErrorUndefined;
-
- pOutBufHdr->nFilledLen = 0;
- pOutBufHdr->nOffset = 0;
- pOutBufHdr->nFlags = 0;
-
- eError = OMX_FillThisBuffer(mHandleComp, pOutBufHdr);
- if (eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_FillThisBuffer ERROR 0x%x", eError);
- }
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-
-status_t OmxFrameDecoder::omxGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
- OMX_CALLBACKTYPE & callbacks) {
- LOG_FUNCTION_NAME;
-
- OMX_ERRORTYPE eError = OMX_ErrorUndefined;
-
- eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.DECODER", pAppData, &callbacks);
- if((eError != OMX_ErrorNone) || (handle == NULL)) {
- handle = NULL;
- return Utils::ErrorUtils::omxToAndroidError(eError);
- }
- commitState(OmxDecoderState_Loaded);
-
- LOG_FUNCTION_NAME_EXIT;
-
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-status_t OmxFrameDecoder::omxEmptyThisBuffer(android::sp<MediaBuffer>& inBuffer, OMX_BUFFERHEADERTYPE *pInBufHdr) {
-
- LOG_FUNCTION_NAME;
-
- OMX_PARAM_PORTDEFINITIONTYPE def;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
-
- InitOMXParams(&def);
- def.nPortIndex = PortIndexInput;
- omxGetParameter(OMX_IndexParamPortDefinition, &def);
- CAMHAL_LOGD("Founded id for empty is %d ", inBuffer->bufferId);
- if (inBuffer->filledLen > def.nBufferSize) {
- CAMHAL_LOGE("Can't copy IN buffer due to it too small %d than needed %d", def.nBufferSize, inBuffer->filledLen);
- return UNKNOWN_ERROR;
- }
-
- int filledLen = inBuffer->filledLen;
- unsigned char* dataBuffer = reinterpret_cast<unsigned char*>(inBuffer->buffer);
-
- //If decoder type MJPEG we check if append DHT forced and if true append it
- //in other case we check mIsNeedCheckDHT and if true search for DHT in buffer
- //if we don't found it - will do append
- //once we find that buffer not contain DHT we will append it each time
- if ((mDecoderType == DecoderType_MJPEG) && ((mAlwaysAppendDHT) || ((mIsNeedCheckDHT) &&
- (mIsNeedCheckDHT = !Decoder_libjpeg::isDhtExist(dataBuffer, filledLen))))) {
- CAMHAL_LOGV("Will append DHT to buffer");
- Decoder_libjpeg::appendDHT(dataBuffer, filledLen, pInBufHdr->pBuffer, filledLen + Decoder_libjpeg::readDHTSize());
- filledLen += Decoder_libjpeg::readDHTSize();
- mIsNeedCheckDHT = false;
- mAlwaysAppendDHT = true;
- } else {
- memcpy(pInBufHdr->pBuffer, dataBuffer, filledLen);
- }
-
- CAMHAL_LOGV("Copied %d bytes into In buffer with bh=%p", filledLen, pInBufHdr);
- CAMHAL_LOGV("Empty this buffer id=%d timestamp %lld offset=%d", inBuffer->bufferId, pInBufHdr->nTimeStamp, pInBufHdr->nOffset);
- pInBufHdr->nFilledLen = filledLen;
- pInBufHdr->nTimeStamp = inBuffer->getTimestamp();
- pInBufHdr->nFlags = 16;
- pInBufHdr->nOffset = 0;
- eError = OMX_EmptyThisBuffer(mHandleComp, pInBufHdr);
- if (eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_EmptyThisBuffer ERROR 0x%x", eError);
- Utils::ErrorUtils::omxToAndroidError(eError);
- }
-
- LOG_FUNCTION_NAME_EXIT;
-
- return NO_ERROR;
-}
-
-
-status_t OmxFrameDecoder::allocateBuffersOutput() {
- LOG_FUNCTION_NAME;
-
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
- def.nPortIndex = PortIndexOutput;
- omxGetParameter(OMX_IndexParamPortDefinition, &def);
- def.nBufferCountActual = mParams.outputBufferCount;
-
- CAMHAL_LOGD("Will set def.nBufferSize=%d stride=%d height=%d", def.nBufferSize , def.format.video.nStride, def.format.video.nFrameHeight);
-
- OMX_BUFFERHEADERTYPE *pOutBufHdr;
- mOutBufferHeaders.clear();
- for (size_t i = 0; i < mOutBuffers->size(); i++) {
- android::sp<MediaBuffer>& outBuffer = mOutBuffers->editItemAt(i);
- android::AutoMutex lock(outBuffer->getLock());
- CameraBuffer* cb = static_cast<CameraBuffer*>(outBuffer->buffer);
- OMX_U8 * outPtr = static_cast<OMX_U8*>(camera_buffer_get_omx_ptr(cb));
- CAMHAL_LOGV("Try to set OMX_UseBuffer [0x%x] for output port with length %d ", outPtr, def.nBufferSize);
- eError = OMX_UseBuffer(mHandleComp, &pOutBufHdr, PortIndexOutput, (void*)i, def.nBufferSize, outPtr);
-
- if (eError != OMX_ErrorNone) {
- ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", eError, eError);
- commitState(OmxDecoderState_Error);
- return UNKNOWN_ERROR;
- }
-
- CAMHAL_LOGD("Got buffer header %p", pOutBufHdr);
- mOutBufferHeaders.add(pOutBufHdr);
- }
-
- omxDumpPortSettings(def);
- LOG_FUNCTION_NAME_EXIT;
- return NO_ERROR;
-
-}
-
-status_t OmxFrameDecoder::allocateBuffersInput() {
- LOG_FUNCTION_NAME;
-
- OMX_PARAM_PORTDEFINITIONTYPE def;
- OMX_BUFFERHEADERTYPE *pInBufHdr;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
-
- InitOMXParams(&def);
- def.nPortIndex = PortIndexInput;
- omxGetParameter(OMX_IndexParamPortDefinition, &def);
-
- // TODO: Will be changed since port reconfiguration will be handled
- def.nBufferCountActual = mInBuffers->size();
- def.bEnabled = OMX_TRUE;
- omxSetParameter(OMX_IndexParamPortDefinition, &def);
-
- mInBufferHeaders.clear();
-
- for (size_t i = 0; i < mInBuffers->size(); i++) {
- CAMHAL_LOGD("Will do OMX_AllocateBuffer for input port with size %d id=%d", def.nBufferSize, i);
- eError = OMX_AllocateBuffer(mHandleComp, &pInBufHdr, PortIndexInput, (void*)i, def.nBufferSize);
- if (eError != OMX_ErrorNone) {
- ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", eError, eError);
- commitState(OmxDecoderState_Error);
- return UNKNOWN_ERROR;
- }
- CAMHAL_LOGD("Got new buffer header [%p] for IN port", pInBufHdr);
- mInBufferHeaders.push_back(pInBufHdr);
- }
-
- LOG_FUNCTION_NAME_EXIT;
- return NO_ERROR;
-}
-
-status_t OmxFrameDecoder::getAndConfigureDecoder() {
- status_t ret = NO_ERROR;
- OMX_ERRORTYPE eError;
-
- ret = omxInit();
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("OMX_Init returned error 0x%x", ret);
- return ret;
- }
- OMX_CALLBACKTYPE callbacks;
- callbacks.EventHandler = OmxFrameDecoder::eventCallback;
- callbacks.EmptyBufferDone = OmxFrameDecoder::emptyBufferDoneCallback;
- callbacks.FillBufferDone = OmxFrameDecoder::fillBufferDoneCallback;
- ret = omxGetHandle(&mHandleComp, this, callbacks);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("OMX_GetHandle returned error 0x%x", ret);
- OMX_Deinit();
- mOmxInialized = false;
- return ret;
- }
- ret = setComponentRole();
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("setComponentRole returned error 0x%x", ret);
- OMX_Deinit();
- mOmxInialized = false;
- return ret;
- }
- disablePortSync(PortIndexOutput);
- ret = setVideoOutputFormat(mParams.width, mParams.height);
- enablePortSync(PortIndexOutput);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("Can't set output format error 0x%x", ret);
- OMX_Deinit();
- mOmxInialized = false;
- return ret;
- }
- enableGrallockHandles();
- return NO_ERROR;
-}
-
-status_t OmxFrameDecoder::switchToIdle() {
- CAMHAL_ASSERT(getOmxState() == OmxDecoderState_Loaded);
- CAMHAL_LOGD("Try set OMX_StateIdle");
- android::AutoMutex lock(mHwLock);
- status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateIdle);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
- OMX_Deinit();
- mOmxInialized = false;
- return ret;
- }
-
- allocateBuffersInput();
-
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
- def.nPortIndex = PortIndexOutput;
- omxGetParameter(OMX_IndexParamPortDefinition, &def);
- def.nBufferCountActual = mParams.outputBufferCount;
- omxSetParameter(OMX_IndexParamPortDefinition, &def);
-
- allocateBuffersOutput();
-
- ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("State transition to IDLE ERROR 0x%x", ret);
- return ret;
- }
- commitState(OmxDecoderState_Idle);
- return NO_ERROR;
-}
-
-status_t OmxFrameDecoder::doStart() {
- LOG_FUNCTION_NAME;
-
- status_t ret = NO_ERROR;
- mStopping = false;
- OMX_ERRORTYPE eError;
-
- ret = getAndConfigureDecoder();
-
-#if 0
- OMX_TI_PARAM_ENHANCEDPORTRECONFIG tParamStruct;
- tParamStruct.nSize = sizeof(OMX_TI_PARAM_ENHANCEDPORTRECONFIG);
- tParamStruct.nVersion.s.nVersionMajor = 0x1;
- tParamStruct.nVersion.s.nVersionMinor = 0x1;
- tParamStruct.nVersion.s.nRevision = 0x0;
- tParamStruct.nVersion.s.nStep = 0x0;
- tParamStruct.nPortIndex = PortIndexOutput;
- tParamStruct.bUsePortReconfigForCrop = OMX_TRUE;
- tParamStruct.bUsePortReconfigForPadding = OMX_FALSE;
- omxSetParameter((OMX_INDEXTYPE)OMX_TI_IndexParamUseEnhancedPortReconfig, &tParamStruct);
-#endif
-
- // Transition to IDLE
- ret = switchToIdle();
- dumpPortSettings(PortIndexInput);
- dumpPortSettings(PortIndexOutput);
-
- LOG_FUNCTION_NAME_EXIT;
- return ret;
-}
-
-status_t OmxFrameDecoder::omxGetParameter(OMX_INDEXTYPE index, OMX_PTR ptr) {
- OMX_ERRORTYPE eError = OMX_GetParameter(mHandleComp, index, ptr);
- if(eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_GetParameter - error 0x%x", eError);
- }
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-status_t OmxFrameDecoder::omxGetConfig(OMX_INDEXTYPE index, OMX_PTR ptr) {
- OMX_ERRORTYPE eError = OMX_GetConfig(mHandleComp, index, ptr);
- if(eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_GetConfig - error 0x%x", eError);
- }
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-status_t OmxFrameDecoder::omxSetParameter(OMX_INDEXTYPE index, OMX_PTR ptr) {
- OMX_ERRORTYPE eError = OMX_SetParameter(mHandleComp, index, ptr);
- if(eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_SetParameter - error 0x%x", eError);
- }
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-status_t OmxFrameDecoder::omxSetConfig(OMX_INDEXTYPE index, OMX_PTR ptr) {
- OMX_ERRORTYPE eError = OMX_SetConfig(mHandleComp, index, ptr);
- if(eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_SetConfig - error 0x%x", eError);
- }
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-status_t OmxFrameDecoder::omxSendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
- OMX_ERRORTYPE eError = OMX_SendCommand(mHandleComp, cmd, param, NULL);
- if(eError != OMX_ErrorNone) {
- CAMHAL_LOGE("OMX_SendCommand - error 0x%x", eError);
- }
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-status_t OmxFrameDecoder::setVideoOutputFormat(OMX_U32 width, OMX_U32 height) {
- LOG_FUNCTION_NAME;
-
- CAMHAL_LOGV("setVideoOutputFormat width=%ld, height=%ld", width, height);
-
- OMX_VIDEO_CODINGTYPE compressionFormat = gCompressionFormat[mDecoderType];
-
- status_t err = setVideoPortFormatType(
- PortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
-
- if (err != NO_ERROR) {
- CAMHAL_LOGE("Error during setVideoPortFormatType 0x%x", err);
- return err;
- }
-
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
- def.nPortIndex = PortIndexInput;
-
- OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
-
- err = omxGetParameter(OMX_IndexParamPortDefinition, &def);
-
- if (err != NO_ERROR) {
- return err;
- }
-
- video_def->nFrameWidth = width;
- video_def->nFrameHeight = height;
-
- video_def->eCompressionFormat = compressionFormat;
- video_def->eColorFormat = OMX_COLOR_FormatUnused;
-
-
- err = omxSetParameter(OMX_IndexParamPortDefinition, &def);
-
-
- if (err != OK) {
- return err;
- }
-
- OMX_PARAM_PORTDEFINITIONTYPE odef;
- OMX_VIDEO_PORTDEFINITIONTYPE *out_video_def = &odef.format.video;
-
- InitOMXParams(&odef);
- odef.nPortIndex = PortIndexOutput;
-
- err = omxGetParameter(OMX_IndexParamPortDefinition, &odef);
- if (err != NO_ERROR) {
- return err;
- }
-
- out_video_def->nFrameWidth = width;
- out_video_def->nFrameHeight = height;
- out_video_def->xFramerate = 30<< 16;//((width >= 720) ? 60 : 30) << 16;
- out_video_def->nStride = 4096;
-
- err = omxSetParameter(OMX_IndexParamPortDefinition, &odef);
- CAMHAL_LOGD("OUT port is configured");
- dumpPortSettings(PortIndexOutput);
-
- LOG_FUNCTION_NAME_EXIT;
- return err;
-}
-
-status_t OmxFrameDecoder::setVideoPortFormatType(
- OMX_U32 portIndex,
- OMX_VIDEO_CODINGTYPE compressionFormat,
- OMX_COLOR_FORMATTYPE colorFormat) {
-
- LOG_FUNCTION_NAME;
-
- OMX_VIDEO_PARAM_PORTFORMATTYPE format;
- InitOMXParams(&format);
- format.nPortIndex = portIndex;
- format.nIndex = 0;
- bool found = false;
-
- OMX_U32 index = 0;
- for (;;) {
- CAMHAL_LOGV("Will check index = %d", index);
- format.nIndex = index;
- OMX_ERRORTYPE eError = OMX_GetParameter(
- mHandleComp, OMX_IndexParamVideoPortFormat,
- &format);
-
- CAMHAL_LOGV("format.eCompressionFormat=0x%x format.eColorFormat=0x%x", format.eCompressionFormat, format.eColorFormat);
-
- if (format.eCompressionFormat == compressionFormat
- && format.eColorFormat == colorFormat) {
- found = true;
- break;
- }
-
- ++index;
- if (index >= kMaxColorFormatSupported) {
- CAMHAL_LOGE("color format %d or compression format %d is not supported",
- colorFormat, compressionFormat);
- return UNKNOWN_ERROR;
- }
- }
-
- if (!found) {
- return UNKNOWN_ERROR;
- }
-
- CAMHAL_LOGV("found a match.");
- OMX_ERRORTYPE eError = OMX_SetParameter(
- mHandleComp, OMX_IndexParamVideoPortFormat,
- &format);
-
- LOG_FUNCTION_NAME_EXIT;
- return Utils::ErrorUtils::omxToAndroidError(eError);
-}
-
-status_t OmxFrameDecoder::setComponentRole() {
- OMX_PARAM_COMPONENTROLETYPE roleParams;
- const char *role = gDecoderRole[mDecoderType];
- InitOMXParams(&roleParams);
-
- strncpy((char *)roleParams.cRole,
- role, OMX_MAX_STRINGNAME_SIZE - 1);
- roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
-
- return omxSetParameter(OMX_IndexParamStandardComponentRole, &roleParams);
-}
-
-void OmxFrameDecoder::freeBuffersOnOutput() {
- LOG_FUNCTION_NAME;
- for (size_t i = 0; i < mOutBufferHeaders.size(); i++) {
- OMX_BUFFERHEADERTYPE* header = mOutBufferHeaders[i];
- CAMHAL_LOGD("Freeing OUT buffer header %p", header);
- OMX_FreeBuffer(mHandleComp, PortIndexOutput, header);
- }
- mOutBufferHeaders.clear();
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void OmxFrameDecoder::freeBuffersOnInput() {
- LOG_FUNCTION_NAME;
- for (size_t i = 0; i < mInBufferHeaders.size(); i++) {
- OMX_BUFFERHEADERTYPE* header = mInBufferHeaders[i];
- CAMHAL_LOGD("Freeing IN buffer header %p", header);
- OMX_FreeBuffer(mHandleComp, PortIndexInput, header);
- }
- mInBufferHeaders.clear();
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void OmxFrameDecoder::doStop() {
- LOG_FUNCTION_NAME;
-
- mStopping = true;
- android::AutoMutex lock(mHwLock);
-
- CAMHAL_LOGD("HwFrameDecoder::doStop state id=%d", getOmxState());
-
- if ((getOmxState() == OmxDecoderState_Executing) || (getOmxState() == OmxDecoderState_Reconfigure)) {
-
- CAMHAL_LOGD("Try set OMX_StateIdle");
- status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateIdle);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
- }
-
- ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("State transition to IDLE ERROR 0x%x", ret);
- }
- commitState(OmxDecoderState_Idle);
- }
-
- if (getOmxState() == OmxDecoderState_Idle) {
-
- CAMHAL_LOGD("Try set OMX_StateLoaded");
- status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateLoaded);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
- return;
- }
- freeBuffersOnOutput();
- freeBuffersOnInput();
- ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
- }
- commitState(OmxDecoderState_Loaded);
-
- }
-
- if (getOmxState() == OmxDecoderState_Error) {
- CAMHAL_LOGD("In state ERROR will try to free buffers!");
- freeBuffersOnOutput();
- freeBuffersOnInput();
- }
-
- CAMHAL_LOGD("Before OMX_FreeHandle ....");
- OMX_FreeHandle(mHandleComp);
- CAMHAL_LOGD("After OMX_FreeHandle ....");
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void OmxFrameDecoder::doFlush() {
- LOG_FUNCTION_NAME;
- mIsNeedCheckDHT = true;
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void OmxFrameDecoder::doRelease() {
- LOG_FUNCTION_NAME;
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-void OmxFrameDecoder::omxDumpPortSettings(OMX_PARAM_PORTDEFINITIONTYPE& def) {
- CAMHAL_LOGD("----------Port settings start--------------------");
- CAMHAL_LOGD("nSize=%d nPortIndex=%d eDir=%d nBufferCountActual=%d", def.nSize, def.nPortIndex, def.eDir, def.nBufferCountActual);
- CAMHAL_LOGD("nBufferCountMin=%d nBufferSize=%d bEnabled=%d bPopulated=%d bBuffersContiguous=%d nBufferAlignment=%d", def.nBufferCountMin, def.nBufferSize, def.bEnabled, def.bPopulated, def.bBuffersContiguous, def.nBufferAlignment);
-
- CAMHAL_LOGD("eDomain = %d",def.eDomain);
-
- if (def.eDomain == OMX_PortDomainVideo) {
- CAMHAL_LOGD("===============Video Port===================");
- CAMHAL_LOGD("cMIMEType=%s",def.format.video.cMIMEType);
- CAMHAL_LOGD("nFrameWidth=%d nFrameHeight=%d", def.format.video.nFrameWidth, def.format.video.nFrameHeight);
- CAMHAL_LOGD("nStride=%d nSliceHeight=%d", def.format.video.nStride, def.format.video.nSliceHeight);
- CAMHAL_LOGD("nBitrate=%d xFramerate=%d", def.format.video.nBitrate, def.format.video.xFramerate>>16);
- CAMHAL_LOGD("bFlagErrorConcealment=%d eCompressionFormat=%d", def.format.video.bFlagErrorConcealment, def.format.video.eCompressionFormat);
- CAMHAL_LOGD("eColorFormat=0x%x pNativeWindow=%p", def.format.video.eColorFormat, def.format.video.pNativeWindow);
- CAMHAL_LOGD("===============END Video Part===================");
- }
- else if (def.eDomain == OMX_PortDomainImage) {
- CAMHAL_LOGD("===============Image Port===================");
- CAMHAL_LOGD("cMIMEType=%s",def.format.image.cMIMEType);
- CAMHAL_LOGD("nFrameWidth=%d nFrameHeight=%d", def.format.image.nFrameWidth, def.format.image.nFrameHeight);
- CAMHAL_LOGD("nStride=%d nSliceHeight=%d", def.format.image.nStride, def.format.image.nSliceHeight);
- CAMHAL_LOGD("bFlagErrorConcealment=%d eCompressionFormat=%d", def.format.image.bFlagErrorConcealment, def.format.image.eCompressionFormat);
- CAMHAL_LOGD("eColorFormat=0x%x pNativeWindow=%p", def.format.image.eColorFormat, def.format.image.pNativeWindow);
- CAMHAL_LOGD("===============END Image Part===================");
- }
- CAMHAL_LOGD("----------Port settings end--------------------");
-}
-
-void OmxFrameDecoder::omxDumpBufferHeader(OMX_BUFFERHEADERTYPE* bh) {
- CAMHAL_LOGD("==============OMX_BUFFERHEADERTYPE start==============");
- CAMHAL_LOGD("nAllocLen=%d nFilledLen=%d nOffset=%d nFlags=0x%x", bh->nAllocLen, bh->nFilledLen, bh->nOffset, bh->nFlags);
- CAMHAL_LOGD("pBuffer=%p nOutputPortIndex=%d nInputPortIndex=%d nSize=0x%x", bh->pBuffer, bh->nOutputPortIndex, bh->nInputPortIndex, bh->nSize);
- CAMHAL_LOGD("nVersion=0x%x", bh->nVersion);
- CAMHAL_LOGD("==============OMX_BUFFERHEADERTYPE end==============");
-}
-
-bool OmxFrameDecoder::getPaddedDimensions(size_t &width, size_t &height) {
-
- switch (height) {
-
- case 480: {
- height = 576;
- if (width == 720) {
- width = 768;
- }
- break;
- }
- case 720: {
- height = 832;
- if (width == 1280) {
- width = 1408;
- }
- break;
- }
- case 1080: {
- height = 1184;
- if (width == 1920) {
- width = 2048;
- }
- break;
- }
-
- }
-
- CAMHAL_LOGD("WxH updated to padded values : %d x %d", width, height);
- return true;
-}
-
-} // namespace Camera
-} // namespace Ti
-
diff --git a/camera/SwFrameDecoder.cpp b/camera/SwFrameDecoder.cpp
deleted file mode 100644
index 2ce2c0f..0000000
--- a/camera/SwFrameDecoder.cpp
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright (C) Texas Instruments - http://www.ti.com/
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Common.h"
-#include "SwFrameDecoder.h"
-
-namespace Ti {
-namespace Camera {
-
-SwFrameDecoder::SwFrameDecoder()
-: mjpegWithHdrSize(0), mJpegWithHeaderBuffer(NULL) {
-}
-
-SwFrameDecoder::~SwFrameDecoder() {
- delete [] mJpegWithHeaderBuffer;
- mJpegWithHeaderBuffer = NULL;
-}
-
-
-void SwFrameDecoder::doConfigure(const DecoderParameters& params) {
- LOG_FUNCTION_NAME;
-
- mjpegWithHdrSize = (mParams.width * mParams.height / 2) +
- mJpgdecoder.readDHTSize();
- if (mJpegWithHeaderBuffer != NULL) {
- delete [] mJpegWithHeaderBuffer;
- mJpegWithHeaderBuffer = NULL;
- }
- mJpegWithHeaderBuffer = new unsigned char[mjpegWithHdrSize];
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-
-void SwFrameDecoder::doProcessInputBuffer() {
- LOG_FUNCTION_NAME;
- nsecs_t timestamp = 0;
-
- CAMHAL_LOGV("Will add header to MJPEG");
- int final_jpg_sz = 0;
- {
- int inIndex = mInQueue.itemAt(0);
- android::sp<MediaBuffer>& inBuffer = mInBuffers->editItemAt(inIndex);
- android::AutoMutex lock(inBuffer->getLock());
- timestamp = inBuffer->getTimestamp();
- final_jpg_sz = mJpgdecoder.appendDHT(
- reinterpret_cast<unsigned char*>(inBuffer->buffer),
- inBuffer->filledLen, mJpegWithHeaderBuffer, mjpegWithHdrSize);
- inBuffer->setStatus(BufferStatus_InDecoded);
- }
- CAMHAL_LOGV("Added header to MJPEG");
- {
- int outIndex = mOutQueue.itemAt(0);
- android::sp<MediaBuffer>& outBuffer = mOutBuffers->editItemAt(outIndex);
- android::AutoMutex lock(outBuffer->getLock());
- CameraBuffer* buffer = reinterpret_cast<CameraBuffer*>(outBuffer->buffer);
- if (!mJpgdecoder.decode(mJpegWithHeaderBuffer, final_jpg_sz,
- reinterpret_cast<unsigned char*>(buffer->mapped), 4096)) {
- CAMHAL_LOGEA("Error while decoding JPEG");
- return;
- }
- outBuffer->setTimestamp(timestamp);
- outBuffer->setStatus(BufferStatus_OutFilled);
- }
- CAMHAL_LOGV("JPEG decoded!");
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-
-} // namespace Camera
-} // namespace Ti
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
index 837a755..e69e4e6 100644
--- a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -38,9 +38,9 @@
#include <sys/select.h>
//#include <linux/videodev.h>
#include <cutils/properties.h>
-#include "DecoderFactory.h"
#include <hal_public.h>
+
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
static int mDebugFps = 0;
@@ -52,8 +52,8 @@ extern int setupM2MDevice();
extern int closeM2MDevice();
extern int startM2MDevice();
extern int stopM2MDevice();
-extern int processFrame(void *srcHandle, int srcWidth, int srcHeight, int srcStride, const char *srcFmt, void *dstHandle,
- int dstWidth, int dstHeight, int dstStride, const char *dstFmt, bool dei, int translen, int index,
+extern int processFrame(void *srcHandle, int srcWidth, int srcHeight, int srcStride, const char *srcFmt, void *dstHandle,
+ int dstWidth, int dstHeight, int dstStride, const char *dstFmt, bool dei, int translen, int index,
int *in_index, int *out_index);
namespace Ti {
@@ -63,9 +63,7 @@ namespace Camera {
#define FPS_PERIOD 30
//Proto Types
-static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
static void convertYUV422ToNV12Tiler(unsigned char *src, android_ycbcr *dest, int width, int height );
-static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height );
android::Mutex gV4LAdapterLock;
char device[MAX_VIDEO_DEVICES][MAX_PATH_LENGTH];
@@ -94,10 +92,6 @@ static void debugShowFPS()
/*--------------------V4L wrapper functions -------------------------------*/
-bool V4LCameraAdapter::isNeedToUseDecoder() const {
- return mPixelFormat != V4L2_PIX_FMT_YUYV;
-}
-
status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
status_t ret = NO_ERROR;
errno = 0;
@@ -165,41 +159,11 @@ status_t V4LCameraAdapter::v4lInitMmap(int& count, int width, int height) {
mInBuffers.push_back(buffer);
}
- if (isNeedToUseDecoder()) {
- mDecoder->registerInputBuffers(&mInBuffers);
- DecoderParameters params;
- params.width = width;
- params.height = height;
- params.inputBufferCount = count;
- params.outputBufferCount = count;
- mDecoder->configure(params);
- }
-
-
-
-
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t V4LCameraAdapter::v4lInitUsrPtr(int& count) {
- status_t ret = NO_ERROR;
-
- mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->rb.memory = V4L2_MEMORY_USERPTR;
- mVideoInfo->rb.count = count;
-
- ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
- if (ret < 0) {
- CAMHAL_LOGEB("VIDIOC_REQBUFS failed for USERPTR: %s", strerror(errno));
- return ret;
- }
-
- count = mVideoInfo->rb.count;
- return ret;
-}
-
status_t V4LCameraAdapter::v4lInitDmaBuf(int& count, int width, int height) {
status_t ret = NO_ERROR;
int bytes = width * height * 2;
@@ -309,7 +273,7 @@ status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_for
LOG_FUNCTION_NAME;
- CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, mPixelFormat);
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, mPixelFormat);
mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
@@ -370,6 +334,7 @@ status_t V4LCameraAdapter::restartPreview ()
v4l2_buffer buf;
buf.index = i;
+ buf.flags = 0;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_DMABUF;
buf.m.fd = mCameraBuffers[i].dma_buf_fd;
@@ -383,14 +348,6 @@ status_t V4LCameraAdapter::restartPreview ()
nQueued++;
}
- if (isNeedToUseDecoder()) {
- for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
- mDecoder->queueOutputBuffer(i);
- CAMHAL_LOGV("Queued output buffer with id=%d ", i);
- }
- mDecoder->start();
- }
-
ret = v4lStartStreaming();
CAMHAL_LOGDA("Ready for preview....");
EXIT:
@@ -501,41 +458,23 @@ status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::F
CAMHAL_LOGEB("Wrong index = %d",idx);
return ret;
}
- if (isNeedToUseDecoder()) {
- for (int i = 0; i < mOutBuffers.size(); i++) {
- android::sp<MediaBuffer>& outBuffer = mOutBuffers.editItemAt(i);
- CameraBuffer* buffer = static_cast<CameraBuffer*>(outBuffer->buffer);
- if (buffer == frameBuf) {
- mDecoder->queueOutputBuffer(outBuffer->bufferId);
- break;
- }
- }
-
- int inIndex = -1;
- ret = mDecoder->dequeueInputBuffer(inIndex);
-
- if (ret == NO_ERROR) {
- ret = returnBufferToV4L(inIndex);
- }
-
- } else {
- v4l2_buffer buf;
- buf.index = idx;
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_DMABUF;
- buf.m.fd = mCameraBuffers[idx].dma_buf_fd;
- buf.length = mCameraBuffers[idx].size;
-
- CAMHAL_LOGD("Will return buffer to V4L with id=%d", idx);
- ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
- if (ret < 0) {
- CAMHAL_LOGEA("VIDIOC_QBUF Failed");
- goto EXIT;
- }
+ v4l2_buffer buf;
+ buf.index = idx;
+ buf.flags = 0;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_DMABUF;
+ buf.m.fd = mCameraBuffers[idx].dma_buf_fd;
+ buf.length = mCameraBuffers[idx].size;
- nQueued++;
+ CAMHAL_LOGD("Will return buffer to V4L with id=%d", idx);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
}
+ nQueued++;
+
EXIT:
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -707,9 +646,6 @@ status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
mOutBuffers.push_back(buffer);
CAMHAL_LOGDB("Preview- buff [%d] = 0x%x length=%d",i, mPreviewBufs[i], mFrameQueue.valueFor(mPreviewBufs[i])->mLength);
}
- if (isNeedToUseDecoder()) {
- mDecoder->registerOutputBuffers(&mOutBuffers);
- }
// Update the preview buffer count
mPreviewBufferCount = num;
}
@@ -749,10 +685,6 @@ status_t V4LCameraAdapter::takePicture() {
}
}
- if (isNeedToUseDecoder()) {
- mDecoder->stop();
- mDecoder->flush();
- }
mLock.lock();
mCapturing = true;
mPreviewing = false;
@@ -785,6 +717,7 @@ status_t V4LCameraAdapter::takePicture() {
v4l2_buffer buf;
buf.index = i;
+ buf.flags = 0;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
@@ -933,6 +866,7 @@ status_t V4LCameraAdapter::startPreview()
}
buf.index = i;
+ buf.flags = 0;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_DMABUF;
buf.m.fd = mCameraBuffers[i].dma_buf_fd;
@@ -946,13 +880,6 @@ status_t V4LCameraAdapter::startPreview()
nQueued++;
}
- if (isNeedToUseDecoder()) {
- for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
- mDecoder->queueOutputBuffer(i);
- CAMHAL_LOGV("Queued output buffer with id=%d ", i);
- }
- mDecoder->start();
- }
ret = v4lStartStreaming();
// Create and start preview thread for receiving buffers from V4L Camera
@@ -987,12 +914,6 @@ status_t V4LCameraAdapter::stopPreview()
return NO_INIT;
}
mPreviewing = false;
- if (isNeedToUseDecoder()) {
- android::AutoMutex lock(mStopLock);
- mStopCondition.waitRelative(mStopLock, 100000000);
- mDecoder->stop();
- mDecoder->flush();
- }
ret = v4lStopStreaming(mPreviewBufferCount);
if (ret < 0) {
@@ -1054,14 +975,14 @@ char * V4LCameraAdapter::GetFrame(int &index, int &filledLen)
// a driver-wide mutex. If we use poll() or blocking VIDIOC_DQBUF ioctl
// here then we sometimes would run into a deadlock on VIDIO_QBUF ioctl.
while(true) {
- if(!mVideoInfo->isStreaming) {
- return NULL;
- }
+ if(!mVideoInfo->isStreaming) {
+ return NULL;
+ }
- ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &buf);
- if((ret == 0) || (errno != EAGAIN)) {
- break;
- }
+ ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &buf);
+ if((ret == 0) || (errno != EAGAIN)) {
+ break;
+ }
}
if (ret < 0) {
@@ -1105,9 +1026,6 @@ status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
mParams.getPreviewSize(( int * ) &width,( int * ) &height);
// TODO: This will reside until correct port reconfiguration handling will done.
- if (isNeedToUseDecoder()) {
- mDecoder->getPaddedDimensions(width, height);
- }
LOG_FUNCTION_NAME_EXIT;
@@ -1143,37 +1061,6 @@ status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t buffe
return NO_ERROR;
}
-status_t V4LCameraAdapter::recalculateFPS()
-{
- float currentFPS;
-
- mFrameCount++;
-
- if ( ( mFrameCount % FPS_PERIOD ) == 0 )
- {
- nsecs_t now = systemTime();
- nsecs_t diff = now - mLastFPSTime;
- currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
- mLastFPSTime = now;
- mLastFrameCount = mFrameCount;
-
- if ( 1 == mIter )
- {
- mFPS = currentFPS;
- }
- else
- {
- //cumulative moving average
- mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
- }
-
- mLastFPS = mFPS;
- mIter++;
- }
-
- return NO_ERROR;
-}
-
void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
{
LOG_FUNCTION_NAME;
@@ -1183,52 +1070,6 @@ void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
LOG_FUNCTION_NAME_EXIT;
}
-void V4LCameraAdapter::setupWorkingMode() {
- char value[PROPERTY_VALUE_MAX];
- int v4lMode = 0;
-
- property_get("camera.v4l.mode", value, "3");
- v4lMode = atoi(value);
-
- if (mDecoder) {
- delete mDecoder;
- mDecoder = NULL;
- }
-
- switch (v4lMode) {
- case 0 : {
- mPixelFormat = V4L2_PIX_FMT_MJPEG;
- mCameraHal->setExternalLocking(true);
- mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, false);
- CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with HW decoding");
- break;
- }
-
- case 1 : {
- mPixelFormat = V4L2_PIX_FMT_MJPEG;
- mCameraHal->setExternalLocking(false);
- mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, true);
- CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with SW decoding");
- break;
- }
-
- case 2 : {
- mPixelFormat = V4L2_PIX_FMT_H264;
- mCameraHal->setExternalLocking(true);
- mDecoder = DecoderFactory::createDecoderByType(DecoderType_H264, false);
- CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_H264");
- break;
- }
- default:
- case 3 : {
- mCameraHal->setExternalLocking(false);
- mPixelFormat = V4L2_PIX_FMT_YUYV;
- CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_YUYV");
- }
-
- }
-}
-
V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index, CameraHal* hal)
:mPixelFormat(DEFAULT_PIXEL_FORMAT), mFrameRate(0), mCameraHal(hal),
mSkipFramesCount(0)
@@ -1239,12 +1080,9 @@ V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index, CameraHal* hal)
// Nothing useful to do in the constructor
mFramesWithEncoder = 0;
- mDecoder = 0;
nQueued = 0;
nDequeued = 0;
- setupWorkingMode();
-
setupM2MDevice();
property_get("camera.v4l.skipframes", value, "1");
@@ -1265,12 +1103,11 @@ V4LCameraAdapter::~V4LCameraAdapter()
close(mCameraHandle);
if (mVideoInfo)
- {
+ {
free(mVideoInfo);
mVideoInfo = NULL;
- }
+ }
- delete mDecoder;
mInBuffers.clear();
mOutBuffers.clear();
@@ -1283,27 +1120,6 @@ V4LCameraAdapter::~V4LCameraAdapter()
LOG_FUNCTION_NAME_EXIT;
}
-static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size ) {
- //convert YUV422I yuyv to uyvy format.
- uint32_t *bf = (uint32_t*)src;
- uint32_t *dst = (uint32_t*)dest;
-
- LOG_FUNCTION_NAME;
-
- if (!src || !dest) {
- return;
- }
-
- for(size_t i = 0; i < size; i = i+4)
- {
- dst[0] = ((bf[0] & 0x00FF00FF) << 8) | ((bf[0] & 0xFF00FF00) >> 8);
- bf++;
- dst++;
- }
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
static void convertYUV422ToNV12Tiler(unsigned char *src, android_ycbcr *ycbcr, int width, int height ) {
//convert YUV422I to YUV420 NV12 format and copies directly to preview buffers (Tiler memory).
unsigned char *bf = src;
@@ -1394,123 +1210,15 @@ static void convertYUV422ToNV12Tiler(unsigned char *src, android_ycbcr *ycbcr, i
LOG_FUNCTION_NAME_EXIT;
}
-static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height ) {
- //convert YUV422I to YUV420 NV12 format.
- unsigned char *bf = src;
- unsigned char *dst_y = dest;
- unsigned char *dst_uv = dest + (width * height);
-
- LOG_FUNCTION_NAME;
-
- if (width % 16 ) {
- for(int i = 0; i < height; i++) {
- for(int j = 0; j < width; j++) {
- *dst_y = *bf;
- dst_y++;
- bf = bf + 2;
- }
- }
-
- bf = src;
- bf++; //UV sample
- for(int i = 0; i < height/2; i++) {
- for(int j=0; j<width; j++) {
- *dst_uv = *bf;
- dst_uv++;
- bf = bf + 2;
- }
- bf = bf + width*2;
- }
- } else {
- //neon conversion
- for(int i = 0; i < height; i++) {
- int n = width;
- int skip = i & 0x1; // skip uv elements for the odd rows
- asm volatile (
- " pld [%[src], %[src_stride], lsl #2] \n\t"
- " cmp %[n], #16 \n\t"
- " blt 5f \n\t"
- "0: @ 16 pixel copy \n\t"
- " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
- " @ now q0 = y q1 = uv \n\t"
- " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
- " cmp %[skip], #0 \n\t"
- " bne 1f \n\t"
- " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
- "1: @ skip odd rows for UV \n\t"
- " sub %[n], %[n], #16 \n\t"
- " cmp %[n], #16 \n\t"
- " bge 0b \n\t"
- "5: @ end \n\t"
-#ifdef NEEDS_ARM_ERRATA_754319_754320
- " vmov s0,s0 @ add noop for errata item \n\t"
-#endif
- : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
- : [src_stride] "r" (width), [skip] "r" (skip)
- : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
- );
- }
- }
-
- LOG_FUNCTION_NAME_EXIT;
-}
-
-
-
-
/* Preview Thread */
// ---------------------------------------------------------------------------
-void V4LCameraAdapter::returnOutputBuffer(int index)
-{
- LOG_FUNCTION_NAME;
-
- size_t width, height;
- int stride = 4096;
- CameraFrame frame;
-
- getFrameSize(width, height);
-
- android::Mutex::Autolock slock(mSubscriberLock);
-
- android::sp<MediaBuffer>& buffer = mOutBuffers.editItemAt(index);
-
- CameraBuffer* cbuffer = static_cast<CameraBuffer*>(buffer->buffer);
-
- frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
- frame.mBuffer = cbuffer;
- if (isNeedToUseDecoder()) {
- //We always get NV12 on out, when using decoder.
- frame.mLength = height * stride * 3 / 2;
- } else {
- frame.mLength = CameraHal::calculateBufferSize(mParams.getPreviewFormat(), width, height);
- }
- frame.mAlignment = stride;
- frame.mOffset = buffer->getOffset();
- frame.mTimestamp = buffer->getTimestamp();
- frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
-
- if (mRecording)
- {
- frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
- mFramesWithEncoder++;
- }
-
- int ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
- if (ret != NO_ERROR) {
- CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
- } else {
- ret = sendFrameToSubscribers(&frame);
- }
- //debugShowFPS();
- LOG_FUNCTION_NAME_EXIT;
-}
-
status_t V4LCameraAdapter::returnBufferToV4L(int id) {
status_t ret = NO_ERROR;
v4l2_buffer buf;
buf.index = id;
+ buf.flags = 0;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_DMABUF;
buf.m.fd = mCameraBuffers[id].dma_buf_fd;
@@ -1557,29 +1265,7 @@ int V4LCameraAdapter::previewThread()
}
}
- if (isNeedToUseDecoder()){
-
- CAMHAL_LOGV("########### Decoder ###########");
- int inIndex = -1, outIndex = -1;
-
- if (GetFrame(index, filledLen) != NULL) {
- CAMHAL_LOGD("Dequeued buffer from V4L with ID=%d", index);
- mDecoder->queueInputBuffer(index);
- }
-
- while (NO_ERROR == mDecoder->dequeueInputBuffer(inIndex)) {
- returnBufferToV4L(inIndex);
- }
-
- while (NO_ERROR == mDecoder->dequeueOutputBuffer(outIndex)) {
- returnOutputBuffer(outIndex);
- }
-
- CAMHAL_LOGV("########### End Decode ###########");
- goto EXIT;
- }
- else
- {
+ {
fp = GetFrame(index, filledLen);
if(!fp) {
@@ -1608,7 +1294,7 @@ int V4LCameraAdapter::previewThread()
#ifdef SAVE_RAW_FRAMES
// Use this to dump frames to files. Choose one or the other, or else they will be trying to write to the same file.
saveFile( ((unsigned char*)mPreviewBufs[out_index]->mapped), ((width*height)*3/2) ); // Dump the VPE output
-// saveFile( ((unsigned char*)((CameraBuffer*)fp)->mapped), ((width*height)*3/2) ); // Dump the VIP output
+ // saveFile( ((unsigned char*)((CameraBuffer*)fp)->mapped), ((width*height)*3/2) ); // Dump the VIP output
#endif
android::Mutex::Autolock lock(mSubscriberLock);
@@ -1634,9 +1320,8 @@ int V4LCameraAdapter::previewThread()
ret = sendFrameToSubscribers(&frame);
}
}
-
+
EXIT:
-
return ret;
}
@@ -1681,13 +1366,13 @@ void detectVideoDevice(char** video_device_list, int& num_device) {
strncat(dev_list[index],filename,sizeof(DEVICE_NAME));
index++;
}
- } //end of while()
- closedir(d);
- num_device = index;
+ } //end of while()
+ closedir(d);
+ num_device = index;
- for(int i=0; i<index; i++){
- CAMHAL_LOGDB("Video device list::dev_list[%d]= %s",i,dev_list[i]);
- }
+ for(int i=0; i<index; i++){
+ CAMHAL_LOGDB("Video device list::dev_list[%d]= %s",i,dev_list[i]);
+ }
}
}
diff --git a/camera/V4LCameraAdapter/V4LCapabilities.cpp b/camera/V4LCameraAdapter/V4LCapabilities.cpp
index e91776d..fb8e3be 100644
--- a/camera/V4LCameraAdapter/V4LCapabilities.cpp
+++ b/camera/V4LCameraAdapter/V4LCapabilities.cpp
@@ -45,7 +45,7 @@ const char V4LCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
const char V4LCameraAdapter::DEFAULT_PICTURE_SIZE[] = "720x480";
const char V4LCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
const char V4LCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "720x480";
-const char V4LCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
+const char V4LCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "8";
const char V4LCameraAdapter::DEFAULT_FRAMERATE[] = "30";
const char V4LCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
const char V4LCameraAdapter::DEFAULT_FRAMERATE_RANGE[] = "30000,30000";
diff --git a/camera/V4LCameraAdapter/V4LM2M.cpp b/camera/V4LCameraAdapter/V4LM2M.cpp
index 25f83b7..4495e20 100644
--- a/camera/V4LCameraAdapter/V4LM2M.cpp
+++ b/camera/V4LCameraAdapter/V4LM2M.cpp
@@ -91,8 +91,8 @@ static int init_queued_count = 0;
static enum v4l2_colorspace src_colorspace, dst_colorspace;
static int src_coplanar = 0, dst_coplanar = 0;
static int srcSize = 0, dstSize = 0, srcSize_uv = 0, dstSize_uv = 0;
-static int src_numbuf = 6;
-static int dst_numbuf = 6;
+static int src_numbuf = 8;
+static int dst_numbuf = 8;
static int startup_threshold = 2;
/*
* Convert a format name string into fourcc and calculate the
diff --git a/camera/inc/DecoderFactory.h b/camera/inc/DecoderFactory.h
deleted file mode 100644
index d5e566f..0000000
--- a/camera/inc/DecoderFactory.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) Texas Instruments - http://www.ti.com/
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DECODERFACTORY_H_
-#define DECODERFACTORY_H_
-
-#include "FrameDecoder.h"
-
-namespace Ti {
-namespace Camera {
-
-class DecoderFactory {
- DecoderFactory();
- ~DecoderFactory();
-public:
- static FrameDecoder* createDecoderByType(DecoderType type, bool forceSwDecoder = false);
-};
-
-} // namespace Camera
-} // namespace Ti
-
-#endif /* DECODERFACTORY_H_ */
diff --git a/camera/inc/FrameDecoder.h b/camera/inc/FrameDecoder.h
index fab0544..79ced45 100644
--- a/camera/inc/FrameDecoder.h
+++ b/camera/inc/FrameDecoder.h
@@ -25,11 +25,6 @@
namespace Ti {
namespace Camera {
-enum DecoderType {
- DecoderType_MJPEG,
- DecoderType_H264
-};
-
enum BufferStatus {
BufferStatus_Unknown,
BufferStatus_InQueued,
@@ -40,25 +35,17 @@ enum BufferStatus {
BufferStatus_OutFilled
};
-enum DecoderState {
- DecoderState_Uninitialized,
- DecoderState_Initialized,
- DecoderState_Running,
- DecoderState_Requested_Stop,
- DecoderState_Stoppped
-};
-
class MediaBuffer: public virtual android::RefBase {
public:
MediaBuffer()
: bufferId(-1), buffer(0), filledLen(0), size(0),
- mOffset(0), mTimestamp(0), mStatus(BufferStatus_Unknown) {
+ mOffset(0), mTimestamp(0) {
}
MediaBuffer(int id, void* buffer, size_t buffSize = 0)
: bufferId(id), buffer(buffer), filledLen(0), size(buffSize),
- mOffset(0), mTimestamp(0), mStatus(BufferStatus_Unknown) {
+ mOffset(0), mTimestamp(0) {
}
virtual ~MediaBuffer() {
@@ -76,14 +63,6 @@ public:
mTimestamp = ts;
}
- BufferStatus getStatus() const {
- return mStatus;
- }
-
- void setStatus(BufferStatus status) {
- mStatus = status;
- }
-
android::Mutex& getLock() const {
return mLock;
}
@@ -99,74 +78,9 @@ public:
private:
uint32_t mOffset;
nsecs_t mTimestamp;
- BufferStatus mStatus;
mutable android::Mutex mLock;
};
-struct DecoderParameters {
- int width;
- int height;
- int inputBufferCount;
- int outputBufferCount;
-};
-
-class FrameDecoder {
-public:
- FrameDecoder();
- virtual ~FrameDecoder();
- void configure(const DecoderParameters& config);
- status_t start();
- void stop();
- void release();
- void flush();
- status_t queueInputBuffer(int id);
- status_t dequeueInputBuffer(int &id);
- status_t queueOutputBuffer(int id);
- status_t dequeueOutputBuffer(int &id);
-
- void registerOutputBuffers(android::Vector< android::sp<MediaBuffer> > *outBuffers) {
- android::AutoMutex lock(mLock);
- mOutQueue.clear();
- mOutBuffers = outBuffers;
- }
-
- void registerInputBuffers(android::Vector< android::sp<MediaBuffer> > *inBuffers) {
- android::AutoMutex lock(mLock);
- mInQueue.clear();
- mInBuffers = inBuffers;
- }
-
- virtual bool getPaddedDimensions(size_t &width, size_t &height) {
- return false;
- }
-
- void setHal(CameraHal* hal) {
- mCameraHal = hal;
- }
-
-protected:
- virtual void doConfigure(const DecoderParameters& config) = 0;
- virtual void doProcessInputBuffer() = 0;
- virtual status_t doStart() = 0;
- virtual void doStop() = 0;
- virtual void doFlush() = 0;
- virtual void doRelease() = 0;
-
- DecoderParameters mParams;
-
- android::Vector<int> mInQueue;
- android::Vector<int> mOutQueue;
-
- android::Vector< android::sp<MediaBuffer> >* mInBuffers;
- android::Vector< android::sp<MediaBuffer> >* mOutBuffers;
-
- CameraHal* mCameraHal;
-
-private:
- DecoderState mState;
- android::Mutex mLock;
-};
-
} // namespace Camera
} // namespace Ti
diff --git a/camera/inc/OmxFrameDecoder.h b/camera/inc/OmxFrameDecoder.h
deleted file mode 100644
index 7cbbf2c..0000000
--- a/camera/inc/OmxFrameDecoder.h
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Copyright (C) Texas Instruments - http://www.ti.com/
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMXFRAMEDECODER_H_
-#define OMXFRAMEDECODER_H_
-
-
-#include <utils/threads.h>
-#include <utils/List.h>
-#include "FrameDecoder.h"
-#include "OMX_Types.h"
-#include "OMX_Core.h"
-#include "OMX_Component.h"
-#include "Decoder_libjpeg.h"
-
-namespace Ti {
-namespace Camera {
-
-enum OmxDecoderState {
- OmxDecoderState_Unloaded = 0,
- OmxDecoderState_Loaded,
- OmxDecoderState_Idle,
- OmxDecoderState_Executing,
- OmxDecoderState_Error,
- OmxDecoderState_Invalid,
- OmxDecoderState_Reconfigure,
- OmxDecoderState_Exit
-};
-
-enum PortType {
- PortIndexInput = 0,
- PortIndexOutput = 1
-};
-
-
-struct OmxMessage {
- enum {
- EVENT,
- EMPTY_BUFFER_DONE,
- FILL_BUFFER_DONE,
- }type;
-
- union {
- // if type == EVENT
- struct {
- OMX_PTR appData;
- OMX_EVENTTYPE event;
- OMX_U32 data1;
- OMX_U32 data2;
- OMX_PTR pEventData;
- } eventData;
-
- // if type == (EMPTY_BUFFER_DONE || FILL_BUFFER_DONE)
- struct {
- OMX_PTR appData;
- OMX_BUFFERHEADERTYPE* pBuffHead;
- } bufferData;
- } u;
-};
-
-class CallbackDispatcher;
-
-struct CallbackDispatcherThread : public android::Thread {
- CallbackDispatcherThread(CallbackDispatcher *dispatcher)
- : mDispatcher(dispatcher) {
- }
-
-private:
- CallbackDispatcher *mDispatcher;
-
- bool threadLoop();
-
- CallbackDispatcherThread(const CallbackDispatcherThread &);
- CallbackDispatcherThread &operator=(const CallbackDispatcherThread &);
-};
-
-class CallbackDispatcher
-{
-
-public:
- CallbackDispatcher();
- ~CallbackDispatcher();
-
- void post(const OmxMessage &msg);
- bool loop();
-
-private:
- void dispatch(const OmxMessage &msg);
-
- CallbackDispatcher(const CallbackDispatcher &);
- CallbackDispatcher &operator=(const CallbackDispatcher &);
-
- android::Mutex mLock;
- android::Condition mQueueChanged;
- android::List<OmxMessage> mQueue;
- android::sp<CallbackDispatcherThread> mThread;
- bool mDone;
-};
-
-class OmxFrameDecoder : public FrameDecoder
-{
-
-public:
- OmxFrameDecoder(DecoderType type = DecoderType_MJPEG);
- virtual ~OmxFrameDecoder();
-
- OMX_ERRORTYPE eventHandler(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
- const OMX_PTR pEventData);
- OMX_ERRORTYPE fillBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead);
- OMX_ERRORTYPE emptyBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead);
-
- static OMX_ERRORTYPE eventCallback(const OMX_HANDLETYPE component,
- const OMX_PTR appData, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
- const OMX_PTR pEventData);
- static OMX_ERRORTYPE emptyBufferDoneCallback(OMX_HANDLETYPE hComponent, OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead);
- static OMX_ERRORTYPE fillBufferDoneCallback(OMX_HANDLETYPE hComponent, OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead);
-
- virtual bool getPaddedDimensions(size_t &width, size_t &height);
-
-protected:
- virtual void doConfigure (const DecoderParameters& config);
- virtual void doProcessInputBuffer();
- virtual status_t doStart();
- virtual void doStop();
- virtual void doFlush();
- virtual void doRelease();
-
-private:
- status_t setComponentRole();
- status_t enableGrallockHandles();
- status_t allocateBuffersOutput();
- void freeBuffersOnOutput();
- void freeBuffersOnInput();
- status_t doPortReconfigure();
- void dumpPortSettings(PortType port);
- status_t getAndConfigureDecoder();
- status_t configureJpegPorts(int width, int height);
- status_t switchToIdle();
- status_t allocateBuffersInput();
- status_t disablePortSync(int port);
- status_t enablePortSync(int port);
- void queueOutputBuffers();
- status_t setVideoOutputFormat(OMX_U32 width, OMX_U32 height);
-
-
- status_t omxInit();
- status_t omxGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData, OMX_CALLBACKTYPE & callbacks);
- OmxDecoderState getOmxState() { return mCurrentState; }
- status_t commitState(OmxDecoderState state) { mPreviousState = mCurrentState; mCurrentState = state; return NO_ERROR; }
- status_t setVideoPortFormatType(
- OMX_U32 portIndex,
- OMX_VIDEO_CODINGTYPE compressionFormat,
- OMX_COLOR_FORMATTYPE colorFormat);
- status_t omxGetParameter(OMX_INDEXTYPE index, OMX_PTR ptr);
- status_t omxSetParameter(OMX_INDEXTYPE index, OMX_PTR ptr);
- status_t omxSendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param);
- status_t omxGetConfig(OMX_INDEXTYPE index, OMX_PTR ptr);
- status_t omxSetConfig(OMX_INDEXTYPE index, OMX_PTR ptr);
- status_t omxFillThisBuffer(OMX_BUFFERHEADERTYPE *pOutBufHdr);
- status_t omxEmptyThisBuffer(android::sp<MediaBuffer>& inBuffer, OMX_BUFFERHEADERTYPE *pInBufHdr);
- void omxDumpPortSettings(OMX_PARAM_PORTDEFINITIONTYPE& def);
- void omxDumpBufferHeader (OMX_BUFFERHEADERTYPE* bh);
- status_t omxSwitchToExecutingSync();
-
- bool mOmxInialized;
-
- OMX_HANDLETYPE mHandleComp;
- OmxDecoderState mCurrentState;
- OmxDecoderState mPreviousState;
-
- // Condition and Mutex used during OpenMAX state transitions & command completion
- android::Condition mStateCondition;
- android::Mutex mHwLock;
-
- android::Vector<OMX_BUFFERHEADERTYPE*> mOutBufferHeaders;
- android::Vector<OMX_BUFFERHEADERTYPE*> mInBufferHeaders;
-
- CallbackDispatcher mDispatcher;
-
- bool mStopping;
- DecoderType mDecoderType;
-
- // If true we will search for DHT in JPEG buffer
- bool mIsNeedCheckDHT;
- // If true we always append DHT to JPEG buffer
- bool mAlwaysAppendDHT;
-};
-
-} //namespace Camera
-} //namespace Ti
-#endif /* OMXFRAMEDECODER_H_ */
diff --git a/camera/inc/SwFrameDecoder.h b/camera/inc/SwFrameDecoder.h
deleted file mode 100644
index f123940..0000000
--- a/camera/inc/SwFrameDecoder.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (C) Texas Instruments - http://www.ti.com/
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SWFRAMEDECODER_H_
-#define SWFRAMEDECODER_H_
-
-#include "FrameDecoder.h"
-#include "Decoder_libjpeg.h"
-
-namespace Ti {
-namespace Camera {
-
-class SwFrameDecoder: public FrameDecoder {
-public:
- SwFrameDecoder();
- virtual ~SwFrameDecoder();
-
-protected:
- virtual void doConfigure(const DecoderParameters& config);
- virtual void doProcessInputBuffer();
- virtual status_t doStart() { return NO_ERROR; }
- virtual void doStop() { }
- virtual void doFlush() { }
- virtual void doRelease() { }
-
-private:
- int mjpegWithHdrSize;
- Decoder_libjpeg mJpgdecoder;
- unsigned char* mJpegWithHeaderBuffer;
-};
-
-} // namespace Camera
-} // namespace Ti
-#endif /* SWFRAMEDECODER_H_ */
diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
index aa4993b..728032b 100644
--- a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -199,7 +199,6 @@ private:
status_t v4lIoctl(int, int, void*);
status_t v4lInitMmap(int& count, int width, int height);
- status_t v4lInitUsrPtr(int&);
status_t v4lInitDmaBuf(int& count, int width, int height);
status_t v4lStartStreaming();
status_t v4lStopStreaming(int nBufferCount);
@@ -207,8 +206,6 @@ private:
status_t restartPreview();
status_t applyFpsValue();
status_t returnBufferToV4L(int id);
- void returnOutputBuffer(int index);
- bool isNeedToUseDecoder() const;
int mPreviewBufferCount;
int mPreviewBufferCountQueueable;
@@ -227,11 +224,6 @@ private:
int mFrameCount;
int mLastFrameCount;
- unsigned int mIter;
- nsecs_t mLastFPSTime;
-
- //variables holding the estimated framerate
- float mFPS, mLastFPS;
int mSensorIndex;
@@ -247,7 +239,6 @@ private:
int nDequeued;
int mQueuedOutputBuffers;
- FrameDecoder* mDecoder;
android::Vector< android::sp<MediaBuffer> > mInBuffers;
android::Vector< android::sp<MediaBuffer> > mOutBuffers;
diff --git a/hwcomposer/display.c b/hwcomposer/display.c
index eddb716..aabaf66 100644
--- a/hwcomposer/display.c
+++ b/hwcomposer/display.c
@@ -627,6 +627,17 @@ int init_primary_display(omap_hwc_device_t *hwc_dev)
return -EINVAL;
}
+ i = drmSetClientCap(drm_fd, DRM_CLIENT_CAP_UNIVERSAL_PLANES, 1);
+ if (i) {
+ ALOGE("failed to set DRM_CLIENT_CAP_UNIVERSAL_PLANES");
+ goto close;
+ }
+ i = drmSetClientCap(drm_fd, DRM_CLIENT_CAP_ATOMIC, 1);
+ if (i) {
+ ALOGE("omapdrm is not capable of atomic modesetting");
+ goto close;
+ }
+
resources = drmModeGetResources(drm_fd);
if (!resources) {
ALOGE("Failed to get resources: %s\n", strerror(errno));
@@ -932,7 +943,7 @@ int update_display(omap_hwc_device_t *ctx, int disp,
if (!(ctx->displays[disp]->composition.use_blitter)) {
if( is_lcd_display(ctx, disp)) {
- set_nv12_planes(ctx, &nv12_bo, width, disp);
+ //set_nv12_planes(ctx, &nv12_bo, width, disp);
}
}
@@ -944,16 +955,30 @@ int update_display(omap_hwc_device_t *ctx, int disp,
}
/*
- * We flip the back-buffer here, drmModePageFlip() assumes that the
- * crtc is already connected to a connector, but this hasn't been done
- * yet, so first up we connect them by calling drmModeSetCrtc() and do
- * page flips on subsequent iterations.
- * FIXME: Should we move this someplace else?
+ * On first up, we need to connect crtc to a connector and we do this by
+ * calling drmModeSetCrtc(). For atomic page-flips, the plane properties
+ * need to be set explicitly and we use drmModeSetPlane() call to set the
+ * necessary care-abouts. We do this both of these operations on the first
+ * frame. On subsequent page-flips we update only the drm fb object.
+ * TODO: handle any updates to width and height
*/
if (!kdisp->is_crtc_set) {
+ ALOGI("Setting up plane %d and connecting connector %d to crtc %d",
+ kdisp->plane_id, kdisp->con->connector_id, kdisp->crtc_id);
+
ret = drmModeSetCrtc(ctx->drm_fd, kdisp->crtc_id, fb_info.fb_id, 0, 0,
&kdisp->con->connector_id, 1, kdisp->mode);
if (ret) {
+ ALOGE("failed to set crtc (%d) to connector (%d)", kdisp->crtc_id,
+ kdisp->con->connector_id);
+ return ret;
+ }
+
+ ret = drmModeSetPlane(ctx->drm_fd, kdisp->plane_id, kdisp->crtc_id,
+ fb_info.fb_id, 0,
+ 0, 0, fb_info.width, fb_info.height,
+ 0, 0, fb_info.width << 16, fb_info.height << 16);
+ if (ret) {
ALOGE("cannot set CRTC for connector %u (%d): %m\n", kdisp->con->connector_id, ret);
return ret;
}
@@ -970,15 +995,24 @@ int update_display(omap_hwc_device_t *ctx, int disp,
}
fence_fd = timeline_create_fence(&display->retire_sync, "hwc_retire_fence2",
TWO_FLIP_EVENTS);
- ret = drmModePageFlip(ctx->drm_fd, kdisp->crtc_id, fb_info.fb_id,
- DRM_MODE_PAGE_FLIP_EVENT, display);
+
+ drmModeAtomicReqPtr req = drmModeAtomicAlloc();
+
+ /* TODO: query up the FB_ID property instead of hard-coding it to 14 */
+ drmModeAtomicAddProperty(req, kdisp->plane_id, 14, fb_info.fb_id);
+ ret = drmModeAtomicCommit(ctx->drm_fd, req,
+ DRM_MODE_PAGE_FLIP_EVENT | DRM_MODE_ATOMIC_NONBLOCK, display);
+
if (ret) {
- ALOGE("cannot flip on connector %d", kdisp->crtc_id);
+ ALOGE("cannot do atomic commit on plane %d", kdisp->plane_id);
pthread_mutex_unlock(&display->lock);
close(fence_fd);
fence_fd = -1;
goto fb_cleanup;
}
+
+ drmModeAtomicFree(req);
+
kdisp->fb_bufs.next = fb_info;
kdisp->fb_bufs.updated = true;
display->is_flip_pending = true;
@@ -1159,6 +1193,9 @@ int add_external_hdmi_display(omap_hwc_device_t *hwc_dev)
pthread_cond_init(&display->cond_flip, NULL);
display->is_flip_pending = false;
+ /* get plane id */
+ get_plane_resource(hwc_dev, 1);
+
/* SurfaceFlinger currently doesn't unblank external display on reboot.
* Unblank HDMI display by default.
* See SurfaceFlinger::readyToRun() function.
@@ -1519,36 +1556,29 @@ void free_displays(omap_hwc_device_t *hwc_dev)
int get_plane_resource(omap_hwc_device_t *hwc_dev, int disp)
{
- int i;
- int drm_fd = hwc_dev->drm_fd;
- drmModePlaneRes *planes_res;
- display_t *display = hwc_dev->displays[disp];
- composition_t *comp = &display->composition;
-
- planes_res = drmModeGetPlaneResources(drm_fd);
-
- if (disp != HWC_DISPLAY_PRIMARY)
- ALOGE("DRM Planes composition is only \
- supported for primary displays\n");
-
- if (!planes_res) {
- ALOGE("Failed to get plane\n");
- goto free_plane;
- }
-
- for (i = 0; i < planes_res->count_planes; i++) {
- drmModeGetPlane(drm_fd,
- planes_res->planes[i]);
-
- comp->plane_info[i].plane_id = planes_res->planes[i];
- }
+ int err = 0;
+ int i = 0;
+ int drm_fd = hwc_dev->drm_fd;
+ drmModePlaneRes *planes_res;
+
+ planes_res = drmModeGetPlaneResources(drm_fd);
+ if (!planes_res) {
+ ALOGE("failed to get plane");
+ err = -1;
+ goto free_plane;
+ }
+ for (i = 0; i < planes_res->count_planes; i++) {
+ drmModeGetPlane(drm_fd, planes_res->planes[i]);
+ }
- return 0;
+ /* Assuming the planes to be in display order for now FIXME */
+ hwc_dev->displays[disp]->disp_link.plane_id = planes_res->planes[disp];
+ ALOGE("plane id: %d", planes_res->planes[disp]);
free_plane:
- drmModeFreePlaneResources(planes_res);
+ drmModeFreePlaneResources(planes_res);
- return 1;
+ return err;
}
int set_nv12_planes(omap_hwc_device_t *hwc_dev, uint32_t *bo, uint32_t width, int disp)
diff --git a/hwcomposer/display.h b/hwcomposer/display.h
index b1cd407..b4ccc57 100644
--- a/hwcomposer/display.h
+++ b/hwcomposer/display.h
@@ -168,6 +168,7 @@ typedef struct kms_display {
drmModeModeInfoPtr mode;
drmEventContext evctx;
drmModeFB fb;
+ int plane_id;
fb_buffers_t fb_bufs;
uint32_t last_plane_fb;
diff --git a/hwcomposer/hal_public.h b/hwcomposer/hal_public.h
index eabf911..09365a7 100644
--- a/hwcomposer/hal_public.h
+++ b/hwcomposer/hal_public.h
@@ -32,7 +32,7 @@
#include <hardware/memtrack.h>
#define ALIGN(x,a) (((x) + (a) - 1L) & ~((a) - 1L))
-#define HW_ALIGN 32
+#define HW_ALIGN 16
/* This can be tuned down as appropriate for the SOC.
*
diff --git a/omx/videoencode/omx_h264_enc/src/omx_H264videoencoder.c b/omx/videoencode/omx_h264_enc/src/omx_H264videoencoder.c
index 60e799a..44c8a60 100644
--- a/omx/videoencode/omx_h264_enc/src/omx_H264videoencoder.c
+++ b/omx/videoencode/omx_h264_enc/src/omx_H264videoencoder.c
@@ -568,6 +568,10 @@ static OMX_ERRORTYPE OMXH264VE_SetParameter(OMX_HANDLETYPE hComponent,
pH264VEComp->sBase.pPorts[nPortIndex]->sPortDef.nBufferCountActual = pPortDef->nBufferCountActual;
/*if frame height/widht changes then change the buffer requirements accordingly*/
if( nPortIndex == OMX_H264VE_INPUT_PORT ) {
+ if (pH264VEComp->bInputMetaDataBufferMode) {
+ pPortDef->format.video.nStride = ALIGN(pPortDef->format.video.nStride, HW_ALIGN);
+ }
+
OMX_CHECK(((pPortDef->format.video.nFrameWidth & 0x0F) == 0), OMX_ErrorUnsupportedSetting); /*Width should be multiple of 16*/
if( pH264VEComp->pVidEncStaticParams->videnc2Params.inputContentType == IVIDEO_PROGRESSIVE ) {
OMX_CHECK(((pPortDef->format.video.nFrameHeight & 0x01) == 0), OMX_ErrorUnsupportedSetting); /* Width should be multiple of 16 */
@@ -613,8 +617,7 @@ static OMX_ERRORTYPE OMXH264VE_SetParameter(OMX_HANDLETYPE hComponent,
pH264VEComp->pVidEncStaticParams->videnc2Params.maxWidth = pPortDef->format.video.nFrameWidth;
pH264VEComp->pVidEncDynamicParams->videnc2DynamicParams.inputWidth = pPortDef->format.video.nFrameWidth;
- pH264VEComp->pVidEncDynamicParams->videnc2DynamicParams.captureWidth =
- pH264VEComp->pVidEncDynamicParams->videnc2DynamicParams.inputWidth;
+ pH264VEComp->pVidEncDynamicParams->videnc2DynamicParams.captureWidth = pPortDef->format.video.nStride;
pH264VEComp->pVidEncDynamicParams->videnc2DynamicParams.inputHeight = pPortDef->format.video.nFrameHeight;
pH264VEComp->sBase.pPorts[OMX_H264VE_OUTPUT_PORT]->sPortDef.nBufferSize =
@@ -627,8 +630,9 @@ static OMX_ERRORTYPE OMXH264VE_SetParameter(OMX_HANDLETYPE hComponent,
pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.nBufferSize =
(pPortDef->format.video.nFrameHeight * pPortDef->format.video.nStride * 3) / 2;
- if (pH264VEComp->bInputMetaDataBufferMode)
+ if (pH264VEComp->bInputMetaDataBufferMode) {
pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.nBufferSize = sizeof(OMX_MetaDataBuffer);
+ }
/* read only field value. update with the frame height.for now codec does not supports the sub frame processing*/
pLocalPortDef->format.video.nSliceHeight=pPortDef->format.video.nFrameHeight;
@@ -1017,6 +1021,8 @@ static OMX_ERRORTYPE OMXH264VE_SetParameter(OMX_HANDLETYPE hComponent,
pH264VEComp->hCC = (void *) ((IMG_gralloc_module_public_t const *)module);
pH264VEComp->bInputMetaDataBufferMode = OMX_TRUE;
pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.nBufferSize = sizeof(OMX_MetaDataBuffer);
+ pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nStride =
+ ALIGN(pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nStride, HW_ALIGN);
}
} else {
eError = OMX_ErrorUnsupportedSetting;
@@ -1832,10 +1838,6 @@ static OMX_ERRORTYPE OMXH264VE_DataNotify(OMX_HANDLETYPE hComponent)
(XDAS_Int8 *)&(pH264VEComp->pCodecInBufferBackupArray[InBufferHdrIndex].sMemHdr[0]);
pH264VEComp->pVedEncInBufs->planeDesc[i].memType = XDM_MEMTYPE_TILEDPAGE;
- pH264VEComp->pVedEncInBufs->imagePitch[0] = pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameWidth;
- if (pH264VEComp->pVedEncInBufs->imagePitch[0] % 32) {
- pH264VEComp->pVedEncInBufs->imagePitch[0] += (pH264VEComp->pVedEncInBufs->imagePitch[0] % 32);
- }
}
}
} else {
@@ -1844,26 +1846,23 @@ static OMX_ERRORTYPE OMXH264VE_DataNotify(OMX_HANDLETYPE hComponent)
pH264VEComp->pVedEncInBufs->planeDesc[i].buf = (XDAS_Int8 *)&(((OMXBase_BufHdrPvtData *)(pH264VEComp->pCodecInBufferArray[InBufferHdrIndex]->pPlatformPrivate))->sMemHdr[0]);
pH264VEComp->pVedEncInBufs->planeDesc[i].memType = XDM_MEMTYPE_RAW;
- pH264VEComp->pVedEncInBufs->imagePitch[0] = pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameWidth;
}
-
- pH264VEComp->pVedEncInBufs->planeDesc[i].bufSize.bytes = (pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameWidth) * (pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameHeight);
+ pH264VEComp->pVedEncInBufs->imagePitch[0] = pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nStride;
+ pH264VEComp->pVedEncInBufs->planeDesc[i].bufSize.bytes = (pH264VEComp->pVedEncInBufs->imagePitch[0]) * (pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameHeight);
} else if( i == 1 ) {
if (pH264VEComp->bInputMetaDataBufferMode) {
//Nothing to be done; color conversion and fd xlation is done during the plane0 processing
memcpy(&(pH264VEComp->pCodecInBufferBackupArray[InBufferHdrIndex].sMemHdr[1]), &(pH264VEComp->pCodecInBufferBackupArray[InBufferHdrIndex].sMemHdr[0]), sizeof(MemHeader));
- pH264VEComp->pCodecInBufferBackupArray[InBufferHdrIndex].sMemHdr[1].offset = ((pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameWidth) * (pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameHeight));
+ pH264VEComp->pCodecInBufferBackupArray[InBufferHdrIndex].sMemHdr[1].offset =
+ ((pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nStride) *
+ (pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameHeight));
pH264VEComp->pVedEncInBufs->planeDesc[i].buf =
(XDAS_Int8 *)&(pH264VEComp->pCodecInBufferBackupArray[InBufferHdrIndex].sMemHdr[1]);
pH264VEComp->pVedEncInBufs->planeDesc[i].memType = XDM_MEMTYPE_TILEDPAGE;
- pH264VEComp->pVedEncInBufs->imagePitch[1] = pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameWidth;
- if (pH264VEComp->pVedEncInBufs->imagePitch[1] % 32) {
- pH264VEComp->pVedEncInBufs->imagePitch[1] += (pH264VEComp->pVedEncInBufs->imagePitch[1] % 32);
- }
} else {
memcpy(&((OMXBase_BufHdrPvtData *)(pH264VEComp->pCodecInBufferArray[InBufferHdrIndex]->pPlatformPrivate))->sMemHdr[1], &((OMXBase_BufHdrPvtData *)(pH264VEComp->pCodecInBufferArray[InBufferHdrIndex]->pPlatformPrivate))->sMemHdr[0], sizeof(MemHeader));
@@ -1872,9 +1871,9 @@ static OMX_ERRORTYPE OMXH264VE_DataNotify(OMX_HANDLETYPE hComponent)
pH264VEComp->pVedEncInBufs->planeDesc[i].buf = (XDAS_Int8 *)&(((OMXBase_BufHdrPvtData *)(pH264VEComp->pCodecInBufferArray[InBufferHdrIndex]->pPlatformPrivate))->sMemHdr[1]);
pH264VEComp->pVedEncInBufs->planeDesc[i].memType = XDM_MEMTYPE_RAW;
- pH264VEComp->pVedEncInBufs->imagePitch[1] = pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameWidth;
}
- pH264VEComp->pVedEncInBufs->planeDesc[i].bufSize.bytes = (pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameWidth) * (pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameHeight) / 2;
+ pH264VEComp->pVedEncInBufs->imagePitch[1] = pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nStride;
+ pH264VEComp->pVedEncInBufs->planeDesc[i].bufSize.bytes = (pH264VEComp->pVedEncInBufs->imagePitch[1]) * (pH264VEComp->sBase.pPorts[OMX_H264VE_INPUT_PORT]->sPortDef.format.video.nFrameHeight) / 2;
} else {
eError = OMX_ErrorUnsupportedSetting;
diff --git a/omx/videoencode/omx_h264_enc/src/omx_H264videoencoderutils.c b/omx/videoencode/omx_h264_enc/src/omx_H264videoencoderutils.c
index 4fe289c..4632fde 100644
--- a/omx/videoencode/omx_h264_enc/src/omx_H264videoencoderutils.c
+++ b/omx/videoencode/omx_h264_enc/src/omx_H264videoencoderutils.c
@@ -436,7 +436,7 @@ OMX_ERRORTYPE OMXH264VE_SetBufferDesc(OMX_HANDLETYPE hComponent, OMX_U32 nPortIn
pH264VEComp->pVedEncInBufs->imageRegion.topLeft.x = 0;
pH264VEComp->pVedEncInBufs->imageRegion.topLeft.y = 0;
- pH264VEComp->pVedEncInBufs->imageRegion.bottomRight.x = extWidth;
+ pH264VEComp->pVedEncInBufs->imageRegion.bottomRight.x = extStride;
pH264VEComp->pVedEncInBufs->activeFrameRegion.topLeft.x = 0;
pH264VEComp->pVedEncInBufs->activeFrameRegion.topLeft.y = 0;