aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTianmi Chen <tianmi.chen@intel.com>2015-04-29 17:10:50 +0800
committerPat Tjin <pattjin@google.com>2015-06-01 20:05:21 +0000
commit1175ce579a69e847027026f615a053af76b792d5 (patch)
tree206253f86110d1745f19af19e83c1b971f97c4a3
parent4a085deb383b88ece9d4c157cc3e1242da93e165 (diff)
downloadlibmix-1175ce579a69e847027026f615a053af76b792d5.tar.gz
enable meta data mode on FUGU
Use meta data mode for adaptive playback. Bug: 19197299 Change-Id: I13d992a1c74c11ce5b2fd7a9f1e543789befc947 Signed-off-by: Tianmi Chen <tianmi.chen@intel.com>
-rw-r--r--videodecoder/VideoDecoderAVC.cpp50
-rw-r--r--videodecoder/VideoDecoderBase.cpp264
-rwxr-xr-xvideodecoder/VideoDecoderBase.h7
-rw-r--r--videodecoder/VideoDecoderDefs.h2
-rw-r--r--videodecoder/VideoDecoderInterface.h3
-rw-r--r--videodecoder/VideoDecoderMPEG2.cpp28
-rw-r--r--videodecoder/VideoDecoderMPEG4.cpp34
-rw-r--r--videodecoder/VideoDecoderVP8.cpp44
-rw-r--r--videodecoder/VideoDecoderWMV.cpp35
9 files changed, 371 insertions, 96 deletions
diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp
index ad4ad33..f16bfe9 100644
--- a/videodecoder/VideoDecoderAVC.cpp
+++ b/videodecoder/VideoDecoderAVC.cpp
@@ -718,6 +718,10 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
+ if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
+ pthread_mutex_lock(&mFormatLock);
+ }
+
if ((mVideoFormatInfo.width != width ||
mVideoFormatInfo.height != height) &&
width && height) {
@@ -781,7 +785,19 @@ void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded);
- mVideoFormatInfo.valid = true;
+ if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
+ if (mSizeChanged
+ || isWiDiStatusChanged()
+ || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)) {
+ mVideoFormatInfo.valid = false;
+ } else {
+ mVideoFormatInfo.valid = true;
+ }
+
+ pthread_mutex_unlock(&mFormatLock);
+ } else {
+ mVideoFormatInfo.valid = true;
+ }
setRenderRect();
}
@@ -813,20 +829,38 @@ bool VideoDecoderAVC::isWiDiStatusChanged() {
}
Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) {
+ Decode_Status status;
updateFormatInfo(data);
- bool needFlush = false;
+
bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER);
+ if (rawDataMode && mSizeChanged) {
+ flushSurfaceBuffers();
+ mSizeChanged = false;
+ return DECODE_FORMAT_CHANGE;
+ }
+ bool needFlush = false;
if (!rawDataMode) {
- needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
- || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
- || isWiDiStatusChanged()
- || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
+ if (mStoreMetaData) {
+ needFlush = mSizeChanged
+ || isWiDiStatusChanged()
+ || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
+ } else {
+ needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
+ || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
+ || isWiDiStatusChanged()
+ || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
+ }
}
- if (needFlush || (rawDataMode && mSizeChanged)) {
+ if (needFlush) {
+ if (mStoreMetaData) {
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+ } else {
+ flushSurfaceBuffers();
+ }
mSizeChanged = false;
- flushSurfaceBuffers();
return DECODE_FORMAT_CHANGE;
} else
return DECODE_SUCCESS;
diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp
index 291441f..906cf06 100644
--- a/videodecoder/VideoDecoderBase.cpp
+++ b/videodecoder/VideoDecoderBase.cpp
@@ -31,6 +31,7 @@
VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type)
: mInitialized(false),
mLowDelay(false),
+ mStoreMetaData(false),
mDisplay(NULL),
mVADisplay(NULL),
mVAContext(VA_INVALID_ID),
@@ -69,8 +70,10 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type)
mSignalBufferPre[i] = NULL;
}
pthread_mutex_init(&mLock, NULL);
+ pthread_mutex_init(&mFormatLock, NULL);
mVideoFormatInfo.mimeType = strdup(mimeType);
mUseGEN = false;
+ mMetaDataBuffersNum = 0;
mLibHandle = NULL;
mParserOpen = NULL;
mParserClose = NULL;
@@ -82,6 +85,7 @@ VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type)
VideoDecoderBase::~VideoDecoderBase() {
pthread_mutex_destroy(&mLock);
+ pthread_mutex_destroy(&mFormatLock);
stop();
free(mVideoFormatInfo.mimeType);
}
@@ -133,6 +137,7 @@ Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) {
mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
}
mLowDelay = buffer->flag & WANT_LOW_DELAY;
+ mStoreMetaData = buffer->flag & WANT_STORE_META_DATA;
mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
if (mRawOutput) {
WTRACE("Output is raw data.");
@@ -163,6 +168,8 @@ Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) {
}
mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
mLowDelay = buffer->flag & WANT_LOW_DELAY;
+ mStoreMetaData = buffer->flag & WANT_STORE_META_DATA;
+ mMetaDataBuffersNum = 0;
mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
if (mRawOutput) {
WTRACE("Output is raw data.");
@@ -184,6 +191,7 @@ void VideoDecoderBase::stop(void) {
// private variables
mLowDelay = false;
+ mStoreMetaData = false;
mRawOutput = false;
mNumSurfaces = 0;
mSurfaceAcquirePos = 0;
@@ -255,9 +263,28 @@ void VideoDecoderBase::freeSurfaceBuffers(void) {
}
const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) {
+ if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
+ // Do nothing here, just to avoid thread
+ // contention in updateFormatInfo()
+ pthread_mutex_lock(&mFormatLock);
+ pthread_mutex_unlock(&mFormatLock);
+ }
+
return &mVideoFormatInfo;
}
+int VideoDecoderBase::getOutputQueueLength(void) {
+ VideoSurfaceBuffer *p = mOutputHead;
+
+ int i = 0;
+ while (p) {
+ p = p->next;
+ i++;
+ }
+
+ return i;
+}
+
const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBuffer *outErrBuf) {
if (mVAStarted == false) {
return NULL;
@@ -774,7 +801,7 @@ Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile,
// if format has been changed in USE_NATIVE_GRAPHIC_BUFFER mode,
// we can not setupVA here when the graphic buffer resolution is smaller than the resolution decoder really needs
if (mSizeChanged) {
- if (mVideoFormatInfo.surfaceWidth < mVideoFormatInfo.width || mVideoFormatInfo.surfaceHeight < mVideoFormatInfo.height) {
+ if (mStoreMetaData || (!mStoreMetaData && (mVideoFormatInfo.surfaceWidth < mVideoFormatInfo.width || mVideoFormatInfo.surfaceHeight < mVideoFormatInfo.height))) {
mSizeChanged = false;
return DECODE_FORMAT_CHANGE;
}
@@ -813,7 +840,6 @@ Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile,
mDisplay = "libva_driver_name=pvr";
mUseGEN = false;
}
-
#endif
mVADisplay = vaGetDisplay(mDisplay);
if (mVADisplay == NULL) {
@@ -868,59 +894,60 @@ Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile,
#endif
}
if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) {
- VASurfaceAttrib attribs[2];
- mVASurfaceAttrib = new VASurfaceAttribExternalBuffers;
- if (mVASurfaceAttrib == NULL) {
- return DECODE_MEMORY_FAIL;
- }
+ if (!mStoreMetaData) {
+ VASurfaceAttrib attribs[2];
+ mVASurfaceAttrib = new VASurfaceAttribExternalBuffers;
+ if (mVASurfaceAttrib == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
- mVASurfaceAttrib->buffers= (unsigned long *)malloc(sizeof(unsigned long)*mNumSurfaces);
- if (mVASurfaceAttrib->buffers == NULL) {
- return DECODE_MEMORY_FAIL;
- }
- mVASurfaceAttrib->num_buffers = mNumSurfaces;
- mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12;
- mVASurfaceAttrib->width = mVideoFormatInfo.surfaceWidth;
- mVASurfaceAttrib->height = mVideoFormatInfo.surfaceHeight;
- mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5;
- mVASurfaceAttrib->num_planes = 2;
- mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferStride;
- mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferStride;
- mVASurfaceAttrib->pitches[2] = 0;
- mVASurfaceAttrib->pitches[3] = 0;
- mVASurfaceAttrib->offsets[0] = 0;
- mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight;
- mVASurfaceAttrib->offsets[2] = 0;
- mVASurfaceAttrib->offsets[3] = 0;
- mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow;
- mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
- if (mConfigBuffer.flag & USE_TILING_MEMORY)
- mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING;
-
- for (int i = 0; i < mNumSurfaces; i++) {
- mVASurfaceAttrib->buffers[i] = (unsigned long)mConfigBuffer.graphicBufferHandler[i];
- }
+ mVASurfaceAttrib->buffers= (unsigned long *)malloc(sizeof(unsigned long)*mNumSurfaces);
+ if (mVASurfaceAttrib->buffers == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+ mVASurfaceAttrib->num_buffers = mNumSurfaces;
+ mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12;
+ mVASurfaceAttrib->width = mVideoFormatInfo.surfaceWidth;
+ mVASurfaceAttrib->height = mVideoFormatInfo.surfaceHeight;
+ mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5;
+ mVASurfaceAttrib->num_planes = 2;
+ mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferStride;
+ mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferStride;
+ mVASurfaceAttrib->pitches[2] = 0;
+ mVASurfaceAttrib->pitches[3] = 0;
+ mVASurfaceAttrib->offsets[0] = 0;
+ mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight;
+ mVASurfaceAttrib->offsets[2] = 0;
+ mVASurfaceAttrib->offsets[3] = 0;
+ mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow;
+ mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+ if (mConfigBuffer.flag & USE_TILING_MEMORY)
+ mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING;
- attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
- attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
- attribs[0].value.type = VAGenericValueTypeInteger;
- attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+ for (int i = 0; i < mNumSurfaces; i++) {
+ mVASurfaceAttrib->buffers[i] = (unsigned long)mConfigBuffer.graphicBufferHandler[i];
+ }
- attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
- attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
- attribs[1].value.type = VAGenericValueTypePointer;
- attribs[1].value.value.p = (void *)mVASurfaceAttrib;
+ attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
+ attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[0].value.type = VAGenericValueTypeInteger;
+ attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
- vaStatus = vaCreateSurfaces(
- mVADisplay,
- format,
- mVideoFormatInfo.surfaceWidth,
- mVideoFormatInfo.surfaceHeight,
- mSurfaces,
- mNumSurfaces,
- attribs,
- 2);
+ attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
+ attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[1].value.type = VAGenericValueTypePointer;
+ attribs[1].value.value.p = (void *)mVASurfaceAttrib;
+ vaStatus = vaCreateSurfaces(
+ mVADisplay,
+ format,
+ mVideoFormatInfo.surfaceWidth,
+ mVideoFormatInfo.surfaceHeight,
+ mSurfaces,
+ mNumSurfaces,
+ attribs,
+ 2);
+ }
} else {
vaStatus = vaCreateSurfaces(
mVADisplay,
@@ -953,7 +980,30 @@ Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile,
mVideoFormatInfo.ctxSurfaces = mSurfaces;
if ((int32_t)profile != VAProfileSoftwareDecoding) {
- vaStatus = vaCreateContext(
+ if (mStoreMetaData) {
+ if (mUseGEN) {
+ vaStatus = vaCreateContext(
+ mVADisplay,
+ mVAConfig,
+ mVideoFormatInfo.surfaceWidth,
+ mVideoFormatInfo.surfaceHeight,
+ 0,
+ NULL,
+ 0,
+ &mVAContext);
+ } else {
+ vaStatus = vaCreateContext(
+ mVADisplay,
+ mVAConfig,
+ mVideoFormatInfo.surfaceWidth,
+ mVideoFormatInfo.surfaceHeight,
+ 0,
+ NULL,
+ mNumSurfaces + mNumExtraSurfaces,
+ &mVAContext);
+ }
+ } else {
+ vaStatus = vaCreateContext(
mVADisplay,
mVAConfig,
mVideoFormatInfo.surfaceWidth,
@@ -962,6 +1012,7 @@ Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile,
mSurfaces,
mNumSurfaces + mNumExtraSurfaces,
&mVAContext);
+ }
CHECK_VA_STATUS("vaCreateContext");
}
@@ -980,6 +1031,17 @@ Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile,
setRotationDegrees(mConfigBuffer.rotationDegrees);
mVAStarted = true;
+
+ pthread_mutex_lock(&mLock);
+ if (mStoreMetaData) {
+ for (uint32_t i = 0; i < mMetaDataBuffersNum; i++) {
+ status = createSurfaceFromHandle(i);
+ CHECK_STATUS("createSurfaceFromHandle");
+ mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i;
+ }
+ }
+ pthread_mutex_unlock(&mLock);
+
return DECODE_SUCCESS;
}
@@ -1055,6 +1117,10 @@ Decode_Status VideoDecoderBase::terminateVA(void) {
mVAStarted = false;
mInitialized = false;
mErrReportEnabled = false;
+ if (mStoreMetaData) {
+ mMetaDataBuffersNum = 0;
+ mSurfaceAcquirePos = 0;
+ }
return DECODE_SUCCESS;
}
@@ -1079,8 +1145,6 @@ Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool
return DECODE_SUCCESS;
}
-
-
Decode_Status VideoDecoderBase::mapSurface(void) {
VAStatus vaStatus = VA_STATUS_SUCCESS;
VAImage image;
@@ -1237,6 +1301,65 @@ Decode_Status VideoDecoderBase::getRawDataFromSurface(VideoRenderBuffer *renderB
return DECODE_SUCCESS;
}
+Decode_Status VideoDecoderBase::createSurfaceFromHandle(int index) {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Decode_Status status;
+
+ int32_t format = VA_RT_FORMAT_YUV420;
+ if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
+#ifndef USE_AVC_SHORT_FORMAT
+ format |= VA_RT_FORMAT_PROTECTED;
+ WTRACE("Surface is protected.");
+#endif
+ }
+ VASurfaceAttrib attribs[2];
+ VASurfaceAttribExternalBuffers surfExtBuf;
+ surfExtBuf.num_buffers = 1;
+ surfExtBuf.pixel_format = VA_FOURCC_NV12;
+ surfExtBuf.width = mVideoFormatInfo.surfaceWidth;
+ surfExtBuf.height = mVideoFormatInfo.surfaceHeight;
+ surfExtBuf.data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5;
+ surfExtBuf.num_planes = 2;
+ surfExtBuf.pitches[0] = mConfigBuffer.graphicBufferStride;
+ surfExtBuf.pitches[1] = mConfigBuffer.graphicBufferStride;
+ surfExtBuf.pitches[2] = 0;
+ surfExtBuf.pitches[3] = 0;
+ surfExtBuf.offsets[0] = 0;
+ surfExtBuf.offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight;
+ surfExtBuf.offsets[2] = 0;
+ surfExtBuf.offsets[3] = 0;
+ surfExtBuf.private_data = (void *)mConfigBuffer.nativeWindow;
+ surfExtBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+ if (mConfigBuffer.flag & USE_TILING_MEMORY) {
+ surfExtBuf.flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING;
+ }
+
+ surfExtBuf.buffers = (long unsigned int*)&(mConfigBuffer.graphicBufferHandler[index]);
+
+ attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
+ attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[0].value.type = VAGenericValueTypeInteger;
+ attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+
+ attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
+ attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[1].value.type = VAGenericValueTypePointer;
+ attribs[1].value.value.p = (void *)&surfExtBuf;
+
+ vaStatus = vaCreateSurfaces(
+ mVADisplay,
+ format,
+ mVideoFormatInfo.surfaceWidth,
+ mVideoFormatInfo.surfaceHeight,
+ &(mSurfaces[index]),
+ 1,
+ attribs,
+ 2);
+ CHECK_VA_STATUS("vaCreateSurfaces");
+
+ return DECODE_SUCCESS;
+}
+
void VideoDecoderBase::initSurfaceBuffer(bool reset) {
bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
if (useGraphicBuffer && reset) {
@@ -1285,11 +1408,27 @@ void VideoDecoderBase::initSurfaceBuffer(bool reset) {
}
}
-Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) {
+Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler, bool isNew) {
+ Decode_Status status;
if (graphichandler == NULL) {
return DECODE_SUCCESS;
}
pthread_mutex_lock(&mLock);
+ bool graphicBufferMode = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
+ if (mStoreMetaData) {
+ if (!graphicBufferMode) {
+ pthread_mutex_unlock(&mLock);
+ return DECODE_SUCCESS;
+ }
+
+ if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) {
+ mConfigBuffer.graphicBufferHandler[mMetaDataBuffersNum] = graphichandler;
+ if (mInitialized) {
+ mSurfaceBuffers[mMetaDataBuffersNum].renderBuffer.graphicBufferHandle = graphichandler;
+ mSurfaceBuffers[mMetaDataBuffersNum].renderBuffer.graphicBufferIndex = mMetaDataBuffersNum;
+ }
+ }
+ }
int i = 0;
if (!mInitialized) {
if (mSignalBufferSize >= MAX_GRAPHIC_BUFFER_NUM) {
@@ -1299,10 +1438,18 @@ Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) {
mSignalBufferPre[mSignalBufferSize++] = graphichandler;
VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize);
} else {
- if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
+ if (!graphicBufferMode) {
pthread_mutex_unlock(&mLock);
return DECODE_SUCCESS;
}
+ if (mStoreMetaData) {
+ if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) {
+ if (mVAStarted) {
+ status = createSurfaceFromHandle(mMetaDataBuffersNum);
+ CHECK_STATUS("createSurfaceFromHandle")
+ }
+ }
+ }
for (i = 0; i < mNumSurfaces; i++) {
if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) {
mSurfaceBuffers[i].renderBuffer.renderDone = true;
@@ -1311,6 +1458,13 @@ Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) {
}
}
}
+
+ if (mStoreMetaData) {
+ if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) {
+ mMetaDataBuffersNum++;
+ }
+ }
+
pthread_mutex_unlock(&mLock);
return DECODE_SUCCESS;
diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h
index 9cf09e8..0838515 100755
--- a/videodecoder/VideoDecoderBase.h
+++ b/videodecoder/VideoDecoderBase.h
@@ -54,10 +54,11 @@ public:
virtual void flush(void);
virtual void freeSurfaceBuffers(void);
virtual const VideoRenderBuffer* getOutput(bool draining = false, VideoErrorBuffer *output_buf = NULL);
- virtual Decode_Status signalRenderDone(void * graphichandler);
+ virtual Decode_Status signalRenderDone(void * graphichandler, bool isNew = false);
virtual const VideoFormatInfo* getFormatInfo(void);
virtual bool checkBufferAvail();
virtual void enableErrorReport(bool enabled = false) {mErrReportEnabled = enabled; };
+ virtual int getOutputQueueLength(void);
protected:
// each acquireSurfaceBuffer must be followed by a corresponding outputSurfaceBuffer or releaseSurfaceBuffer.
@@ -90,6 +91,7 @@ protected:
virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID *config);
#endif
virtual Decode_Status checkHardwareCapability();
+ Decode_Status createSurfaceFromHandle(int32_t index);
private:
Decode_Status mapSurface(void);
void initSurfaceBuffer(bool reset);
@@ -101,6 +103,7 @@ private:
protected:
bool mLowDelay; // when true, decoded frame is immediately output for rendering
+ bool mStoreMetaData; // when true, meta data mode is enabled for adaptive playback
VideoFormatInfo mVideoFormatInfo;
Display *mDisplay;
VADisplay mVADisplay;
@@ -122,6 +125,7 @@ protected:
int32_t mOutputWindowSize; // indicate limit of number of outstanding frames for output
int32_t mRotationDegrees;
+ pthread_mutex_t mFormatLock;
bool mErrReportEnabled;
bool mWiDiOn;
@@ -173,6 +177,7 @@ private:
void *mSignalBufferPre[MAX_GRAPHIC_BUFFER_NUM];
uint32 mSignalBufferSize;
bool mUseGEN;
+ uint32_t mMetaDataBuffersNum;
protected:
void ManageReference(bool enable) {mManageReference = enable;}
void setOutputMethod(OUTPUT_METHOD method) {mOutputMethod = method;}
diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h
index 708725b..70d2319 100644
--- a/videodecoder/VideoDecoderDefs.h
+++ b/videodecoder/VideoDecoderDefs.h
@@ -112,6 +112,8 @@ typedef enum {
// indicate the modular drm type
IS_SUBSAMPLE_ENCRYPTION = 0x200000,
+ // indicate meta data mode
+ WANT_STORE_META_DATA = 0x400000,
} VIDEO_BUFFER_FLAG;
typedef enum
diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h
index fdc2c12..60f4092 100644
--- a/videodecoder/VideoDecoderInterface.h
+++ b/videodecoder/VideoDecoderInterface.h
@@ -31,10 +31,11 @@ public:
virtual void freeSurfaceBuffers(void) = 0;
virtual const VideoRenderBuffer* getOutput(bool draining = false, VideoErrorBuffer *output_buf = NULL) = 0;
virtual const VideoFormatInfo* getFormatInfo(void) = 0;
- virtual Decode_Status signalRenderDone(void * graphichandler) = 0;
+ virtual Decode_Status signalRenderDone(void * graphichandler, bool isNew = false) = 0;
virtual bool checkBufferAvail() = 0;
virtual Decode_Status getRawDataFromSurface(VideoRenderBuffer *renderBuffer = NULL, uint8_t *pRawData = NULL, uint32_t *pSize = NULL, bool internal = true) = 0;
virtual void enableErrorReport(bool enabled) = 0;
+ virtual int getOutputQueueLength(void) = 0;
};
#endif /* VIDEO_DECODER_INTERFACE_H_ */
diff --git a/videodecoder/VideoDecoderMPEG2.cpp b/videodecoder/VideoDecoderMPEG2.cpp
index 5b79a58..928ee9b 100644
--- a/videodecoder/VideoDecoderMPEG2.cpp
+++ b/videodecoder/VideoDecoderMPEG2.cpp
@@ -103,15 +103,31 @@ Decode_Status VideoDecoderMPEG2::decode(VideoDecodeBuffer *buffer) {
(data->codec_data->frame_width > 0) && (data->codec_data->frame_height)) {
// update encoded image size
ITRACE("Video size is changed. from %dx%d to %dx%d\n",mVideoFormatInfo.width,mVideoFormatInfo.height, data->codec_data->frame_width,data->codec_data->frame_height);
- bool needFlush = false;
+ if (useGraphicbuffer && mStoreMetaData) {
+ pthread_mutex_lock(&mFormatLock);
+ }
mVideoFormatInfo.width = data->codec_data->frame_width;
mVideoFormatInfo.height = data->codec_data->frame_height;
+ bool needFlush = false;
if (useGraphicbuffer) {
- needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
- || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight);
+ if (mStoreMetaData) {
+ needFlush = true;
+
+ mVideoFormatInfo.valid = false;
+ pthread_mutex_unlock(&mFormatLock);
+ } else {
+ needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
+ || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight);
+ }
}
+
if (needFlush) {
- flushSurfaceBuffers();
+ if (mStoreMetaData) {
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+ } else {
+ flushSurfaceBuffers();
+ }
mSizeChanged = false;
return DECODE_FORMAT_CHANGE;
} else {
@@ -119,6 +135,10 @@ Decode_Status VideoDecoderMPEG2::decode(VideoDecodeBuffer *buffer) {
}
setRenderRect();
+ } else {
+ if (useGraphicbuffer && mStoreMetaData) {
+ mVideoFormatInfo.valid = true;
+ }
}
VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp
index b54afa9..d0b25d6 100644
--- a/videodecoder/VideoDecoderMPEG4.cpp
+++ b/videodecoder/VideoDecoderMPEG4.cpp
@@ -98,17 +98,33 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) {
data->codec_data.video_object_layer_width &&
data->codec_data.video_object_layer_height) {
// update encoded image size
- ITRACE("Video size is changed. from %dx%d to %dx%d\n",mVideoFormatInfo.width,mVideoFormatInfo.height,
- data->codec_data.video_object_layer_width,data->codec_data.video_object_layer_height);
- bool noNeedFlush = false;
+ ITRACE("Video size is changed. from %dx%d to %dx%d\n", mVideoFormatInfo.width, mVideoFormatInfo.height,
+ data->codec_data.video_object_layer_width,data->codec_data.video_object_layer_height);
+
+ if (useGraphicbuffer && mStoreMetaData) {
+ pthread_mutex_lock(&mFormatLock);
+ }
mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
+ bool needFlush = false;
if (useGraphicbuffer) {
- noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
- && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
+ if (mStoreMetaData) {
+ needFlush = true;
+
+ mVideoFormatInfo.valid = false;
+ pthread_mutex_unlock(&mFormatLock);
+ } else {
+ needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
+ || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight);
+ }
}
- if (!noNeedFlush) {
- flushSurfaceBuffers();
+ if (needFlush) {
+ if (mStoreMetaData) {
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+ } else {
+ flushSurfaceBuffers();
+ }
mSizeChanged = false;
return DECODE_FORMAT_CHANGE;
} else {
@@ -116,6 +132,10 @@ Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) {
}
setRenderRect();
+ } else {
+ if (useGraphicbuffer && mStoreMetaData) {
+ mVideoFormatInfo.valid = true;
+ }
}
status = decodeFrame(buffer, data);
diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp
index 87249b4..c4c96c9 100644
--- a/videodecoder/VideoDecoderVP8.cpp
+++ b/videodecoder/VideoDecoderVP8.cpp
@@ -50,6 +50,10 @@ void VideoDecoderVP8::updateFormatInfo(vbp_data_vp8 *data) {
ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
+ if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
+ pthread_mutex_lock(&mFormatLock);
+ }
+
if ((mVideoFormatInfo.width != width ||
mVideoFormatInfo.height != height) &&
width && height) {
@@ -68,7 +72,17 @@ void VideoDecoderVP8::updateFormatInfo(vbp_data_vp8 *data) {
mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d", data->codec_data->crop_left, data->codec_data->crop_top, data->codec_data->crop_right, data->codec_data->crop_bottom);
- mVideoFormatInfo.valid = true;
+ if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
+ if (mSizeChanged) {
+ mVideoFormatInfo.valid = false;
+ } else {
+ mVideoFormatInfo.valid = true;
+ }
+
+ pthread_mutex_unlock(&mFormatLock);
+ } else {
+ mVideoFormatInfo.valid = true;
+ }
setRenderRect();
}
@@ -161,21 +175,27 @@ Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_v
}
if (VP8_KEY_FRAME == data->codec_data->frame_type) {
- if (mSizeChanged && !useGraphicbuffer){
+ updateFormatInfo(data);
+ if (mSizeChanged && !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
mSizeChanged = false;
return DECODE_FORMAT_CHANGE;
- } else {
- updateFormatInfo(data);
- bool noNeedFlush = false;
- if (useGraphicbuffer) {
- noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
- && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
- }
- if (mSizeChanged == true && !noNeedFlush) {
+ }
+
+ bool needFlush = false;
+ if (useGraphicbuffer) {
+ // For VP8 in adaptive playback legacy mode,
+ // force buffer reallocation.
+ needFlush = mSizeChanged;
+ }
+ if (needFlush) {
+ if (mStoreMetaData) {
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+ } else {
flushSurfaceBuffers();
- mSizeChanged = false;
- return DECODE_FORMAT_CHANGE;
}
+ mSizeChanged = false;
+ return DECODE_FORMAT_CHANGE;
}
}
diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp
index 16c307a..4945640 100644
--- a/videodecoder/VideoDecoderWMV.cpp
+++ b/videodecoder/VideoDecoderWMV.cpp
@@ -104,23 +104,42 @@ Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) {
data->se_data->CODED_WIDTH &&
data->se_data->CODED_HEIGHT) {
ITRACE("video size is changed from %dx%d to %dx%d", mVideoFormatInfo.width, mVideoFormatInfo.height,
- data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT);
+ data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT);
+ if (useGraphicbuffer && mStoreMetaData) {
+ pthread_mutex_lock(&mFormatLock);
+ }
mVideoFormatInfo.width = data->se_data->CODED_WIDTH;
mVideoFormatInfo.height = data->se_data->CODED_HEIGHT;
- bool noNeedFlush = false;
+ bool needFlush = false;
if (useGraphicbuffer) {
- noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
- && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
+ if (mStoreMetaData) {
+ needFlush = true;
+
+ mVideoFormatInfo.valid = false;
+ pthread_mutex_unlock(&mFormatLock);
+ } else {
+ needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
+ || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight);
+ }
}
setRenderRect();
- if (noNeedFlush) {
- mSizeChanged = true;
- } else {
- flushSurfaceBuffers();
+ if (needFlush) {
+ if (mStoreMetaData) {
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+ } else {
+ flushSurfaceBuffers();
+ }
mSizeChanged = false;
return DECODE_FORMAT_CHANGE;
+ } else {
+ mSizeChanged = true;
+ }
+ } else {
+ if (useGraphicbuffer && mStoreMetaData) {
+ mVideoFormatInfo.valid = true;
}
}