aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Android.mk6
-rw-r--r--README1
-rw-r--r--videodecoder/Android.mk79
-rw-r--r--videodecoder/VideoDecoderAVC.cpp992
-rwxr-xr-xvideodecoder/VideoDecoderAVC.h84
-rw-r--r--videodecoder/VideoDecoderBase.cpp1514
-rwxr-xr-xvideodecoder/VideoDecoderBase.h187
-rw-r--r--videodecoder/VideoDecoderDefs.h263
-rw-r--r--videodecoder/VideoDecoderHost.cpp85
-rw-r--r--videodecoder/VideoDecoderHost.h29
-rw-r--r--videodecoder/VideoDecoderInterface.h40
-rw-r--r--videodecoder/VideoDecoderMPEG4.cpp645
-rw-r--r--videodecoder/VideoDecoderMPEG4.h70
-rw-r--r--videodecoder/VideoDecoderTrace.cpp37
-rwxr-xr-xvideodecoder/VideoDecoderTrace.h96
-rw-r--r--videodecoder/VideoDecoderVP8.cpp449
-rw-r--r--videodecoder/VideoDecoderVP8.h91
-rw-r--r--videodecoder/VideoDecoderWMV.cpp568
-rw-r--r--videodecoder/VideoDecoderWMV.h66
-rw-r--r--videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp367
-rw-r--r--videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h44
-rw-r--r--videodecoder/securevideo/baytrail/secvideoparser.h150
-rw-r--r--videodecoder/securevideo/baytrail/va_private.h64
-rw-r--r--videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.cpp351
-rw-r--r--videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.h44
-rw-r--r--videodecoder/securevideo/cherrytrail/secvideoparser.h150
-rw-r--r--videodecoder/securevideo/cherrytrail/va_private.h63
-rw-r--r--videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.cpp507
-rw-r--r--videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.h75
-rwxr-xr-xvideodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp858
-rwxr-xr-xvideodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h69
-rwxr-xr-xvideodecoder/securevideo/merrifield/VideoFrameInfo.h36
-rw-r--r--videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.cpp510
-rw-r--r--videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.h75
-rw-r--r--videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp861
-rw-r--r--videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h69
-rwxr-xr-xvideodecoder/securevideo/moorefield/VideoFrameInfo.h36
-rw-r--r--videodecoder/use_util_sse4.h93
-rw-r--r--videoencoder/Android.mk110
-rw-r--r--videoencoder/IntelMetadataBuffer.cpp832
-rw-r--r--videoencoder/IntelMetadataBuffer.h162
-rw-r--r--videoencoder/PVSoftMPEG4Encoder.cpp513
-rw-r--r--videoencoder/PVSoftMPEG4Encoder.h84
-rw-r--r--videoencoder/VideoEncoderAVC.cpp1377
-rw-r--r--videoencoder/VideoEncoderAVC.h73
-rw-r--r--videoencoder/VideoEncoderBase.cpp1928
-rw-r--r--videoencoder/VideoEncoderBase.h186
-rw-r--r--videoencoder/VideoEncoderDef.h731
-rw-r--r--videoencoder/VideoEncoderH263.cpp178
-rw-r--r--videoencoder/VideoEncoderH263.h57
-rw-r--r--videoencoder/VideoEncoderHost.cpp76
-rw-r--r--videoencoder/VideoEncoderHost.h25
-rw-r--r--videoencoder/VideoEncoderInterface.h37
-rw-r--r--videoencoder/VideoEncoderLog.h61
-rw-r--r--videoencoder/VideoEncoderMP4.cpp281
-rw-r--r--videoencoder/VideoEncoderMP4.h61
-rw-r--r--videoencoder/VideoEncoderUtils.cpp808
-rw-r--r--videoencoder/VideoEncoderUtils.h85
-rw-r--r--videoencoder/VideoEncoderVP8.cpp521
-rw-r--r--videoencoder/VideoEncoderVP8.h58
-rw-r--r--videoencoder/bitstream.h403
61 files changed, 18371 insertions, 0 deletions
diff --git a/Android.mk b/Android.mk
new file mode 100644
index 0000000..7c49108
--- /dev/null
+++ b/Android.mk
@@ -0,0 +1,6 @@
+AUDIO_PATH := $(call my-dir)
+
+ifeq ($(INTEL_VA),true)
+ include $(AUDIO_PATH)/videodecoder/Android.mk
+ include $(AUDIO_PATH)/videoencoder/Android.mk
+endif
diff --git a/README b/README
new file mode 100644
index 0000000..825359e
--- /dev/null
+++ b/README
@@ -0,0 +1 @@
+intel moorestown mix library
diff --git a/videodecoder/Android.mk b/videodecoder/Android.mk
new file mode 100644
index 0000000..885b325
--- /dev/null
+++ b/videodecoder/Android.mk
@@ -0,0 +1,79 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+ifeq ($(TARGET_HAS_VPP),true)
+LOCAL_CFLAGS += -DTARGET_HAS_VPP
+endif
+
+LOCAL_SRC_FILES := \
+ VideoDecoderHost.cpp \
+ VideoDecoderBase.cpp \
+ VideoDecoderWMV.cpp \
+ VideoDecoderMPEG4.cpp \
+ VideoDecoderAVC.cpp \
+ VideoDecoderTrace.cpp
+
+LOCAL_C_INCLUDES := \
+ $(TARGET_OUT_HEADERS)/libva \
+ $(TARGET_OUT_HEADERS)/libmixvbp
+
+ifeq ($(USE_INTEL_SECURE_AVC),true)
+LOCAL_CFLAGS += -DUSE_INTEL_SECURE_AVC
+LOCAL_SRC_FILES += securevideo/$(TARGET_BOARD_PLATFORM)/VideoDecoderAVCSecure.cpp
+LOCAL_C_INCLUDES += $(LOCAL_PATH)/securevideo/$(TARGET_BOARD_PLATFORM)
+LOCAL_CFLAGS += -DUSE_INTEL_SECURE_AVC
+endif
+
+PLATFORM_USE_GEN_HW := \
+ baytrail \
+ cherrytrail
+
+ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_GEN_HW)),)
+ LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT -DUSE_GEN_HW
+endif
+
+
+PLATFORM_USE_HYBRID_DRIVER := \
+ baytrail
+
+ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_USE_HYBRID_DRIVER)),)
+ LOCAL_CFLAGS += -DUSE_HYBRID_DRIVER
+endif
+
+PLATFORM_SUPPORT_SLICE_HEADER_PARSER := \
+ merrifield \
+ moorefield
+
+ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_SLICE_HEADER_PARSER)),)
+ LOCAL_CFLAGS += -DUSE_SLICE_HEADER_PARSING
+endif
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ libva \
+ libva-android \
+ libva-tpi \
+ libdl
+
+LOCAL_COPY_HEADERS_TO := libmix_videodecoder
+
+LOCAL_COPY_HEADERS := \
+ VideoDecoderHost.h \
+ VideoDecoderInterface.h \
+ VideoDecoderDefs.h
+
+ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_SLICE_HEADER_PARSER)),)
+ LOCAL_COPY_HEADERS += securevideo/$(TARGET_BOARD_PLATFORM)/VideoFrameInfo.h
+endif
+
+LOCAL_CFLAGS += -Werror
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libva_videodecoder
+
+ifeq ($(USE_HW_VP8),true)
+LOCAL_SRC_FILES += VideoDecoderVP8.cpp
+LOCAL_CFLAGS += -DUSE_HW_VP8
+endif
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/videodecoder/VideoDecoderAVC.cpp b/videodecoder/VideoDecoderAVC.cpp
new file mode 100644
index 0000000..8ed91f9
--- /dev/null
+++ b/videodecoder/VideoDecoderAVC.cpp
@@ -0,0 +1,992 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoDecoderAVC.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+#include <cutils/properties.h>
+
+// Macros for actual buffer needed calculation
+#define WIDI_CONSUMED 6
+#define HDMI_CONSUMED 2
+#define NW_CONSUMED 2
+#define POC_DEFAULT 0x7FFFFFFF
+
+VideoDecoderAVC::VideoDecoderAVC(const char *mimeType)
+ : VideoDecoderBase(mimeType, VBP_H264),
+ mToggleDPB(0),
+ mErrorConcealment(false){
+
+ invalidateDPB(0);
+ invalidateDPB(1);
+ mLastPictureFlags = VA_PICTURE_H264_INVALID;
+}
+
+VideoDecoderAVC::~VideoDecoderAVC() {
+ stop();
+}
+
+Decode_Status VideoDecoderAVC::start(VideoConfigBuffer *buffer) {
+ Decode_Status status;
+
+ status = VideoDecoderBase::start(buffer);
+ CHECK_STATUS("VideoDecoderBase::start");
+
+ // We don't want base class to manage reference.
+ VideoDecoderBase::ManageReference(false);
+ // output by picture order count
+ VideoDecoderBase::setOutputMethod(OUTPUT_BY_POC);
+
+ mErrorConcealment = buffer->flag & WANT_ERROR_CONCEALMENT;
+ if (buffer->data == NULL || buffer->size == 0) {
+ WTRACE("No config data to start VA.");
+ if ((buffer->flag & HAS_SURFACE_NUMBER) && (buffer->flag & HAS_VA_PROFILE)) {
+ ITRACE("Used client supplied profile and surface to start VA.");
+ return VideoDecoderBase::setupVA(buffer->surfaceNumber, buffer->profile);
+ }
+ return DECODE_SUCCESS;
+ }
+
+ vbp_data_h264 *data = NULL;
+ status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+
+ status = startVA(data);
+ return status;
+}
+
+void VideoDecoderAVC::stop(void) {
+ // drop the last frame and ignore return value
+ endDecodingFrame(true);
+ VideoDecoderBase::stop();
+ invalidateDPB(0);
+ invalidateDPB(1);
+ mToggleDPB = 0;
+ mErrorConcealment = false;
+ mLastPictureFlags = VA_PICTURE_H264_INVALID;
+}
+
+void VideoDecoderAVC::flush(void) {
+ // drop the frame and ignore return value
+ VideoDecoderBase::flush();
+ invalidateDPB(0);
+ invalidateDPB(1);
+ mToggleDPB = 0;
+ mLastPictureFlags = VA_PICTURE_H264_INVALID;
+}
+
+Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) {
+ Decode_Status status;
+ vbp_data_h264 *data = NULL;
+ if (buffer == NULL) {
+ return DECODE_INVALID_DATA;
+ }
+ status = VideoDecoderBase::parseBuffer(
+ buffer->data,
+ buffer->size,
+ false,
+ (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+
+ if (!mVAStarted) {
+ if (data->has_sps && data->has_pps) {
+ status = startVA(data);
+ CHECK_STATUS("startVA");
+ } else {
+ WTRACE("Can't start VA as either SPS or PPS is still not available.");
+ return DECODE_SUCCESS;
+ }
+ }
+
+ VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
+
+ status = decodeFrame(buffer, data);
+ if (status == DECODE_MULTIPLE_FRAME) {
+ buffer->ext = &mExtensionBuffer;
+ mExtensionBuffer.extType = PACKED_FRAME_TYPE;
+ mExtensionBuffer.extSize = sizeof(mPackedFrame);
+ mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
+ }
+ return status;
+}
+
+Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
+ Decode_Status status;
+ if (data->has_sps == 0 || data->has_pps == 0) {
+ return DECODE_NO_CONFIG;
+ }
+
+ mVideoFormatInfo.flags = 0;
+ uint32_t fieldFlags = 0;
+ for (unsigned int i = 0; i < data->num_pictures; i++) {
+ VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
+ fieldFlags |= pic.flags;
+ // Don't remove the following codes, it can be enabled for debugging DPB.
+#if 0
+ VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d",
+ i,
+ buffer->timeStamp/1E6,
+ pic.TopFieldOrderCnt,
+ pic.BottomFieldOrderCnt,
+ pic.flags,
+ (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
+ (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
+#endif
+ }
+ int32_t topField = fieldFlags & VA_PICTURE_H264_TOP_FIELD;
+ int32_t botField = fieldFlags & VA_PICTURE_H264_BOTTOM_FIELD;
+ if ((topField == 0 && botField != 0) || (topField != 0 && botField == 0)) {
+ mVideoFormatInfo.flags |= IS_SINGLE_FIELD;
+ }
+
+ if (data->new_sps || data->new_pps) {
+ status = handleNewSequence(data);
+ CHECK_STATUS("handleNewSequence");
+ }
+
+ if (isWiDiStatusChanged()) {
+ mSizeChanged = false;
+ flushSurfaceBuffers();
+ return DECODE_FORMAT_CHANGE;
+ }
+
+ // first pic_data always exists, check if any slice is parsed
+ if (data->pic_data[0].num_slices == 0) {
+ ITRACE("No slice available for decoding.");
+ status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
+ mSizeChanged = false;
+ return status;
+ }
+
+ uint64_t lastPTS = mCurrentPTS;
+ mCurrentPTS = buffer->timeStamp;
+ //if (lastPTS != mCurrentPTS) {
+ if (isNewFrame(data, lastPTS == mCurrentPTS)) {
+ if (mLowDelay) {
+ // start decoding a new frame
+ status = beginDecodingFrame(data);
+ if (status != DECODE_SUCCESS) {
+ Decode_Status st = status;
+ // finish decoding the last frame if
+ // encounter error when decode the new frame
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+ return st;
+ }
+ }
+
+ // finish decoding the last frame
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+
+ if (!mLowDelay) {
+ // start decoding a new frame
+ status = beginDecodingFrame(data);
+ CHECK_STATUS("beginDecodingFrame");
+ }
+ } else {
+ status = continueDecodingFrame(data);
+ CHECK_STATUS("continueDecodingFrame");
+ }
+
+ // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field
+#if 0
+ if (buffer->flag & HAS_COMPLETE_FRAME) {
+ // finish decoding current frame
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+ }
+#endif
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) {
+ Decode_Status status;
+
+ status = acquireSurfaceBuffer();
+ CHECK_STATUS("acquireSurfaceBuffer");
+ VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
+ if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
+ (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
+ mAcquiredBuffer->referenceFrame = true;
+ } else {
+ mAcquiredBuffer->referenceFrame = false;
+ }
+ // set asReference in updateDPB
+
+ if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
+ } else {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
+ }
+
+ // TODO: Set the discontinuity flag
+ mAcquiredBuffer->renderBuffer.flag = 0;
+ mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
+ mAcquiredBuffer->pictureOrder = getPOC(picture);
+
+ if (mSizeChanged) {
+ mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
+ mSizeChanged = false;
+ }
+
+ status = continueDecodingFrame(data);
+ // surface buffer is released if decode fails
+ return status;
+}
+
+
+Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) {
+ Decode_Status status;
+ vbp_picture_data_h264 *picData = data->pic_data;
+
+ // TODO: remove these debugging codes
+ if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
+ ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
+ return DECODE_FAIL;
+ }
+ for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
+ // sanity check
+ if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
+ return DECODE_PARSER_FAIL;
+ }
+
+ if (picIndex > 0 &&
+ (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
+ // it is a packed frame buffer
+ vbp_picture_data_h264 *lastPic = &data->pic_data[picIndex - 1];
+ vbp_slice_data_h264 *sliceData = &(lastPic->slc_data[lastPic->num_slices - 1]);
+ mPackedFrame.offSet = sliceData->slice_size + sliceData->slice_offset;
+ mPackedFrame.timestamp = mCurrentPTS; // use the current time stamp for the packed frame
+ ITRACE("slice data offset= %d, size = %d", sliceData->slice_offset, sliceData->slice_size);
+ return DECODE_MULTIPLE_FRAME;
+ }
+
+ for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
+ status = decodeSlice(data, picIndex, sliceIndex);
+ if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ // TODO: this is new code
+ // remove current frame from DPB as it can't be decoded.
+ removeReferenceFromDPB(picData->pic_parms);
+ return status;
+ }
+ }
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
+ Decode_Status status;
+ VAStatus vaStatus;
+ uint32_t bufferIDCount = 0;
+ // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
+ VABufferID bufferIDs[4];
+
+ vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
+ vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
+ VAPictureParameterBufferH264 *picParam = picData->pic_parms;
+ VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
+
+ if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
+ // either condition indicates start of a new frame
+ if (sliceParam->first_mb_in_slice != 0) {
+ WTRACE("The first slice is lost.");
+ // TODO: handle the first slice lost
+ }
+ if (mDecodingFrame) {
+ // interlace content, complete decoding the first field
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ CHECK_VA_STATUS("vaEndPicture");
+
+ // for interlace content, top field may be valid only after the second field is parsed
+ int32_t poc = getPOC(&(picParam->CurrPic));
+ if (poc < mAcquiredBuffer->pictureOrder) {
+ mAcquiredBuffer->pictureOrder = poc;
+ }
+ }
+
+ // Check there is no reference frame loss before decoding a frame
+
+ // Update the reference frames and surface IDs for DPB and current frame
+ status = updateDPB(picParam);
+ CHECK_STATUS("updateDPB");
+
+#ifndef USE_AVC_SHORT_FORMAT
+ //We have to provide a hacked DPB rather than complete DPB for libva as workaround
+ status = updateReferenceFrames(picData);
+ CHECK_STATUS("updateReferenceFrames");
+#endif
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ // start decoding a frame
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferH264),
+ 1,
+ picParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferH264),
+ 1,
+ data->IQ_matrix_buf,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+ bufferIDCount++;
+ }
+
+#ifndef USE_AVC_SHORT_FORMAT
+
+ status = setReference(sliceParam);
+ CHECK_STATUS("setReference");
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+#else
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264Base),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+#endif
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ sliceData->slice_size, //size
+ 1, //num_elements
+ sliceData->buffer_addr + sliceData->slice_offset,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *sliceParam) {
+ int32_t numList = 1;
+ // TODO: set numList to 0 if it is I slice
+ if (sliceParam->slice_type == 1 || sliceParam->slice_type == 6) {
+ // B slice
+ numList = 2;
+ }
+
+ int32_t activeMinus1 = sliceParam->num_ref_idx_l0_active_minus1;
+ VAPictureH264 *ref = sliceParam->RefPicList0;
+
+ for (int32_t i = 0; i < numList; i++) {
+ if (activeMinus1 >= REF_LIST_SIZE) {
+ ETRACE("Invalid activeMinus1 (%d)", activeMinus1);
+ return DECODE_PARSER_FAIL;
+ }
+ for (int32_t j = 0; j <= activeMinus1; j++, ref++) {
+ if (!(ref->flags & VA_PICTURE_H264_INVALID)) {
+ ref->picture_id = findSurface(ref);
+ if (ref->picture_id == VA_INVALID_SURFACE) {
+ // Error DecodeRefMissing is counted once even there're multiple
+ mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
+ mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
+
+ if (mLastReference) {
+ WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref));
+ ref->picture_id = mLastReference->renderBuffer.surface;
+ } else {
+ ETRACE("Reference frame %d is missing. Stop decoding.", getPOC(ref));
+ return DECODE_NO_REFERENCE;
+ }
+ }
+ }
+ }
+ activeMinus1 = sliceParam->num_ref_idx_l1_active_minus1;
+ ref = sliceParam->RefPicList1;
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) {
+ clearAsReference(mToggleDPB);
+ // pointer to toggled DPB (new)
+ DecodedPictureBuffer *dpb = mDPBs[!mToggleDPB];
+ VAPictureH264 *ref = picParam->ReferenceFrames;
+
+ // update current picture ID
+ picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface;
+
+ // build new DPB
+ for (int32_t i = 0; i < MAX_REF_NUMBER; i++, ref++) {
+ if (ref->flags & VA_PICTURE_H264_INVALID) {
+ continue;
+ }
+#ifdef USE_AVC_SHORT_FORMAT
+ ref->picture_id = findSurface(ref);
+#endif
+ dpb->poc = getPOC(ref);
+ // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC
+ dpb->surfaceBuffer = findRefSurfaceBuffer(ref);
+ if (dpb->surfaceBuffer == NULL) {
+ ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic)));
+ // Error DecodeRefMissing is counted once even there're multiple
+ mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
+ mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
+ if (dpb->poc == getPOC(&(picParam->CurrPic))) {
+ WTRACE("updateDPB: Using the current picture for missing reference.");
+ dpb->surfaceBuffer = mAcquiredBuffer;
+ } else if (mLastReference) {
+ WTRACE("updateDPB: Use last reference frame %d for missing reference.", mLastReference->pictureOrder);
+ // TODO: this is new code for error resilience
+ dpb->surfaceBuffer = mLastReference;
+ } else {
+ WTRACE("updateDPB: Unable to recover the missing reference frame.");
+ // continue buillding DPB without updating dpb pointer.
+ continue;
+ // continue building DPB as this reference may not be actually used.
+ // especially happen after seeking to a non-IDR I frame.
+ //return DECODE_NO_REFERENCE;
+ }
+ }
+ if (dpb->surfaceBuffer) {
+ // this surface is used as reference
+ dpb->surfaceBuffer->asReferernce = true;
+ }
+ dpb++;
+ }
+
+ // add current frame to DPB if it is a reference frame
+ if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
+ (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
+ dpb->poc = getPOC(&(picParam->CurrPic));
+ dpb->surfaceBuffer = mAcquiredBuffer;
+ dpb->surfaceBuffer->asReferernce = true;
+ }
+ // invalidate the current used DPB
+ invalidateDPB(mToggleDPB);
+ mToggleDPB = !mToggleDPB;
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picData) {
+ bool found = false;
+ uint32_t flags = 0;
+ VAPictureParameterBufferH264 *picParam = picData->pic_parms;
+ VASliceParameterBufferH264 *sliceParam = NULL;
+ uint8_t activeMinus1 = 0;
+ VAPictureH264 *refList = NULL;
+ VAPictureH264 *dpb = picParam->ReferenceFrames;
+ VAPictureH264 *refFrame = NULL;
+
+ // invalidate DPB in the picture buffer
+ memset(picParam->ReferenceFrames, 0xFF, sizeof(picParam->ReferenceFrames));
+ picParam->num_ref_frames = 0;
+
+ // update DPB from the reference list in each slice.
+ for (uint32_t slice = 0; slice < picData->num_slices; slice++) {
+ sliceParam = &(picData->slc_data[slice].slc_parms);
+
+ for (int32_t list = 0; list < 2; list++) {
+ refList = (list == 0) ? sliceParam->RefPicList0 :
+ sliceParam->RefPicList1;
+ activeMinus1 = (list == 0) ? sliceParam->num_ref_idx_l0_active_minus1 :
+ sliceParam->num_ref_idx_l1_active_minus1;
+ if (activeMinus1 >= REF_LIST_SIZE) {
+ return DECODE_PARSER_FAIL;
+ }
+ for (uint8_t item = 0; item < (uint8_t)(activeMinus1 + 1); item++, refList++) {
+ if (refList->flags & VA_PICTURE_H264_INVALID) {
+ break;
+ }
+ found = false;
+ refFrame = picParam->ReferenceFrames;
+ for (uint8_t frame = 0; frame < picParam->num_ref_frames; frame++, refFrame++) {
+ if (refFrame->TopFieldOrderCnt == refList->TopFieldOrderCnt) {
+ ///check for complementary field
+ flags = refFrame->flags | refList->flags;
+ //If both TOP and BOTTOM are set, we'll clear those flags
+ if ((flags & VA_PICTURE_H264_TOP_FIELD) &&
+ (flags & VA_PICTURE_H264_BOTTOM_FIELD)) {
+ refFrame->flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ }
+ found = true; //already in the DPB; will not add this one
+ break;
+ }
+ }
+ if (found == false) {
+ // add a new reference to the DPB
+ dpb->picture_id = findSurface(refList);
+ if (dpb->picture_id == VA_INVALID_SURFACE) {
+ if (mLastReference != NULL) {
+ dpb->picture_id = mLastReference->renderBuffer.surface;
+ } else {
+ ETRACE("Reference frame %d is missing. Stop updating references frames.", getPOC(refList));
+ return DECODE_NO_REFERENCE;
+ }
+ }
+ dpb->flags = refList->flags;
+ // if it's bottom field in dpb, there must have top field in DPB,
+ // so clear the bottom flag, or will confuse VED to address top field
+ if (dpb->flags & VA_PICTURE_H264_BOTTOM_FIELD)
+ dpb->flags &= (~VA_PICTURE_H264_BOTTOM_FIELD);
+ dpb->frame_idx = refList->frame_idx;
+ dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt;
+ dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt;
+ dpb++;
+ picParam->num_ref_frames++;
+ }
+ }
+ }
+ }
+ return DECODE_SUCCESS;
+}
+
+void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam) {
+ // remove the current frame from DPB as it can't be decoded.
+ if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
+ (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
+ DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
+ int32_t poc = getPOC(&(picParam->CurrPic));
+ for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
+ if (poc == dpb->poc) {
+ dpb->poc = (int32_t)POC_DEFAULT;
+ if (dpb->surfaceBuffer) {
+ dpb->surfaceBuffer->asReferernce = false;
+ }
+ dpb->surfaceBuffer = NULL;
+ break;
+ }
+ }
+ }
+}
+
+int32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) {
+ if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) {
+ return pic->BottomFieldOrderCnt;
+ }
+ return pic->TopFieldOrderCnt;
+}
+
+VASurfaceID VideoDecoderAVC::findSurface(VAPictureH264 *pic) {
+ VideoSurfaceBuffer *p = findSurfaceBuffer(pic);
+ if (p == NULL) {
+ ETRACE("Could not find surface for poc %d", getPOC(pic));
+ return VA_INVALID_SURFACE;
+ }
+ return p->renderBuffer.surface;
+}
+
+VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) {
+ DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
+ for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
+ if (dpb->poc == pic->BottomFieldOrderCnt ||
+ dpb->poc == pic->TopFieldOrderCnt) {
+ // TODO: remove these debugging codes
+ if (dpb->surfaceBuffer == NULL) {
+ ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
+ }
+ return dpb->surfaceBuffer;
+ }
+ }
+ // ETRACE("Unable to find surface for poc %d", getPOC(pic));
+ return NULL;
+}
+
+VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) {
+ DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
+ // always looking for the latest one in the DPB, in case ref frames have same POC
+ dpb += (DPB_SIZE - 1);
+ for (int32_t i = DPB_SIZE; i > 0; i--, dpb--) {
+ if (dpb->poc == pic->BottomFieldOrderCnt ||
+ dpb->poc == pic->TopFieldOrderCnt) {
+ // TODO: remove these debugging codes
+ if (dpb->surfaceBuffer == NULL) {
+ ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
+ }
+ return dpb->surfaceBuffer;
+ }
+ }
+ ETRACE("Unable to find surface for poc %d", getPOC(pic));
+ return NULL;
+}
+
+void VideoDecoderAVC::invalidateDPB(int toggle) {
+ DecodedPictureBuffer* p = mDPBs[toggle];
+ for (int i = 0; i < DPB_SIZE; i++) {
+ p->poc = (int32_t) POC_DEFAULT;
+ p->surfaceBuffer = NULL;
+ p++;
+ }
+}
+
+void VideoDecoderAVC::clearAsReference(int toggle) {
+ DecodedPictureBuffer* p = mDPBs[toggle];
+ for (int i = 0; i < DPB_SIZE; i++) {
+ if (p->surfaceBuffer) {
+ p->surfaceBuffer->asReferernce = false;
+ }
+ p++;
+ }
+}
+
+Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) {
+ int32_t DPBSize = getDPBSize(data);
+
+ //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline
+ VAProfile vaProfile = VAProfileH264High;
+
+ // TODO: determine when to use VAProfileH264ConstrainedBaseline, set only if we are told to do so
+ if ((data->codec_data->profile_idc == 66 || data->codec_data->constraint_set0_flag == 1) &&
+ data->codec_data->constraint_set1_flag == 1) {
+ if (mErrorConcealment) {
+ vaProfile = VAProfileH264ConstrainedBaseline;
+ }
+ }
+
+ VideoDecoderBase::setOutputWindowSize(mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK ? OUTPUT_WINDOW_SIZE : DPBSize);
+ updateFormatInfo(data);
+
+ // for 1080p, limit the total surface to 19, according the hardware limitation
+ // change the max surface number from 19->10 to workaround memory shortage
+ // remove the workaround
+ if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) {
+ DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER;
+ }
+
+ if (mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK) {
+ // When Adaptive playback is enabled, turn off low delay mode.
+ // Otherwise there may be a 240ms stuttering if the output mode is changed from LowDelay to Delay.
+ enableLowDelayMode(false);
+ } else {
+ // for baseline profile, enable low delay mode automatically
+ enableLowDelayMode(data->codec_data->profile_idc == 66);
+ }
+
+ return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile);
+}
+
+void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
+ // new video size
+ uint32_t width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
+ uint32_t height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
+ ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
+ mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
+
+ if ((mVideoFormatInfo.width != width ||
+ mVideoFormatInfo.height != height) &&
+ width && height) {
+ if (VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width ||
+ VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height) {
+ mSizeChanged = true;
+ ITRACE("Video size is changed.");
+ }
+ mVideoFormatInfo.width = width;
+ mVideoFormatInfo.height = height;
+ }
+
+ // video_range has default value of 0.
+ mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag;
+
+ switch (data->codec_data->matrix_coefficients) {
+ case 1:
+ mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
+ break;
+
+ // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
+ // SMPTE 170M/BT601
+ case 5:
+ case 6:
+ mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
+ break;
+
+ default:
+ // unknown color matrix, set to 0 so color space flag will not be set.
+ mVideoFormatInfo.colorMatrix = 0;
+ break;
+ }
+ mVideoFormatInfo.aspectX = data->codec_data->sar_width;
+ mVideoFormatInfo.aspectY = data->codec_data->sar_height;
+ mVideoFormatInfo.bitrate = data->codec_data->bit_rate;
+ mVideoFormatInfo.cropLeft = data->codec_data->crop_left;
+ mVideoFormatInfo.cropRight = data->codec_data->crop_right;
+ mVideoFormatInfo.cropTop = data->codec_data->crop_top;
+ mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
+
+ ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d",
+ data->codec_data->crop_left,
+ data->codec_data->crop_top,
+ data->codec_data->crop_right,
+ data->codec_data->crop_bottom);
+
+ if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
+ mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
+ } else {
+ // The number of actual buffer needed is
+ // outputQueue + nativewindow_owned + num_ref_frames + widi_need_max + 1(available buffer)
+ // while outputQueue = DPB < 8? DPB :8
+ mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */
+ + data->codec_data->num_ref_frames
+#ifndef USE_GEN_HW
+ + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */
+ + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */
+#endif
+ + 1;
+ }
+
+ ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded);
+
+ mVideoFormatInfo.valid = true;
+
+ setRenderRect();
+}
+
+bool VideoDecoderAVC::isWiDiStatusChanged() {
+#ifndef USE_GEN_HW
+ if (mWiDiOn)
+ return false;
+
+ if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION)
+ return false;
+
+ if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER))
+ return false;
+
+ char prop[PROPERTY_VALUE_MAX];
+ bool widi_on = (property_get("media.widi.enabled", prop, NULL) > 0) &&
+ (!strcmp(prop, "1") || !strcasecmp(prop, "true"));
+ if (widi_on) {
+ mVideoFormatInfo.actualBufferNeeded += WIDI_CONSUMED;
+ mWiDiOn = true;
+ ITRACE("WiDi is enabled, actual buffer needed is %d", mVideoFormatInfo.actualBufferNeeded);
+ return true;
+ }
+ return false;
+#else
+ return false;
+#endif
+}
+
+Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) {
+ updateFormatInfo(data);
+ bool needFlush = false;
+ bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER);
+
+ if (!rawDataMode) {
+ needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
+ || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
+ || isWiDiStatusChanged()
+ || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
+ }
+
+ if (needFlush || (rawDataMode && mSizeChanged)) {
+ mSizeChanged = false;
+ flushSurfaceBuffers();
+ return DECODE_FORMAT_CHANGE;
+ } else
+ return DECODE_SUCCESS;
+}
+
+bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) {
+ if (data->num_pictures == 0) {
+ ETRACE("num_pictures == 0");
+ return true;
+ }
+
+ vbp_picture_data_h264* picData = data->pic_data;
+ if (picData->num_slices == 0) {
+ ETRACE("num_slices == 0");
+ return true;
+ }
+
+ bool newFrame = false;
+ uint32_t fieldFlags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD;
+
+ if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) {
+ // not the first slice, assume it is continuation of a partial frame
+ // TODO: check if it is new frame boundary as the first slice may get lost in streaming case.
+ WTRACE("first_mb_in_slice != 0");
+ if (!equalPTS) {
+ // return true if different timestamp, it is a workaround here for a streaming case
+ WTRACE("different PTS, treat it as a new frame");
+ return true;
+ }
+ } else {
+ if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) {
+ ETRACE("Current picture has both odd field and even field.");
+ }
+ // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and
+ // the last picture form an opposite field pair
+ if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) {
+ // opposite field
+ newFrame = false;
+ WTRACE("current picture is not at frame boundary.");
+ mLastPictureFlags = 0;
+ } else {
+ newFrame = true;
+ mLastPictureFlags = 0;
+ for (uint32_t i = 0; i < data->num_pictures; i++) {
+ mLastPictureFlags |= data->pic_data[i].pic_parms->CurrPic.flags;
+ }
+ if ((mLastPictureFlags & fieldFlags) == fieldFlags) {
+ // current buffer contains both odd field and even field.
+ mLastPictureFlags = 0;
+ }
+ }
+ }
+
+ return newFrame;
+}
+
+int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) {
+ // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16
+ struct DPBTable {
+ int32_t level;
+ float maxDPB;
+ } dpbTable[] = {
+ {9, 148.5},
+ {10, 148.5},
+ {11, 337.5},
+ {12, 891.0},
+ {13, 891.0},
+ {20, 891.0},
+ {21, 1782.0},
+ {22, 3037.5},
+ {30, 3037.5},
+ {31, 6750.0},
+ {32, 7680.0},
+ {40, 12288.0},
+ {41, 12288.0},
+ {42, 13056.0},
+ {50, 41400.0},
+ {51, 69120.0}
+ };
+
+ int32_t count = sizeof(dpbTable)/sizeof(DPBTable);
+ float maxDPB = 0;
+ for (int32_t i = 0; i < count; i++)
+ {
+ if (dpbTable[i].level == data->codec_data->level_idc) {
+ maxDPB = dpbTable[i].maxDPB;
+ break;
+ }
+ }
+
+ int32_t maxDPBSize = maxDPB * 1024 / (
+ (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) *
+ (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) *
+ 384);
+
+ if (maxDPBSize > 16) {
+ maxDPBSize = 16;
+ } else if (maxDPBSize == 0) {
+ maxDPBSize = 3;
+ }
+ if(maxDPBSize < data->codec_data->num_ref_frames) {
+ maxDPBSize = data->codec_data->num_ref_frames;
+ }
+
+ // add one extra frame for current frame.
+ maxDPBSize += 1;
+ ITRACE("maxDPBSize = %d, num_ref_frame = %d", maxDPBSize, data->codec_data->num_ref_frames);
+ return maxDPBSize;
+}
+
+Decode_Status VideoDecoderAVC::checkHardwareCapability() {
+#ifndef USE_GEN_HW
+ VAStatus vaStatus;
+ VAConfigAttrib cfgAttribs[2];
+ cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
+ cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
+ vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH264High,
+ VAEntrypointVLD, cfgAttribs, 2);
+ CHECK_VA_STATUS("vaGetConfigAttributes");
+ if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
+ ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
+ cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
+ return DECODE_DRIVER_FAIL;
+ }
+#endif
+ return DECODE_SUCCESS;
+}
+
+#ifdef USE_AVC_SHORT_FORMAT
+Decode_Status VideoDecoderAVC::getCodecSpecificConfigs(
+ VAProfile profile, VAConfigID *config)
+{
+ VAStatus vaStatus;
+ VAConfigAttrib attrib[2];
+
+ if (config == NULL) {
+ ETRACE("Invalid parameter!");
+ return DECODE_FAIL;
+ }
+
+ attrib[0].type = VAConfigAttribRTFormat;
+ attrib[0].value = VA_RT_FORMAT_YUV420;
+ attrib[1].type = VAConfigAttribDecSliceMode;
+ attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
+
+ vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
+
+ if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) {
+ ITRACE("AVC short format used");
+ attrib[1].value = VA_DEC_SLICE_MODE_BASE;
+ } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
+ ITRACE("AVC long format ssed");
+ attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
+ } else {
+ ETRACE("Unsupported Decode Slice Mode!");
+ return DECODE_FAIL;
+ }
+
+ vaStatus = vaCreateConfig(
+ mVADisplay,
+ profile,
+ VAEntrypointVLD,
+ &attrib[0],
+ 2,
+ config);
+ CHECK_VA_STATUS("vaCreateConfig");
+
+ return DECODE_SUCCESS;
+}
+#endif
diff --git a/videodecoder/VideoDecoderAVC.h b/videodecoder/VideoDecoderAVC.h
new file mode 100755
index 0000000..6129703
--- /dev/null
+++ b/videodecoder/VideoDecoderAVC.h
@@ -0,0 +1,84 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_AVC_H_
+#define VIDEO_DECODER_AVC_H_
+
+#include "VideoDecoderBase.h"
+
+
+class VideoDecoderAVC : public VideoDecoderBase {
+public:
+ VideoDecoderAVC(const char *mimeType);
+ virtual ~VideoDecoderAVC();
+
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+ virtual void flush(void);
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+
+protected:
+ virtual Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data);
+ virtual Decode_Status beginDecodingFrame(vbp_data_h264 *data);
+ virtual Decode_Status continueDecodingFrame(vbp_data_h264 *data);
+ virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex);
+ Decode_Status setReference(VASliceParameterBufferH264 *sliceParam);
+ Decode_Status updateDPB(VAPictureParameterBufferH264 *picParam);
+ Decode_Status updateReferenceFrames(vbp_picture_data_h264 *picData);
+ void removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam);
+ int32_t getPOC(VAPictureH264 *pic); // Picture Order Count
+ inline VASurfaceID findSurface(VAPictureH264 *pic);
+ inline VideoSurfaceBuffer* findSurfaceBuffer(VAPictureH264 *pic);
+ inline VideoSurfaceBuffer* findRefSurfaceBuffer(VAPictureH264 *pic);
+ inline void invalidateDPB(int toggle);
+ inline void clearAsReference(int toggle);
+ Decode_Status startVA(vbp_data_h264 *data);
+ void updateFormatInfo(vbp_data_h264 *data);
+ Decode_Status handleNewSequence(vbp_data_h264 *data);
+ bool isNewFrame(vbp_data_h264 *data, bool equalPTS);
+ int32_t getDPBSize(vbp_data_h264 *data);
+ virtual Decode_Status checkHardwareCapability();
+#ifdef USE_AVC_SHORT_FORMAT
+ virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config);
+#endif
+ bool isWiDiStatusChanged();
+
+private:
+ struct DecodedPictureBuffer {
+ VideoSurfaceBuffer *surfaceBuffer;
+ int32_t poc; // Picture Order Count
+ };
+
+ enum {
+ AVC_EXTRA_SURFACE_NUMBER = 11,
+ // maximum DPB (Decoded Picture Buffer) size
+ MAX_REF_NUMBER = 16,
+ DPB_SIZE = 17, // DPB_SIZE = MAX_REF_NUMBER + 1,
+ REF_LIST_SIZE = 32,
+ };
+
+ // maintain 2 ping-pong decoded picture buffers
+ DecodedPictureBuffer mDPBs[2][DPB_SIZE];
+ uint8_t mToggleDPB; // 0 or 1
+ bool mErrorConcealment;
+ uint32_t mLastPictureFlags;
+ VideoExtensionBuffer mExtensionBuffer;
+ PackedFrameData mPackedFrame;
+};
+
+
+
+#endif /* VIDEO_DECODER_AVC_H_ */
diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp
new file mode 100644
index 0000000..1065cd4
--- /dev/null
+++ b/videodecoder/VideoDecoderBase.cpp
@@ -0,0 +1,1514 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoDecoderBase.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+#include <va/va_android.h>
+#include <va/va_tpi.h>
+#ifdef __SSE4_1__
+#include "use_util_sse4.h"
+#endif
+
+#define INVALID_PTS ((uint64_t)-1)
+#define MAXIMUM_POC 0x7FFFFFFF
+#define MINIMUM_POC 0x80000000
+#define ANDROID_DISPLAY_HANDLE 0x18C34078
+
+VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type)
+ : mInitialized(false),
+ mLowDelay(false),
+ mDisplay(NULL),
+ mVADisplay(NULL),
+ mVAContext(VA_INVALID_ID),
+ mVAConfig(VA_INVALID_ID),
+ mVAStarted(false),
+ mCurrentPTS(INVALID_PTS),
+ mAcquiredBuffer(NULL),
+ mLastReference(NULL),
+ mForwardReference(NULL),
+ mDecodingFrame(false),
+ mSizeChanged(false),
+ mShowFrame(true),
+ mOutputWindowSize(OUTPUT_WINDOW_SIZE),
+ mRotationDegrees(0),
+ mErrReportEnabled(false),
+ mWiDiOn(false),
+ mRawOutput(false),
+ mManageReference(true),
+ mOutputMethod(OUTPUT_BY_PCT),
+ mNumSurfaces(0),
+ mSurfaceBuffers(NULL),
+ mOutputHead(NULL),
+ mOutputTail(NULL),
+ mSurfaces(NULL),
+ mVASurfaceAttrib(NULL),
+ mSurfaceUserPtr(NULL),
+ mSurfaceAcquirePos(0),
+ mNextOutputPOC(MINIMUM_POC),
+ mParserType(type),
+ mParserHandle(NULL),
+ mSignalBufferSize(0) {
+
+ memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo));
+ memset(&mConfigBuffer, 0, sizeof(mConfigBuffer));
+ for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
+ mSignalBufferPre[i] = NULL;
+ }
+ pthread_mutex_init(&mLock, NULL);
+ mVideoFormatInfo.mimeType = strdup(mimeType);
+ mUseGEN = false;
+ mLibHandle = NULL;
+ mParserOpen = NULL;
+ mParserClose = NULL;
+ mParserParse = NULL;
+ mParserQuery = NULL;
+ mParserFlush = NULL;
+ mParserUpdate = NULL;
+}
+
+VideoDecoderBase::~VideoDecoderBase() {
+ pthread_mutex_destroy(&mLock);
+ stop();
+ free(mVideoFormatInfo.mimeType);
+}
+
+Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) {
+ if (buffer == NULL) {
+ return DECODE_INVALID_DATA;
+ }
+
+ if (mParserHandle != NULL) {
+ WTRACE("Decoder has already started.");
+ return DECODE_SUCCESS;
+ }
+ mLibHandle = dlopen("libmixvbp.so", RTLD_NOW);
+ if (mLibHandle == NULL) {
+ return DECODE_NO_PARSER;
+ }
+ mParserOpen = (OpenFunc)dlsym(mLibHandle, "vbp_open");
+ mParserClose = (CloseFunc)dlsym(mLibHandle, "vbp_close");
+ mParserParse = (ParseFunc)dlsym(mLibHandle, "vbp_parse");
+ mParserQuery = (QueryFunc)dlsym(mLibHandle, "vbp_query");
+ mParserFlush = (FlushFunc)dlsym(mLibHandle, "vbp_flush");
+ if (mParserOpen == NULL || mParserClose == NULL || mParserParse == NULL
+ || mParserQuery == NULL || mParserFlush == NULL) {
+ return DECODE_NO_PARSER;
+ }
+#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
+ mParserUpdate = (UpdateFunc)dlsym(mLibHandle, "vbp_update");
+ if (mParserUpdate == NULL) {
+ return DECODE_NO_PARSER;
+ }
+#endif
+ if ((int32_t)mParserType != VBP_INVALID) {
+ ITRACE("mParserType = %d", mParserType);
+ if (mParserOpen(mParserType, &mParserHandle) != VBP_OK) {
+ ETRACE("Failed to open VBP parser.");
+ return DECODE_NO_PARSER;
+ }
+ }
+ // keep a copy of configure buffer, meta data only. It can be used to override VA setup parameter.
+ mConfigBuffer = *buffer;
+ mConfigBuffer.data = NULL;
+ mConfigBuffer.size = 0;
+
+ mVideoFormatInfo.width = buffer->width;
+ mVideoFormatInfo.height = buffer->height;
+ if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) {
+ mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth;
+ mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
+ }
+ mLowDelay = buffer->flag & WANT_LOW_DELAY;
+ mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
+ if (mRawOutput) {
+ WTRACE("Output is raw data.");
+ }
+
+ return DECODE_SUCCESS;
+}
+
+
+Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) {
+ if (buffer == NULL) {
+ return DECODE_INVALID_DATA;
+ }
+
+ // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec
+ terminateVA();
+
+ // reset the mconfigBuffer to pass it for startVA.
+ mConfigBuffer = *buffer;
+ mConfigBuffer.data = NULL;
+ mConfigBuffer.size = 0;
+
+ mVideoFormatInfo.width = buffer->width;
+ mVideoFormatInfo.height = buffer->height;
+ if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) {
+ mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth;
+ mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
+ }
+ mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
+ mLowDelay = buffer->flag & WANT_LOW_DELAY;
+ mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
+ if (mRawOutput) {
+ WTRACE("Output is raw data.");
+ }
+ return DECODE_SUCCESS;
+}
+
+
+
+void VideoDecoderBase::stop(void) {
+ terminateVA();
+
+ mCurrentPTS = INVALID_PTS;
+ mAcquiredBuffer = NULL;
+ mLastReference = NULL;
+ mForwardReference = NULL;
+ mDecodingFrame = false;
+ mSizeChanged = false;
+
+ // private variables
+ mLowDelay = false;
+ mRawOutput = false;
+ mNumSurfaces = 0;
+ mSurfaceAcquirePos = 0;
+ mNextOutputPOC = MINIMUM_POC;
+ mVideoFormatInfo.valid = false;
+ if (mParserHandle){
+ mParserClose(mParserHandle);
+ mParserHandle = NULL;
+ }
+ if (mLibHandle) {
+ dlclose(mLibHandle);
+ mLibHandle = NULL;
+ }
+}
+
+void VideoDecoderBase::flush(void) {
+ if (mVAStarted == false) {
+ // nothing to flush at this stage
+ return;
+ }
+
+ endDecodingFrame(true);
+
+ VideoSurfaceBuffer *p = mOutputHead;
+ // check if there's buffer with DRC flag in the output queue
+ while (p) {
+ if (p->renderBuffer.flag & IS_RESOLUTION_CHANGE) {
+ mSizeChanged = true;
+ break;
+ }
+ p = p->next;
+ }
+ // avoid setting mSurfaceAcquirePos to 0 as it may cause tearing
+ // (surface is still being rendered)
+ mSurfaceAcquirePos = (mSurfaceAcquirePos + 1) % mNumSurfaces;
+ mNextOutputPOC = MINIMUM_POC;
+ mCurrentPTS = INVALID_PTS;
+ mAcquiredBuffer = NULL;
+ mLastReference = NULL;
+ mForwardReference = NULL;
+ mOutputHead = NULL;
+ mOutputTail = NULL;
+ mDecodingFrame = false;
+
+ // flush vbp parser
+ if (mParserHandle && (mParserFlush(mParserHandle) != VBP_OK)) {
+ WTRACE("Failed to flush parser. Continue");
+ }
+
+ // initialize surface buffer without resetting mapped/raw data
+ initSurfaceBuffer(false);
+
+}
+
+void VideoDecoderBase::freeSurfaceBuffers(void) {
+ if (mVAStarted == false) {
+ // nothing to free surface buffers at this stage
+ return;
+ }
+
+ pthread_mutex_lock(&mLock);
+
+ endDecodingFrame(true);
+
+ // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec
+ terminateVA();
+
+ pthread_mutex_unlock(&mLock);
+}
+
+const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) {
+ return &mVideoFormatInfo;
+}
+
+const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBuffer *outErrBuf) {
+ VAStatus vaStatus;
+ if (mVAStarted == false) {
+ return NULL;
+ }
+ bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
+
+ if (draining) {
+ // complete decoding the last frame and ignore return
+ endDecodingFrame(false);
+ }
+
+ if (mOutputHead == NULL) {
+ return NULL;
+ }
+
+ // output by position (the first buffer)
+ VideoSurfaceBuffer *outputByPos = mOutputHead;
+
+ if (mLowDelay) {
+ mOutputHead = mOutputHead->next;
+ if (mOutputHead == NULL) {
+ mOutputTail = NULL;
+ }
+ vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp);
+ if (useGraphicBuffer && !mUseGEN) {
+ vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface);
+ fillDecodingErrors(&(outputByPos->renderBuffer));
+ }
+ if (draining && mOutputTail == NULL) {
+ outputByPos->renderBuffer.flag |= IS_EOS;
+ }
+ drainDecodingErrors(outErrBuf, &(outputByPos->renderBuffer));
+
+ return &(outputByPos->renderBuffer);
+ }
+
+ // output by presentation time stamp (the smallest pts)
+ VideoSurfaceBuffer *outputByPts = findOutputByPts();
+
+ VideoSurfaceBuffer *output = NULL;
+ if (mOutputMethod == OUTPUT_BY_POC) {
+ output = findOutputByPoc(draining);
+ } else if (mOutputMethod == OUTPUT_BY_PCT) {
+ output = findOutputByPct(draining);
+ } else {
+ ETRACE("Invalid output method.");
+ return NULL;
+ }
+
+ if (output == NULL) {
+ return NULL;
+ }
+
+ if (output != outputByPts) {
+ // swap time stamp
+ uint64_t ts = output->renderBuffer.timeStamp;
+ output->renderBuffer.timeStamp = outputByPts->renderBuffer.timeStamp;
+ outputByPts->renderBuffer.timeStamp = ts;
+ }
+
+ if (output != outputByPos) {
+ // remove this output from middle or end of the list
+ VideoSurfaceBuffer *p = outputByPos;
+ while (p->next != output) {
+ p = p->next;
+ }
+ p->next = output->next;
+ if (mOutputTail == output) {
+ mOutputTail = p;
+ }
+ } else {
+ // remove this output from head of the list
+ mOutputHead = mOutputHead->next;
+ if (mOutputHead == NULL) {
+ mOutputTail = NULL;
+ }
+ }
+ //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6);
+ vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp);
+
+ if (useGraphicBuffer && !mUseGEN) {
+ vaSyncSurface(mVADisplay, output->renderBuffer.surface);
+ fillDecodingErrors(&(output->renderBuffer));
+ }
+
+ if (draining && mOutputTail == NULL) {
+ output->renderBuffer.flag |= IS_EOS;
+ }
+
+ drainDecodingErrors(outErrBuf, &(output->renderBuffer));
+
+ return &(output->renderBuffer);
+}
+
+VideoSurfaceBuffer* VideoDecoderBase::findOutputByPts() {
+ // output by presentation time stamp - buffer with the smallest time stamp is output
+ VideoSurfaceBuffer *p = mOutputHead;
+ VideoSurfaceBuffer *outputByPts = NULL;
+ uint64_t pts = INVALID_PTS;
+ do {
+ if ((uint64_t)(p->renderBuffer.timeStamp) <= pts) {
+ // find buffer with the smallest PTS
+ pts = p->renderBuffer.timeStamp;
+ outputByPts = p;
+ }
+ p = p->next;
+ } while (p != NULL);
+
+ return outputByPts;
+}
+
+VideoSurfaceBuffer* VideoDecoderBase::findOutputByPct(bool draining) {
+ // output by picture coding type (PCT)
+ // if there is more than one reference frame, the first reference frame is ouput, otherwise,
+ // output non-reference frame if there is any.
+
+ VideoSurfaceBuffer *p = mOutputHead;
+ VideoSurfaceBuffer *outputByPct = NULL;
+ int32_t reference = 0;
+ do {
+ if (p->referenceFrame) {
+ reference++;
+ if (reference > 1) {
+ // mOutputHead must be a reference frame
+ outputByPct = mOutputHead;
+ break;
+ }
+ } else {
+ // first non-reference frame
+ outputByPct = p;
+ break;
+ }
+ p = p->next;
+ } while (p != NULL);
+
+ if (outputByPct == NULL && draining) {
+ outputByPct = mOutputHead;
+ }
+ return outputByPct;
+}
+
+#if 0
+VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) {
+ // output by picture order count (POC)
+ // Output criteria:
+ // if there is IDR frame (POC == 0), all the frames before IDR must be output;
+ // Otherwise, if draining flag is set or list is full, frame with the least POC is output;
+ // Otherwise, NOTHING is output
+
+ int32_t dpbFullness = 0;
+ for (int32_t i = 0; i < mNumSurfaces; i++) {
+ // count num of reference frames
+ if (mSurfaceBuffers[i].asReferernce) {
+ dpbFullness++;
+ }
+ }
+
+ if (mAcquiredBuffer && mAcquiredBuffer->asReferernce) {
+ // frame is being decoded and is not ready for output yet
+ dpbFullness--;
+ }
+
+ VideoSurfaceBuffer *p = mOutputHead;
+ while (p != NULL) {
+ // count dpbFullness with non-reference frame in the output queue
+ if (p->asReferernce == false) {
+ dpbFullness++;
+ }
+ p = p->next;
+ }
+
+Retry:
+ p = mOutputHead;
+ VideoSurfaceBuffer *outputByPoc = NULL;
+ int32_t count = 0;
+ int32_t poc = MAXIMUM_POC;
+
+ do {
+ if (p->pictureOrder == 0) {
+ // output picture with the least POC before IDR
+ if (outputByPoc != NULL) {
+ mNextOutputPOC = outputByPoc->pictureOrder + 1;
+ return outputByPoc;
+ } else {
+ mNextOutputPOC = MINIMUM_POC;
+ }
+ }
+
+ // POC of the output candidate must not be less than mNextOutputPOC
+ if (p->pictureOrder < mNextOutputPOC) {
+ break;
+ }
+
+ if (p->pictureOrder < poc) {
+ // update the least POC.
+ poc = p->pictureOrder;
+ outputByPoc = p;
+ }
+ count++;
+ p = p->next;
+ } while (p != NULL && count < mOutputWindowSize);
+
+ if (draining == false && dpbFullness < mOutputWindowSize) {
+ // list is not full and we are not in draining state
+ // if DPB is already full, one frame must be output
+ return NULL;
+ }
+
+ if (outputByPoc == NULL) {
+ mNextOutputPOC = MINIMUM_POC;
+ goto Retry;
+ }
+
+ // for debugging purpose
+ if (outputByPoc->pictureOrder != 0 && outputByPoc->pictureOrder < mNextOutputPOC) {
+ ETRACE("Output POC is not incremental, expected %d, actual %d", mNextOutputPOC, outputByPoc->pictureOrder);
+ //gaps_in_frame_num_value_allowed_flag is not currently supported
+ }
+
+ mNextOutputPOC = outputByPoc->pictureOrder + 1;
+
+ return outputByPoc;
+}
+#else
+VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) {
+ VideoSurfaceBuffer *output = NULL;
+ VideoSurfaceBuffer *p = mOutputHead;
+ int32_t count = 0;
+ int32_t poc = MAXIMUM_POC;
+ VideoSurfaceBuffer *outputleastpoc = mOutputHead;
+ do {
+ count++;
+ if (p->pictureOrder == 0) {
+ // any picture before this POC (new IDR) must be output
+ if (output == NULL) {
+ mNextOutputPOC = MINIMUM_POC;
+ // looking for any POC with negative value
+ } else {
+ mNextOutputPOC = output->pictureOrder + 1;
+ break;
+ }
+ }
+ if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) {
+ // this POC meets ouput criteria.
+ poc = p->pictureOrder;
+ output = p;
+ outputleastpoc = p;
+ }
+ if (poc == mNextOutputPOC || count == mOutputWindowSize) {
+ if (output != NULL) {
+ // this indicates two cases:
+ // 1) the next output POC is found.
+ // 2) output queue is full and there is at least one buffer meeting the output criteria.
+ mNextOutputPOC = output->pictureOrder + 1;
+ break;
+ } else {
+ // this indicates output queue is full and no buffer in the queue meets the output criteria
+ // restart processing as queue is FULL and output criteria is changed. (next output POC is 0)
+ mNextOutputPOC = MINIMUM_POC;
+ count = 0;
+ poc = MAXIMUM_POC;
+ p = mOutputHead;
+ continue;
+ }
+ }
+ if (p->next == NULL) {
+ output = NULL;
+ }
+
+ p = p->next;
+ } while (p != NULL);
+
+ if (draining == true && output == NULL) {
+ output = outputleastpoc;
+ }
+
+ return output;
+}
+#endif
+
+bool VideoDecoderBase::checkBufferAvail(void) {
+ if (!mInitialized) {
+ if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) == 0) {
+ return true;
+ }
+ for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
+ if (mSignalBufferPre[i] != NULL) {
+ return true;
+ }
+ }
+ return false;
+ }
+ // check whether there is buffer available for decoding
+ // TODO: check frame being referenced for frame skipping
+ VideoSurfaceBuffer *buffer = NULL;
+ for (int32_t i = 0; i < mNumSurfaces; i++) {
+ buffer = mSurfaceBuffers + i;
+
+ if (buffer->asReferernce == false &&
+ buffer->renderBuffer.renderDone == true) {
+ querySurfaceRenderStatus(buffer);
+ if (buffer->renderBuffer.driverRenderDone == true)
+ return true;
+ }
+ }
+ return false;
+}
+
+Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) {
+ if (mVAStarted == false) {
+ return DECODE_FAIL;
+ }
+
+ if (mAcquiredBuffer != NULL) {
+ ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
+ return DECODE_FAIL;
+ }
+
+ int nextAcquire = mSurfaceAcquirePos;
+ VideoSurfaceBuffer *acquiredBuffer = NULL;
+ bool acquired = false;
+
+ while (acquired == false) {
+ acquiredBuffer = mSurfaceBuffers + nextAcquire;
+
+ querySurfaceRenderStatus(acquiredBuffer);
+
+ if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true && acquiredBuffer->renderBuffer.driverRenderDone == true) {
+ // this is potential buffer for acquisition. Check if it is referenced by other surface for frame skipping
+ VideoSurfaceBuffer *temp;
+ acquired = true;
+ for (int i = 0; i < mNumSurfaces; i++) {
+ if (i == nextAcquire) {
+ continue;
+ }
+ temp = mSurfaceBuffers + i;
+ // use mSurfaces[nextAcquire] instead of acquiredBuffer->renderBuffer.surface as its the actual surface to use.
+ if (temp->renderBuffer.surface == mSurfaces[nextAcquire] &&
+ temp->renderBuffer.renderDone == false) {
+ ITRACE("Surface is referenced by other surface buffer.");
+ acquired = false;
+ break;
+ }
+ }
+ }
+ if (acquired) {
+ break;
+ }
+ nextAcquire++;
+ if (nextAcquire == mNumSurfaces) {
+ nextAcquire = 0;
+ }
+ if (nextAcquire == mSurfaceAcquirePos) {
+ return DECODE_NO_SURFACE;
+ }
+ }
+
+ if (acquired == false) {
+ return DECODE_NO_SURFACE;
+ }
+
+ mAcquiredBuffer = acquiredBuffer;
+ mSurfaceAcquirePos = nextAcquire;
+
+ // set surface again as surface maybe reset by skipped frame.
+ // skipped frame is a "non-coded frame" and decoder needs to duplicate the previous reference frame as the output.
+ mAcquiredBuffer->renderBuffer.surface = mSurfaces[mSurfaceAcquirePos];
+ if (mSurfaceUserPtr && mAcquiredBuffer->mappedData) {
+ mAcquiredBuffer->mappedData->data = mSurfaceUserPtr[mSurfaceAcquirePos];
+ }
+ mAcquiredBuffer->renderBuffer.timeStamp = INVALID_PTS;
+ mAcquiredBuffer->renderBuffer.display = mVADisplay;
+ mAcquiredBuffer->renderBuffer.flag = 0;
+ mAcquiredBuffer->renderBuffer.renderDone = false;
+ mAcquiredBuffer->asReferernce = false;
+ mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 0;
+ mAcquiredBuffer->renderBuffer.errBuf.timeStamp = INVALID_PTS;
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) {
+ Decode_Status status;
+ if (mAcquiredBuffer == NULL) {
+ ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
+ return DECODE_FAIL;
+ }
+
+ if (mRawOutput) {
+ status = getRawDataFromSurface();
+ CHECK_STATUS();
+ }
+
+ // frame is successfly decoded to the current surface, it is ready for output
+ if (mShowFrame) {
+ mAcquiredBuffer->renderBuffer.renderDone = false;
+ } else {
+ mAcquiredBuffer->renderBuffer.renderDone = true;
+ }
+
+ // decoder must set "asReference and referenceFrame" flags properly
+
+ // update reference frames
+ if (mAcquiredBuffer->referenceFrame) {
+ if (mManageReference) {
+ // managing reference for MPEG4/H.263/WMV.
+ // AVC should manage reference frame in a different way
+ if (mForwardReference != NULL) {
+ // this foward reference is no longer needed
+ mForwardReference->asReferernce = false;
+ }
+ // Forware reference for either P or B frame prediction
+ mForwardReference = mLastReference;
+ mAcquiredBuffer->asReferernce = true;
+ }
+
+ // the last reference frame.
+ mLastReference = mAcquiredBuffer;
+ }
+ // add to the output list
+ if (mShowFrame) {
+ if (mOutputHead == NULL) {
+ mOutputHead = mAcquiredBuffer;
+ } else {
+ mOutputTail->next = mAcquiredBuffer;
+ }
+ mOutputTail = mAcquiredBuffer;
+ mOutputTail->next = NULL;
+ }
+
+ //VTRACE("Pushing POC %d to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6);
+
+ mAcquiredBuffer = NULL;
+ mSurfaceAcquirePos = (mSurfaceAcquirePos + 1 ) % mNumSurfaces;
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderBase::releaseSurfaceBuffer(void) {
+ if (mAcquiredBuffer == NULL) {
+ // this is harmless error
+ return DECODE_SUCCESS;
+ }
+
+ // frame is not decoded to the acquired buffer, current surface is invalid, and can't be output.
+ mAcquiredBuffer->asReferernce = false;
+ mAcquiredBuffer->renderBuffer.renderDone = true;
+ mAcquiredBuffer = NULL;
+ return DECODE_SUCCESS;
+}
+
+void VideoDecoderBase::flushSurfaceBuffers(void) {
+ endDecodingFrame(true);
+ VideoSurfaceBuffer *p = NULL;
+ while (mOutputHead) {
+ mOutputHead->renderBuffer.renderDone = true;
+ p = mOutputHead;
+ mOutputHead = mOutputHead->next;
+ p->next = NULL;
+ }
+ mOutputHead = NULL;
+ mOutputTail = NULL;
+}
+
+Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) {
+ Decode_Status status = DECODE_SUCCESS;
+ VAStatus vaStatus;
+
+ if (mDecodingFrame == false) {
+ if (mAcquiredBuffer != NULL) {
+ //ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
+ releaseSurfaceBuffer();
+ status = DECODE_FAIL;
+ }
+ return status;
+ }
+ // return through exit label to reset mDecodingFrame
+ if (mAcquiredBuffer == NULL) {
+ ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
+ status = DECODE_FAIL;
+ goto exit;
+ }
+
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ releaseSurfaceBuffer();
+ ETRACE("vaEndPicture failed. vaStatus = %d", vaStatus);
+ status = DECODE_DRIVER_FAIL;
+ goto exit;
+ }
+
+ if (dropFrame) {
+ // we are asked to drop this decoded picture
+ VTRACE("Frame dropped in endDecodingFrame");
+ vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface);
+ releaseSurfaceBuffer();
+ goto exit;
+ }
+ status = outputSurfaceBuffer();
+ // fall through
+exit:
+ mDecodingFrame = false;
+ return status;
+}
+
+
+Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile, uint32_t numExtraSurface) {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Decode_Status status;
+ VAConfigAttrib attrib;
+
+ if (mVAStarted) {
+ return DECODE_SUCCESS;
+ }
+
+ mRotationDegrees = 0;
+ if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){
+#ifdef TARGET_HAS_VPP
+ if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber - mConfigBuffer.vppBufferNum)
+#else
+ if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)
+#endif
+ return DECODE_FORMAT_CHANGE;
+
+ numSurface = mConfigBuffer.surfaceNumber;
+ // if format has been changed in USE_NATIVE_GRAPHIC_BUFFER mode,
+ // we can not setupVA here when the graphic buffer resolution is smaller than the resolution decoder really needs
+ if (mSizeChanged) {
+ if (mVideoFormatInfo.surfaceWidth < mVideoFormatInfo.width || mVideoFormatInfo.surfaceHeight < mVideoFormatInfo.height) {
+ mSizeChanged = false;
+ return DECODE_FORMAT_CHANGE;
+ }
+ }
+ }
+
+ // TODO: validate profile
+ if (numSurface == 0) {
+ return DECODE_FAIL;
+ }
+
+ if (mConfigBuffer.flag & HAS_MINIMUM_SURFACE_NUMBER) {
+ if (numSurface < mConfigBuffer.surfaceNumber) {
+ WTRACE("surface to allocated %d is less than minimum number required %d",
+ numSurface, mConfigBuffer.surfaceNumber);
+ numSurface = mConfigBuffer.surfaceNumber;
+ }
+ }
+
+ if (mVADisplay != NULL) {
+ ETRACE("VA is partially started.");
+ return DECODE_FAIL;
+ }
+
+ // Display is defined as "unsigned int"
+#ifndef USE_HYBRID_DRIVER
+ mDisplay = new Display;
+ *mDisplay = ANDROID_DISPLAY_HANDLE;
+#else
+ if (profile >= VAProfileH264Baseline && profile <= VAProfileVC1Advanced) {
+ ITRACE("Using GEN driver");
+ mDisplay = "libva_driver_name=i965";
+ mUseGEN = true;
+ } else {
+ ITRACE("Using PVR driver");
+ mDisplay = "libva_driver_name=pvr";
+ mUseGEN = false;
+ }
+
+#endif
+ mVADisplay = vaGetDisplay(mDisplay);
+ if (mVADisplay == NULL) {
+ ETRACE("vaGetDisplay failed.");
+ return DECODE_DRIVER_FAIL;
+ }
+
+ int majorVersion, minorVersion;
+ vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
+ CHECK_VA_STATUS("vaInitialize");
+
+ if ((int32_t)profile != VAProfileSoftwareDecoding) {
+
+ status = checkHardwareCapability();
+ CHECK_STATUS("checkHardwareCapability");
+
+#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
+ status = getCodecSpecificConfigs(profile, &mVAConfig);
+ CHECK_STATUS("getCodecSpecificAttributes");
+#else
+ //We are requesting RT attributes
+ attrib.type = VAConfigAttribRTFormat;
+ attrib.value = VA_RT_FORMAT_YUV420;
+
+ vaStatus = vaCreateConfig(
+ mVADisplay,
+ profile,
+ VAEntrypointVLD,
+ &attrib,
+ 1,
+ &mVAConfig);
+ CHECK_VA_STATUS("vaCreateConfig");
+#endif
+ }
+
+ mNumSurfaces = numSurface;
+ mNumExtraSurfaces = numExtraSurface;
+ mSurfaces = new VASurfaceID [mNumSurfaces + mNumExtraSurfaces];
+ mExtraSurfaces = mSurfaces + mNumSurfaces;
+ if (mSurfaces == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+
+ setRenderRect();
+
+ int32_t format = VA_RT_FORMAT_YUV420;
+ if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
+#ifndef USE_AVC_SHORT_FORMAT
+ format |= VA_RT_FORMAT_PROTECTED;
+ WTRACE("Surface is protected.");
+#endif
+ }
+ if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) {
+ VASurfaceAttrib attribs[2];
+ mVASurfaceAttrib = new VASurfaceAttribExternalBuffers;
+ if (mVASurfaceAttrib == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+
+ mVASurfaceAttrib->buffers= (unsigned long *)malloc(sizeof(unsigned long)*mNumSurfaces);
+ if (mVASurfaceAttrib->buffers == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+ mVASurfaceAttrib->num_buffers = mNumSurfaces;
+ mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12;
+ mVASurfaceAttrib->width = mVideoFormatInfo.surfaceWidth;
+ mVASurfaceAttrib->height = mVideoFormatInfo.surfaceHeight;
+ mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5;
+ mVASurfaceAttrib->num_planes = 2;
+ mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferStride;
+ mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferStride;
+ mVASurfaceAttrib->pitches[2] = 0;
+ mVASurfaceAttrib->pitches[3] = 0;
+ mVASurfaceAttrib->offsets[0] = 0;
+ mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight;
+ mVASurfaceAttrib->offsets[2] = 0;
+ mVASurfaceAttrib->offsets[3] = 0;
+ mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow;
+ mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+ if (mConfigBuffer.flag & USE_TILING_MEMORY)
+ mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING;
+
+ for (int i = 0; i < mNumSurfaces; i++) {
+ mVASurfaceAttrib->buffers[i] = (unsigned long)mConfigBuffer.graphicBufferHandler[i];
+ }
+
+ attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
+ attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[0].value.type = VAGenericValueTypeInteger;
+ attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+
+ attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
+ attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[1].value.type = VAGenericValueTypePointer;
+ attribs[1].value.value.p = (void *)mVASurfaceAttrib;
+
+ vaStatus = vaCreateSurfaces(
+ mVADisplay,
+ format,
+ mVideoFormatInfo.surfaceWidth,
+ mVideoFormatInfo.surfaceHeight,
+ mSurfaces,
+ mNumSurfaces,
+ attribs,
+ 2);
+
+ } else {
+ vaStatus = vaCreateSurfaces(
+ mVADisplay,
+ format,
+ mVideoFormatInfo.width,
+ mVideoFormatInfo.height,
+ mSurfaces,
+ mNumSurfaces,
+ NULL,
+ 0);
+ mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width;
+ mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height;
+ }
+ CHECK_VA_STATUS("vaCreateSurfaces");
+
+ if (mNumExtraSurfaces != 0) {
+ vaStatus = vaCreateSurfaces(
+ mVADisplay,
+ format,
+ mVideoFormatInfo.surfaceWidth,
+ mVideoFormatInfo.surfaceHeight,
+ mExtraSurfaces,
+ mNumExtraSurfaces,
+ NULL,
+ 0);
+ CHECK_VA_STATUS("vaCreateSurfaces");
+ }
+
+ mVideoFormatInfo.surfaceNumber = mNumSurfaces;
+ mVideoFormatInfo.ctxSurfaces = mSurfaces;
+
+ if ((int32_t)profile != VAProfileSoftwareDecoding) {
+ vaStatus = vaCreateContext(
+ mVADisplay,
+ mVAConfig,
+ mVideoFormatInfo.surfaceWidth,
+ mVideoFormatInfo.surfaceHeight,
+ 0,
+ mSurfaces,
+ mNumSurfaces + mNumExtraSurfaces,
+ &mVAContext);
+ CHECK_VA_STATUS("vaCreateContext");
+ }
+
+ mSurfaceBuffers = new VideoSurfaceBuffer [mNumSurfaces];
+ if (mSurfaceBuffers == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+ initSurfaceBuffer(true);
+
+ if ((int32_t)profile == VAProfileSoftwareDecoding) {
+ // derive user pointer from surface for direct access
+ status = mapSurface();
+ CHECK_STATUS("mapSurface")
+ }
+
+ setRotationDegrees(mConfigBuffer.rotationDegrees);
+
+ mVAStarted = true;
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderBase::terminateVA(void) {
+ mSignalBufferSize = 0;
+ for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
+ mSignalBufferPre[i] = NULL;
+ }
+
+ if (mVAStarted == false) {
+ // VA hasn't been started yet
+ return DECODE_SUCCESS;
+ }
+
+ if (mSurfaceBuffers) {
+ for (int32_t i = 0; i < mNumSurfaces; i++) {
+ if (mSurfaceBuffers[i].renderBuffer.rawData) {
+ if (mSurfaceBuffers[i].renderBuffer.rawData->data) {
+ delete [] mSurfaceBuffers[i].renderBuffer.rawData->data;
+ }
+ delete mSurfaceBuffers[i].renderBuffer.rawData;
+ }
+ if (mSurfaceBuffers[i].mappedData) {
+ // don't delete data pointer as it is mapped from surface
+ delete mSurfaceBuffers[i].mappedData;
+ }
+ }
+ delete [] mSurfaceBuffers;
+ mSurfaceBuffers = NULL;
+ }
+
+ if (mVASurfaceAttrib) {
+ if (mVASurfaceAttrib->buffers) free(mVASurfaceAttrib->buffers);
+ delete mVASurfaceAttrib;
+ mVASurfaceAttrib = NULL;
+ }
+
+
+ if (mSurfaceUserPtr) {
+ delete [] mSurfaceUserPtr;
+ mSurfaceUserPtr = NULL;
+ }
+
+ if (mSurfaces)
+ {
+ vaDestroySurfaces(mVADisplay, mSurfaces, mNumSurfaces + mNumExtraSurfaces);
+ delete [] mSurfaces;
+ mSurfaces = NULL;
+ }
+
+ if (mVAContext != VA_INVALID_ID) {
+ vaDestroyContext(mVADisplay, mVAContext);
+ mVAContext = VA_INVALID_ID;
+ }
+
+ if (mVAConfig != VA_INVALID_ID) {
+ vaDestroyConfig(mVADisplay, mVAConfig);
+ mVAConfig = VA_INVALID_ID;
+ }
+
+ if (mVADisplay) {
+ vaTerminate(mVADisplay);
+ mVADisplay = NULL;
+ }
+
+ if (mDisplay) {
+#ifndef USE_HYBRID_DRIVER
+ delete mDisplay;
+#endif
+ mDisplay = NULL;
+ }
+
+ mVAStarted = false;
+ mInitialized = false;
+ mErrReportEnabled = false;
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData) {
+ // DON'T check if mVAStarted == true
+ if (mParserHandle == NULL) {
+ return DECODE_NO_PARSER;
+ }
+
+ uint32_t vbpStatus;
+ if (buffer == NULL || size <= 0) {
+ return DECODE_INVALID_DATA;
+ }
+
+ uint8_t configFlag = config ? 1 : 0;
+ vbpStatus = mParserParse(mParserHandle, buffer, size, configFlag);
+ CHECK_VBP_STATUS("vbp_parse");
+
+ vbpStatus = mParserQuery(mParserHandle, vbpData);
+ CHECK_VBP_STATUS("vbp_query");
+
+ return DECODE_SUCCESS;
+}
+
+
+
+Decode_Status VideoDecoderBase::mapSurface(void) {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAImage image;
+ uint8_t *userPtr;
+ mSurfaceUserPtr = new uint8_t* [mNumSurfaces];
+ if (mSurfaceUserPtr == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+
+ for (int32_t i = 0; i< mNumSurfaces; i++) {
+ vaStatus = vaDeriveImage(mVADisplay, mSurfaces[i], &image);
+ CHECK_VA_STATUS("vaDeriveImage");
+ vaStatus = vaMapBuffer(mVADisplay, image.buf, (void**)&userPtr);
+ CHECK_VA_STATUS("vaMapBuffer");
+ mSurfaceUserPtr[i] = userPtr;
+ mSurfaceBuffers[i].mappedData = new VideoFrameRawData;
+ if (mSurfaceBuffers[i].mappedData == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+ mSurfaceBuffers[i].mappedData->own = false; // derived from surface so can't be released
+ mSurfaceBuffers[i].mappedData->data = NULL; // specified during acquireSurfaceBuffer
+ mSurfaceBuffers[i].mappedData->fourcc = image.format.fourcc;
+ mSurfaceBuffers[i].mappedData->width = mVideoFormatInfo.width;
+ mSurfaceBuffers[i].mappedData->height = mVideoFormatInfo.height;
+ mSurfaceBuffers[i].mappedData->size = image.data_size;
+ for (int pi = 0; pi < 3; pi++) {
+ mSurfaceBuffers[i].mappedData->pitch[pi] = image.pitches[pi];
+ mSurfaceBuffers[i].mappedData->offset[pi] = image.offsets[pi];
+ }
+ // debug information
+ if (image.pitches[0] != image.pitches[1] ||
+ image.width != mVideoFormatInfo.width ||
+ image.height != mVideoFormatInfo.height ||
+ image.offsets[0] != 0) {
+ WTRACE("Unexpected VAImage format, w = %d, h = %d, offset = %d", image.width, image.height, image.offsets[0]);
+ }
+ // TODO: do we need to unmap buffer?
+ //vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
+ //CHECK_VA_STATUS("vaMapBuffer");
+ vaStatus = vaDestroyImage(mVADisplay,image.image_id);
+ CHECK_VA_STATUS("vaDestroyImage");
+
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderBase::getRawDataFromSurface(VideoRenderBuffer *renderBuffer, uint8_t *pRawData, uint32_t *pSize, bool internal) {
+ if (internal) {
+ if (mAcquiredBuffer == NULL) {
+ return DECODE_FAIL;
+ }
+ renderBuffer = &(mAcquiredBuffer->renderBuffer);
+ }
+
+ VAStatus vaStatus;
+ VAImageFormat imageFormat;
+ VAImage vaImage;
+ vaStatus = vaSyncSurface(renderBuffer->display, renderBuffer->surface);
+ CHECK_VA_STATUS("vaSyncSurface");
+
+ vaStatus = vaDeriveImage(renderBuffer->display, renderBuffer->surface, &vaImage);
+ CHECK_VA_STATUS("vaDeriveImage");
+
+ void *pBuf = NULL;
+ vaStatus = vaMapBuffer(renderBuffer->display, vaImage.buf, &pBuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+
+ // size in NV12 format
+ uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight);
+ uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop);
+ int32_t size = cropWidth * cropHeight * 3 / 2;
+
+ if (internal) {
+ VideoFrameRawData *rawData = NULL;
+ if (renderBuffer->rawData == NULL) {
+ rawData = new VideoFrameRawData;
+ if (rawData == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+ memset(rawData, 0, sizeof(VideoFrameRawData));
+ renderBuffer->rawData = rawData;
+ } else {
+ rawData = renderBuffer->rawData;
+ }
+
+ if (rawData->data != NULL && rawData->size != size) {
+ delete [] rawData->data;
+ rawData->data = NULL;
+ rawData->size = 0;
+ }
+ if (rawData->data == NULL) {
+ rawData->data = new uint8_t [size];
+ if (rawData->data == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+ }
+
+ rawData->own = true; // allocated by this library
+ rawData->width = cropWidth;
+ rawData->height = cropHeight;
+ rawData->pitch[0] = cropWidth;
+ rawData->pitch[1] = cropWidth;
+ rawData->pitch[2] = 0; // interleaved U/V, two planes
+ rawData->offset[0] = 0;
+ rawData->offset[1] = cropWidth * cropHeight;
+ rawData->offset[2] = cropWidth * cropHeight * 3 / 2;
+ rawData->size = size;
+ rawData->fourcc = 'NV12';
+
+ pRawData = rawData->data;
+ } else {
+ *pSize = size;
+ }
+
+ if (size == (int32_t)vaImage.data_size) {
+#ifdef __SSE4_1__
+ stream_memcpy(pRawData, pBuf, size);
+#else
+ memcpy(pRawData, pBuf, size);
+#endif
+ } else {
+ // copy Y data
+ uint8_t *src = (uint8_t*)pBuf;
+ uint8_t *dst = pRawData;
+ uint32_t row = 0;
+ for (row = 0; row < cropHeight; row++) {
+#ifdef __SSE4_1__
+ stream_memcpy(dst, src, cropWidth);
+#else
+ memcpy(dst, src, cropWidth);
+#endif
+ dst += cropWidth;
+ src += vaImage.pitches[0];
+ }
+ // copy interleaved V and U data
+ src = (uint8_t*)pBuf + vaImage.offsets[1];
+ for (row = 0; row < cropHeight / 2; row++) {
+#ifdef __SSE4_1__
+ stream_memcpy(dst, src, cropWidth);
+#else
+ memcpy(dst, src, cropWidth);
+#endif
+ dst += cropWidth;
+ src += vaImage.pitches[1];
+ }
+ }
+
+ vaStatus = vaUnmapBuffer(renderBuffer->display, vaImage.buf);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+
+ vaStatus = vaDestroyImage(renderBuffer->display, vaImage.image_id);
+ CHECK_VA_STATUS("vaDestroyImage");
+
+ return DECODE_SUCCESS;
+}
+
+void VideoDecoderBase::initSurfaceBuffer(bool reset) {
+ bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
+ if (useGraphicBuffer && reset) {
+ pthread_mutex_lock(&mLock);
+ }
+ for (int32_t i = 0; i < mNumSurfaces; i++) {
+ mSurfaceBuffers[i].renderBuffer.display = mVADisplay;
+ mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE; // set in acquireSurfaceBuffer
+ mSurfaceBuffers[i].renderBuffer.flag = 0;
+ mSurfaceBuffers[i].renderBuffer.scanFormat = VA_FRAME_PICTURE;
+ mSurfaceBuffers[i].renderBuffer.timeStamp = 0;
+ mSurfaceBuffers[i].referenceFrame = false;
+ mSurfaceBuffers[i].asReferernce= false;
+ mSurfaceBuffers[i].pictureOrder = 0;
+ mSurfaceBuffers[i].next = NULL;
+ if (reset == true) {
+ mSurfaceBuffers[i].renderBuffer.rawData = NULL;
+ mSurfaceBuffers[i].mappedData = NULL;
+ }
+ if (useGraphicBuffer) {
+ if (reset) {
+ mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = mConfigBuffer.graphicBufferHandler[i];
+ mSurfaceBuffers[i].renderBuffer.renderDone = false; //default false
+ for (uint32_t j = 0; j < mSignalBufferSize; j++) {
+ if(mSignalBufferPre[j] != NULL && mSignalBufferPre[j] == mSurfaceBuffers[i].renderBuffer.graphicBufferHandle) {
+ mSurfaceBuffers[i].renderBuffer.renderDone = true;
+ VTRACE("initSurfaceBuffer set renderDone = true index = %d", i);
+ mSignalBufferPre[j] = NULL;
+ break;
+ }
+ }
+ } else {
+ mSurfaceBuffers[i].renderBuffer.renderDone = false;
+ }
+ } else {
+ mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = NULL;
+ mSurfaceBuffers[i].renderBuffer.renderDone = true;
+ }
+ mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i;
+ }
+
+ if (useGraphicBuffer && reset) {
+ mInitialized = true;
+ mSignalBufferSize = 0;
+ pthread_mutex_unlock(&mLock);
+ }
+}
+
+Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) {
+ if (graphichandler == NULL) {
+ return DECODE_SUCCESS;
+ }
+ pthread_mutex_lock(&mLock);
+ int i = 0;
+ if (!mInitialized) {
+ if (mSignalBufferSize >= MAX_GRAPHIC_BUFFER_NUM) {
+ pthread_mutex_unlock(&mLock);
+ return DECODE_INVALID_DATA;
+ }
+ mSignalBufferPre[mSignalBufferSize++] = graphichandler;
+ VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize);
+ } else {
+ if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
+ pthread_mutex_unlock(&mLock);
+ return DECODE_SUCCESS;
+ }
+ for (i = 0; i < mNumSurfaces; i++) {
+ if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) {
+ mSurfaceBuffers[i].renderBuffer.renderDone = true;
+ VTRACE("SignalRenderDoneFlag mInitialized = true index = %d", i);
+ break;
+ }
+ }
+ }
+ pthread_mutex_unlock(&mLock);
+
+ return DECODE_SUCCESS;
+
+}
+
+void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) {
+ VASurfaceStatus surfStat = VASurfaceReady;
+ VAStatus vaStat = VA_STATUS_SUCCESS;
+
+ if (!surface) {
+ LOGW("SurfaceBuffer not ready yet");
+ return;
+ }
+ surface->renderBuffer.driverRenderDone = true;
+
+#ifndef USE_GEN_HW
+ if (surface->renderBuffer.surface != VA_INVALID_SURFACE &&
+ (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
+
+ vaStat = vaQuerySurfaceStatus(mVADisplay, surface->renderBuffer.surface, &surfStat);
+
+ if ((vaStat == VA_STATUS_SUCCESS) && (surfStat != VASurfaceReady))
+ surface->renderBuffer.driverRenderDone = false;
+
+ }
+#endif
+
+}
+
+// This function should be called before start() to load different type of parsers
+#if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
+Decode_Status VideoDecoderBase::setParserType(_vbp_parser_type type) {
+ if ((int32_t)type != VBP_INVALID) {
+ ITRACE("Parser Type = %d", (int32_t)type);
+ mParserType = type;
+ return DECODE_SUCCESS;
+ } else {
+ ETRACE("Invalid parser type = %d", (int32_t)type);
+ return DECODE_NO_PARSER;
+ }
+}
+
+Decode_Status VideoDecoderBase::updateBuffer(uint8_t *buffer, int32_t size, void** vbpData) {
+ if (mParserHandle == NULL) {
+ return DECODE_NO_PARSER;
+ }
+
+ uint32_t vbpStatus;
+ if (buffer == NULL || size <= 0) {
+ return DECODE_INVALID_DATA;
+ }
+
+ vbpStatus = mParserUpdate(mParserHandle, buffer, size, vbpData);
+ CHECK_VBP_STATUS("vbp_update");
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderBase::queryBuffer(void** vbpData) {
+ if (mParserHandle == NULL) {
+ return DECODE_NO_PARSER;
+ }
+
+ uint32_t vbpStatus;
+ vbpStatus = mParserQuery(mParserHandle, vbpData);
+ CHECK_VBP_STATUS("vbp_query");
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderBase::getCodecSpecificConfigs(VAProfile profile, VAConfigID *config) {
+ VAStatus vaStatus;
+ VAConfigAttrib attrib;
+ attrib.type = VAConfigAttribRTFormat;
+ attrib.value = VA_RT_FORMAT_YUV420;
+
+ if (config == NULL) {
+ ETRACE("Invalid parameter!");
+ return DECODE_FAIL;
+ }
+
+ vaStatus = vaCreateConfig(
+ mVADisplay,
+ profile,
+ VAEntrypointVLD,
+ &attrib,
+ 1,
+ config);
+
+ CHECK_VA_STATUS("vaCreateConfig");
+
+ return DECODE_SUCCESS;
+}
+#endif
+Decode_Status VideoDecoderBase::checkHardwareCapability() {
+ return DECODE_SUCCESS;
+}
+
+void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *currentSurface) {
+ if (mErrReportEnabled && outErrBuf && currentSurface) {
+ memcpy(outErrBuf, &(currentSurface->errBuf), sizeof(VideoErrorBuffer));
+
+ currentSurface->errBuf.errorNumber = 0;
+ currentSurface->errBuf.timeStamp = INVALID_PTS;
+ }
+ if (outErrBuf)
+ VTRACE("%s: error number is %d", __FUNCTION__, outErrBuf->errorNumber);
+}
+
+void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) {
+ VAStatus ret;
+
+ if (mErrReportEnabled) {
+ currentSurface->errBuf.timeStamp = currentSurface->timeStamp;
+ // TODO: is 10 a suitable number?
+ VASurfaceDecodeMBErrors *err_drv_output = NULL;
+ ret = vaQuerySurfaceError(mVADisplay, currentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output);
+ if (ret || !err_drv_output) {
+ WTRACE("vaQuerySurfaceError failed.");
+ return;
+ }
+
+ int offset = 0x1 & currentSurface->errBuf.errorNumber;// offset is either 0 or 1
+ for (int i = 0; i < MAX_ERR_NUM - offset; i++) {
+ if (err_drv_output[i].status != -1) {
+ currentSurface->errBuf.errorNumber++;
+ currentSurface->errBuf.errorArray[i + offset].type = DecodeMBError;
+ currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb = err_drv_output[i].start_mb;
+ currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb = err_drv_output[i].end_mb;
+ currentSurface->errBuf.errorArray[i + offset].num_mbs = err_drv_output[i].end_mb - err_drv_output[i].start_mb + 1;
+ ITRACE("Error Index[%d]: type = %d, start_mb = %d, end_mb = %d",
+ currentSurface->errBuf.errorNumber - 1,
+ currentSurface->errBuf.errorArray[i + offset].type,
+ currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb,
+ currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb);
+ } else break;
+ }
+ ITRACE("%s: error number of current surface is %d, timestamp @%llu",
+ __FUNCTION__, currentSurface->errBuf.errorNumber, currentSurface->timeStamp);
+ }
+}
+
+void VideoDecoderBase::setRotationDegrees(int32_t rotationDegrees) {
+ if (mRotationDegrees == rotationDegrees) {
+ return;
+ }
+
+ ITRACE("set new rotation degree: %d", rotationDegrees);
+ VADisplayAttribute rotate;
+ rotate.type = VADisplayAttribRotation;
+ rotate.value = VA_ROTATION_NONE;
+ if (rotationDegrees == 0)
+ rotate.value = VA_ROTATION_NONE;
+ else if (rotationDegrees == 90)
+ rotate.value = VA_ROTATION_90;
+ else if (rotationDegrees == 180)
+ rotate.value = VA_ROTATION_180;
+ else if (rotationDegrees == 270)
+ rotate.value = VA_ROTATION_270;
+
+ VAStatus ret = vaSetDisplayAttributes(mVADisplay, &rotate, 1);
+ if (ret) {
+ ETRACE("Failed to set rotation degree.");
+ }
+ mRotationDegrees = rotationDegrees;
+}
+
+void VideoDecoderBase::setRenderRect() {
+
+ if (!mVADisplay)
+ return;
+
+ VAStatus ret;
+ VARectangle rect;
+ rect.x = mVideoFormatInfo.cropLeft;
+ rect.y = mVideoFormatInfo.cropTop;
+ rect.width = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight);
+ rect.height = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop);
+
+ VADisplayAttribute render_rect;
+ render_rect.type = VADisplayAttribRenderRect;
+ render_rect.value = (long)&rect;
+
+ ret = vaSetDisplayAttributes(mVADisplay, &render_rect, 1);
+ if (ret) {
+ ETRACE("Failed to set rotation degree.");
+ }
+}
diff --git a/videodecoder/VideoDecoderBase.h b/videodecoder/VideoDecoderBase.h
new file mode 100755
index 0000000..9cf09e8
--- /dev/null
+++ b/videodecoder/VideoDecoderBase.h
@@ -0,0 +1,187 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_BASE_H_
+#define VIDEO_DECODER_BASE_H_
+
+#include <va/va.h>
+#include <va/va_tpi.h>
+#include "VideoDecoderDefs.h"
+#include "VideoDecoderInterface.h"
+#include <pthread.h>
+#include <dlfcn.h>
+
+extern "C" {
+#include "vbp_loader.h"
+}
+
+#ifndef Display
+#ifdef USE_GEN_HW
+typedef char Display;
+#else
+typedef unsigned int Display;
+#endif
+#endif
+
+// TODO: check what is the best number. Must be at least 2 to support one backward reference frame.
+// Currently set to 8 to support 7 backward reference frames. This value is used for AVC frame reordering only.
+// e.g:
+// POC: 4P, 8P, 10P, 6B and mNextOutputPOC = 5
+#define OUTPUT_WINDOW_SIZE 8
+
+class VideoDecoderBase : public IVideoDecoder {
+public:
+ VideoDecoderBase(const char *mimeType, _vbp_parser_type type);
+ virtual ~VideoDecoderBase();
+
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual Decode_Status reset(VideoConfigBuffer *buffer) ;
+ virtual void stop(void);
+ //virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+ virtual void flush(void);
+ virtual void freeSurfaceBuffers(void);
+ virtual const VideoRenderBuffer* getOutput(bool draining = false, VideoErrorBuffer *output_buf = NULL);
+ virtual Decode_Status signalRenderDone(void * graphichandler);
+ virtual const VideoFormatInfo* getFormatInfo(void);
+ virtual bool checkBufferAvail();
+ virtual void enableErrorReport(bool enabled = false) {mErrReportEnabled = enabled; };
+
+protected:
+ // each acquireSurfaceBuffer must be followed by a corresponding outputSurfaceBuffer or releaseSurfaceBuffer.
+ // Only one surface buffer can be acquired at any given time
+ virtual Decode_Status acquireSurfaceBuffer(void);
+ // frame is successfully decoded to the acquired surface buffer and surface is ready for output
+ virtual Decode_Status outputSurfaceBuffer(void);
+ // acquired surface buffer is not used
+ virtual Decode_Status releaseSurfaceBuffer(void);
+ // flush all decoded but not rendered buffers
+ virtual void flushSurfaceBuffers(void);
+ virtual Decode_Status endDecodingFrame(bool dropFrame);
+ virtual VideoSurfaceBuffer* findOutputByPoc(bool draining = false);
+ virtual VideoSurfaceBuffer* findOutputByPct(bool draining = false);
+ virtual VideoSurfaceBuffer* findOutputByPts();
+ virtual Decode_Status setupVA(uint32_t numSurface, VAProfile profile, uint32_t numExtraSurface = 0);
+ virtual Decode_Status terminateVA(void);
+ virtual Decode_Status parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData);
+
+ static inline uint32_t alignMB(uint32_t a) {
+ return ((a + 15) & (~15));
+ }
+
+ virtual Decode_Status getRawDataFromSurface(VideoRenderBuffer *renderBuffer = NULL, uint8_t *pRawData = NULL, uint32_t *pSize = NULL, bool internal = true);
+
+#if (defined USE_AVC_SHORT_FORMAT) || (defined USE_SLICE_HEADER_PARSING)
+ Decode_Status updateBuffer(uint8_t *buffer, int32_t size, void** vbpData);
+ Decode_Status queryBuffer(void **vbpData);
+ Decode_Status setParserType(_vbp_parser_type type);
+ virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID *config);
+#endif
+ virtual Decode_Status checkHardwareCapability();
+private:
+ Decode_Status mapSurface(void);
+ void initSurfaceBuffer(bool reset);
+ void drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *currentSurface);
+ void fillDecodingErrors(VideoRenderBuffer *currentSurface);
+
+ bool mInitialized;
+ pthread_mutex_t mLock;
+
+protected:
+ bool mLowDelay; // when true, decoded frame is immediately output for rendering
+ VideoFormatInfo mVideoFormatInfo;
+ Display *mDisplay;
+ VADisplay mVADisplay;
+ VAContextID mVAContext;
+ VAConfigID mVAConfig;
+ VASurfaceID *mExtraSurfaces; // extra surfaces array
+ int32_t mNumExtraSurfaces;
+ bool mVAStarted;
+ uint64_t mCurrentPTS; // current presentation time stamp (unit is unknown, depend on the framework: GStreamer 100-nanosec, Android: microsecond)
+ // the following three member variables should be set using
+ // acquireSurfaceBuffer/outputSurfaceBuffer/releaseSurfaceBuffer
+ VideoSurfaceBuffer *mAcquiredBuffer;
+ VideoSurfaceBuffer *mLastReference;
+ VideoSurfaceBuffer *mForwardReference;
+ VideoConfigBuffer mConfigBuffer; // only store configure meta data.
+ bool mDecodingFrame; // indicate whether a frame is being decoded
+ bool mSizeChanged; // indicate whether video size is changed.
+ bool mShowFrame; // indicate whether the decoded frame is for display
+
+ int32_t mOutputWindowSize; // indicate limit of number of outstanding frames for output
+ int32_t mRotationDegrees;
+
+ bool mErrReportEnabled;
+ bool mWiDiOn;
+ typedef uint32_t (*OpenFunc)(uint32_t, void **);
+ typedef uint32_t (*CloseFunc)(void *);
+ typedef uint32_t (*ParseFunc)(void *, uint8_t *, uint32_t, uint8_t);
+ typedef uint32_t (*QueryFunc)(void *, void **);
+ typedef uint32_t (*FlushFunc)(void *);
+ typedef uint32_t (*UpdateFunc)(void *, void *, uint32_t, void **);
+ void *mLibHandle;
+ OpenFunc mParserOpen;
+ CloseFunc mParserClose;
+ ParseFunc mParserParse;
+ QueryFunc mParserQuery;
+ FlushFunc mParserFlush;
+ UpdateFunc mParserUpdate;
+ enum {
+ // TODO: move this to vbp_loader.h
+ VBP_INVALID = 0xFF,
+ // TODO: move this to va.h
+ VAProfileSoftwareDecoding = 0xFF,
+ };
+
+ enum OUTPUT_METHOD {
+ // output by Picture Coding Type (I, P, B)
+ OUTPUT_BY_PCT,
+ // output by Picture Order Count (for AVC only)
+ OUTPUT_BY_POC,
+ //OUTPUT_BY_POS,
+ //OUTPUT_BY_PTS,
+ };
+
+private:
+ bool mRawOutput; // whether to output NV12 raw data
+ bool mManageReference; // this should stay true for VC1/MP4 decoder, and stay false for AVC decoder. AVC handles reference frame using DPB
+ OUTPUT_METHOD mOutputMethod;
+
+ int32_t mNumSurfaces;
+ VideoSurfaceBuffer *mSurfaceBuffers;
+ VideoSurfaceBuffer *mOutputHead; // head of output buffer list
+ VideoSurfaceBuffer *mOutputTail; // tail of output buffer list
+ VASurfaceID *mSurfaces; // surfaces array
+ VASurfaceAttribExternalBuffers *mVASurfaceAttrib;
+ uint8_t **mSurfaceUserPtr; // mapped user space pointer
+ int32_t mSurfaceAcquirePos; // position of surface to start acquiring
+ int32_t mNextOutputPOC; // Picture order count of next output
+ _vbp_parser_type mParserType;
+ void *mParserHandle;
+ void *mSignalBufferPre[MAX_GRAPHIC_BUFFER_NUM];
+ uint32 mSignalBufferSize;
+ bool mUseGEN;
+protected:
+ void ManageReference(bool enable) {mManageReference = enable;}
+ void setOutputMethod(OUTPUT_METHOD method) {mOutputMethod = method;}
+ void setOutputWindowSize(int32_t size) {mOutputWindowSize = (size < OUTPUT_WINDOW_SIZE) ? size : OUTPUT_WINDOW_SIZE;}
+ void querySurfaceRenderStatus(VideoSurfaceBuffer* surface);
+ void enableLowDelayMode(bool enable) {mLowDelay = enable;}
+ void setRotationDegrees(int32_t rotationDegrees);
+ void setRenderRect(void);
+};
+
+
+#endif // VIDEO_DECODER_BASE_H_
diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h
new file mode 100644
index 0000000..c9b5d30
--- /dev/null
+++ b/videodecoder/VideoDecoderDefs.h
@@ -0,0 +1,263 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_DEFS_H_
+#define VIDEO_DECODER_DEFS_H_
+
+#include <va/va.h>
+#include <stdint.h>
+
+// format specific data, for future extension.
+struct VideoExtensionBuffer {
+ int32_t extType;
+ int32_t extSize;
+ uint8_t *extData;
+};
+
+typedef enum {
+ PACKED_FRAME_TYPE,
+} VIDEO_EXTENSION_TYPE;
+
+struct VideoFrameRawData {
+ int32_t width;
+ int32_t height;
+ int32_t pitch[3];
+ int32_t offset[3];
+ uint32_t fourcc; //NV12
+ int32_t size;
+ uint8_t *data;
+ bool own; // own data or derived from surface. If true, the library will release the memory during clearnup
+};
+
+struct PackedFrameData {
+ int64_t timestamp;
+ int32_t offSet;
+};
+
+// flags for VideoDecodeBuffer, VideoConfigBuffer and VideoRenderBuffer
+typedef enum {
+ // indicates if sample has discontinuity in time stamp (happen after seeking usually)
+ HAS_DISCONTINUITY = 0x01,
+
+ // indicates wheter the sample contains a complete frame or end of frame.
+ HAS_COMPLETE_FRAME = 0x02,
+
+ // indicate whether surfaceNumber field in the VideoConfigBuffer is valid
+ HAS_SURFACE_NUMBER = 0x04,
+
+ // indicate whether profile field in the VideoConfigBuffer is valid
+ HAS_VA_PROFILE = 0x08,
+
+ // indicate whether output order will be the same as decoder order
+ WANT_LOW_DELAY = 0x10, // make display order same as decoding order
+
+ // indicates whether error concealment algorithm should be enabled to automatically conceal error.
+ WANT_ERROR_CONCEALMENT = 0x20,
+
+ // indicate wheter raw data should be output.
+ WANT_RAW_OUTPUT = 0x40,
+
+ // indicate sample is decoded but should not be displayed.
+ WANT_DECODE_ONLY = 0x80,
+
+ // indicate surfaceNumber field is valid and it contains minimum surface number to allocate.
+ HAS_MINIMUM_SURFACE_NUMBER = 0x100,
+
+ // indicates surface created will be protected
+ WANT_SURFACE_PROTECTION = 0x400,
+
+ // indicates if extra data is appended at end of buffer
+ HAS_EXTRADATA = 0x800,
+
+ // indicates if buffer contains codec data
+ HAS_CODECDATA = 0x1000,
+
+ // indicate if it use graphic buffer.
+ USE_NATIVE_GRAPHIC_BUFFER = 0x2000,
+
+ // indicate whether it is a sync frame in container
+ IS_SYNC_FRAME = 0x4000,
+
+ // indicate whether video decoder buffer contains secure data
+ IS_SECURE_DATA = 0x8000,
+
+ // indicate it's the last output frame of the sequence
+ IS_EOS = 0x10000,
+
+ // indicate should allocate tiling surfaces
+ USE_TILING_MEMORY = 0x20000,
+
+ // indicate the frame has resolution change
+ IS_RESOLUTION_CHANGE = 0x40000,
+
+ // indicate whether video decoder buffer contains only one field
+ IS_SINGLE_FIELD = 0x80000,
+
+ // indicate adaptive playback mode
+ WANT_ADAPTIVE_PLAYBACK = 0x100000,
+
+ // indicate the modular drm type
+ IS_SUBSAMPLE_ENCRYPTION = 0x200000,
+
+} VIDEO_BUFFER_FLAG;
+
+typedef enum
+{
+ DecodeHeaderError = 0,
+ DecodeMBError = 1,
+ DecodeSliceMissing = 2,
+ DecodeRefMissing = 3,
+} VideoDecodeErrorType;
+
+#define MAX_ERR_NUM 10
+
+struct VideoDecodeBuffer {
+ uint8_t *data;
+ int32_t size;
+ int64_t timeStamp;
+ uint32_t flag;
+ uint32_t rotationDegrees;
+ VideoExtensionBuffer *ext;
+};
+
+
+//#define MAX_GRAPHIC_BUFFER_NUM (16 + 1 + 11) // max DPB + 1 + AVC_EXTRA_NUM
+#define MAX_GRAPHIC_BUFFER_NUM 64 // extended for VPP
+
+struct VideoConfigBuffer {
+ uint8_t *data;
+ int32_t size;
+ int32_t width;
+ int32_t height;
+ uint32_t surfaceNumber;
+ VAProfile profile;
+ uint32_t flag;
+ void *graphicBufferHandler[MAX_GRAPHIC_BUFFER_NUM];
+ uint32_t graphicBufferStride;
+ uint32_t graphicBufferColorFormat;
+ uint32_t graphicBufferWidth;
+ uint32_t graphicBufferHeight;
+ VideoExtensionBuffer *ext;
+ void* nativeWindow;
+ uint32_t rotationDegrees;
+#ifdef TARGET_HAS_VPP
+ uint32_t vppBufferNum;
+#endif
+};
+
+struct VideoErrorInfo {
+ VideoDecodeErrorType type;
+ uint32_t num_mbs;
+ union {
+ struct {uint32_t start_mb; uint32_t end_mb;} mb_pos;
+ } error_data;
+};
+
+struct VideoErrorBuffer {
+ uint32_t errorNumber; // Error number should be no more than MAX_ERR_NUM
+ int64_t timeStamp; // presentation time stamp
+ VideoErrorInfo errorArray[MAX_ERR_NUM];
+};
+
+struct VideoRenderBuffer {
+ VASurfaceID surface;
+ VADisplay display;
+ int32_t scanFormat; //progressive, top-field first, or bottom-field first
+ int64_t timeStamp; // presentation time stamp
+ mutable volatile bool renderDone; // indicated whether frame is rendered, this must be set to false by the client of this library once
+ // surface is rendered. Not setting this flag will lead to DECODE_NO_SURFACE error.
+ void * graphicBufferHandle;
+ int32_t graphicBufferIndex; //the index in graphichandle array
+ uint32_t flag;
+ mutable volatile bool driverRenderDone;
+ VideoFrameRawData *rawData;
+
+ VideoErrorBuffer errBuf;
+};
+
+struct VideoSurfaceBuffer {
+ VideoRenderBuffer renderBuffer;
+ int32_t pictureOrder; // picture order count, valid only for AVC format
+ bool referenceFrame; // indicated whether frame associated with this surface is a reference I/P frame
+ bool asReferernce; // indicated wheter frame is used as reference (as a result surface can not be used for decoding)
+ VideoFrameRawData *mappedData;
+ VideoSurfaceBuffer *next;
+};
+
+struct VideoFormatInfo {
+ bool valid; // indicates whether format info is valid. MimeType is always valid.
+ char *mimeType;
+ uint32_t width;
+ uint32_t height;
+ uint32_t surfaceWidth;
+ uint32_t surfaceHeight;
+ uint32_t surfaceNumber;
+ VASurfaceID *ctxSurfaces;
+ int32_t aspectX;
+ int32_t aspectY;
+ int32_t cropLeft;
+ int32_t cropRight;
+ int32_t cropTop;
+ int32_t cropBottom;
+ int32_t colorMatrix;
+ int32_t videoRange;
+ int32_t bitrate;
+ int32_t framerateNom;
+ int32_t framerateDenom;
+ uint32_t actualBufferNeeded;
+ int32_t flags; // indicate whether current picture is field or frame
+ VideoExtensionBuffer *ext;
+};
+
+// TODO: categorize the follow errors as fatal and non-fatal.
+typedef enum {
+ DECODE_NOT_STARTED = -10,
+ DECODE_NEED_RESTART = -9,
+ DECODE_NO_CONFIG = -8,
+ DECODE_NO_SURFACE = -7,
+ DECODE_NO_REFERENCE = -6,
+ DECODE_NO_PARSER = -5,
+ DECODE_INVALID_DATA = -4,
+ DECODE_DRIVER_FAIL = -3,
+ DECODE_PARSER_FAIL = -2,
+ DECODE_MEMORY_FAIL = -1,
+ DECODE_FAIL = 0,
+ DECODE_SUCCESS = 1,
+ DECODE_FORMAT_CHANGE = 2,
+ DECODE_FRAME_DROPPED = 3,
+ DECODE_MULTIPLE_FRAME = 4,
+} VIDEO_DECODE_STATUS;
+
+typedef int32_t Decode_Status;
+
+#ifndef NULL
+#define NULL 0
+#endif
+
+inline bool checkFatalDecoderError(Decode_Status status) {
+ if (status == DECODE_NOT_STARTED ||
+ status == DECODE_NEED_RESTART ||
+ status == DECODE_NO_PARSER ||
+ status == DECODE_INVALID_DATA ||
+ status == DECODE_MEMORY_FAIL ||
+ status == DECODE_FAIL) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+#endif // VIDEO_DECODER_DEFS_H_
diff --git a/videodecoder/VideoDecoderHost.cpp b/videodecoder/VideoDecoderHost.cpp
new file mode 100644
index 0000000..56f55d7
--- /dev/null
+++ b/videodecoder/VideoDecoderHost.cpp
@@ -0,0 +1,85 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoDecoderWMV.h"
+#include "VideoDecoderMPEG4.h"
+#include "VideoDecoderAVC.h"
+
+#ifdef USE_INTEL_SECURE_AVC
+#include "VideoDecoderAVCSecure.h"
+#endif
+
+#ifdef USE_HW_VP8
+#include "VideoDecoderVP8.h"
+#endif
+#include "VideoDecoderHost.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+
+IVideoDecoder* createVideoDecoder(const char* mimeType) {
+ if (mimeType == NULL) {
+ ETRACE("NULL mime type.");
+ return NULL;
+ }
+
+ if (strcasecmp(mimeType, "video/wmv") == 0 ||
+ strcasecmp(mimeType, "video/vc1") == 0 ||
+ strcasecmp(mimeType, "video/x-ms-wmv") == 0) {
+ VideoDecoderWMV *p = new VideoDecoderWMV(mimeType);
+ return (IVideoDecoder *)p;
+ } else if (strcasecmp(mimeType, "video/avc") == 0 ||
+ strcasecmp(mimeType, "video/h264") == 0) {
+ VideoDecoderAVC *p = new VideoDecoderAVC(mimeType);
+ return (IVideoDecoder *)p;
+ } else if (strcasecmp(mimeType, "video/mp4v-es") == 0 ||
+ strcasecmp(mimeType, "video/mpeg4") == 0 ||
+ strcasecmp(mimeType, "video/h263") == 0 ||
+ strcasecmp(mimeType, "video/3gpp") == 0) {
+ VideoDecoderMPEG4 *p = new VideoDecoderMPEG4(mimeType);
+ return (IVideoDecoder *)p;
+ }
+#ifdef USE_INTEL_SECURE_AVC
+ else if (strcasecmp(mimeType, "video/avc-secure") == 0) {
+ VideoDecoderAVC *p = new VideoDecoderAVCSecure(mimeType);
+ return (IVideoDecoder *)p;
+ }
+#endif
+
+#ifdef USE_HW_VP8
+ else if (strcasecmp(mimeType, "video/vp8") == 0 ||
+ strcasecmp(mimeType, "video/x-vnd.on2.vp8") == 0) {
+ VideoDecoderVP8 *p = new VideoDecoderVP8(mimeType);
+ return (IVideoDecoder *)p;
+ }
+#endif
+
+ else {
+ ETRACE("Unknown mime type: %s", mimeType);
+ }
+ return NULL;
+}
+
+void releaseVideoDecoder(IVideoDecoder* p) {
+ if (p) {
+ const VideoFormatInfo *info = p->getFormatInfo();
+ if (info && info->mimeType) {
+ ITRACE("Deleting decoder for %s", info->mimeType);
+ }
+ }
+ delete p;
+}
+
+
diff --git a/videodecoder/VideoDecoderHost.h b/videodecoder/VideoDecoderHost.h
new file mode 100644
index 0000000..1f053b6
--- /dev/null
+++ b/videodecoder/VideoDecoderHost.h
@@ -0,0 +1,29 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+
+#ifndef VIDEO_DECODER_HOST_H_
+#define VIDEO_DECODER_HOST_H_
+
+
+#include "VideoDecoderInterface.h"
+
+IVideoDecoder* createVideoDecoder(const char* mimeType);
+void releaseVideoDecoder(IVideoDecoder *p);
+
+
+
+#endif /* VIDEO_DECODER_HOST_H_ */
diff --git a/videodecoder/VideoDecoderInterface.h b/videodecoder/VideoDecoderInterface.h
new file mode 100644
index 0000000..fdc2c12
--- /dev/null
+++ b/videodecoder/VideoDecoderInterface.h
@@ -0,0 +1,40 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+
+#ifndef VIDEO_DECODER_INTERFACE_H_
+#define VIDEO_DECODER_INTERFACE_H_
+
+#include "VideoDecoderDefs.h"
+
+class IVideoDecoder {
+public:
+ virtual ~IVideoDecoder() {}
+ virtual Decode_Status start(VideoConfigBuffer *buffer) = 0;
+ virtual Decode_Status reset(VideoConfigBuffer *buffer) = 0;
+ virtual void stop(void) = 0;
+ virtual void flush() = 0;
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer) = 0;
+ virtual void freeSurfaceBuffers(void) = 0;
+ virtual const VideoRenderBuffer* getOutput(bool draining = false, VideoErrorBuffer *output_buf = NULL) = 0;
+ virtual const VideoFormatInfo* getFormatInfo(void) = 0;
+ virtual Decode_Status signalRenderDone(void * graphichandler) = 0;
+ virtual bool checkBufferAvail() = 0;
+ virtual Decode_Status getRawDataFromSurface(VideoRenderBuffer *renderBuffer = NULL, uint8_t *pRawData = NULL, uint32_t *pSize = NULL, bool internal = true) = 0;
+ virtual void enableErrorReport(bool enabled) = 0;
+};
+
+#endif /* VIDEO_DECODER_INTERFACE_H_ */
diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp
new file mode 100644
index 0000000..b54afa9
--- /dev/null
+++ b/videodecoder/VideoDecoderMPEG4.cpp
@@ -0,0 +1,645 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoDecoderMPEG4.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+
+VideoDecoderMPEG4::VideoDecoderMPEG4(const char *mimeType)
+ : VideoDecoderBase(mimeType, VBP_MPEG4),
+ mLastVOPTimeIncrement(0),
+ mExpectingNVOP(false),
+ mSendIQMatrixBuf(false),
+ mLastVOPCodingType(MP4_VOP_TYPE_I),
+ mIsShortHeader(false) {
+}
+
+VideoDecoderMPEG4::~VideoDecoderMPEG4() {
+ stop();
+}
+
+Decode_Status VideoDecoderMPEG4::start(VideoConfigBuffer *buffer) {
+ Decode_Status status;
+
+ status = VideoDecoderBase::start(buffer);
+ CHECK_STATUS("VideoDecoderBase::start");
+
+ if (buffer->data == NULL || buffer->size == 0) {
+ WTRACE("No config data to start VA.");
+ return DECODE_SUCCESS;
+ }
+
+ vbp_data_mp42 *data = NULL;
+ status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+
+ status = startVA(data);
+ return status;
+}
+
+void VideoDecoderMPEG4::stop(void) {
+ // drop the last frame and ignore return value
+ endDecodingFrame(true);
+ VideoDecoderBase::stop();
+
+ mLastVOPTimeIncrement = 0;
+ mExpectingNVOP = false;
+ mLastVOPCodingType = MP4_VOP_TYPE_I;
+}
+
+Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) {
+ Decode_Status status;
+ vbp_data_mp42 *data = NULL;
+ bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
+ if (buffer == NULL) {
+ return DECODE_INVALID_DATA;
+ }
+ if (buffer->flag & IS_SYNC_FRAME) {
+ mIsSyncFrame = true;
+ } else {
+ mIsSyncFrame = false;
+ }
+ buffer->ext = NULL;
+ status = VideoDecoderBase::parseBuffer(
+ buffer->data,
+ buffer->size,
+ false,
+ (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+
+ if (!mVAStarted) {
+ status = startVA(data);
+ CHECK_STATUS("startVA");
+ }
+
+ if (mSizeChanged && !useGraphicbuffer) {
+ // some container has the incorrect width/height.
+ // send the format change to OMX to update the crop info.
+ mSizeChanged = false;
+ ITRACE("Video size is changed during startVA");
+ return DECODE_FORMAT_CHANGE;
+ }
+
+ if ((mVideoFormatInfo.width != (uint32_t)data->codec_data.video_object_layer_width ||
+ mVideoFormatInfo.height != (uint32_t)data->codec_data.video_object_layer_height) &&
+ data->codec_data.video_object_layer_width &&
+ data->codec_data.video_object_layer_height) {
+ // update encoded image size
+ ITRACE("Video size is changed. from %dx%d to %dx%d\n",mVideoFormatInfo.width,mVideoFormatInfo.height,
+ data->codec_data.video_object_layer_width,data->codec_data.video_object_layer_height);
+ bool noNeedFlush = false;
+ mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
+ mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
+ if (useGraphicbuffer) {
+ noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
+ && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
+ }
+ if (!noNeedFlush) {
+ flushSurfaceBuffers();
+ mSizeChanged = false;
+ return DECODE_FORMAT_CHANGE;
+ } else {
+ mSizeChanged = true;
+ }
+
+ setRenderRect();
+ }
+
+ status = decodeFrame(buffer, data);
+ CHECK_STATUS("decodeFrame");
+
+ return status;
+}
+
+void VideoDecoderMPEG4::flush(void) {
+ VideoDecoderBase::flush();
+
+ mExpectingNVOP = false;
+ mLastVOPTimeIncrement = 0;
+ mLastVOPCodingType = MP4_VOP_TYPE_I;
+}
+
+Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data) {
+ Decode_Status status;
+ // check if any slice is parsed, we may just receive configuration data
+ if (data->number_picture_data == 0) {
+ WTRACE("number_picture_data == 0");
+ return DECODE_SUCCESS;
+ }
+
+ // When the MPEG4 parser gets the invaild parameters, add the check
+ // and return error to OMX to avoid mediaserver crash.
+ if (data->picture_data && (data->picture_data->picture_param.vop_width == 0
+ || data->picture_data->picture_param.vop_height == 0)) {
+ return DECODE_PARSER_FAIL;
+ }
+
+ uint64_t lastPTS = mCurrentPTS;
+ mCurrentPTS = buffer->timeStamp;
+
+ if (lastPTS != mCurrentPTS) {
+ // finish decoding the last frame
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+
+ // start decoding a new frame
+ status = beginDecodingFrame(data);
+ if (status == DECODE_MULTIPLE_FRAME) {
+ buffer->ext = &mExtensionBuffer;
+ mExtensionBuffer.extType = PACKED_FRAME_TYPE;
+ mExtensionBuffer.extSize = sizeof(mPackedFrame);
+ mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
+ } else if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ }
+ CHECK_STATUS("beginDecodingFrame");
+ } else {
+ status = continueDecodingFrame(data);
+ if (status == DECODE_MULTIPLE_FRAME) {
+ buffer->ext = &mExtensionBuffer;
+ mExtensionBuffer.extType = PACKED_FRAME_TYPE;
+ mExtensionBuffer.extSize = sizeof(mPackedFrame);
+ mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
+ } else if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ }
+ CHECK_STATUS("continueDecodingFrame");
+ }
+
+ if (buffer->flag & HAS_COMPLETE_FRAME) {
+ // finish decoding current frame
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+ }
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) {
+
+ Decode_Status status = DECODE_SUCCESS;
+ vbp_picture_data_mp42 *picData = data->picture_data;
+ VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
+ int codingType = picParam->vop_fields.bits.vop_coding_type;
+
+ // start sanity checking
+ if (mExpectingNVOP) {
+ // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type
+ // of this frame must be B
+ // for example: {PB} B N P B B P...
+ if (picData->vop_coded == 1 && codingType != MP4_VOP_TYPE_B) {
+ WTRACE("Invalid coding type while waiting for n-vop for packed frame.");
+ mExpectingNVOP = false;
+ }
+ }
+
+ // handle N-VOP picuture, it could be a skipped frame or a simple placeholder of packed frame
+ if (picData->vop_coded == 0) {
+ if (mLastReference == NULL) {
+ WTRACE("The last reference is unavailable to construct skipped frame.");
+ flush();
+ mExpectingNVOP = false;
+ // TODO: handle this case
+ return DECODE_SUCCESS;
+ }
+
+ if (mExpectingNVOP) {
+ // P frame is already in queue, just need to update time stamp.
+ mLastReference->renderBuffer.timeStamp = mCurrentPTS;
+ mExpectingNVOP = false;
+ }
+ else {
+ // Do nothing for skip frame as the last frame will be rendered agian by natively
+ // No needs to handle reference frame neither
+#if 0
+ // this is skipped frame, use the last reference frame as output
+ status = acquireSurfaceBuffer();
+ CHECK_STATUS("acquireSurfaceBuffer");
+ mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
+ mAcquiredBuffer->renderBuffer.flag = 0;
+ mAcquiredBuffer->renderBuffer.scanFormat = mLastReference->renderBuffer.scanFormat;
+ mAcquiredBuffer->renderBuffer.surface = mLastReference->renderBuffer.surface;
+ // No need to update mappedData for HW decoding
+ //mAcquiredBuffer->mappedData.data = mLastReference->mappedData.data;
+ mAcquiredBuffer->referenceFrame = true;
+ status = outputSurfaceBuffer();
+ CHECK_STATUS("outputSurfaceBuffer");
+#endif
+ }
+
+ if (data->number_picture_data > 1) {
+ WTRACE("Unexpected to have more picture data following a non-coded VOP.");
+ //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for
+ // coded picture, a frame is lost.
+ // TODO: handle this case
+ // return DECODE_FAIL;
+ }
+ return DECODE_SUCCESS;
+ }
+ else {
+ // Check if we have reference frame(s) for decoding
+ if (codingType == MP4_VOP_TYPE_B) {
+ if (mForwardReference == NULL ||
+ mLastReference == NULL) {
+ if (mIsShortHeader) {
+ status = DECODE_SUCCESS;
+ VTRACE("%s: No reference frame but keep decoding", __FUNCTION__);
+ } else
+ return DECODE_NO_REFERENCE;
+ }
+ } else if (codingType == MP4_VOP_TYPE_P || codingType == MP4_VOP_TYPE_S) {
+ if (mLastReference == NULL && mIsSyncFrame == false) {
+ if (mIsShortHeader) {
+ status = DECODE_SUCCESS;
+ VTRACE("%s: No reference frame but keep decoding", __FUNCTION__);
+ } else
+ return DECODE_NO_REFERENCE;
+ }
+ }
+ // all sanity checks pass, continue decoding through continueDecodingFrame
+ status = continueDecodingFrame(data);
+ }
+ return status;
+}
+
+Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) {
+ Decode_Status status = DECODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
+
+ /*
+ Packed Frame Assumption:
+
+ 1. In one packed frame, there's only one P or I frame and only one B frame.
+ 2. In packed frame, there's no skipped frame (vop_coded = 0)
+ 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately).
+ 4. N-VOP frame is the frame with vop_coded = 0.
+ 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame
+
+
+ I, P, {P, B}, B, N, P, N, I, ...
+ I, P, {P, B}, N, P, N, I, ...
+
+ The first N is placeholder for P frame in the packed frame
+ The second N is a skipped frame
+ */
+
+ vbp_picture_data_mp42 *picData = data->picture_data;
+ for (uint32_t i = 0; i < data->number_picture_data; i++, picData = picData->next_picture_data) {
+ // each slice has its own picture data, video_packet_header following resync_marker may reset picture header, see MP4 spec
+ VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
+ int codingType = picParam->vop_fields.bits.vop_coding_type;
+ if (codingType == MP4_VOP_TYPE_S && picParam->no_of_sprite_warping_points > 1) {
+ WTRACE("Hardware only supports up to one warping point (stationary or translation)");
+ }
+
+ if (picData->vop_coded == 0) {
+ ETRACE("Unexpected to have non-coded VOP.");
+ return DECODE_FAIL;
+ }
+ if (picData->new_picture_flag == 1 || mDecodingFrame == false) {
+ // either condition indicates start of a new frame
+ if (picData->new_picture_flag == 0) {
+ WTRACE("First slice of picture is lost!");
+ // TODO: handle this case
+ }
+ if (mDecodingFrame) {
+ if (codingType == MP4_VOP_TYPE_B){
+ // this indicates the start of a new frame in the packed frame
+ // Update timestamp for P frame in the packed frame as timestamp here is for the B frame!
+ if (picParam->vop_time_increment_resolution){
+ uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment +
+ picParam->vop_time_increment_resolution;
+ increment = increment % picParam->vop_time_increment_resolution;
+ // convert to micro-second
+ // TODO: unit of time stamp varies on different frame work
+ increment = increment * 1e6 / picParam->vop_time_increment_resolution;
+ mAcquiredBuffer->renderBuffer.timeStamp += increment;
+ if (useGraphicBuffer){
+ mPackedFrame.timestamp = mCurrentPTS;
+ mCurrentPTS = mAcquiredBuffer->renderBuffer.timeStamp;
+ }
+ }
+ } else {
+ // this indicates the start of a new frame in the packed frame. no B frame int the packet
+ // Update the timestamp according the increment
+ if (picParam->vop_time_increment_resolution){
+ int64_t increment = picData->vop_time_increment - mLastVOPTimeIncrement + picParam->vop_time_increment_resolution;
+ increment = increment % picParam->vop_time_increment_resolution;
+ //convert to micro-second
+ increment = increment * 1e6 / picParam->vop_time_increment_resolution;
+ if (useGraphicBuffer) {
+ mPackedFrame.timestamp = mCurrentPTS + increment;
+ }
+ else {
+ mCurrentPTS += increment;
+ }
+
+ } else {
+ if (useGraphicBuffer) {
+ mPackedFrame.timestamp = mCurrentPTS + 30000;
+ }
+ else {
+ mCurrentPTS += 30000;
+ }
+ }
+ }
+ endDecodingFrame(false);
+ mExpectingNVOP = true;
+ if (codingType != MP4_VOP_TYPE_B) {
+ mExpectingNVOP = false;
+ }
+ if (useGraphicBuffer) {
+ int32_t count = i - 1;
+ if (count < 0) {
+ WTRACE("Shuld not be here!");
+ return DECODE_SUCCESS;
+ }
+ vbp_picture_data_mp42 *lastpic = data->picture_data;
+ for(int k = 0; k < count; k++ ) {
+ lastpic = lastpic->next_picture_data;
+ }
+ mPackedFrame.offSet = lastpic->slice_data.slice_offset + lastpic->slice_data.slice_size;
+ VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",mPackedFrame.offSet,mPackedFrame.timestamp);
+ return DECODE_MULTIPLE_FRAME;
+ }
+ }
+
+ // acquire a new surface buffer
+ status = acquireSurfaceBuffer();
+ CHECK_STATUS("acquireSurfaceBuffer");
+
+ // sprite is treated as P frame in the display order, so only B frame frame is not used as "reference"
+ mAcquiredBuffer->referenceFrame = (codingType != MP4_VOP_TYPE_B);
+ if (picData->picture_param.vol_fields.bits.interlaced) {
+ // only MPEG-4 studio profile can have field coding. All other profiles
+ // use frame coding only, i.e, there is no field VOP. (see vop_structure in MP4 spec)
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
+ } else {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
+ }
+ // TODO: set discontinuity flag
+ mAcquiredBuffer->renderBuffer.flag = 0;
+ mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
+ if (mSizeChanged) {
+ mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
+ mSizeChanged = false;
+ }
+ if (codingType != MP4_VOP_TYPE_B) {
+ mLastVOPCodingType = codingType;
+ mLastVOPTimeIncrement = picData->vop_time_increment;
+ }
+
+ // start decoding a frame
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ mDecodingFrame = true;
+ mSendIQMatrixBuf = true;
+ }
+
+ status = decodeSlice(data, picData);
+ CHECK_STATUS("decodeSlice");
+ }
+
+ return DECODE_SUCCESS;
+}
+
+
+Decode_Status VideoDecoderMPEG4::decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData) {
+ Decode_Status status;
+ VAStatus vaStatus;
+ uint32_t bufferIDCount = 0;
+ // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
+ VABufferID bufferIDs[4];
+
+ VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
+ vbp_slice_data_mp42 *sliceData = &(picData->slice_data);
+ VASliceParameterBufferMPEG4 *sliceParam = &(sliceData->slice_param);
+
+ // send picture parametre for each slice
+ status = setReference(picParam);
+ CHECK_STATUS("setReference");
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferMPEG4),
+ 1,
+ picParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+
+ bufferIDCount++;
+ if (picParam->vol_fields.bits.quant_type && mSendIQMatrixBuf)
+ {
+ // only send IQ matrix for the first slice in the picture
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferMPEG4),
+ 1,
+ &(data->iq_matrix_buffer),
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+
+ mSendIQMatrixBuf = false;
+ bufferIDCount++;
+ }
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferMPEG4),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+
+ bufferIDCount++;
+
+ //slice data buffer pointer
+ //Note that this is the original data buffer ptr;
+ // offset to the actual slice data is provided in
+ // slice_data_offset in VASliceParameterBufferMP42
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ sliceData->slice_size, //size
+ 1, //num_elements
+ sliceData->buffer_addr + sliceData->slice_offset,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+
+ bufferIDCount++;
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *picParam) {
+ switch (picParam->vop_fields.bits.vop_coding_type) {
+ case MP4_VOP_TYPE_I:
+ picParam->forward_reference_picture = VA_INVALID_SURFACE;
+ picParam->backward_reference_picture = VA_INVALID_SURFACE;
+ break;
+ case MP4_VOP_TYPE_P:
+ if (mLastReference == NULL && mIsSyncFrame == false && !mIsShortHeader) {
+ return DECODE_NO_REFERENCE;
+ }
+ if (mLastReference != NULL) {
+ picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
+ } else {
+ VTRACE("%s: no reference frame, but keep decoding", __FUNCTION__);
+ picParam->forward_reference_picture = VA_INVALID_SURFACE;
+ }
+ picParam->backward_reference_picture = VA_INVALID_SURFACE;
+ break;
+ case MP4_VOP_TYPE_B:
+ picParam->vop_fields.bits.backward_reference_vop_coding_type = mLastVOPCodingType;
+ // WEIRD, CHECK AGAIN !!!!!!!
+ if (mIsShortHeader) {
+ if (mLastReference != NULL) {
+ picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
+ } else {
+ VTRACE("%s: no forward reference frame, but keep decoding", __FUNCTION__);
+ picParam->forward_reference_picture = VA_INVALID_SURFACE;
+ }
+ if (mForwardReference != NULL) {
+ picParam->backward_reference_picture = mForwardReference->renderBuffer.surface;
+ } else {
+ VTRACE("%s: no backward reference frame, but keep decoding", __FUNCTION__);
+ picParam->backward_reference_picture = VA_INVALID_SURFACE;
+ }
+ } else if (mLastReference == NULL || mForwardReference == NULL) {
+ return DECODE_NO_REFERENCE;
+ } else {
+ picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
+ picParam->backward_reference_picture = mForwardReference->renderBuffer.surface;
+ }
+ break;
+ case MP4_VOP_TYPE_S:
+ // WEIRD, CHECK AGAIN!!!! WAS using mForwardReference
+ if (mLastReference == NULL) {
+ return DECODE_NO_REFERENCE;
+ }
+ picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
+ picParam->backward_reference_picture = VA_INVALID_SURFACE;
+ break;
+
+ default:
+ // Will never reach here;
+ return DECODE_PARSER_FAIL;
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderMPEG4::startVA(vbp_data_mp42 *data) {
+ updateFormatInfo(data);
+
+ VAProfile vaProfile;
+
+ if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) {
+ vaProfile = VAProfileMPEG4AdvancedSimple;
+ } else {
+ vaProfile = VAProfileMPEG4Simple;
+ }
+
+ mIsShortHeader = data->codec_data.short_video_header;
+
+ return VideoDecoderBase::setupVA(MP4_SURFACE_NUMBER, vaProfile);
+}
+
+void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) {
+ ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
+ mVideoFormatInfo.width, mVideoFormatInfo.height,
+ data->codec_data.video_object_layer_width,
+ data->codec_data.video_object_layer_height);
+
+ mVideoFormatInfo.cropBottom = data->codec_data.video_object_layer_height > mVideoFormatInfo.height ?
+ data->codec_data.video_object_layer_height - mVideoFormatInfo.height : 0;
+ mVideoFormatInfo.cropRight = data->codec_data.video_object_layer_width > mVideoFormatInfo.width ?
+ data->codec_data.video_object_layer_width - mVideoFormatInfo.width : 0;
+
+ if ((mVideoFormatInfo.width != (uint32_t)data->codec_data.video_object_layer_width ||
+ mVideoFormatInfo.height != (uint32_t)data->codec_data.video_object_layer_height) &&
+ data->codec_data.video_object_layer_width &&
+ data->codec_data.video_object_layer_height) {
+ // update encoded image size
+ mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
+ mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
+ mSizeChanged = true;
+ ITRACE("Video size is changed.");
+ }
+
+ // video_range has default value of 0. Y ranges from 16 to 235.
+ mVideoFormatInfo.videoRange = data->codec_data.video_range;
+
+ switch (data->codec_data.matrix_coefficients) {
+ case 1:
+ mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
+ break;
+
+ // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
+ // SMPTE 170M/BT601
+ case 5:
+ case 6:
+ mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
+ break;
+
+ default:
+ // unknown color matrix, set to 0 so color space flag will not be set.
+ mVideoFormatInfo.colorMatrix = 0;
+ break;
+ }
+
+ mVideoFormatInfo.aspectX = data->codec_data.par_width;
+ mVideoFormatInfo.aspectY = data->codec_data.par_height;
+ //mVideoFormatInfo.bitrate = data->codec_data.bit_rate;
+ mVideoFormatInfo.valid = true;
+
+ setRenderRect();
+}
+
+Decode_Status VideoDecoderMPEG4::checkHardwareCapability() {
+ VAStatus vaStatus;
+ VAConfigAttrib cfgAttribs[2];
+ cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
+ cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
+ vaStatus = vaGetConfigAttributes(mVADisplay,
+ mIsShortHeader ? VAProfileH263Baseline : VAProfileMPEG4AdvancedSimple,
+ VAEntrypointVLD, cfgAttribs, 2);
+ CHECK_VA_STATUS("vaGetConfigAttributes");
+ if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
+ ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
+ cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
+ return DECODE_DRIVER_FAIL;
+ }
+
+ return DECODE_SUCCESS;
+}
diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h
new file mode 100644
index 0000000..8fa319e
--- /dev/null
+++ b/videodecoder/VideoDecoderMPEG4.h
@@ -0,0 +1,70 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_MPEG4_H_
+#define VIDEO_DECODER_MPEG4_H_
+
+#include "VideoDecoderBase.h"
+
+
+class VideoDecoderMPEG4 : public VideoDecoderBase {
+public:
+ VideoDecoderMPEG4(const char *mimeType);
+ virtual ~VideoDecoderMPEG4();
+
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+ virtual void flush(void);
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+
+protected:
+ virtual Decode_Status checkHardwareCapability();
+
+private:
+ Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data);
+ Decode_Status beginDecodingFrame(vbp_data_mp42 *data);
+ Decode_Status continueDecodingFrame(vbp_data_mp42 *data);
+ Decode_Status decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData);
+ Decode_Status setReference(VAPictureParameterBufferMPEG4 *picParam);
+ Decode_Status startVA(vbp_data_mp42 *data);
+ void updateFormatInfo(vbp_data_mp42 *data);
+
+private:
+ // Value of VOP type defined here follows MP4 spec
+ enum {
+ MP4_VOP_TYPE_I = 0,
+ MP4_VOP_TYPE_P = 1,
+ MP4_VOP_TYPE_B = 2,
+ MP4_VOP_TYPE_S = 3,
+ };
+
+ enum {
+ MP4_SURFACE_NUMBER = 10,
+ };
+
+ uint64_t mLastVOPTimeIncrement;
+ bool mExpectingNVOP; // indicate if future n-vop is a placeholder of a packed frame
+ bool mSendIQMatrixBuf; // indicate if iq_matrix_buffer is sent to driver
+ int32_t mLastVOPCodingType;
+ bool mIsSyncFrame; // indicate if it is SyncFrame in container
+ bool mIsShortHeader; // indicate if it is short header format
+ VideoExtensionBuffer mExtensionBuffer;
+ PackedFrameData mPackedFrame;
+};
+
+
+
+#endif /* VIDEO_DECODER_MPEG4_H_ */
diff --git a/videodecoder/VideoDecoderTrace.cpp b/videodecoder/VideoDecoderTrace.cpp
new file mode 100644
index 0000000..1075419
--- /dev/null
+++ b/videodecoder/VideoDecoderTrace.cpp
@@ -0,0 +1,37 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+
+
+#include "VideoDecoderTrace.h"
+
+#ifdef ENABLE_VIDEO_DECODER_TRACE
+
+void TraceVideoDecoder(const char* cat, const char* fun, int line, const char* format, ...)
+{
+ if (NULL == cat || NULL == fun || NULL == format)
+ return;
+
+ printf("%s %s(#%d): ", cat, fun, line);
+ va_list args;
+ va_start(args, format);
+ vprintf(format, args);
+ va_end(args);
+ printf("\n");
+}
+
+#endif
+
diff --git a/videodecoder/VideoDecoderTrace.h b/videodecoder/VideoDecoderTrace.h
new file mode 100755
index 0000000..c4c1001
--- /dev/null
+++ b/videodecoder/VideoDecoderTrace.h
@@ -0,0 +1,96 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+
+#ifndef VIDEO_DECODER_TRACE_H_
+#define VIDEO_DECODER_TRACE_H_
+
+
+#define ENABLE_VIDEO_DECODER_TRACE
+//#define ANDROID
+
+
+#ifdef ENABLE_VIDEO_DECODER_TRACE
+
+#ifndef ANDROID
+
+#include <stdio.h>
+#include <stdarg.h>
+
+extern void TraceVideoDecoder(const char* cat, const char* fun, int line, const char* format, ...);
+#define VIDEO_DECODER_TRACE(cat, format, ...) \
+TraceVideoDecoder(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__)
+
+#define ETRACE(format, ...) VIDEO_DECODER_TRACE("ERROR: ", format, ##__VA_ARGS__)
+#define WTRACE(format, ...) VIDEO_DECODER_TRACE("WARNING: ", format, ##__VA_ARGS__)
+#define ITRACE(format, ...) VIDEO_DECODER_TRACE("INFO: ", format, ##__VA_ARGS__)
+#define VTRACE(format, ...) VIDEO_DECODER_TRACE("VERBOSE: ", format, ##__VA_ARGS__)
+
+#else
+// for Android OS
+
+//#define LOG_NDEBUG 0
+
+#define LOG_TAG "VideoDecoder"
+
+#include <wrs_omxil_core/log.h>
+#define ETRACE(...) LOGE(__VA_ARGS__)
+#define WTRACE(...) LOGW(__VA_ARGS__)
+#define ITRACE(...) LOGI(__VA_ARGS__)
+#define VTRACE(...) LOGV(__VA_ARGS__)
+
+#endif
+
+
+#else
+
+#define ETRACE(format, ...)
+#define WTRACE(format, ...)
+#define ITRACE(format, ...)
+#define VTRACE(format, ...)
+
+
+#endif /* ENABLE_VIDEO_DECODER_TRACE*/
+
+
+#define CHECK_STATUS(FUNC)\
+ if (status != DECODE_SUCCESS) {\
+ if (status > DECODE_SUCCESS) {\
+ WTRACE(FUNC" failed. status = %d", status);\
+ } else {\
+ ETRACE(FUNC" failed. status = %d", status);\
+ }\
+ return status;\
+ }
+
+#define CHECK_VA_STATUS(FUNC)\
+ if (vaStatus != VA_STATUS_SUCCESS) {\
+ ETRACE(FUNC" failed. vaStatus = 0x%x", vaStatus);\
+ return DECODE_DRIVER_FAIL;\
+ }
+
+#define CHECK_VBP_STATUS(FUNC)\
+ if (vbpStatus != VBP_OK) {\
+ ETRACE(FUNC" failed. vbpStatus = %d", (int)vbpStatus);\
+ if (vbpStatus == VBP_ERROR) {\
+ return DECODE_FAIL;\
+ }\
+ return DECODE_PARSER_FAIL;\
+ }
+
+#endif /*VIDEO_DECODER_TRACE_H_*/
+
+
diff --git a/videodecoder/VideoDecoderVP8.cpp b/videodecoder/VideoDecoderVP8.cpp
new file mode 100644
index 0000000..87249b4
--- /dev/null
+++ b/videodecoder/VideoDecoderVP8.cpp
@@ -0,0 +1,449 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoDecoderVP8.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+
+VideoDecoderVP8::VideoDecoderVP8(const char *mimeType)
+ : VideoDecoderBase(mimeType, VBP_VP8) {
+ invalidateReferenceFrames(0);
+ invalidateReferenceFrames(1);
+}
+
+VideoDecoderVP8::~VideoDecoderVP8() {
+ stop();
+}
+
+void VideoDecoderVP8::invalidateReferenceFrames(int toggle) {
+ ReferenceFrameBuffer *p = mRFBs[toggle];
+ for (int i = 0; i < VP8_REF_SIZE; i++) {
+ p->index = (uint32_t) -1;
+ p->surfaceBuffer = NULL;
+ p++;
+ }
+}
+
+void VideoDecoderVP8::clearAsReference(int toggle, int ref_type) {
+ ReferenceFrameBuffer ref = mRFBs[toggle][ref_type];
+ if (ref.surfaceBuffer) {
+ ref.surfaceBuffer->asReferernce = false;
+ }
+}
+
+void VideoDecoderVP8::updateFormatInfo(vbp_data_vp8 *data) {
+ uint32_t width = data->codec_data->frame_width;
+ uint32_t height = data->codec_data->frame_height;
+ ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
+ mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
+
+ if ((mVideoFormatInfo.width != width ||
+ mVideoFormatInfo.height != height) &&
+ width && height) {
+ if ((VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width) ||
+ (VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height)) {
+ mSizeChanged = true;
+ ITRACE("Video size is changed.");
+ }
+ mVideoFormatInfo.width = width;
+ mVideoFormatInfo.height = height;
+ }
+
+ mVideoFormatInfo.cropLeft = data->codec_data->crop_left;
+ mVideoFormatInfo.cropRight = data->codec_data->crop_right;
+ mVideoFormatInfo.cropTop = data->codec_data->crop_top;
+ mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
+ ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d", data->codec_data->crop_left, data->codec_data->crop_top, data->codec_data->crop_right, data->codec_data->crop_bottom);
+
+ mVideoFormatInfo.valid = true;
+
+ setRenderRect();
+}
+
+Decode_Status VideoDecoderVP8::startVA(vbp_data_vp8 *data) {
+ updateFormatInfo(data);
+
+ VAProfile vaProfile = VAProfileVP8Version0_3;
+ if (data->codec_data->version_num > 3) {
+ return DECODE_PARSER_FAIL;
+ }
+
+ enableLowDelayMode(true);
+
+ return VideoDecoderBase::setupVA(VP8_SURFACE_NUMBER + VP8_REF_SIZE, vaProfile);
+}
+
+Decode_Status VideoDecoderVP8::start(VideoConfigBuffer *buffer) {
+ Decode_Status status;
+
+ status = VideoDecoderBase::start(buffer);
+ CHECK_STATUS("VideoDecoderBase::start");
+
+ // We don't want base class to manage reference.
+ VideoDecoderBase::ManageReference(false);
+
+ if (buffer->data == NULL || buffer->size == 0) {
+ WTRACE("No config data to start VA.");
+ return DECODE_SUCCESS;
+ }
+
+ vbp_data_vp8 *data = NULL;
+ status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+
+ status = startVA(data);
+ return status;
+}
+
+void VideoDecoderVP8::stop(void) {
+ VideoDecoderBase::stop();
+
+ invalidateReferenceFrames(0);
+ invalidateReferenceFrames(1);
+}
+
+void VideoDecoderVP8::flush(void) {
+ VideoDecoderBase::flush();
+
+ invalidateReferenceFrames(0);
+ invalidateReferenceFrames(1);
+}
+
+Decode_Status VideoDecoderVP8::decode(VideoDecodeBuffer *buffer) {
+ Decode_Status status;
+ vbp_data_vp8 *data = NULL;
+ if (buffer == NULL) {
+ ETRACE("VideoDecodeBuffer is NULL.");
+ return DECODE_INVALID_DATA;
+ }
+
+ status = VideoDecoderBase::parseBuffer(
+ buffer->data,
+ buffer->size,
+ false,
+ (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+
+ mShowFrame = data->codec_data->show_frame;
+
+ if (!mVAStarted) {
+ status = startVA(data);
+ CHECK_STATUS("startVA");
+ }
+
+ VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
+
+ status = decodeFrame(buffer, data);
+
+ return status;
+}
+
+Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vp8 *data) {
+ Decode_Status status;
+ bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
+ mCurrentPTS = buffer->timeStamp;
+ if (0 == data->num_pictures || NULL == data->pic_data) {
+ WTRACE("Number of pictures is 0.");
+ return DECODE_SUCCESS;
+ }
+
+ if (VP8_KEY_FRAME == data->codec_data->frame_type) {
+ if (mSizeChanged && !useGraphicbuffer){
+ mSizeChanged = false;
+ return DECODE_FORMAT_CHANGE;
+ } else {
+ updateFormatInfo(data);
+ bool noNeedFlush = false;
+ if (useGraphicbuffer) {
+ noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
+ && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
+ }
+ if (mSizeChanged == true && !noNeedFlush) {
+ flushSurfaceBuffers();
+ mSizeChanged = false;
+ return DECODE_FORMAT_CHANGE;
+ }
+ }
+ }
+
+ if (data->codec_data->frame_type == VP8_SKIPPED_FRAME) {
+ // Do nothing for skip frame as the last frame will be rendered agian by natively
+ return DECODE_SUCCESS;
+ }
+
+ status = acquireSurfaceBuffer();
+ CHECK_STATUS("acquireSurfaceBuffer");
+
+ // set referenceFrame to true if frame decoded is I/P frame, false otherwise.
+ int frameType = data->codec_data->frame_type;
+ mAcquiredBuffer->referenceFrame = (frameType == VP8_KEY_FRAME || frameType == VP8_INTER_FRAME);
+ // assume it is frame picture.
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
+ mAcquiredBuffer->renderBuffer.timeStamp = buffer->timeStamp;
+ mAcquiredBuffer->renderBuffer.flag = 0;
+ if (buffer->flag & WANT_DECODE_ONLY) {
+ mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY;
+ }
+ if (mSizeChanged) {
+ mSizeChanged = false;
+ mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
+ }
+
+ // Here data->num_pictures is always equal to 1
+ for (uint32_t index = 0; index < data->num_pictures; index++) {
+ status = decodePicture(data, index);
+ if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ return status;
+ }
+ }
+
+ if (frameType != VP8_SKIPPED_FRAME) {
+ updateReferenceFrames(data);
+ }
+
+ // if sample is successfully decoded, call outputSurfaceBuffer(); otherwise
+ // call releaseSurfacebuffer();
+ status = outputSurfaceBuffer();
+ return status;
+}
+
+Decode_Status VideoDecoderVP8::decodePicture(vbp_data_vp8 *data, int32_t picIndex) {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Decode_Status status;
+ uint32_t bufferIDCount = 0;
+ VABufferID bufferIDs[5];
+
+ vbp_picture_data_vp8 *picData = &(data->pic_data[picIndex]);
+ VAPictureParameterBufferVP8 *picParams = picData->pic_parms;
+
+ status = setReference(picParams);
+ CHECK_STATUS("setReference");
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+ // setting mDecodingFrame to true so vaEndPicture will be invoked to end the picture decoding.
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferVP8),
+ 1,
+ picParams,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAProbabilityBufferType,
+ sizeof(VAProbabilityDataBufferVP8),
+ 1,
+ data->prob_data,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateProbabilityBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferVP8),
+ 1,
+ data->IQ_matrix_buf,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+ bufferIDCount++;
+
+ /* Here picData->num_slices is always equal to 1 */
+ for (uint32_t i = 0; i < picData->num_slices; i++) {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferVP8),
+ 1,
+ &(picData->slc_data[i].slc_parms),
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ picData->slc_data[i].slice_size, //size
+ 1, //num_elements
+ picData->slc_data[i].buffer_addr + picData->slc_data[i].slice_offset,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+ bufferIDCount++;
+ }
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ mDecodingFrame = false;
+ CHECK_VA_STATUS("vaEndPicture");
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderVP8::setReference(VAPictureParameterBufferVP8 *picParam) {
+ int frameType = picParam->pic_fields.bits.key_frame;
+ switch (frameType) {
+ case VP8_KEY_FRAME:
+ picParam->last_ref_frame = VA_INVALID_SURFACE;
+ picParam->alt_ref_frame = VA_INVALID_SURFACE;
+ picParam->golden_ref_frame = VA_INVALID_SURFACE;
+ break;
+ case VP8_INTER_FRAME:
+ if (mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer == NULL ||
+ mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer == NULL ||
+ mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer == NULL) {
+ mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
+ mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
+ return DECODE_NO_REFERENCE;
+ }
+ //mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer = mLastReference;
+ picParam->last_ref_frame = mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer->renderBuffer.surface;
+ picParam->alt_ref_frame = mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer->renderBuffer.surface;
+ picParam->golden_ref_frame = mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer->renderBuffer.surface;
+ break;
+ case VP8_SKIPPED_FRAME:
+ // will never happen here
+ break;
+ default:
+ return DECODE_PARSER_FAIL;
+ }
+
+ return DECODE_SUCCESS;
+}
+
+void VideoDecoderVP8::updateReferenceFrames(vbp_data_vp8 *data) {
+ /* Refresh last frame reference buffer using the currently reconstructed frame */
+ refreshLastReference(data);
+
+ /* Refresh golden frame reference buffer using the currently reconstructed frame */
+ refreshGoldenReference(data);
+
+ /* Refresh alternative frame reference buffer using the currently reconstructed frame */
+ refreshAltReference(data);
+
+ /* Update reference frames */
+ for (int i = 0; i < VP8_REF_SIZE; i++) {
+ VideoSurfaceBuffer *p = mRFBs[1][i].surfaceBuffer;
+ int j;
+ for (j = 0; j < VP8_REF_SIZE; j++) {
+ if (p == mRFBs[0][j].surfaceBuffer) {
+ break;
+ }
+ }
+ if (j == VP8_REF_SIZE) {
+ clearAsReference(1, i);
+ }
+ }
+}
+
+void VideoDecoderVP8::refreshLastReference(vbp_data_vp8 *data) {
+ /* Save previous last reference */
+ mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer = mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer;
+ mRFBs[1][VP8_LAST_REF_PIC].index = mRFBs[0][VP8_LAST_REF_PIC].index;
+
+ /* For key frame, this is always true */
+ if (data->codec_data->refresh_last_frame) {
+ mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer = mAcquiredBuffer;
+ mRFBs[0][VP8_LAST_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface;
+ mRFBs[0][VP8_LAST_REF_PIC].surfaceBuffer->asReferernce = true;
+ }
+}
+
+void VideoDecoderVP8::refreshGoldenReference(vbp_data_vp8 *data) {
+ /* Save previous golden reference */
+ mRFBs[1][VP8_GOLDEN_REF_PIC].surfaceBuffer = mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer;
+ mRFBs[1][VP8_GOLDEN_REF_PIC].index = mRFBs[0][VP8_GOLDEN_REF_PIC].index;
+
+ if (data->codec_data->golden_copied != BufferCopied_NoneToGolden) {
+ if (data->codec_data->golden_copied == BufferCopied_LastToGolden) {
+ /* LastFrame is copied to GoldenFrame */
+ mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer = mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer;
+ mRFBs[0][VP8_GOLDEN_REF_PIC].index = mRFBs[1][VP8_LAST_REF_PIC].index;
+ } else if (data->codec_data->golden_copied == BufferCopied_AltRefToGolden) {
+ /* AltRefFrame is copied to GoldenFrame */
+ mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer = mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer;
+ mRFBs[0][VP8_GOLDEN_REF_PIC].index = mRFBs[0][VP8_ALT_REF_PIC].index;
+ }
+ }
+
+ /* For key frame, this is always true */
+ if (data->codec_data->refresh_golden_frame) {
+ mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer = mAcquiredBuffer;
+ mRFBs[0][VP8_GOLDEN_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface;
+ mRFBs[0][VP8_GOLDEN_REF_PIC].surfaceBuffer->asReferernce = true;
+ }
+}
+
+void VideoDecoderVP8::refreshAltReference(vbp_data_vp8 *data) {
+ /* Save previous alternative reference */
+ mRFBs[1][VP8_ALT_REF_PIC].surfaceBuffer = mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer;
+ mRFBs[1][VP8_ALT_REF_PIC].index = mRFBs[0][VP8_ALT_REF_PIC].index;
+
+ if (data->codec_data->altref_copied != BufferCopied_NoneToAltRef) {
+ if (data->codec_data->altref_copied == BufferCopied_LastToAltRef) {
+ /* LastFrame is copied to AltRefFrame */
+ mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer = mRFBs[1][VP8_LAST_REF_PIC].surfaceBuffer;
+ mRFBs[0][VP8_ALT_REF_PIC].index = mRFBs[1][VP8_LAST_REF_PIC].index;
+ } else if (data->codec_data->altref_copied == BufferCopied_GoldenToAltRef) {
+ /* GoldenFrame is copied to AltRefFrame */
+ mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer = mRFBs[1][VP8_GOLDEN_REF_PIC].surfaceBuffer;
+ mRFBs[0][VP8_ALT_REF_PIC].index = mRFBs[1][VP8_GOLDEN_REF_PIC].index;
+ }
+ }
+
+ /* For key frame, this is always true */
+ if (data->codec_data->refresh_alt_frame) {
+ mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer = mAcquiredBuffer;
+ mRFBs[0][VP8_ALT_REF_PIC].index = mAcquiredBuffer->renderBuffer.surface;
+ mRFBs[0][VP8_ALT_REF_PIC].surfaceBuffer->asReferernce = true;
+ }
+}
+
+
+Decode_Status VideoDecoderVP8::checkHardwareCapability() {
+ VAStatus vaStatus;
+ VAConfigAttrib cfgAttribs[2];
+ cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
+ cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
+ vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileVP8Version0_3,
+ VAEntrypointVLD, cfgAttribs, 2);
+ CHECK_VA_STATUS("vaGetConfigAttributes");
+ if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
+ ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
+ cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
+ return DECODE_DRIVER_FAIL;
+ }
+
+ return DECODE_SUCCESS;
+}
+
diff --git a/videodecoder/VideoDecoderVP8.h b/videodecoder/VideoDecoderVP8.h
new file mode 100644
index 0000000..1daecaf
--- /dev/null
+++ b/videodecoder/VideoDecoderVP8.h
@@ -0,0 +1,91 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_VP8_H_
+#define VIDEO_DECODER_VP8_H_
+
+#include "VideoDecoderBase.h"
+
+
+class VideoDecoderVP8 : public VideoDecoderBase {
+public:
+ VideoDecoderVP8(const char *mimeType);
+ virtual ~VideoDecoderVP8();
+
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+ virtual void flush(void);
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+
+protected:
+ virtual Decode_Status checkHardwareCapability();
+
+private:
+ Decode_Status decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vp8 *data);
+ Decode_Status decodePicture(vbp_data_vp8 *data, int32_t picIndex);
+ Decode_Status setReference(VAPictureParameterBufferVP8 *picParam);
+ Decode_Status startVA(vbp_data_vp8 *data);
+ void updateReferenceFrames(vbp_data_vp8 *data);
+ void refreshLastReference(vbp_data_vp8 *data);
+ void refreshGoldenReference(vbp_data_vp8 *data);
+ void refreshAltReference(vbp_data_vp8 *data);
+ void updateFormatInfo(vbp_data_vp8 *data);
+ void invalidateReferenceFrames(int toggle);
+ void clearAsReference(int toggle, int ref_type);
+
+private:
+ enum {
+ VP8_SURFACE_NUMBER = 9,
+ VP8_REF_SIZE = 3,
+ };
+
+ enum {
+ VP8_KEY_FRAME = 0,
+ VP8_INTER_FRAME,
+ VP8_SKIPPED_FRAME,
+ };
+
+ enum {
+ VP8_LAST_REF_PIC = 0,
+ VP8_GOLDEN_REF_PIC,
+ VP8_ALT_REF_PIC,
+ };
+
+ enum {
+ BufferCopied_NoneToGolden = 0,
+ BufferCopied_LastToGolden = 1,
+ BufferCopied_AltRefToGolden = 2
+ };
+
+ enum {
+ BufferCopied_NoneToAltRef = 0,
+ BufferCopied_LastToAltRef = 1,
+ BufferCopied_GoldenToAltRef = 2
+ };
+
+ struct ReferenceFrameBuffer {
+ VideoSurfaceBuffer *surfaceBuffer;
+ int32_t index;
+ };
+
+ //[2] : [0 for current each reference frame, 1 for the previous each reference frame]
+ //[VP8_REF_SIZE] : [0 for last ref pic, 1 for golden ref pic, 2 for alt ref pic]
+ ReferenceFrameBuffer mRFBs[2][VP8_REF_SIZE];
+};
+
+
+
+#endif /* VIDEO_DECODER_VP8_H_ */
diff --git a/videodecoder/VideoDecoderWMV.cpp b/videodecoder/VideoDecoderWMV.cpp
new file mode 100644
index 0000000..16c307a
--- /dev/null
+++ b/videodecoder/VideoDecoderWMV.cpp
@@ -0,0 +1,568 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoDecoderWMV.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+
+VideoDecoderWMV::VideoDecoderWMV(const char *mimeType)
+ : VideoDecoderBase(mimeType, VBP_VC1),
+ mBufferIDs(NULL),
+ mNumBufferIDs(0),
+ mConfigDataParsed(false),
+ mRangeMapped(false),
+ mDeblockedCurrPicIndex(0),
+ mDeblockedLastPicIndex(1),
+ mDeblockedForwardPicIndex(2) {
+}
+
+
+VideoDecoderWMV::~VideoDecoderWMV() {
+ stop();
+}
+
+Decode_Status VideoDecoderWMV::start(VideoConfigBuffer *buffer) {
+ Decode_Status status;
+
+ status = VideoDecoderBase::start(buffer);
+ CHECK_STATUS("VideoDecoderBase::start");
+
+ if (buffer->data == NULL || buffer->size == 0) {
+ WTRACE("No config data to start VA.");
+ return DECODE_SUCCESS;
+ }
+
+ vbp_data_vc1 *data = NULL;
+ status = parseBuffer(buffer->data, buffer->size, &data);
+ CHECK_STATUS("parseBuffer");
+
+ status = startVA(data);
+ return status;
+}
+
+void VideoDecoderWMV::stop(void) {
+ if (mBufferIDs) {
+ delete [] mBufferIDs;
+ mBufferIDs = NULL;
+ }
+ mNumBufferIDs = 0;
+ mConfigDataParsed = false;
+ mRangeMapped = false;
+
+ mDeblockedCurrPicIndex = 0;
+ mDeblockedLastPicIndex = 1;
+ mDeblockedForwardPicIndex = 2;
+
+ VideoDecoderBase::stop();
+}
+
+void VideoDecoderWMV::flush(void) {
+ VideoDecoderBase::flush();
+
+ mRangeMapped = false;
+ mDeblockedCurrPicIndex = 0;
+ mDeblockedLastPicIndex = 1;
+ mDeblockedForwardPicIndex = 2;
+}
+
+Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) {
+ Decode_Status status;
+ vbp_data_vc1 *data = NULL;
+ bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
+ if (buffer == NULL) {
+ return DECODE_INVALID_DATA;
+ }
+
+ status = parseBuffer(buffer->data, buffer->size, &data);
+ CHECK_STATUS("parseBuffer");
+
+ if (!mVAStarted) {
+ status = startVA(data);
+ CHECK_STATUS("startVA");
+ }
+
+ if (mSizeChanged && !useGraphicbuffer) {
+ mSizeChanged = false;
+ return DECODE_FORMAT_CHANGE;
+ }
+
+ if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH ||
+ mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) &&
+ data->se_data->CODED_WIDTH &&
+ data->se_data->CODED_HEIGHT) {
+ ITRACE("video size is changed from %dx%d to %dx%d", mVideoFormatInfo.width, mVideoFormatInfo.height,
+ data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT);
+ mVideoFormatInfo.width = data->se_data->CODED_WIDTH;
+ mVideoFormatInfo.height = data->se_data->CODED_HEIGHT;
+ bool noNeedFlush = false;
+ if (useGraphicbuffer) {
+ noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
+ && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
+ }
+
+ setRenderRect();
+
+ if (noNeedFlush) {
+ mSizeChanged = true;
+ } else {
+ flushSurfaceBuffers();
+ mSizeChanged = false;
+ return DECODE_FORMAT_CHANGE;
+ }
+ }
+
+ status = decodeFrame(buffer, data);
+ CHECK_STATUS("decodeFrame");
+ return status;
+}
+
+Decode_Status VideoDecoderWMV::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vc1 *data) {
+ Decode_Status status;
+ mCurrentPTS = buffer->timeStamp;
+ if (0 == data->num_pictures || NULL == data->pic_data) {
+ WTRACE("Number of pictures is 0, buffer contains configuration data only?");
+ return DECODE_SUCCESS;
+ }
+
+ if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) {
+
+ // Do nothing for skip frame as the last frame will be rendered agian by natively
+ // No needs to handle reference frame neither
+ return DECODE_SUCCESS;
+#if 0
+ //use the last P or I frame surface for skipped frame and treat it as P frame
+ if (mLastReference == NULL) {
+ // TODO: handle this case
+ WTRACE("The last reference is unavailable to construct skipped frame.");
+ return DECODE_SUCCESS;
+ }
+
+ status = acquireSurfaceBuffer();
+ CHECK_STATUS("acquireSurfaceBuffer");
+ mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
+ mAcquiredBuffer->renderBuffer.flag = 0;
+ mAcquiredBuffer->renderBuffer.scanFormat = mLastReference->renderBuffer.scanFormat;
+ mAcquiredBuffer->renderBuffer.surface = mLastReference->renderBuffer.surface;
+ // No need to update mappedData for HW decoding
+ //mAcquiredBuffer->mappedData.data = mLastReference->mappedData.data;
+ mAcquiredBuffer->referenceFrame = true;
+ // let outputSurfaceBuffer handle "asReference" for VC1
+ status = outputSurfaceBuffer();
+ return status;
+#endif
+ }
+
+ status = acquireSurfaceBuffer();
+ CHECK_STATUS("acquireSurfaceBuffer");
+
+ mAcquiredBuffer->renderBuffer.timeStamp = buffer->timeStamp;
+ if (buffer->flag & HAS_DISCONTINUITY) {
+ mAcquiredBuffer->renderBuffer.flag |= HAS_DISCONTINUITY;
+ }
+ if (buffer->flag & WANT_DECODE_ONLY) {
+ mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY;
+ }
+ if (mSizeChanged) {
+ mSizeChanged = false;
+ mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
+ }
+
+ if (data->num_pictures > 1) {
+ if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_TOP_FIELD;
+ } else {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD;
+ }
+ } else {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
+ }
+
+ mRangeMapped = (data->se_data->RANGE_MAPY_FLAG || data->se_data->RANGE_MAPUV_FLAG || data->se_data->RANGERED);
+
+ int frameType = data->pic_data[0].pic_parms->picture_fields.bits.picture_type;
+ mAcquiredBuffer->referenceFrame = (frameType == VC1_PTYPE_I || frameType == VC1_PTYPE_P);
+
+ // TODO: handle multiple frames parsed from a sample buffer
+ int numPictures = (data->num_pictures > 1) ? 2 : 1;
+
+ for (int index = 0; index < numPictures; index++) {
+ status = decodePicture(data, index);
+ if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ return status;
+ }
+ }
+
+ if (mRangeMapped) {
+ updateDeblockedPicIndexes(frameType);
+ }
+
+ // let outputSurfaceBuffer handle "asReference" for VC1
+ status = outputSurfaceBuffer();
+ return status;
+}
+
+
+Decode_Status VideoDecoderWMV::decodePicture(vbp_data_vc1 *data, int32_t picIndex) {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Decode_Status status;
+ int32_t bufferIDCount = 0;
+ vbp_picture_data_vc1 *picData = &(data->pic_data[picIndex]);
+ VAPictureParameterBufferVC1 *picParams = picData->pic_parms;
+
+ if (picParams == NULL) {
+ return DECODE_PARSER_FAIL;
+ }
+
+ status = allocateVABufferIDs(picData->num_slices * 2 + 2);
+ CHECK_STATUS("allocateVABufferIDs");
+
+ status = setReference(picParams, picIndex, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_STATUS("setReference");
+
+ if (mRangeMapped) {
+ // keep the destination surface for the picture after decoding and in-loop filtering
+ picParams->inloop_decoded_picture = mExtraSurfaces[mDeblockedCurrPicIndex];
+ } else {
+ picParams->inloop_decoded_picture = VA_INVALID_SURFACE;
+ }
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+ // setting mDecodingFrame to true so vaEndPicture will be invoked to end the picture decoding.
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferVC1),
+ 1,
+ picParams,
+ &mBufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ if (picParams->bitplane_present.value) {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VABitPlaneBufferType,
+ picData->size_bitplanes,
+ 1,
+ picData->packed_bitplanes,
+ &mBufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateBitPlaneBuffer");
+ bufferIDCount++;
+ }
+
+ for (uint32_t i = 0; i < picData->num_slices; i++) {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferVC1),
+ 1,
+ &(picData->slc_data[i].slc_parms),
+ &mBufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ //size
+ picData->slc_data[i].slice_size,
+ //num_elements
+ 1,
+ //slice data buffer pointer
+ //Note that this is the original data buffer ptr;
+ // offset to the actual slice data is provided in
+ // slice_data_offset in VASliceParameterBufferVC1
+ picData->slc_data[i].buffer_addr + picData->slc_data[i].slice_offset,
+ &mBufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+ bufferIDCount++;
+ }
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ mBufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ mDecodingFrame = false;
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ return DECODE_SUCCESS;
+}
+
+
+Decode_Status VideoDecoderWMV::setReference(
+ VAPictureParameterBufferVC1 *params,
+ int32_t picIndex,
+ VASurfaceID current) {
+ int frameType = params->picture_fields.bits.picture_type;
+ switch (frameType) {
+ case VC1_PTYPE_I:
+ params->forward_reference_picture = current;
+ params->backward_reference_picture = current;
+ break;
+ case VC1_PTYPE_P:
+ // check REFDIST in the picture parameter buffer
+ if (0 != params->reference_fields.bits.reference_distance_flag &&
+ 0 != params->reference_fields.bits.reference_distance) {
+ /* The previous decoded frame (distance is up to 16 but not 0) is used
+ for reference. Not supported here.
+ */
+ return DECODE_NO_REFERENCE;
+ }
+ if (1 == picIndex) {
+ // handle interlace field coding case
+ if (1 == params->reference_fields.bits.num_reference_pictures ||
+ 1 == params->reference_fields.bits.reference_field_pic_indicator) {
+ /*
+ two reference fields or the second closest I/P field is used for
+ prediction. Set forward reference picture to INVALID so it will be
+ updated to a valid previous reconstructed reference frame later.
+ */
+ params->forward_reference_picture = VA_INVALID_SURFACE;
+ } else {
+ /* the closest I/P is used for reference so it must be the
+ complementary field in the same surface.
+ */
+ params->forward_reference_picture = current;
+ }
+ }
+ if (VA_INVALID_SURFACE == params->forward_reference_picture) {
+ if (mLastReference == NULL) {
+ return DECODE_NO_REFERENCE;
+ }
+ params->forward_reference_picture = mLastReference->renderBuffer.surface;
+ }
+ params->backward_reference_picture = VA_INVALID_SURFACE;
+ break;
+ case VC1_PTYPE_B:
+ if (mForwardReference == NULL || mLastReference == NULL) {
+ return DECODE_NO_REFERENCE;
+ }
+ params->forward_reference_picture = mForwardReference->renderBuffer.surface;
+ params->backward_reference_picture = mLastReference->renderBuffer.surface;
+ break;
+ case VC1_PTYPE_BI:
+ params->forward_reference_picture = VA_INVALID_SURFACE;
+ params->backward_reference_picture = VA_INVALID_SURFACE;
+ break;
+ case VC1_PTYPE_SKIPPED:
+ //Will never happen here
+ break;
+ default:
+ break;
+ }
+ return DECODE_SUCCESS;
+}
+
+void VideoDecoderWMV::updateDeblockedPicIndexes(int frameType) {
+ int32_t curPicIndex = mDeblockedCurrPicIndex;
+
+ /* Out Loop (range map) buffers */
+ if (frameType != VC1_PTYPE_SKIPPED) {
+ if ((frameType == VC1_PTYPE_I) || (frameType == VC1_PTYPE_P)) {
+ mDeblockedCurrPicIndex = mDeblockedLastPicIndex;
+ mDeblockedLastPicIndex = curPicIndex;
+ } else {
+ mDeblockedCurrPicIndex = mDeblockedForwardPicIndex;
+ mDeblockedForwardPicIndex = curPicIndex;
+ }
+ }
+}
+
+Decode_Status VideoDecoderWMV::updateConfigData(
+ uint8_t *configData,
+ int32_t configDataLen,
+ uint8_t **newConfigData,
+ int32_t* newConfigDataLen) {
+ int32_t i = 0;
+ uint8_t *p = configData;
+
+ /* Check for start codes. If one exist, then this is VC-1 and not WMV. */
+ while (i < configDataLen - 2) {
+ if ((p[i] == 0) &&
+ (p[i + 1] == 0) &&
+ (p[i + 2] == 1)) {
+ *newConfigData = NULL;
+ *newConfigDataLen = 0;
+ return DECODE_SUCCESS;
+ }
+ i++;
+ }
+
+ *newConfigDataLen = configDataLen + 9;
+ p = *newConfigData = new uint8_t [*newConfigDataLen];
+ if (!p) {
+ return DECODE_MEMORY_FAIL;
+ }
+
+ /* If we get here we have 4+ bytes of codec data that must be formatted */
+ /* to pass through as an RCV sequence header. */
+ p[0] = 0;
+ p[1] = 0;
+ p[2] = 1;
+ p[3] = 0x0f; /* Start code. */
+ p[4] = (mVideoFormatInfo.width >> 8) & 0x0ff;
+ p[5] = mVideoFormatInfo.width & 0x0ff;
+ p[6] = (mVideoFormatInfo.height >> 8) & 0x0ff;
+ p[7] = mVideoFormatInfo.height & 0x0ff;
+
+ memcpy(p + 8, configData, configDataLen);
+ *(p + configDataLen + 8) = 0x80;
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderWMV::startVA(vbp_data_vc1 *data) {
+ updateFormatInfo(data);
+
+ VAProfile vaProfile;
+ switch (data->se_data->PROFILE) {
+ case 0:
+ vaProfile = VAProfileVC1Simple;
+ break;
+ case 1:
+ vaProfile = VAProfileVC1Main;
+ break;
+ default:
+ vaProfile = VAProfileVC1Advanced;
+ break;
+ }
+
+ return VideoDecoderBase::setupVA(VC1_SURFACE_NUMBER, vaProfile, VC1_EXTRA_SURFACE_NUMBER);
+}
+
+void VideoDecoderWMV::updateFormatInfo(vbp_data_vc1 *data) {
+ ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
+ mVideoFormatInfo.width, mVideoFormatInfo.height,
+ data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT);
+
+ mVideoFormatInfo.cropBottom = data->se_data->CODED_HEIGHT > mVideoFormatInfo.height ?
+ data->se_data->CODED_HEIGHT - mVideoFormatInfo.height : 0;
+ mVideoFormatInfo.cropRight = data->se_data->CODED_WIDTH > mVideoFormatInfo.width ?
+ data->se_data->CODED_WIDTH - mVideoFormatInfo.width : 0;
+
+ if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH ||
+ mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) &&
+ data->se_data->CODED_WIDTH &&
+ data->se_data->CODED_HEIGHT) {
+ // encoded image size
+ mVideoFormatInfo.width = data->se_data->CODED_WIDTH;
+ mVideoFormatInfo.height = data->se_data->CODED_HEIGHT;
+ mSizeChanged = true;
+ ITRACE("Video size is changed.");
+ }
+
+ // scaling has been performed on the decoded image.
+ mVideoFormatInfo.videoRange = 1;
+
+ switch (data->se_data->MATRIX_COEF) {
+ case 1:
+ mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
+ break;
+ // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996.
+ case 6:
+ mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
+ break;
+ default:
+ // unknown color matrix, set to 0 so color space flag will not be set.
+ mVideoFormatInfo.colorMatrix = 0;
+ break;
+ }
+
+ mVideoFormatInfo.aspectX = data->se_data->ASPECT_HORIZ_SIZE;
+ mVideoFormatInfo.aspectY = data->se_data->ASPECT_VERT_SIZE;
+ mVideoFormatInfo.bitrate = 0; //data->se_data->bitrate;
+ mVideoFormatInfo.valid = true;
+
+ setRenderRect();
+}
+
+Decode_Status VideoDecoderWMV::allocateVABufferIDs(int32_t number) {
+ if (mNumBufferIDs > number) {
+ return DECODE_SUCCESS;
+ }
+ if (mBufferIDs) {
+ delete [] mBufferIDs;
+ }
+ mBufferIDs = NULL;
+ mNumBufferIDs = 0;
+ mBufferIDs = new VABufferID [number];
+ if (mBufferIDs == NULL) {
+ return DECODE_MEMORY_FAIL;
+ }
+ mNumBufferIDs = number;
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderWMV::parseBuffer(uint8_t *data, int32_t size, vbp_data_vc1 **vbpData) {
+ Decode_Status status;
+
+ if (data == NULL || size == 0) {
+ return DECODE_INVALID_DATA;
+ }
+
+ if (mConfigDataParsed) {
+ status = VideoDecoderBase::parseBuffer(data, size, false, (void**)vbpData);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+ } else {
+ uint8_t *newData = NULL;
+ int32_t newSize = 0;
+ status = updateConfigData(data, size, &newData, &newSize);
+ CHECK_STATUS("updateConfigData");
+
+ if (newSize) {
+ status = VideoDecoderBase::parseBuffer(newData, newSize, true, (void**)vbpData);
+ delete [] newData;
+ } else {
+ status = VideoDecoderBase::parseBuffer(data, size, true, (void**)vbpData);
+ }
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+ mConfigDataParsed = true;
+ }
+ return DECODE_SUCCESS;
+}
+
+
+Decode_Status VideoDecoderWMV::checkHardwareCapability() {
+#ifndef USE_GEN_HW
+ VAStatus vaStatus;
+ VAConfigAttrib cfgAttribs[2];
+ cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
+ cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
+ vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileVC1Advanced,
+ VAEntrypointVLD, cfgAttribs, 2);
+ CHECK_VA_STATUS("vaGetConfigAttributes");
+ if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
+ ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
+ cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
+ return DECODE_DRIVER_FAIL;
+ }
+#endif
+ return DECODE_SUCCESS;
+}
+
+
diff --git a/videodecoder/VideoDecoderWMV.h b/videodecoder/VideoDecoderWMV.h
new file mode 100644
index 0000000..40e4a5c
--- /dev/null
+++ b/videodecoder/VideoDecoderWMV.h
@@ -0,0 +1,66 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_WMV_H_
+#define VIDEO_DECODER_WMV_H_
+
+#include "VideoDecoderBase.h"
+
+
+class VideoDecoderWMV : public VideoDecoderBase {
+public:
+ VideoDecoderWMV(const char *mimeType);
+ virtual ~VideoDecoderWMV();
+
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+ virtual void flush(void);
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+
+protected:
+ virtual Decode_Status checkHardwareCapability();
+
+
+private:
+ Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_vc1 *data);
+ Decode_Status decodePicture(vbp_data_vc1 *data, int32_t picIndex);
+ Decode_Status setReference(VAPictureParameterBufferVC1 *params, int32_t picIndex, VASurfaceID current);
+ void updateDeblockedPicIndexes(int frameType);
+ Decode_Status updateConfigData(uint8_t *configData, int32_t configDataLen, uint8_t **newConfigData, int32_t *newConfigDataLen);
+ Decode_Status startVA(vbp_data_vc1 *data);
+ void updateFormatInfo(vbp_data_vc1 *data);
+ inline Decode_Status allocateVABufferIDs(int32_t number);
+ Decode_Status parseBuffer(uint8_t *data, int32_t size, vbp_data_vc1 **vbpData);
+
+private:
+ enum {
+ VC1_SURFACE_NUMBER = 10,
+ VC1_EXTRA_SURFACE_NUMBER = 3,
+ };
+
+ VABufferID *mBufferIDs;
+ int32_t mNumBufferIDs;
+ bool mConfigDataParsed;
+ bool mRangeMapped;
+
+ int32_t mDeblockedCurrPicIndex;
+ int32_t mDeblockedLastPicIndex;
+ int32_t mDeblockedForwardPicIndex;
+};
+
+
+
+#endif /* VIDEO_DECODER_WMV_H_ */
diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp
new file mode 100644
index 0000000..52a5285
--- /dev/null
+++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.cpp
@@ -0,0 +1,367 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "va_private.h"
+#include "VideoDecoderAVCSecure.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+
+#define STARTCODE_PREFIX_LEN 3
+#define NALU_TYPE_MASK 0x1F
+#define MAX_NALU_HEADER_BUFFER 8192
+static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01};
+
+VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType)
+ : VideoDecoderAVC(mimeType),
+ mNaluHeaderBuffer(NULL),
+ mSliceHeaderBuffer(NULL) {
+ setParserType(VBP_H264SECURE);
+}
+
+VideoDecoderAVCSecure::~VideoDecoderAVCSecure() {
+}
+
+Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) {
+ Decode_Status status = VideoDecoderAVC::start(buffer);
+ if (status != DECODE_SUCCESS) {
+ return status;
+ }
+
+ mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER];
+
+ if (mNaluHeaderBuffer == NULL) {
+ ETRACE("Failed to allocate memory for mNaluHeaderBuffer");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ mSliceHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER];
+ if (mSliceHeaderBuffer == NULL) {
+ ETRACE("Failed to allocate memory for mSliceHeaderBuffer");
+ if (mNaluHeaderBuffer) {
+ delete [] mNaluHeaderBuffer;
+ mNaluHeaderBuffer = NULL;
+ }
+ return DECODE_MEMORY_FAIL;
+ }
+
+ return status;
+}
+
+void VideoDecoderAVCSecure::stop(void) {
+ VideoDecoderAVC::stop();
+
+ if (mNaluHeaderBuffer) {
+ delete [] mNaluHeaderBuffer;
+ mNaluHeaderBuffer = NULL;
+ }
+
+ if (mSliceHeaderBuffer) {
+ delete [] mSliceHeaderBuffer;
+ mSliceHeaderBuffer = NULL;
+ }
+
+}
+
+Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) {
+ Decode_Status status;
+ int32_t sizeAccumulated = 0;
+ int32_t sliceHeaderSize = 0;
+ int32_t sizeLeft = 0;
+ int32_t sliceIdx = 0;
+ uint8_t naluType;
+ frame_info_t* pFrameInfo;
+
+ mFrameSize = 0;
+ if (buffer->flag & IS_SECURE_DATA) {
+ VTRACE("Decoding protected video ...");
+ mIsEncryptData = 1;
+ } else {
+ VTRACE("Decoding clear video ...");
+ mIsEncryptData = 0;
+ return VideoDecoderAVC::decode(buffer);
+ }
+
+ if (buffer->size != sizeof(frame_info_t)) {
+ ETRACE("Not enough data to read frame_info_t!");
+ return DECODE_INVALID_DATA;
+ }
+ pFrameInfo = (frame_info_t*) buffer->data;
+
+ mFrameSize = pFrameInfo->length;
+ VTRACE("mFrameSize = %d", mFrameSize);
+
+ memcpy(&mEncParam, pFrameInfo->pavp, sizeof(pavp_info_t));
+ for (int32_t i = 0; i < pFrameInfo->num_nalus; i++) {
+ naluType = pFrameInfo->nalus[i].type & NALU_TYPE_MASK;
+ if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) {
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ &sliceIdx,
+ sizeof(int32_t));
+ sliceHeaderSize += 4;
+
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ &pFrameInfo->data,
+ sizeof(uint8_t*));
+ sliceHeaderSize += sizeof(uint8_t*);
+
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ &pFrameInfo->nalus[i].offset,
+ sizeof(uint32_t));
+ sliceHeaderSize += sizeof(uint32_t);
+
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ &pFrameInfo->nalus[i].length,
+ sizeof(uint32_t));
+ sliceHeaderSize += sizeof(uint32_t);
+
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ pFrameInfo->nalus[i].slice_header,
+ sizeof(slice_header_t));
+ sliceHeaderSize += sizeof(slice_header_t);
+ if (pFrameInfo->nalus[i].type & 0x60) {
+ memcpy(mSliceHeaderBuffer+sliceHeaderSize, pFrameInfo->dec_ref_pic_marking, sizeof(dec_ref_pic_marking_t));
+ } else {
+ memset(mSliceHeaderBuffer+sliceHeaderSize, 0, sizeof(dec_ref_pic_marking_t));
+ }
+ sliceHeaderSize += sizeof(dec_ref_pic_marking_t);
+ sliceIdx++;
+ } else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) {
+ memcpy(mNaluHeaderBuffer + sizeAccumulated,
+ startcodePrefix,
+ STARTCODE_PREFIX_LEN);
+ sizeAccumulated += STARTCODE_PREFIX_LEN;
+ memcpy(mNaluHeaderBuffer + sizeAccumulated,
+ pFrameInfo->nalus[i].data,
+ pFrameInfo->nalus[i].length);
+ sizeAccumulated += pFrameInfo->nalus[i].length;
+ } else {
+ WTRACE("Failure: DECODE_FRAME_DROPPED");
+ return DECODE_FRAME_DROPPED;
+ }
+ }
+
+ vbp_data_h264 *data = NULL;
+ int new_sequence_to_handle = 0;
+
+ if (sizeAccumulated > 0) {
+ status = VideoDecoderBase::parseBuffer(
+ mNaluHeaderBuffer,
+ sizeAccumulated,
+ false,
+ (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+
+ // [FIX DRC zoom issue] if one buffer contains more than one nalu
+ // for example SPS+PPS+IDR, new_sps/new_pps flags set in parseBuffer
+ // will be flushed in the following updateBuffer.
+ // So that handleNewSequence will not be handled in decodeFrame()
+ if (data->new_sps || data->new_pps) {
+ new_sequence_to_handle = 1;
+ }
+ }
+
+ if (sliceHeaderSize > 0) {
+ memset(mSliceHeaderBuffer + sliceHeaderSize, 0xFF, 4);
+ sliceHeaderSize += 4;
+ status = VideoDecoderBase::updateBuffer(
+ mSliceHeaderBuffer,
+ sliceHeaderSize,
+ (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::updateBuffer");
+
+ // in case the flags were flushed but indeed new sequence needed to be handled.
+ if ((1 == new_sequence_to_handle) &&
+ ((data->new_sps == 0) || (data->new_pps == 0))) {
+ data->new_sps = 1;
+ data->new_pps = 1;
+ }
+ }
+
+ if (data == NULL) {
+ ETRACE("Invalid data returned by parser!");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ if (!mVAStarted) {
+ if (data->has_sps && data->has_pps) {
+ status = startVA(data);
+ CHECK_STATUS("startVA");
+ } else {
+ WTRACE("Can't start VA as either SPS or PPS is still not available.");
+ return DECODE_SUCCESS;
+ }
+ }
+ status = decodeFrame(buffer, data);
+ return status;
+}
+
+Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
+ Decode_Status status;
+ VAStatus vaStatus;
+ uint32_t bufferIDCount = 0;
+ // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
+ VABufferID bufferIDs[5];
+
+ vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
+ vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
+ VAPictureParameterBufferH264 *picParam = picData->pic_parms;
+ VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
+ VAEncryptionParameterBuffer encryptParam;
+
+ if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
+ // either condition indicates start of a new frame
+ if (sliceParam->first_mb_in_slice != 0) {
+ WTRACE("The first slice is lost.");
+ // TODO: handle the first slice lost
+ }
+ if (mDecodingFrame) {
+ // interlace content, complete decoding the first field
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ CHECK_VA_STATUS("vaEndPicture");
+
+ // for interlace content, top field may be valid only after the second field is parsed
+ mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt;
+ }
+
+ // Update the reference frames and surface IDs for DPB and current frame
+ status = updateDPB(picParam);
+ CHECK_STATUS("updateDPB");
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ // start decoding a frame
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferH264),
+ 1,
+ picParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferH264),
+ 1,
+ data->IQ_matrix_buf,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+ bufferIDCount++;
+
+ if (mIsEncryptData) {
+ memset(&encryptParam, 0, sizeof(VAEncryptionParameterBuffer));
+ encryptParam.pavpCounterMode = 4;
+ encryptParam.pavpEncryptionType = 2;
+ encryptParam.hostEncryptMode = 2;
+ encryptParam.pavpHasBeenEnabled = 1;
+ encryptParam.app_id = 0;
+ memcpy(encryptParam.pavpAesCounter, mEncParam.iv, 16);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ (VABufferType)VAEncryptionParameterBufferType,
+ sizeof(VAEncryptionParameterBuffer),
+ 1,
+ &encryptParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateEncryptionParameterBuffer");
+ bufferIDCount++;
+ }
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ mFrameSize, //size
+ 1, //num_elements
+ sliceData->buffer_addr + sliceData->slice_offset,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+ bufferIDCount++;
+
+ }
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264Base),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs(
+ VAProfile profile, VAConfigID *config)
+{
+ VAStatus vaStatus;
+ VAConfigAttrib attrib[2];
+
+ if (config == NULL) {
+ ETRACE("Invalid parameter!");
+ return DECODE_FAIL;
+ }
+
+ attrib[0].type = VAConfigAttribRTFormat;
+ attrib[0].value = VA_RT_FORMAT_YUV420;
+ attrib[1].type = VAConfigAttribDecSliceMode;
+ attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
+
+ vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
+
+ if (attrib[1].value & VA_DEC_SLICE_MODE_BASE)
+ {
+ ITRACE("AVC short format used");
+ attrib[1].value = VA_DEC_SLICE_MODE_BASE;
+ } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
+ ITRACE("AVC long format ssed");
+ attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
+ } else {
+ ETRACE("Unsupported Decode Slice Mode!");
+ return DECODE_FAIL;
+ }
+
+ vaStatus = vaCreateConfig(
+ mVADisplay,
+ profile,
+ VAEntrypointVLD,
+ &attrib[0],
+ 2,
+ config);
+ CHECK_VA_STATUS("vaCreateConfig");
+
+ return DECODE_SUCCESS;
+}
diff --git a/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h
new file mode 100644
index 0000000..2214075
--- /dev/null
+++ b/videodecoder/securevideo/baytrail/VideoDecoderAVCSecure.h
@@ -0,0 +1,44 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_AVC_SECURE_H_
+#define VIDEO_DECODER_AVC_SECURE_H_
+
+#include "VideoDecoderAVC.h"
+#include "secvideoparser.h"
+
+class VideoDecoderAVCSecure : public VideoDecoderAVC {
+public:
+ VideoDecoderAVCSecure(const char *mimeType);
+ virtual ~VideoDecoderAVCSecure();
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+
+protected:
+ virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config);
+
+private:
+ virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex);
+private:
+ pavp_info_t mEncParam;
+ uint8_t *mNaluHeaderBuffer;
+ uint8_t *mSliceHeaderBuffer;
+ uint32_t mIsEncryptData;
+ uint32_t mFrameSize;
+};
+
+#endif /* VIDEO_DECODER_AVC_SECURE_H_ */
diff --git a/videodecoder/securevideo/baytrail/secvideoparser.h b/videodecoder/securevideo/baytrail/secvideoparser.h
new file mode 100644
index 0000000..f27580a
--- /dev/null
+++ b/videodecoder/securevideo/baytrail/secvideoparser.h
@@ -0,0 +1,150 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef SEC_VIDEO_PARSER_H_
+#define SEC_VIDEO_PARSER_H_
+
+#include <stdint.h>
+
+/* H264 start code values */
+typedef enum _h264_nal_unit_type
+{
+ h264_NAL_UNIT_TYPE_unspecified = 0,
+ h264_NAL_UNIT_TYPE_SLICE,
+ h264_NAL_UNIT_TYPE_DPA,
+ h264_NAL_UNIT_TYPE_DPB,
+ h264_NAL_UNIT_TYPE_DPC,
+ h264_NAL_UNIT_TYPE_IDR,
+ h264_NAL_UNIT_TYPE_SEI,
+ h264_NAL_UNIT_TYPE_SPS,
+ h264_NAL_UNIT_TYPE_PPS,
+ h264_NAL_UNIT_TYPE_Acc_unit_delimiter,
+ h264_NAL_UNIT_TYPE_EOSeq,
+ h264_NAL_UNIT_TYPE_EOstream,
+ h264_NAL_UNIT_TYPE_filler_data,
+ h264_NAL_UNIT_TYPE_SPS_extension,
+ h264_NAL_UNIT_TYPE_ACP = 19,
+ h264_NAL_UNIT_TYPE_Slice_extension = 20
+} h264_nal_unit_type_t;
+
+#define MAX_OP 16
+
+enum dec_ref_pic_marking_flags {
+ IDR_PIC_FLAG = 0,
+ NO_OUTPUT_OF_PRIOR_PICS_FLAG,
+ LONG_TERM_REFERENCE_FLAG,
+ ADAPTIVE_REF_PIC_MARKING_MODE_FLAG
+};
+
+typedef struct _dec_ref_pic_marking_t {
+ union {
+ uint8_t flags;
+ struct {
+ uint8_t idr_pic_flag:1;
+ uint8_t no_output_of_prior_pics_flag:1;
+ uint8_t long_term_reference_flag:1;
+ uint8_t adaptive_ref_pic_marking_mode_flag:1;
+ };
+ };
+ struct {
+ uint8_t memory_management_control_operation;
+ union {
+ struct {
+ uint8_t difference_of_pic_nums_minus1;
+ } op1;
+ struct {
+ uint8_t long_term_pic_num;
+ } op2;
+ struct {
+ uint8_t difference_of_pic_nums_minus1;
+ uint8_t long_term_frame_idx;
+ } op3;
+ struct {
+ uint8_t max_long_term_frame_idx_plus1;
+ } op4;
+ struct {
+ uint8_t long_term_frame_idx;
+ } op6;
+ };
+ } op[MAX_OP];
+} dec_ref_pic_marking_t;
+
+enum slice_header_flags {
+ FIELD_PIC_FLAG = 0,
+ BOTTOM_FIELD_FLAG
+};
+
+typedef struct _slice_header_t {
+ uint8_t nal_unit_type;
+ uint8_t pps_id;
+ uint8_t padding; // TODO: padding needed because flags in secfw impl. is a big-endian uint16_t
+ union {
+ uint8_t flags;
+ struct {
+ uint8_t field_pic_flag:1;
+ uint8_t bottom_field_flag:1;
+ };
+ };
+ uint32_t first_mb_in_slice;
+ uint32_t frame_num;
+ uint16_t idr_pic_id;
+ uint16_t pic_order_cnt_lsb;
+ int32_t delta_pic_order_cnt[2];
+ int32_t delta_pic_order_cnt_bottom;
+} slice_header_t;
+
+typedef struct {
+ uint8_t type;
+ uint32_t offset;
+ uint8_t* data;
+ uint32_t length;
+ slice_header_t* slice_header;
+} nalu_info_t;
+
+typedef struct {
+ uint32_t iv[4];
+ uint32_t mode;
+ uint32_t app_id;
+} pavp_info_t;
+
+#define MAX_NUM_NALUS 20
+
+typedef struct {
+ uint8_t* data;
+ uint32_t length;
+ pavp_info_t* pavp;
+ dec_ref_pic_marking_t* dec_ref_pic_marking;
+ uint32_t num_nalus;
+ nalu_info_t nalus[MAX_NUM_NALUS];
+} frame_info_t;
+
+int parser_init(void);
+int parse_frame(uint8_t* frame, uint32_t frame_size, uint8_t* nalu_data, uint32_t* nalu_data_size);
+
+// DEBUG PRINTING
+void print_slice_header(slice_header_t* slice_header);
+void print_dec_ref_pic_marking(dec_ref_pic_marking_t* dec_ref_pic_marking);
+void print_data_bytes(uint8_t* data, uint32_t count);
+void print_nalu_data(uint8_t* nalu_data, uint32_t nalu_data_size);
+
+// BYTESWAPPING
+uint16_t byteswap_16(uint16_t word);
+uint32_t byteswap_32(uint32_t dword);
+void byteswap_slice_header(slice_header_t* slice_header);
+void byteswap_dec_ref_pic_marking(dec_ref_pic_marking_t* dec_ref_pic_marking);
+void byteswap_nalu_data(uint8_t* nalu_data, uint32_t nalu_data_size);
+
+#endif /* SEC_VIDEO_PARSER_H_ */
diff --git a/videodecoder/securevideo/baytrail/va_private.h b/videodecoder/securevideo/baytrail/va_private.h
new file mode 100644
index 0000000..34a4e1b
--- /dev/null
+++ b/videodecoder/securevideo/baytrail/va_private.h
@@ -0,0 +1,64 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+
+#ifndef __VA_PRIVATE_H__
+#define __VA_PRIVATE_H__
+#include <va/va.h>
+#define ENABLE_PAVP_LINUX 1
+// Misc parameter for encoder
+#define VAEncMiscParameterTypePrivate -2
+// encryption parameters for PAVP
+#define VAEncryptionParameterBufferType -3
+
+typedef struct _VAEncMiscParameterPrivate
+{
+ unsigned int target_usage; // Valid values 1-7 for AVC & MPEG2.
+ unsigned int reserved[7]; // Reserved for future use.
+} VAEncMiscParameterPrivate;
+
+/*VAEncrytpionParameterBuffer*/
+typedef struct _VAEncryptionParameterBuffer
+{
+ //Not used currently
+ unsigned int encryptionSupport;
+ //Not used currently
+ unsigned int hostEncryptMode;
+ // For IV, Counter input
+ unsigned int pavpAesCounter[2][4];
+ // not used currently
+ unsigned int pavpIndex;
+ // PAVP mode, CTR, CBC, DEDE etc
+ unsigned int pavpCounterMode;
+ unsigned int pavpEncryptionType;
+ // not used currently
+ unsigned int pavpInputSize[2];
+ // not used currently
+ unsigned int pavpBufferSize[2];
+ // not used currently
+ VABufferID pvap_buf;
+ // set to TRUE if protected media
+ unsigned int pavpHasBeenEnabled;
+ // not used currently
+ unsigned int IntermmediatedBufReq;
+ // not used currently
+ unsigned int uiCounterIncrement;
+ // AppId: PAVP sessin Index from application
+ unsigned int app_id;
+
+} VAEncryptionParameterBuffer;
+
+#endif
diff --git a/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.cpp
new file mode 100644
index 0000000..18c87b9
--- /dev/null
+++ b/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.cpp
@@ -0,0 +1,351 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "va_private.h"
+#include "VideoDecoderAVCSecure.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+
+#define STARTCODE_PREFIX_LEN 3
+#define NALU_TYPE_MASK 0x1F
+#define MAX_NALU_HEADER_BUFFER 8192
+static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01};
+
+VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType)
+ : VideoDecoderAVC(mimeType),
+ mNaluHeaderBuffer(NULL),
+ mSliceHeaderBuffer(NULL) {
+ setParserType(VBP_H264SECURE);
+}
+
+VideoDecoderAVCSecure::~VideoDecoderAVCSecure() {
+}
+
+Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) {
+ Decode_Status status = VideoDecoderAVC::start(buffer);
+ if (status != DECODE_SUCCESS) {
+ return status;
+ }
+
+ mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER];
+
+ if (mNaluHeaderBuffer == NULL) {
+ ETRACE("Failed to allocate memory for mNaluHeaderBuffer");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ mSliceHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER];
+ if (mSliceHeaderBuffer == NULL) {
+ ETRACE("Failed to allocate memory for mSliceHeaderBuffer");
+ if (mNaluHeaderBuffer) {
+ delete [] mNaluHeaderBuffer;
+ mNaluHeaderBuffer = NULL;
+ }
+ return DECODE_MEMORY_FAIL;
+ }
+
+ return status;
+}
+
+void VideoDecoderAVCSecure::stop(void) {
+ VideoDecoderAVC::stop();
+
+ if (mNaluHeaderBuffer) {
+ delete [] mNaluHeaderBuffer;
+ mNaluHeaderBuffer = NULL;
+ }
+
+ if (mSliceHeaderBuffer) {
+ delete [] mSliceHeaderBuffer;
+ mSliceHeaderBuffer = NULL;
+ }
+
+}
+
+Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) {
+ Decode_Status status;
+ int32_t sizeAccumulated = 0;
+ int32_t sliceHeaderSize = 0;
+ int32_t sizeLeft = 0;
+ int32_t sliceIdx = 0;
+ uint8_t naluType;
+ frame_info_t* pFrameInfo;
+
+ mFrameSize = 0;
+ if (buffer->flag & IS_SECURE_DATA) {
+ VTRACE("Decoding protected video ...");
+ mIsEncryptData = 1;
+ } else {
+ VTRACE("Decoding clear video ...");
+ mIsEncryptData = 0;
+ return VideoDecoderAVC::decode(buffer);
+ }
+
+ if (buffer->size != sizeof(frame_info_t)) {
+ ETRACE("Not enough data to read frame_info_t!");
+ return DECODE_INVALID_DATA;
+ }
+ pFrameInfo = (frame_info_t*) buffer->data;
+
+ mFrameSize = pFrameInfo->length;
+ VTRACE("mFrameSize = %d", mFrameSize);
+
+ memcpy(&mEncParam, pFrameInfo->pavp, sizeof(pavp_info_t));
+ for (int32_t i = 0; i < pFrameInfo->num_nalus; i++) {
+ naluType = pFrameInfo->nalus[i].type & NALU_TYPE_MASK;
+ if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) {
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ &sliceIdx,
+ sizeof(int32_t));
+ sliceHeaderSize += 4;
+
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ &pFrameInfo->data,
+ sizeof(uint8_t*));
+ sliceHeaderSize += sizeof(uint8_t*);
+
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ &pFrameInfo->nalus[i].offset,
+ sizeof(uint32_t));
+ sliceHeaderSize += sizeof(uint32_t);
+
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ &pFrameInfo->nalus[i].length,
+ sizeof(uint32_t));
+ sliceHeaderSize += sizeof(uint32_t);
+
+ memcpy(mSliceHeaderBuffer + sliceHeaderSize,
+ pFrameInfo->nalus[i].slice_header,
+ sizeof(slice_header_t));
+ sliceHeaderSize += sizeof(slice_header_t);
+ if (pFrameInfo->nalus[i].type & 0x60) {
+ memcpy(mSliceHeaderBuffer+sliceHeaderSize, pFrameInfo->dec_ref_pic_marking, sizeof(dec_ref_pic_marking_t));
+ } else {
+ memset(mSliceHeaderBuffer+sliceHeaderSize, 0, sizeof(dec_ref_pic_marking_t));
+ }
+ sliceHeaderSize += sizeof(dec_ref_pic_marking_t);
+ sliceIdx++;
+ } else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) {
+ memcpy(mNaluHeaderBuffer + sizeAccumulated,
+ startcodePrefix,
+ STARTCODE_PREFIX_LEN);
+ sizeAccumulated += STARTCODE_PREFIX_LEN;
+ memcpy(mNaluHeaderBuffer + sizeAccumulated,
+ pFrameInfo->nalus[i].data,
+ pFrameInfo->nalus[i].length);
+ sizeAccumulated += pFrameInfo->nalus[i].length;
+ } else {
+ WTRACE("Failure: DECODE_FRAME_DROPPED");
+ return DECODE_FRAME_DROPPED;
+ }
+ }
+
+ vbp_data_h264 *data = NULL;
+
+ if (sizeAccumulated > 0) {
+ status = VideoDecoderBase::parseBuffer(
+ mNaluHeaderBuffer,
+ sizeAccumulated,
+ false,
+ (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+ }
+
+ if (sliceHeaderSize > 0) {
+ memset(mSliceHeaderBuffer + sliceHeaderSize, 0xFF, 4);
+ sliceHeaderSize += 4;
+ status = VideoDecoderBase::updateBuffer(
+ mSliceHeaderBuffer,
+ sliceHeaderSize,
+ (void**)&data);
+ CHECK_STATUS("VideoDecoderBase::updateBuffer");
+ }
+
+ if (data == NULL) {
+ ETRACE("Invalid data returned by parser!");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ if (!mVAStarted) {
+ if (data->has_sps && data->has_pps) {
+ status = startVA(data);
+ CHECK_STATUS("startVA");
+ } else {
+ WTRACE("Can't start VA as either SPS or PPS is still not available.");
+ return DECODE_SUCCESS;
+ }
+ }
+ status = decodeFrame(buffer, data);
+ return status;
+}
+
+Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
+ Decode_Status status;
+ VAStatus vaStatus;
+ uint32_t bufferIDCount = 0;
+ // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
+ VABufferID bufferIDs[5];
+
+ vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
+ vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
+ VAPictureParameterBufferH264 *picParam = picData->pic_parms;
+ VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
+ VAEncryptionParameterBuffer encryptParam;
+
+ if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
+ // either condition indicates start of a new frame
+ if (sliceParam->first_mb_in_slice != 0) {
+ WTRACE("The first slice is lost.");
+ // TODO: handle the first slice lost
+ }
+ if (mDecodingFrame) {
+ // interlace content, complete decoding the first field
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ CHECK_VA_STATUS("vaEndPicture");
+
+ // for interlace content, top field may be valid only after the second field is parsed
+ mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt;
+ }
+
+ // Update the reference frames and surface IDs for DPB and current frame
+ status = updateDPB(picParam);
+ CHECK_STATUS("updateDPB");
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ // start decoding a frame
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferH264),
+ 1,
+ picParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferH264),
+ 1,
+ data->IQ_matrix_buf,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+ bufferIDCount++;
+
+ if (mIsEncryptData) {
+ memset(&encryptParam, 0, sizeof(VAEncryptionParameterBuffer));
+ encryptParam.pavpCounterMode = 4;
+ encryptParam.pavpEncryptionType = 2;
+ encryptParam.hostEncryptMode = 2;
+ encryptParam.pavpHasBeenEnabled = 1;
+ encryptParam.app_id = 0;
+ memcpy(encryptParam.pavpAesCounter, mEncParam.iv, 16);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ (VABufferType)VAEncryptionParameterBufferType,
+ sizeof(VAEncryptionParameterBuffer),
+ 1,
+ &encryptParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateEncryptionParameterBuffer");
+ bufferIDCount++;
+ }
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ mFrameSize, //size
+ 1, //num_elements
+ sliceData->buffer_addr + sliceData->slice_offset,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+ bufferIDCount++;
+
+ }
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264Base),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs(
+ VAProfile profile, VAConfigID *config)
+{
+ VAStatus vaStatus;
+ VAConfigAttrib attrib[2];
+
+ if (config == NULL) {
+ ETRACE("Invalid parameter!");
+ return DECODE_FAIL;
+ }
+
+ attrib[0].type = VAConfigAttribRTFormat;
+ attrib[0].value = VA_RT_FORMAT_YUV420;
+ attrib[1].type = VAConfigAttribDecSliceMode;
+ attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
+
+ vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
+
+ if (attrib[1].value & VA_DEC_SLICE_MODE_BASE)
+ {
+ ITRACE("AVC short format used");
+ attrib[1].value = VA_DEC_SLICE_MODE_BASE;
+ } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
+ ITRACE("AVC long format ssed");
+ attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
+ } else {
+ ETRACE("Unsupported Decode Slice Mode!");
+ return DECODE_FAIL;
+ }
+
+ vaStatus = vaCreateConfig(
+ mVADisplay,
+ profile,
+ VAEntrypointVLD,
+ &attrib[0],
+ 2,
+ config);
+ CHECK_VA_STATUS("vaCreateConfig");
+
+ return DECODE_SUCCESS;
+}
diff --git a/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.h b/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.h
new file mode 100644
index 0000000..2214075
--- /dev/null
+++ b/videodecoder/securevideo/cherrytrail/VideoDecoderAVCSecure.h
@@ -0,0 +1,44 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_AVC_SECURE_H_
+#define VIDEO_DECODER_AVC_SECURE_H_
+
+#include "VideoDecoderAVC.h"
+#include "secvideoparser.h"
+
+class VideoDecoderAVCSecure : public VideoDecoderAVC {
+public:
+ VideoDecoderAVCSecure(const char *mimeType);
+ virtual ~VideoDecoderAVCSecure();
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+
+protected:
+ virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config);
+
+private:
+ virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex);
+private:
+ pavp_info_t mEncParam;
+ uint8_t *mNaluHeaderBuffer;
+ uint8_t *mSliceHeaderBuffer;
+ uint32_t mIsEncryptData;
+ uint32_t mFrameSize;
+};
+
+#endif /* VIDEO_DECODER_AVC_SECURE_H_ */
diff --git a/videodecoder/securevideo/cherrytrail/secvideoparser.h b/videodecoder/securevideo/cherrytrail/secvideoparser.h
new file mode 100644
index 0000000..f27580a
--- /dev/null
+++ b/videodecoder/securevideo/cherrytrail/secvideoparser.h
@@ -0,0 +1,150 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef SEC_VIDEO_PARSER_H_
+#define SEC_VIDEO_PARSER_H_
+
+#include <stdint.h>
+
+/* H264 start code values */
+typedef enum _h264_nal_unit_type
+{
+ h264_NAL_UNIT_TYPE_unspecified = 0,
+ h264_NAL_UNIT_TYPE_SLICE,
+ h264_NAL_UNIT_TYPE_DPA,
+ h264_NAL_UNIT_TYPE_DPB,
+ h264_NAL_UNIT_TYPE_DPC,
+ h264_NAL_UNIT_TYPE_IDR,
+ h264_NAL_UNIT_TYPE_SEI,
+ h264_NAL_UNIT_TYPE_SPS,
+ h264_NAL_UNIT_TYPE_PPS,
+ h264_NAL_UNIT_TYPE_Acc_unit_delimiter,
+ h264_NAL_UNIT_TYPE_EOSeq,
+ h264_NAL_UNIT_TYPE_EOstream,
+ h264_NAL_UNIT_TYPE_filler_data,
+ h264_NAL_UNIT_TYPE_SPS_extension,
+ h264_NAL_UNIT_TYPE_ACP = 19,
+ h264_NAL_UNIT_TYPE_Slice_extension = 20
+} h264_nal_unit_type_t;
+
+#define MAX_OP 16
+
+enum dec_ref_pic_marking_flags {
+ IDR_PIC_FLAG = 0,
+ NO_OUTPUT_OF_PRIOR_PICS_FLAG,
+ LONG_TERM_REFERENCE_FLAG,
+ ADAPTIVE_REF_PIC_MARKING_MODE_FLAG
+};
+
+typedef struct _dec_ref_pic_marking_t {
+ union {
+ uint8_t flags;
+ struct {
+ uint8_t idr_pic_flag:1;
+ uint8_t no_output_of_prior_pics_flag:1;
+ uint8_t long_term_reference_flag:1;
+ uint8_t adaptive_ref_pic_marking_mode_flag:1;
+ };
+ };
+ struct {
+ uint8_t memory_management_control_operation;
+ union {
+ struct {
+ uint8_t difference_of_pic_nums_minus1;
+ } op1;
+ struct {
+ uint8_t long_term_pic_num;
+ } op2;
+ struct {
+ uint8_t difference_of_pic_nums_minus1;
+ uint8_t long_term_frame_idx;
+ } op3;
+ struct {
+ uint8_t max_long_term_frame_idx_plus1;
+ } op4;
+ struct {
+ uint8_t long_term_frame_idx;
+ } op6;
+ };
+ } op[MAX_OP];
+} dec_ref_pic_marking_t;
+
+enum slice_header_flags {
+ FIELD_PIC_FLAG = 0,
+ BOTTOM_FIELD_FLAG
+};
+
+typedef struct _slice_header_t {
+ uint8_t nal_unit_type;
+ uint8_t pps_id;
+ uint8_t padding; // TODO: padding needed because flags in secfw impl. is a big-endian uint16_t
+ union {
+ uint8_t flags;
+ struct {
+ uint8_t field_pic_flag:1;
+ uint8_t bottom_field_flag:1;
+ };
+ };
+ uint32_t first_mb_in_slice;
+ uint32_t frame_num;
+ uint16_t idr_pic_id;
+ uint16_t pic_order_cnt_lsb;
+ int32_t delta_pic_order_cnt[2];
+ int32_t delta_pic_order_cnt_bottom;
+} slice_header_t;
+
+typedef struct {
+ uint8_t type;
+ uint32_t offset;
+ uint8_t* data;
+ uint32_t length;
+ slice_header_t* slice_header;
+} nalu_info_t;
+
+typedef struct {
+ uint32_t iv[4];
+ uint32_t mode;
+ uint32_t app_id;
+} pavp_info_t;
+
+#define MAX_NUM_NALUS 20
+
+typedef struct {
+ uint8_t* data;
+ uint32_t length;
+ pavp_info_t* pavp;
+ dec_ref_pic_marking_t* dec_ref_pic_marking;
+ uint32_t num_nalus;
+ nalu_info_t nalus[MAX_NUM_NALUS];
+} frame_info_t;
+
+int parser_init(void);
+int parse_frame(uint8_t* frame, uint32_t frame_size, uint8_t* nalu_data, uint32_t* nalu_data_size);
+
+// DEBUG PRINTING
+void print_slice_header(slice_header_t* slice_header);
+void print_dec_ref_pic_marking(dec_ref_pic_marking_t* dec_ref_pic_marking);
+void print_data_bytes(uint8_t* data, uint32_t count);
+void print_nalu_data(uint8_t* nalu_data, uint32_t nalu_data_size);
+
+// BYTESWAPPING
+uint16_t byteswap_16(uint16_t word);
+uint32_t byteswap_32(uint32_t dword);
+void byteswap_slice_header(slice_header_t* slice_header);
+void byteswap_dec_ref_pic_marking(dec_ref_pic_marking_t* dec_ref_pic_marking);
+void byteswap_nalu_data(uint8_t* nalu_data, uint32_t nalu_data_size);
+
+#endif /* SEC_VIDEO_PARSER_H_ */
diff --git a/videodecoder/securevideo/cherrytrail/va_private.h b/videodecoder/securevideo/cherrytrail/va_private.h
new file mode 100644
index 0000000..e53e31d
--- /dev/null
+++ b/videodecoder/securevideo/cherrytrail/va_private.h
@@ -0,0 +1,63 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VA_PRIVATE_H__
+#define __VA_PRIVATE_H__
+#include <va/va.h>
+#define ENABLE_PAVP_LINUX 1
+// Misc parameter for encoder
+#define VAEncMiscParameterTypePrivate -2
+// encryption parameters for PAVP
+#define VAEncryptionParameterBufferType -3
+
+typedef struct _VAEncMiscParameterPrivate
+{
+ unsigned int target_usage; // Valid values 1-7 for AVC & MPEG2.
+ unsigned int reserved[7]; // Reserved for future use.
+} VAEncMiscParameterPrivate;
+
+/*VAEncrytpionParameterBuffer*/
+typedef struct _VAEncryptionParameterBuffer
+{
+ //Not used currently
+ unsigned int encryptionSupport;
+ //Not used currently
+ unsigned int hostEncryptMode;
+ // For IV, Counter input
+ unsigned int pavpAesCounter[2][4];
+ // not used currently
+ unsigned int pavpIndex;
+ // PAVP mode, CTR, CBC, DEDE etc
+ unsigned int pavpCounterMode;
+ unsigned int pavpEncryptionType;
+ // not used currently
+ unsigned int pavpInputSize[2];
+ // not used currently
+ unsigned int pavpBufferSize[2];
+ // not used currently
+ VABufferID pvap_buf;
+ // set to TRUE if protected media
+ unsigned int pavpHasBeenEnabled;
+ // not used currently
+ unsigned int IntermmediatedBufReq;
+ // not used currently
+ unsigned int uiCounterIncrement;
+ // AppId: PAVP sessin Index from application
+ unsigned int app_id;
+
+} VAEncryptionParameterBuffer;
+
+#endif
diff --git a/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.cpp
new file mode 100644
index 0000000..d9da2ac
--- /dev/null
+++ b/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.cpp
@@ -0,0 +1,507 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoDecoderAVCSecure.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+
+
+#define STARTCODE_00 0x00
+#define STARTCODE_01 0x01
+#define STARTCODE_PREFIX_LEN 3
+#define NALU_TYPE_MASK 0x1F
+
+
+// mask for little endian, to mast the second and fourth bytes in the byte stream
+#define STARTCODE_MASK0 0xFF000000 //0x00FF0000
+#define STARTCODE_MASK1 0x0000FF00 //0x000000FF
+
+
+typedef enum {
+ NAL_UNIT_TYPE_unspecified0 = 0,
+ NAL_UNIT_TYPE_SLICE,
+ NAL_UNIT_TYPE_DPA,
+ NAL_UNIT_TYPE_DPB,
+ NAL_UNIT_TYPE_DPC,
+ NAL_UNIT_TYPE_IDR,
+ NAL_UNIT_TYPE_SEI,
+ NAL_UNIT_TYPE_SPS,
+ NAL_UNIT_TYPE_PPS,
+ NAL_UNIT_TYPE_Acc_unit_delimiter,
+ NAL_UNIT_TYPE_EOSeq,
+ NAL_UNIT_TYPE_EOstream,
+ NAL_UNIT_TYPE_filler_data,
+ NAL_UNIT_TYPE_SPS_extension,
+ NAL_UNIT_TYPE_Reserved14,
+ NAL_UNIT_TYPE_Reserved15,
+ NAL_UNIT_TYPE_Reserved16,
+ NAL_UNIT_TYPE_Reserved17,
+ NAL_UNIT_TYPE_Reserved18,
+ NAL_UNIT_TYPE_ACP,
+ NAL_UNIT_TYPE_Reserved20,
+ NAL_UNIT_TYPE_Reserved21,
+ NAL_UNIT_TYPE_Reserved22,
+ NAL_UNIT_TYPE_Reserved23,
+ NAL_UNIT_TYPE_unspecified24,
+} NAL_UNIT_TYPE;
+
+#ifndef min
+#define min(X, Y) ((X) <(Y) ? (X) : (Y))
+#endif
+
+
+static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01};
+
+
+VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType)
+ : VideoDecoderAVC(mimeType),
+ mNaluHeaderBuffer(NULL),
+ mInputBuffer(NULL) {
+
+ memset(&mMetadata, 0, sizeof(NaluMetadata));
+ memset(&mByteStream, 0, sizeof(NaluByteStream));
+}
+
+VideoDecoderAVCSecure::~VideoDecoderAVCSecure() {
+}
+
+Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) {
+ Decode_Status status = VideoDecoderAVC::start(buffer);
+ if (status != DECODE_SUCCESS) {
+ return status;
+ }
+
+ mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER];
+ mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER];
+ mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER];
+
+ if (mMetadata.naluInfo == NULL ||
+ mByteStream.byteStream == NULL ||
+ mNaluHeaderBuffer == NULL) {
+ ETRACE("Failed to allocate memory.");
+ // TODO: release all allocated memory
+ return DECODE_MEMORY_FAIL;
+ }
+ return status;
+}
+
+void VideoDecoderAVCSecure::stop(void) {
+ VideoDecoderAVC::stop();
+
+ if (mMetadata.naluInfo) {
+ delete [] mMetadata.naluInfo;
+ mMetadata.naluInfo = NULL;
+ }
+
+ if (mByteStream.byteStream) {
+ delete [] mByteStream.byteStream;
+ mByteStream.byteStream = NULL;
+ }
+
+ if (mNaluHeaderBuffer) {
+ delete [] mNaluHeaderBuffer;
+ mNaluHeaderBuffer = NULL;
+ }
+}
+
+Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) {
+ Decode_Status status;
+ int32_t sizeAccumulated = 0;
+ int32_t sizeLeft = 0;
+ uint8_t *pByteStream = NULL;
+ NaluInfo *pNaluInfo = mMetadata.naluInfo;
+
+ if (buffer->flag & IS_SECURE_DATA) {
+ pByteStream = buffer->data;
+ sizeLeft = buffer->size;
+ mInputBuffer = NULL;
+ } else {
+ status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream);
+ CHECK_STATUS("parseAnnexBStream");
+ pByteStream = mByteStream.byteStream;
+ sizeLeft = mByteStream.streamPos;
+ mInputBuffer = buffer->data;
+ }
+ if (sizeLeft < 4) {
+ ETRACE("Not enough data to read number of NALU.");
+ return DECODE_INVALID_DATA;
+ }
+
+ // read number of NALU
+ memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t));
+ pByteStream += 4;
+ sizeLeft -= 4;
+
+ if (mMetadata.naluNumber == 0) {
+ WTRACE("Number of NALU is ZERO!");
+ return DECODE_SUCCESS;
+ }
+
+ for (int32_t i = 0; i < mMetadata.naluNumber; i++) {
+ if (sizeLeft < 12) {
+ ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft);
+ return DECODE_INVALID_DATA;
+ }
+ sizeLeft -= 12;
+ // read NALU offset
+ memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t));
+ pByteStream += 4;
+
+ // read NALU size
+ memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t));
+ pByteStream += 4;
+
+ // read NALU header length
+ memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t));
+ pByteStream += 4;
+
+ if (sizeLeft < pNaluInfo->naluHeaderLen) {
+ ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen);
+ return DECODE_INVALID_DATA;
+ }
+
+ sizeLeft -= pNaluInfo->naluHeaderLen;
+
+ if (pNaluInfo->naluHeaderLen) {
+ // copy start code prefix to buffer
+ memcpy(mNaluHeaderBuffer + sizeAccumulated,
+ startcodePrefix,
+ STARTCODE_PREFIX_LEN);
+ sizeAccumulated += STARTCODE_PREFIX_LEN;
+
+ // copy NALU header
+ memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen);
+ pByteStream += pNaluInfo->naluHeaderLen;
+
+ sizeAccumulated += pNaluInfo->naluHeaderLen;
+ } else {
+ WTRACE("header len is zero for NALU %d", i);
+ }
+
+ // for next NALU
+ pNaluInfo++;
+ }
+
+ buffer->data = mNaluHeaderBuffer;
+ buffer->size = sizeAccumulated;
+
+ return VideoDecoderAVC::decode(buffer);
+}
+
+
+Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
+
+ Decode_Status status;
+ VAStatus vaStatus;
+ uint32_t bufferIDCount = 0;
+ // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
+ VABufferID bufferIDs[4];
+
+ vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
+ vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
+ VAPictureParameterBufferH264 *picParam = picData->pic_parms;
+ VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
+
+ if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
+ // either condition indicates start of a new frame
+ if (sliceParam->first_mb_in_slice != 0) {
+ WTRACE("The first slice is lost.");
+ // TODO: handle the first slice lost
+ }
+ if (mDecodingFrame) {
+ // interlace content, complete decoding the first field
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ CHECK_VA_STATUS("vaEndPicture");
+
+ // for interlace content, top field may be valid only after the second field is parsed
+ mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt;
+ }
+
+ // Check there is no reference frame loss before decoding a frame
+
+ // Update the reference frames and surface IDs for DPB and current frame
+ status = updateDPB(picParam);
+ CHECK_STATUS("updateDPB");
+
+ //We have to provide a hacked DPB rather than complete DPB for libva as workaround
+ status = updateReferenceFrames(picData);
+ CHECK_STATUS("updateReferenceFrames");
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ // start decoding a frame
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferH264),
+ 1,
+ picParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferH264),
+ 1,
+ data->IQ_matrix_buf,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+ bufferIDCount++;
+ }
+
+ status = setReference(sliceParam);
+ CHECK_STATUS("setReference");
+
+ // find which naluinfo is correlated to current slice
+ int naluIndex = 0;
+ uint32_t accumulatedHeaderLen = 0;
+ uint32_t headerLen = 0;
+ for (; naluIndex < mMetadata.naluNumber; naluIndex++) {
+ headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen;
+ if (headerLen == 0) {
+ WTRACE("lenght of current NAL unit is 0.");
+ continue;
+ }
+ accumulatedHeaderLen += STARTCODE_PREFIX_LEN;
+ if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) {
+ break;
+ }
+ accumulatedHeaderLen += headerLen;
+ }
+
+ if (sliceData->slice_offset != accumulatedHeaderLen) {
+ WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen);
+ }
+
+ sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen;
+ sliceData->slice_size = sliceParam->slice_data_size;
+
+ // no need to update:
+ // sliceParam->slice_data_offset - 0 always
+ // sliceParam->slice_data_bit_offset - relative to sliceData->slice_offset
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit
+ // offset points to first byte of NAL unit
+ uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset;
+ if (mInputBuffer != NULL) {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ sliceData->slice_size, //size
+ 1, //num_elements
+ mInputBuffer + sliceOffset,
+ &bufferIDs[bufferIDCount]);
+ } else {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAProtectedSliceDataBufferType,
+ sliceData->slice_size, //size
+ 1, //num_elements
+ (uint8_t*)sliceOffset, // IMR offset
+ &bufferIDs[bufferIDCount]);
+ }
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ return DECODE_SUCCESS;
+}
+
+
+// Parse byte string pattern "0x000001" (3 bytes) in the current buffer.
+// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found.
+int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) {
+ uint8_t *ptr;
+ uint32_t left = 0, data = 0, phase = 0;
+ uint8_t mask1 = 0, mask2 = 0;
+
+ /* Meaning of phase:
+ 0: initial status, "0x000001" bytes are not found so far;
+ 1: one "0x00" byte is found;
+ 2: two or more consecutive "0x00" bytes" are found;
+ 3: "0x000001" patten is found ;
+ 4: if there is one more byte after "0x000001";
+ */
+
+ left = length;
+ ptr = (uint8_t *) (stream + offset);
+ phase = 0;
+
+ // parse until there is more data and start code not found
+ while ((left > 0) && (phase < 3)) {
+ // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time.
+ if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) {
+ while (left > 3) {
+ data = *((uint32_t *)ptr);
+ mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0));
+ mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1));
+ // If second byte and fourth byte are not zero's then we cannot have a start code here,
+ // as we need two consecutive zero bytes for a start code pattern.
+ if (mask1 && mask2) {
+ // skip 4 bytes and start over
+ ptr += 4;
+ left -=4;
+ continue;
+ } else {
+ break;
+ }
+ }
+ }
+
+ // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time
+ if (left > 0) {
+ if (*ptr == STARTCODE_00) {
+ phase++;
+ if (phase > 2) {
+ // more than 2 consecutive '0x00' bytes is found
+ phase = 2;
+ }
+ } else if ((*ptr == STARTCODE_01) && (phase == 2)) {
+ // start code is found
+ phase = 3;
+ } else {
+ // reset lookup
+ phase = 0;
+ }
+ ptr++;
+ left--;
+ }
+ }
+
+ if ((left > 0) && (phase == 3)) {
+ phase = 4;
+ // return offset of position following the pattern in the buffer which matches "0x000001" byte string
+ return (int32_t)(ptr - stream);
+ }
+ return -1;
+}
+
+
+Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) {
+ uint8_t naluType;
+ int32_t naluHeaderLen;
+
+ naluType = *(uint8_t *)(stream + naluStream->naluOffset);
+ naluType &= NALU_TYPE_MASK;
+ // first update nalu header length based on nalu type
+ if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) {
+ // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes
+ naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE);
+ } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) {
+ //sps, pps, sei, etc, return the entire NAL unit in clear
+ naluHeaderLen = naluStream->naluLen;
+ } else {
+ return DECODE_FRAME_DROPPED;
+ }
+
+ memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t));
+ naluStream->streamPos += 4;
+
+ memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t));
+ naluStream->streamPos += 4;
+
+ memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t));
+ naluStream->streamPos += 4;
+
+ if (naluHeaderLen) {
+ memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen);
+ naluStream->streamPos += naluHeaderLen;
+ }
+ return DECODE_SUCCESS;
+}
+
+
+// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container
+Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) {
+ int32_t naluOffset, offset, left;
+ NaluInfo *info;
+ uint32_t ret = DECODE_SUCCESS;
+
+ naluOffset = 0;
+ offset = 0;
+ left = length;
+
+ // leave 4 bytes to copy nalu count
+ naluStream->streamPos = 4;
+ naluStream->naluCount = 0;
+ memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER);
+
+ for (; ;) {
+ naluOffset = findNalUnitOffset(stream, offset, left);
+ if (naluOffset == -1) {
+ break;
+ }
+
+ if (naluStream->naluCount == 0) {
+ naluStream->naluOffset = naluOffset;
+ } else {
+ naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN;
+ ret = copyNaluHeader(stream, naluStream);
+ if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) {
+ LOGW("copyNaluHeader returned %d", ret);
+ return ret;
+ }
+ // starting position for next NALU
+ naluStream->naluOffset = naluOffset;
+ }
+
+ if (ret == DECODE_SUCCESS) {
+ naluStream->naluCount++;
+ }
+
+ // update next lookup position and length
+ offset = naluOffset + 1; // skip one byte of NAL unit type
+ left = length - offset;
+ }
+
+ if (naluStream->naluCount > 0) {
+ naluStream->naluLen = length - naluStream->naluOffset;
+ memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t));
+ // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED
+ copyNaluHeader(stream, naluStream);
+ return DECODE_SUCCESS;
+ }
+
+ LOGW("number of valid NALU is 0!");
+ return DECODE_SUCCESS;
+}
+
diff --git a/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.h b/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.h
new file mode 100644
index 0000000..ee16073
--- /dev/null
+++ b/videodecoder/securevideo/clovertrail/VideoDecoderAVCSecure.h
@@ -0,0 +1,75 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_AVC_SECURE_H_
+#define VIDEO_DECODER_AVC_SECURE_H_
+
+#include "VideoDecoderAVC.h"
+
+
+class VideoDecoderAVCSecure : public VideoDecoderAVC {
+public:
+ VideoDecoderAVCSecure(const char *mimeType);
+ virtual ~VideoDecoderAVCSecure();
+
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+
+ // data in the decoded buffer is all encrypted.
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+
+private:
+ enum {
+ MAX_SLICE_HEADER_SIZE = 30,
+ MAX_NALU_HEADER_BUFFER = 8192,
+ MAX_NALU_NUMBER = 400, // > 4096/12
+ };
+
+ // Information of Network Abstraction Layer Unit
+ struct NaluInfo {
+ int32_t naluOffset; // offset of NAL unit in the firewalled buffer
+ int32_t naluLen; // length of NAL unit
+ int32_t naluHeaderLen; // length of NAL unit header
+ };
+
+ struct NaluMetadata {
+ NaluInfo *naluInfo;
+ int32_t naluNumber; // number of NAL units
+ };
+
+ struct NaluByteStream {
+ int32_t naluOffset;
+ int32_t naluLen;
+ int32_t streamPos;
+ uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData
+ int32_t naluCount;
+ };
+
+ virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex);
+ int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length);
+ Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream);
+ Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream);
+
+private:
+ NaluMetadata mMetadata;
+ NaluByteStream mByteStream;
+ uint8_t *mNaluHeaderBuffer;
+ uint8_t *mInputBuffer;
+};
+
+
+
+#endif /* VIDEO_DECODER_AVC_SECURE_H_ */
diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp
new file mode 100755
index 0000000..649402d
--- /dev/null
+++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.cpp
@@ -0,0 +1,858 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <va/va.h>
+#include "VideoDecoderBase.h"
+#include "VideoDecoderAVC.h"
+#include "VideoDecoderTrace.h"
+#include "vbp_loader.h"
+#include "VideoDecoderAVCSecure.h"
+#include "VideoFrameInfo.h"
+
+#define MAX_SLICEHEADER_BUFFER_SIZE 4096
+#define STARTCODE_PREFIX_LEN 3
+#define NALU_TYPE_MASK 0x1F
+#define MAX_NALU_HEADER_BUFFER 8192
+static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01};
+
+/* H264 start code values */
+typedef enum _h264_nal_unit_type
+{
+ h264_NAL_UNIT_TYPE_unspecified = 0,
+ h264_NAL_UNIT_TYPE_SLICE,
+ h264_NAL_UNIT_TYPE_DPA,
+ h264_NAL_UNIT_TYPE_DPB,
+ h264_NAL_UNIT_TYPE_DPC,
+ h264_NAL_UNIT_TYPE_IDR,
+ h264_NAL_UNIT_TYPE_SEI,
+ h264_NAL_UNIT_TYPE_SPS,
+ h264_NAL_UNIT_TYPE_PPS,
+ h264_NAL_UNIT_TYPE_Acc_unit_delimiter,
+ h264_NAL_UNIT_TYPE_EOSeq,
+ h264_NAL_UNIT_TYPE_EOstream,
+ h264_NAL_UNIT_TYPE_filler_data,
+ h264_NAL_UNIT_TYPE_SPS_extension,
+ h264_NAL_UNIT_TYPE_ACP = 19,
+ h264_NAL_UNIT_TYPE_Slice_extension = 20
+} h264_nal_unit_type_t;
+
+VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType)
+ : VideoDecoderAVC(mimeType){
+ mFrameSize = 0;
+ mFrameData = NULL;
+ mIsEncryptData = 0;
+ mClearData = NULL;
+ mCachedHeader = NULL;
+ setParserType(VBP_H264SECURE);
+ mFrameIdx = 0;
+ mModularMode = 0;
+ mSliceNum = 0;
+}
+
+Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) {
+ VTRACE("VideoDecoderAVCSecure::start");
+
+ Decode_Status status = VideoDecoderAVC::start(buffer);
+ if (status != DECODE_SUCCESS) {
+ return status;
+ }
+
+ mClearData = new uint8_t [MAX_NALU_HEADER_BUFFER];
+ if (mClearData == NULL) {
+ ETRACE("Failed to allocate memory for mClearData");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ mCachedHeader= new uint8_t [MAX_SLICEHEADER_BUFFER_SIZE];
+ if (mCachedHeader == NULL) {
+ ETRACE("Failed to allocate memory for mCachedHeader");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ return status;
+}
+
+void VideoDecoderAVCSecure::stop(void) {
+ VTRACE("VideoDecoderAVCSecure::stop");
+ VideoDecoderAVC::stop();
+
+ if (mClearData) {
+ delete [] mClearData;
+ mClearData = NULL;
+ }
+
+ if (mCachedHeader) {
+ delete [] mCachedHeader;
+ mCachedHeader = NULL;
+ }
+}
+Decode_Status VideoDecoderAVCSecure::processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data)
+{
+ VTRACE("processModularInputBuffer +++");
+ Decode_Status status;
+ int32_t clear_data_size = 0;
+ uint8_t *clear_data = NULL;
+
+ int32_t nalu_num = 0;
+ uint8_t nalu_type = 0;
+ int32_t nalu_offset = 0;
+ uint32_t nalu_size = 0;
+ uint8_t naluType = 0;
+ uint8_t *nalu_data = NULL;
+ uint32_t sliceidx = 0;
+
+ frame_info_t *pFrameInfo = NULL;
+ mSliceNum = 0;
+ memset(&mSliceInfo, 0, sizeof(mSliceInfo));
+ mIsEncryptData = 0;
+
+ if (buffer->flag & IS_SECURE_DATA) {
+ VTRACE("Decoding protected video ...");
+ pFrameInfo = (frame_info_t *) buffer->data;
+ if (pFrameInfo == NULL) {
+ ETRACE("Invalid parameter: pFrameInfo is NULL!");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ mFrameData = pFrameInfo->data;
+ mFrameSize = pFrameInfo->size;
+ VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize);
+
+ nalu_num = pFrameInfo->num_nalus;
+ VTRACE("nalu_num = %d", nalu_num);
+
+ if (nalu_num <= 0 || nalu_num >= MAX_NUM_NALUS) {
+ ETRACE("Invalid parameter: nalu_num = %d", nalu_num);
+ return DECODE_MEMORY_FAIL;
+ }
+
+ for (int32_t i = 0; i < nalu_num; i++) {
+
+ nalu_size = pFrameInfo->nalus[i].length;
+ nalu_type = pFrameInfo->nalus[i].type;
+ nalu_offset = pFrameInfo->nalus[i].offset;
+ nalu_data = pFrameInfo->nalus[i].data;
+ naluType = nalu_type & NALU_TYPE_MASK;
+
+ VTRACE("nalu_type = 0x%x, nalu_size = %d, nalu_offset = 0x%x", nalu_type, nalu_size, nalu_offset);
+
+ if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) {
+
+ mIsEncryptData = 1;
+ VTRACE("slice idx = %d", sliceidx);
+ mSliceInfo[sliceidx].sliceHeaderByte = nalu_type;
+ mSliceInfo[sliceidx].sliceStartOffset = (nalu_offset >> 4) << 4;
+ mSliceInfo[sliceidx].sliceByteOffset = nalu_offset - mSliceInfo[sliceidx].sliceStartOffset;
+ mSliceInfo[sliceidx].sliceLength = mSliceInfo[sliceidx].sliceByteOffset + nalu_size;
+ mSliceInfo[sliceidx].sliceSize = (mSliceInfo[sliceidx].sliceByteOffset + nalu_size + 0xF) & ~0xF;
+ VTRACE("sliceHeaderByte = 0x%x", mSliceInfo[sliceidx].sliceHeaderByte);
+ VTRACE("sliceStartOffset = %d", mSliceInfo[sliceidx].sliceStartOffset);
+ VTRACE("sliceByteOffset = %d", mSliceInfo[sliceidx].sliceByteOffset);
+ VTRACE("sliceSize = %d", mSliceInfo[sliceidx].sliceSize);
+ VTRACE("sliceLength = %d", mSliceInfo[sliceidx].sliceLength);
+#if 0
+ uint32_t testsize;
+ uint8_t *testdata;
+ testsize = mSliceInfo[sliceidx].sliceSize > 64 ? 64 : mSliceInfo[sliceidx].sliceSize ;
+ testdata = (uint8_t *)(mFrameData);
+ for (int i = 0; i < testsize; i++) {
+ VTRACE("testdata[%d] = 0x%x", i, testdata[i]);
+ }
+#endif
+ sliceidx++;
+
+ } else if (naluType == h264_NAL_UNIT_TYPE_SPS || naluType == h264_NAL_UNIT_TYPE_PPS) {
+ if (nalu_data == NULL) {
+ ETRACE("Invalid parameter: nalu_data = NULL for naluType 0x%x", naluType);
+ return DECODE_MEMORY_FAIL;
+ }
+ memcpy(mClearData + clear_data_size,
+ nalu_data,
+ nalu_size);
+ clear_data_size += nalu_size;
+ } else {
+ ITRACE("Nalu type = 0x%x is skipped", naluType);
+ continue;
+ }
+ }
+ clear_data = mClearData;
+ mSliceNum = sliceidx;
+
+ } else {
+ VTRACE("Decoding clear video ...");
+ mIsEncryptData = 0;
+ mFrameSize = buffer->size;
+ mFrameData = buffer->data;
+ clear_data = buffer->data;
+ clear_data_size = buffer->size;
+ }
+
+ if (clear_data_size > 0) {
+ status = VideoDecoderBase::parseBuffer(
+ clear_data,
+ clear_data_size,
+ false,
+ (void**)data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+ } else {
+ status = VideoDecoderBase::queryBuffer((void**)data);
+ CHECK_STATUS("VideoDecoderBase::queryBuffer");
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data)
+{
+ Decode_Status status;
+ int32_t clear_data_size = 0;
+ uint8_t *clear_data = NULL;
+ uint8_t naluType = 0;
+
+ int32_t num_nalus;
+ int32_t nalu_offset;
+ int32_t offset;
+ uint8_t *data_src;
+ uint8_t *nalu_data;
+ uint32_t nalu_size;
+
+ if (buffer->flag & IS_SECURE_DATA) {
+ VTRACE("Decoding protected video ...");
+ mIsEncryptData = 1;
+
+ mFrameData = buffer->data;
+ mFrameSize = buffer->size;
+ VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize);
+ num_nalus = *(uint32_t *)(buffer->data + buffer->size + sizeof(uint32_t));
+ VTRACE("num_nalus = %d", num_nalus);
+ offset = 4;
+ for (int32_t i = 0; i < num_nalus; i++) {
+ VTRACE("%d nalu, offset = %d", i, offset);
+ data_src = buffer->data + buffer->size + sizeof(uint32_t) + offset;
+ nalu_size = *(uint32_t *)(data_src + 2 * sizeof(uint32_t));
+ nalu_size = (nalu_size + 0x03) & (~0x03);
+
+ nalu_data = data_src + 3 *sizeof(uint32_t);
+ naluType = nalu_data[0] & NALU_TYPE_MASK;
+ offset += nalu_size + 3 *sizeof(uint32_t);
+ VTRACE("naluType = 0x%x", naluType);
+ VTRACE("nalu_size = %d, nalu_data = %p", nalu_size, nalu_data);
+
+ if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) {
+ ETRACE("Slice NALU received!");
+ return DECODE_INVALID_DATA;
+ }
+
+ else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) {
+ memcpy(mClearData + clear_data_size,
+ startcodePrefix,
+ STARTCODE_PREFIX_LEN);
+ clear_data_size += STARTCODE_PREFIX_LEN;
+ memcpy(mClearData + clear_data_size,
+ nalu_data,
+ nalu_size);
+ clear_data_size += nalu_size;
+ } else {
+ ETRACE("Failure: DECODE_FRAME_DROPPED");
+ return DECODE_FRAME_DROPPED;
+ }
+ }
+ clear_data = mClearData;
+ } else {
+ VTRACE("Decoding clear video ...");
+ mIsEncryptData = 0;
+ mFrameSize = buffer->size;
+ mFrameData = buffer->data;
+ clear_data = buffer->data;
+ clear_data_size = buffer->size;
+ }
+
+ if (clear_data_size > 0) {
+ status = VideoDecoderBase::parseBuffer(
+ clear_data,
+ clear_data_size,
+ false,
+ (void**)data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+ } else {
+ status = VideoDecoderBase::queryBuffer((void**)data);
+ CHECK_STATUS("VideoDecoderBase::queryBuffer");
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) {
+ VTRACE("VideoDecoderAVCSecure::decode");
+ Decode_Status status;
+ vbp_data_h264 *data = NULL;
+ if (buffer == NULL) {
+ return DECODE_INVALID_DATA;
+ }
+
+#if 0
+ uint32_t testsize;
+ uint8_t *testdata;
+ testsize = buffer->size > 16 ? 16:buffer->size ;
+ testdata = (uint8_t *)(buffer->data);
+ for (int i = 0; i < 16; i++) {
+ VTRACE("testdata[%d] = 0x%x", i, testdata[i]);
+ }
+#endif
+ if (buffer->flag & IS_SUBSAMPLE_ENCRYPTION) {
+ mModularMode = 1;
+ }
+
+ if (mModularMode) {
+ status = processModularInputBuffer(buffer,&data);
+ CHECK_STATUS("processModularInputBuffer");
+ }
+ else {
+ status = processClassicInputBuffer(buffer,&data);
+ CHECK_STATUS("processClassicInputBuffer");
+ }
+
+ if (!mVAStarted) {
+ if (data->has_sps && data->has_pps) {
+ status = startVA(data);
+ CHECK_STATUS("startVA");
+ } else {
+ WTRACE("Can't start VA as either SPS or PPS is still not available.");
+ return DECODE_SUCCESS;
+ }
+ }
+
+ status = decodeFrame(buffer, data);
+
+ return status;
+}
+
+Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
+ VTRACE("VideoDecoderAVCSecure::decodeFrame");
+ Decode_Status status;
+ VTRACE("data->has_sps = %d, data->has_pps = %d", data->has_sps, data->has_pps);
+
+#if 0
+ // Don't remove the following codes, it can be enabled for debugging DPB.
+ for (unsigned int i = 0; i < data->num_pictures; i++) {
+ VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
+ VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d",
+ i,
+ buffer->timeStamp/1E6,
+ pic.TopFieldOrderCnt,
+ pic.BottomFieldOrderCnt,
+ pic.flags,
+ (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
+ (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
+ }
+#endif
+
+ if (data->new_sps || data->new_pps) {
+ status = handleNewSequence(data);
+ CHECK_STATUS("handleNewSequence");
+ }
+
+ if (mModularMode && (!mIsEncryptData)) {
+ if (data->pic_data[0].num_slices == 0) {
+ ITRACE("No slice available for decoding.");
+ status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
+ mSizeChanged = false;
+ return status;
+ }
+ }
+
+ uint64_t lastPTS = mCurrentPTS;
+ mCurrentPTS = buffer->timeStamp;
+
+ // start decoding a new frame
+ status = acquireSurfaceBuffer();
+ CHECK_STATUS("acquireSurfaceBuffer");
+
+ if (mModularMode) {
+ parseModularSliceHeader(buffer,data);
+ }
+ else {
+ parseClassicSliceHeader(buffer,data);
+ }
+
+ if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ return status;
+ }
+
+ status = beginDecodingFrame(data);
+ CHECK_STATUS("beginDecodingFrame");
+
+ // finish decoding the last frame
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+
+ if (isNewFrame(data, lastPTS == mCurrentPTS) == 0) {
+ ETRACE("Can't handle interlaced frames yet");
+ return DECODE_FAIL;
+ }
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::beginDecodingFrame(vbp_data_h264 *data) {
+ VTRACE("VideoDecoderAVCSecure::beginDecodingFrame");
+ Decode_Status status;
+ VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
+ if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
+ (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
+ mAcquiredBuffer->referenceFrame = true;
+ } else {
+ mAcquiredBuffer->referenceFrame = false;
+ }
+
+ if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
+ } else {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
+ }
+
+ mAcquiredBuffer->renderBuffer.flag = 0;
+ mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
+ mAcquiredBuffer->pictureOrder = getPOC(picture);
+
+ if (mSizeChanged) {
+ mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
+ mSizeChanged = false;
+ }
+
+ status = continueDecodingFrame(data);
+ return status;
+}
+
+Decode_Status VideoDecoderAVCSecure::continueDecodingFrame(vbp_data_h264 *data) {
+ VTRACE("VideoDecoderAVCSecure::continueDecodingFrame");
+ Decode_Status status;
+ vbp_picture_data_h264 *picData = data->pic_data;
+
+ if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
+ ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
+ return DECODE_FAIL;
+ }
+ VTRACE("data->num_pictures = %d", data->num_pictures);
+ for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
+ if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
+ return DECODE_PARSER_FAIL;
+ }
+
+ if (picIndex > 0 &&
+ (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
+ ETRACE("Packed frame is not supported yet!");
+ return DECODE_FAIL;
+ }
+ VTRACE("picData->num_slices = %d", picData->num_slices);
+ for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
+ status = decodeSlice(data, picIndex, sliceIndex);
+ if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ // remove current frame from DPB as it can't be decoded.
+ removeReferenceFromDPB(picData->pic_parms);
+ return status;
+ }
+ }
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::parseClassicSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
+ Decode_Status status;
+ VAStatus vaStatus;
+
+ VABufferID sliceheaderbufferID;
+ VABufferID pictureparameterparsingbufferID;
+ VABufferID mSlicebufferID;
+
+ if (mFrameSize <= 0) {
+ return DECODE_SUCCESS;
+ }
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAParseSliceHeaderGroupBufferType,
+ MAX_SLICEHEADER_BUFFER_SIZE,
+ 1,
+ NULL,
+ &sliceheaderbufferID);
+ CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer");
+
+ void *sliceheaderbuf;
+ vaStatus = vaMapBuffer(
+ mVADisplay,
+ sliceheaderbufferID,
+ &sliceheaderbuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+ memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE);
+
+ vaStatus = vaUnmapBuffer(
+ mVADisplay,
+ sliceheaderbufferID);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ mFrameSize, //size
+ 1, //num_elements
+ mFrameData,
+ &mSlicebufferID);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+
+ data->pic_parse_buffer->frame_buf_id = mSlicebufferID;
+ data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID;
+ data->pic_parse_buffer->frame_size = mFrameSize;
+ data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE;
+
+#if 0
+
+ VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag);
+ VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag);
+ VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag);
+ VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag);
+ VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag);
+ VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag);
+ VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag);
+ VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc);
+
+ VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id);
+ VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1);
+ VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc);
+ VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4);
+ VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type);
+ VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag);
+ VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1);
+ VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1);
+#endif
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAParsePictureParameterBufferType,
+ sizeof(VAParsePictureParameterBuffer),
+ 1,
+ data->pic_parse_buffer,
+ &pictureparameterparsingbufferID);
+ CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer");
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ &pictureparameterparsingbufferID,
+ 1);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ vaStatus = vaMapBuffer(
+ mVADisplay,
+ sliceheaderbufferID,
+ &sliceheaderbuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+ status = updateSliceParameter(data,sliceheaderbuf);
+ CHECK_STATUS("processSliceHeader");
+
+ vaStatus = vaUnmapBuffer(
+ mVADisplay,
+ sliceheaderbufferID);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::parseModularSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
+ Decode_Status status;
+ VAStatus vaStatus;
+
+ VABufferID sliceheaderbufferID;
+ VABufferID pictureparameterparsingbufferID;
+ VABufferID mSlicebufferID;
+ int32_t sliceIdx;
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ if (mFrameSize <= 0 || mSliceNum <=0) {
+ return DECODE_SUCCESS;
+ }
+ void *sliceheaderbuf;
+ memset(mCachedHeader, 0, MAX_SLICEHEADER_BUFFER_SIZE);
+ int32_t offset = 0;
+ int32_t size = 0;
+
+ for (sliceIdx = 0; sliceIdx < mSliceNum; sliceIdx++) {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAParseSliceHeaderGroupBufferType,
+ MAX_SLICEHEADER_BUFFER_SIZE,
+ 1,
+ NULL,
+ &sliceheaderbufferID);
+ CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer");
+
+ vaStatus = vaMapBuffer(
+ mVADisplay,
+ sliceheaderbufferID,
+ &sliceheaderbuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+ memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE);
+
+ vaStatus = vaUnmapBuffer(
+ mVADisplay,
+ sliceheaderbufferID);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ mSliceInfo[sliceIdx].sliceSize, //size
+ 1, //num_elements
+ mFrameData + mSliceInfo[sliceIdx].sliceStartOffset,
+ &mSlicebufferID);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+
+ data->pic_parse_buffer->frame_buf_id = mSlicebufferID;
+ data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID;
+ data->pic_parse_buffer->frame_size = mSliceInfo[sliceIdx].sliceLength;
+ data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE;
+ data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte;
+ data->pic_parse_buffer->slice_offset = mSliceInfo[sliceIdx].sliceByteOffset;
+
+#if 0
+ VTRACE("data->pic_parse_buffer->slice_offset = 0x%x", data->pic_parse_buffer->slice_offset);
+ VTRACE("pic_parse_buffer->nalu_header.value = %x", data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte);
+ VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag);
+ VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag);
+ VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag);
+ VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag);
+ VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag);
+ VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag);
+ VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag);
+ VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc);
+ VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id);
+ VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1);
+ VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc);
+ VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4);
+ VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type);
+ VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag);
+ VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1);
+ VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1);
+#endif
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAParsePictureParameterBufferType,
+ sizeof(VAParsePictureParameterBuffer),
+ 1,
+ data->pic_parse_buffer,
+ &pictureparameterparsingbufferID);
+ CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer");
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ &pictureparameterparsingbufferID,
+ 1);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ vaStatus = vaMapBuffer(
+ mVADisplay,
+ sliceheaderbufferID,
+ &sliceheaderbuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+ size = *(uint32 *)((uint8 *)sliceheaderbuf + 4) + 4;
+ VTRACE("slice header size = 0x%x, offset = 0x%x", size, offset);
+ if (offset + size <= MAX_SLICEHEADER_BUFFER_SIZE - 4) {
+ memcpy(mCachedHeader+offset, sliceheaderbuf, size);
+ offset += size;
+ } else {
+ WTRACE("Cached slice header is not big enough!");
+ }
+ vaStatus = vaUnmapBuffer(
+ mVADisplay,
+ sliceheaderbufferID);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+ }
+ memset(mCachedHeader + offset, 0xFF, 4);
+ status = updateSliceParameter(data,mCachedHeader);
+ CHECK_STATUS("processSliceHeader");
+ return DECODE_SUCCESS;
+}
+
+
+Decode_Status VideoDecoderAVCSecure::updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf) {
+ VTRACE("VideoDecoderAVCSecure::updateSliceParameter");
+ Decode_Status status;
+ status = VideoDecoderBase::updateBuffer(
+ (uint8_t *)sliceheaderbuf,
+ MAX_SLICEHEADER_BUFFER_SIZE,
+ (void**)&data);
+ CHECK_STATUS("updateBuffer");
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
+ Decode_Status status;
+ VAStatus vaStatus;
+ uint32_t bufferIDCount = 0;
+ // maximum 3 buffers to render a slice: picture parameter, IQMatrix, slice parameter
+ VABufferID bufferIDs[3];
+
+ vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
+ vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
+ VAPictureParameterBufferH264 *picParam = picData->pic_parms;
+ VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
+ uint32_t slice_data_size = 0;
+ uint8_t* slice_data_addr = NULL;
+
+ if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
+ // either condition indicates start of a new frame
+ if (sliceParam->first_mb_in_slice != 0) {
+ WTRACE("The first slice is lost.");
+ }
+ VTRACE("Current frameidx = %d", mFrameIdx++);
+ // Update the reference frames and surface IDs for DPB and current frame
+ status = updateDPB(picParam);
+ CHECK_STATUS("updateDPB");
+
+ //We have to provide a hacked DPB rather than complete DPB for libva as workaround
+ status = updateReferenceFrames(picData);
+ CHECK_STATUS("updateReferenceFrames");
+
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferH264),
+ 1,
+ picParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferH264),
+ 1,
+ data->IQ_matrix_buf,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+ bufferIDCount++;
+ }
+
+ status = setReference(sliceParam);
+ CHECK_STATUS("setReference");
+
+ if (mModularMode) {
+ if (mIsEncryptData) {
+ sliceParam->slice_data_size = mSliceInfo[sliceIndex].sliceSize;
+ slice_data_size = mSliceInfo[sliceIndex].sliceSize;
+ slice_data_addr = mFrameData + mSliceInfo[sliceIndex].sliceStartOffset;
+ } else {
+ slice_data_size = sliceData->slice_size;
+ slice_data_addr = sliceData->buffer_addr + sliceData->slice_offset;
+ }
+ } else {
+ sliceParam->slice_data_size = mFrameSize;
+ slice_data_size = mFrameSize;
+ slice_data_addr = mFrameData;
+ }
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ VABufferID slicebufferID;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ slice_data_size, //size
+ 1, //num_elements
+ slice_data_addr,
+ &slicebufferID);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ &slicebufferID,
+ 1);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ return DECODE_SUCCESS;
+
+}
+
+Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs(
+ VAProfile profile, VAConfigID *config)
+{
+ VAStatus vaStatus;
+ VAConfigAttrib attrib[2];
+
+ if (config == NULL) {
+ ETRACE("Invalid parameter!");
+ return DECODE_FAIL;
+ }
+
+ attrib[0].type = VAConfigAttribRTFormat;
+ attrib[0].value = VA_RT_FORMAT_YUV420;
+ attrib[1].type = VAConfigAttribDecSliceMode;
+ attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
+ if (mModularMode) {
+ attrib[1].value = VA_DEC_SLICE_MODE_SUBSAMPLE;
+ }
+
+ vaStatus = vaCreateConfig(
+ mVADisplay,
+ profile,
+ VAEntrypointVLD,
+ &attrib[0],
+ 2,
+ config);
+ CHECK_VA_STATUS("vaCreateConfig");
+
+ return DECODE_SUCCESS;
+}
diff --git a/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h
new file mode 100755
index 0000000..d4a9f15
--- /dev/null
+++ b/videodecoder/securevideo/merrifield/VideoDecoderAVCSecure.h
@@ -0,0 +1,69 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_AVC_SECURE_H
+#define VIDEO_DECODER_AVC_SECURE_H
+
+#include "VideoDecoderBase.h"
+#include "VideoDecoderAVC.h"
+#include "VideoDecoderDefs.h"
+
+class VideoDecoderAVCSecure : public VideoDecoderAVC {
+public:
+ VideoDecoderAVCSecure(const char *mimeType);
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+
+ // data in the decoded buffer is all encrypted.
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+protected:
+ virtual Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data);
+ virtual Decode_Status continueDecodingFrame(vbp_data_h264 *data);
+ virtual Decode_Status beginDecodingFrame(vbp_data_h264 *data);
+ virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config);
+ Decode_Status parseClassicSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data);
+ Decode_Status parseModularSliceHeader(VideoDecodeBuffer *buffer, vbp_data_h264 *data);
+
+ Decode_Status updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf);
+ virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex);
+private:
+ Decode_Status processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data);
+ Decode_Status processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data);
+ int32_t mIsEncryptData;
+ int32_t mFrameSize;
+ uint8_t* mFrameData;
+ uint8_t* mClearData;
+ uint8_t* mCachedHeader;
+ int32_t mFrameIdx;
+ int32_t mModularMode;
+
+ enum {
+ MAX_SLICE_HEADER_NUM = 256,
+ };
+ int32_t mSliceNum;
+ // Information of Slices in the Modular DRM Mode
+ struct SliceInfo {
+ uint8_t sliceHeaderByte; // first byte of the slice header
+ uint32_t sliceStartOffset; // offset of Slice unit in the firewalled buffer
+ uint32_t sliceByteOffset; // extra offset from the blockAligned slice offset
+ uint32_t sliceSize; // block aligned length of slice unit
+ uint32_t sliceLength; // actual size of the slice
+ };
+
+ SliceInfo mSliceInfo[MAX_SLICE_HEADER_NUM];
+};
+
+#endif
diff --git a/videodecoder/securevideo/merrifield/VideoFrameInfo.h b/videodecoder/securevideo/merrifield/VideoFrameInfo.h
new file mode 100755
index 0000000..485b0da
--- /dev/null
+++ b/videodecoder/securevideo/merrifield/VideoFrameInfo.h
@@ -0,0 +1,36 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_FRAME_INFO_H_
+#define VIDEO_FRAME_INFO_H_
+
+#define MAX_NUM_NALUS 16
+
+typedef struct {
+ uint8_t type; // nalu type + nal_ref_idc
+ uint32_t offset; // offset to the pointer of the encrypted data
+ uint8_t* data; // if the nalu is encrypted, this field is useless; if current NALU is SPS/PPS, data is the pointer to clear SPS/PPS data
+ uint32_t length; // nalu length
+} nalu_info_t;
+
+typedef struct {
+ uint8_t* data; // pointer to the encrypted data
+ uint32_t size; // encrypted data size
+ uint32_t num_nalus; // number of NALU
+ nalu_info_t nalus[MAX_NUM_NALUS];
+} frame_info_t;
+
+#endif
diff --git a/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.cpp
new file mode 100644
index 0000000..38039e2
--- /dev/null
+++ b/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.cpp
@@ -0,0 +1,510 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoDecoderAVCSecure.h"
+#include "VideoDecoderTrace.h"
+#include <string.h>
+
+
+#define STARTCODE_00 0x00
+#define STARTCODE_01 0x01
+#define STARTCODE_PREFIX_LEN 3
+#define NALU_TYPE_MASK 0x1F
+
+
+// mask for little endian, to mast the second and fourth bytes in the byte stream
+#define STARTCODE_MASK0 0xFF000000 //0x00FF0000
+#define STARTCODE_MASK1 0x0000FF00 //0x000000FF
+
+
+typedef enum {
+ NAL_UNIT_TYPE_unspecified0 = 0,
+ NAL_UNIT_TYPE_SLICE,
+ NAL_UNIT_TYPE_DPA,
+ NAL_UNIT_TYPE_DPB,
+ NAL_UNIT_TYPE_DPC,
+ NAL_UNIT_TYPE_IDR,
+ NAL_UNIT_TYPE_SEI,
+ NAL_UNIT_TYPE_SPS,
+ NAL_UNIT_TYPE_PPS,
+ NAL_UNIT_TYPE_Acc_unit_delimiter,
+ NAL_UNIT_TYPE_EOSeq,
+ NAL_UNIT_TYPE_EOstream,
+ NAL_UNIT_TYPE_filler_data,
+ NAL_UNIT_TYPE_SPS_extension,
+ NAL_UNIT_TYPE_Reserved14,
+ NAL_UNIT_TYPE_Reserved15,
+ NAL_UNIT_TYPE_Reserved16,
+ NAL_UNIT_TYPE_Reserved17,
+ NAL_UNIT_TYPE_Reserved18,
+ NAL_UNIT_TYPE_ACP,
+ NAL_UNIT_TYPE_Reserved20,
+ NAL_UNIT_TYPE_Reserved21,
+ NAL_UNIT_TYPE_Reserved22,
+ NAL_UNIT_TYPE_Reserved23,
+ NAL_UNIT_TYPE_unspecified24,
+} NAL_UNIT_TYPE;
+
+#ifndef min
+#define min(X, Y) ((X) <(Y) ? (X) : (Y))
+#endif
+
+
+static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01};
+
+
+VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType)
+ : VideoDecoderAVC(mimeType),
+ mNaluHeaderBuffer(NULL),
+ mInputBuffer(NULL) {
+
+ memset(&mMetadata, 0, sizeof(NaluMetadata));
+ memset(&mByteStream, 0, sizeof(NaluByteStream));
+}
+
+VideoDecoderAVCSecure::~VideoDecoderAVCSecure() {
+}
+
+Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) {
+ Decode_Status status = VideoDecoderAVC::start(buffer);
+ if (status != DECODE_SUCCESS) {
+ return status;
+ }
+
+ mMetadata.naluInfo = new NaluInfo [MAX_NALU_NUMBER];
+ mByteStream.byteStream = new uint8_t [MAX_NALU_HEADER_BUFFER];
+ mNaluHeaderBuffer = new uint8_t [MAX_NALU_HEADER_BUFFER];
+
+ if (mMetadata.naluInfo == NULL ||
+ mByteStream.byteStream == NULL ||
+ mNaluHeaderBuffer == NULL) {
+ ETRACE("Failed to allocate memory.");
+ // TODO: release all allocated memory
+ return DECODE_MEMORY_FAIL;
+ }
+ return status;
+}
+
+void VideoDecoderAVCSecure::stop(void) {
+ VideoDecoderAVC::stop();
+
+ if (mMetadata.naluInfo) {
+ delete [] mMetadata.naluInfo;
+ mMetadata.naluInfo = NULL;
+ }
+
+ if (mByteStream.byteStream) {
+ delete [] mByteStream.byteStream;
+ mByteStream.byteStream = NULL;
+ }
+
+ if (mNaluHeaderBuffer) {
+ delete [] mNaluHeaderBuffer;
+ mNaluHeaderBuffer = NULL;
+ }
+}
+
+Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) {
+ Decode_Status status;
+ int32_t sizeAccumulated = 0;
+ int32_t sizeLeft = 0;
+ uint8_t *pByteStream = NULL;
+ NaluInfo *pNaluInfo = mMetadata.naluInfo;
+
+ if (buffer->flag & IS_SECURE_DATA) {
+ // NALU headers are appended to encrypted video bitstream
+ // |...encrypted video bitstream (16 bytes aligned)...| 4 bytes of header size |...NALU headers..|
+ pByteStream = buffer->data + buffer->size + 4;
+ sizeLeft = *(int32_t *)(buffer->data + buffer->size);
+ VTRACE("%s sizeLeft: %d buffer->size: %#x", __func__, sizeLeft, buffer->size);
+ mInputBuffer = buffer->data;
+ } else {
+ status = parseAnnexBStream(buffer->data, buffer->size, &mByteStream);
+ CHECK_STATUS("parseAnnexBStream");
+ pByteStream = mByteStream.byteStream;
+ sizeLeft = mByteStream.streamPos;
+ mInputBuffer = buffer->data;
+ }
+ if (sizeLeft < 4) {
+ ETRACE("Not enough data to read number of NALU.");
+ return DECODE_INVALID_DATA;
+ }
+
+ // read number of NALU
+ memcpy(&(mMetadata.naluNumber), pByteStream, sizeof(int32_t));
+ pByteStream += 4;
+ sizeLeft -= 4;
+
+ if (mMetadata.naluNumber == 0) {
+ WTRACE("Number of NALU is ZERO!");
+ return DECODE_SUCCESS;
+ }
+
+ for (int32_t i = 0; i < mMetadata.naluNumber; i++) {
+ if (sizeLeft < 12) {
+ ETRACE("Not enough data to parse NALU offset, size, header length for NALU %d, left = %d", i, sizeLeft);
+ return DECODE_INVALID_DATA;
+ }
+ sizeLeft -= 12;
+ // read NALU offset
+ memcpy(&(pNaluInfo->naluOffset), pByteStream, sizeof(int32_t));
+ pByteStream += 4;
+
+ // read NALU size
+ memcpy(&(pNaluInfo->naluLen), pByteStream, sizeof(int32_t));
+ pByteStream += 4;
+
+ // read NALU header length
+ memcpy(&(pNaluInfo->naluHeaderLen), pByteStream, sizeof(int32_t));
+ pByteStream += 4;
+
+
+ if (sizeLeft < pNaluInfo->naluHeaderLen) {
+ ETRACE("Not enough data to copy NALU header for %d, left = %d, header len = %d", i, sizeLeft, pNaluInfo->naluHeaderLen);
+ return DECODE_INVALID_DATA;
+ }
+
+ sizeLeft -= pNaluInfo->naluHeaderLen;
+
+ if (pNaluInfo->naluHeaderLen) {
+ // copy start code prefix to buffer
+ memcpy(mNaluHeaderBuffer + sizeAccumulated,
+ startcodePrefix,
+ STARTCODE_PREFIX_LEN);
+ sizeAccumulated += STARTCODE_PREFIX_LEN;
+
+ // copy NALU header
+ memcpy(mNaluHeaderBuffer + sizeAccumulated, pByteStream, pNaluInfo->naluHeaderLen);
+ pByteStream += pNaluInfo->naluHeaderLen;
+
+ sizeAccumulated += pNaluInfo->naluHeaderLen;
+ } else {
+ WTRACE("header len is zero for NALU %d", i);
+ }
+
+ // for next NALU
+ pNaluInfo++;
+ }
+
+ buffer->data = mNaluHeaderBuffer;
+ buffer->size = sizeAccumulated;
+
+ return VideoDecoderAVC::decode(buffer);
+}
+
+
+Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
+
+ Decode_Status status;
+ VAStatus vaStatus;
+ uint32_t bufferIDCount = 0;
+ // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
+ VABufferID bufferIDs[4];
+
+ vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
+ vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
+ VAPictureParameterBufferH264 *picParam = picData->pic_parms;
+ VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
+
+ if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
+ // either condition indicates start of a new frame
+ if (sliceParam->first_mb_in_slice != 0) {
+ WTRACE("The first slice is lost.");
+ // TODO: handle the first slice lost
+ }
+ if (mDecodingFrame) {
+ // interlace content, complete decoding the first field
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ CHECK_VA_STATUS("vaEndPicture");
+
+ // for interlace content, top field may be valid only after the second field is parsed
+ mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt;
+ }
+
+ // Check there is no reference frame loss before decoding a frame
+
+ // Update the reference frames and surface IDs for DPB and current frame
+ status = updateDPB(picParam);
+ CHECK_STATUS("updateDPB");
+
+ //We have to provide a hacked DPB rather than complete DPB for libva as workaround
+ status = updateReferenceFrames(picData);
+ CHECK_STATUS("updateReferenceFrames");
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ // start decoding a frame
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferH264),
+ 1,
+ picParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferH264),
+ 1,
+ data->IQ_matrix_buf,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+ bufferIDCount++;
+ }
+
+ status = setReference(sliceParam);
+ CHECK_STATUS("setReference");
+
+ // find which naluinfo is correlated to current slice
+ int naluIndex = 0;
+ uint32_t accumulatedHeaderLen = 0;
+ uint32_t headerLen = 0;
+ for (; naluIndex < mMetadata.naluNumber; naluIndex++) {
+ headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen;
+ if (headerLen == 0) {
+ WTRACE("lenght of current NAL unit is 0.");
+ continue;
+ }
+ accumulatedHeaderLen += STARTCODE_PREFIX_LEN;
+ if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) {
+ break;
+ }
+ accumulatedHeaderLen += headerLen;
+ }
+
+ if (sliceData->slice_offset != accumulatedHeaderLen) {
+ WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen);
+ }
+
+ sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen;
+ uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset;
+ uint32_t slice_offset_shift = sliceOffset % 16;
+ sliceParam->slice_data_offset += slice_offset_shift;
+ sliceData->slice_size = (sliceParam->slice_data_size + slice_offset_shift + 0xF) & ~0xF;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit
+ // offset points to first byte of NAL unit
+
+ if (mInputBuffer != NULL) {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ sliceData->slice_size, //Slice size
+ 1, // num_elements
+ mInputBuffer + sliceOffset - slice_offset_shift,
+ &bufferIDs[bufferIDCount]);
+ } else {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAProtectedSliceDataBufferType,
+ sliceData->slice_size, //size
+ 1, //num_elements
+ (uint8_t*)sliceOffset, // IMR offset
+ &bufferIDs[bufferIDCount]);
+ }
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ return DECODE_SUCCESS;
+}
+
+
+// Parse byte string pattern "0x000001" (3 bytes) in the current buffer.
+// Returns offset of position following the pattern in the buffer if pattern is found or -1 if not found.
+int32_t VideoDecoderAVCSecure::findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length) {
+ uint8_t *ptr;
+ uint32_t left = 0, data = 0, phase = 0;
+ uint8_t mask1 = 0, mask2 = 0;
+
+ /* Meaning of phase:
+ 0: initial status, "0x000001" bytes are not found so far;
+ 1: one "0x00" byte is found;
+ 2: two or more consecutive "0x00" bytes" are found;
+ 3: "0x000001" patten is found ;
+ 4: if there is one more byte after "0x000001";
+ */
+
+ left = length;
+ ptr = (uint8_t *) (stream + offset);
+ phase = 0;
+
+ // parse until there is more data and start code not found
+ while ((left > 0) && (phase < 3)) {
+ // Check if the address is 32-bit aligned & phase=0, if thats the case we can check 4 bytes instead of one byte at a time.
+ if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0)) {
+ while (left > 3) {
+ data = *((uint32_t *)ptr);
+ mask1 = (STARTCODE_00 != (data & STARTCODE_MASK0));
+ mask2 = (STARTCODE_00 != (data & STARTCODE_MASK1));
+ // If second byte and fourth byte are not zero's then we cannot have a start code here,
+ // as we need two consecutive zero bytes for a start code pattern.
+ if (mask1 && mask2) {
+ // skip 4 bytes and start over
+ ptr += 4;
+ left -=4;
+ continue;
+ } else {
+ break;
+ }
+ }
+ }
+
+ // At this point either data is not on a 32-bit boundary or phase > 0 so we look at one byte at a time
+ if (left > 0) {
+ if (*ptr == STARTCODE_00) {
+ phase++;
+ if (phase > 2) {
+ // more than 2 consecutive '0x00' bytes is found
+ phase = 2;
+ }
+ } else if ((*ptr == STARTCODE_01) && (phase == 2)) {
+ // start code is found
+ phase = 3;
+ } else {
+ // reset lookup
+ phase = 0;
+ }
+ ptr++;
+ left--;
+ }
+ }
+
+ if ((left > 0) && (phase == 3)) {
+ phase = 4;
+ // return offset of position following the pattern in the buffer which matches "0x000001" byte string
+ return (int32_t)(ptr - stream);
+ }
+ return -1;
+}
+
+
+Decode_Status VideoDecoderAVCSecure::copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream) {
+ uint8_t naluType;
+ int32_t naluHeaderLen;
+
+ naluType = *(uint8_t *)(stream + naluStream->naluOffset);
+ naluType &= NALU_TYPE_MASK;
+ // first update nalu header length based on nalu type
+ if (naluType >= NAL_UNIT_TYPE_SLICE && naluType <= NAL_UNIT_TYPE_IDR) {
+ // coded slice, return only up to MAX_SLICE_HEADER_SIZE bytes
+ naluHeaderLen = min(naluStream->naluLen, MAX_SLICE_HEADER_SIZE);
+ } else if (naluType >= NAL_UNIT_TYPE_SEI && naluType <= NAL_UNIT_TYPE_PPS) {
+ //sps, pps, sei, etc, return the entire NAL unit in clear
+ naluHeaderLen = naluStream->naluLen;
+ } else {
+ return DECODE_FRAME_DROPPED;
+ }
+
+ memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluOffset), sizeof(int32_t));
+ naluStream->streamPos += 4;
+
+ memcpy(naluStream->byteStream + naluStream->streamPos, &(naluStream->naluLen), sizeof(int32_t));
+ naluStream->streamPos += 4;
+
+ memcpy(naluStream->byteStream + naluStream->streamPos, &naluHeaderLen, sizeof(int32_t));
+ naluStream->streamPos += 4;
+
+ if (naluHeaderLen) {
+ memcpy(naluStream->byteStream + naluStream->streamPos, (uint8_t*)(stream + naluStream->naluOffset), naluHeaderLen);
+ naluStream->streamPos += naluHeaderLen;
+ }
+ return DECODE_SUCCESS;
+}
+
+
+// parse start-code prefixed stream, also knowns as Annex B byte stream, commonly used in AVI, ES, MPEG2 TS container
+Decode_Status VideoDecoderAVCSecure::parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream) {
+ int32_t naluOffset, offset, left;
+ NaluInfo *info;
+ uint32_t ret = DECODE_SUCCESS;
+
+ naluOffset = 0;
+ offset = 0;
+ left = length;
+
+ // leave 4 bytes to copy nalu count
+ naluStream->streamPos = 4;
+ naluStream->naluCount = 0;
+ memset(naluStream->byteStream, 0, MAX_NALU_HEADER_BUFFER);
+
+ for (; ;) {
+ naluOffset = findNalUnitOffset(stream, offset, left);
+ if (naluOffset == -1) {
+ break;
+ }
+
+ if (naluStream->naluCount == 0) {
+ naluStream->naluOffset = naluOffset;
+ } else {
+ naluStream->naluLen = naluOffset - naluStream->naluOffset - STARTCODE_PREFIX_LEN;
+ ret = copyNaluHeader(stream, naluStream);
+ if (ret != DECODE_SUCCESS && ret != DECODE_FRAME_DROPPED) {
+ LOGW("copyNaluHeader returned %d", ret);
+ return ret;
+ }
+ // starting position for next NALU
+ naluStream->naluOffset = naluOffset;
+ }
+
+ if (ret == DECODE_SUCCESS) {
+ naluStream->naluCount++;
+ }
+
+ // update next lookup position and length
+ offset = naluOffset + 1; // skip one byte of NAL unit type
+ left = length - offset;
+ }
+
+ if (naluStream->naluCount > 0) {
+ naluStream->naluLen = length - naluStream->naluOffset;
+ memcpy(naluStream->byteStream, &(naluStream->naluCount), sizeof(int32_t));
+ // ignore return value, either DECODE_SUCCESS or DECODE_FRAME_DROPPED
+ copyNaluHeader(stream, naluStream);
+ return DECODE_SUCCESS;
+ }
+
+ LOGW("number of valid NALU is 0!");
+ return DECODE_SUCCESS;
+}
+
diff --git a/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.h b/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.h
new file mode 100644
index 0000000..ee16073
--- /dev/null
+++ b/videodecoder/securevideo/merrplus/VideoDecoderAVCSecure.h
@@ -0,0 +1,75 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_AVC_SECURE_H_
+#define VIDEO_DECODER_AVC_SECURE_H_
+
+#include "VideoDecoderAVC.h"
+
+
+class VideoDecoderAVCSecure : public VideoDecoderAVC {
+public:
+ VideoDecoderAVCSecure(const char *mimeType);
+ virtual ~VideoDecoderAVCSecure();
+
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+
+ // data in the decoded buffer is all encrypted.
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+
+private:
+ enum {
+ MAX_SLICE_HEADER_SIZE = 30,
+ MAX_NALU_HEADER_BUFFER = 8192,
+ MAX_NALU_NUMBER = 400, // > 4096/12
+ };
+
+ // Information of Network Abstraction Layer Unit
+ struct NaluInfo {
+ int32_t naluOffset; // offset of NAL unit in the firewalled buffer
+ int32_t naluLen; // length of NAL unit
+ int32_t naluHeaderLen; // length of NAL unit header
+ };
+
+ struct NaluMetadata {
+ NaluInfo *naluInfo;
+ int32_t naluNumber; // number of NAL units
+ };
+
+ struct NaluByteStream {
+ int32_t naluOffset;
+ int32_t naluLen;
+ int32_t streamPos;
+ uint8_t *byteStream; // 4 bytes of naluCount, 4 bytes of naluOffset, 4 bytes of naulLen, 4 bytes of naluHeaderLen, followed by naluHeaderData
+ int32_t naluCount;
+ };
+
+ virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex);
+ int32_t findNalUnitOffset(uint8_t *stream, int32_t offset, int32_t length);
+ Decode_Status copyNaluHeader(uint8_t *stream, NaluByteStream *naluStream);
+ Decode_Status parseAnnexBStream(uint8_t *stream, int32_t length, NaluByteStream *naluStream);
+
+private:
+ NaluMetadata mMetadata;
+ NaluByteStream mByteStream;
+ uint8_t *mNaluHeaderBuffer;
+ uint8_t *mInputBuffer;
+};
+
+
+
+#endif /* VIDEO_DECODER_AVC_SECURE_H_ */
diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp
new file mode 100644
index 0000000..2867ad9
--- /dev/null
+++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.cpp
@@ -0,0 +1,861 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <va/va.h>
+#include "VideoDecoderBase.h"
+#include "VideoDecoderAVC.h"
+#include "VideoDecoderTrace.h"
+#include "vbp_loader.h"
+#include "VideoDecoderAVCSecure.h"
+#include "VideoFrameInfo.h"
+
+#define MAX_SLICEHEADER_BUFFER_SIZE 4096
+#define STARTCODE_PREFIX_LEN 3
+#define NALU_TYPE_MASK 0x1F
+#define MAX_NALU_HEADER_BUFFER 8192
+static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01};
+
+/* H264 start code values */
+typedef enum _h264_nal_unit_type
+{
+ h264_NAL_UNIT_TYPE_unspecified = 0,
+ h264_NAL_UNIT_TYPE_SLICE,
+ h264_NAL_UNIT_TYPE_DPA,
+ h264_NAL_UNIT_TYPE_DPB,
+ h264_NAL_UNIT_TYPE_DPC,
+ h264_NAL_UNIT_TYPE_IDR,
+ h264_NAL_UNIT_TYPE_SEI,
+ h264_NAL_UNIT_TYPE_SPS,
+ h264_NAL_UNIT_TYPE_PPS,
+ h264_NAL_UNIT_TYPE_Acc_unit_delimiter,
+ h264_NAL_UNIT_TYPE_EOSeq,
+ h264_NAL_UNIT_TYPE_EOstream,
+ h264_NAL_UNIT_TYPE_filler_data,
+ h264_NAL_UNIT_TYPE_SPS_extension,
+ h264_NAL_UNIT_TYPE_ACP = 19,
+ h264_NAL_UNIT_TYPE_Slice_extension = 20
+} h264_nal_unit_type_t;
+
+VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType)
+ : VideoDecoderAVC(mimeType){
+ mFrameSize = 0;
+ mFrameData = NULL;
+ mIsEncryptData = 0;
+ mClearData = NULL;
+ mCachedHeader = NULL;
+ setParserType(VBP_H264SECURE);
+ mFrameIdx = 0;
+ mModularMode = 0;
+ mSliceNum = 0;
+}
+
+Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) {
+ VTRACE("VideoDecoderAVCSecure::start");
+
+ Decode_Status status = VideoDecoderAVC::start(buffer);
+ if (status != DECODE_SUCCESS) {
+ return status;
+ }
+
+ mClearData = new uint8_t [MAX_NALU_HEADER_BUFFER];
+ if (mClearData == NULL) {
+ ETRACE("Failed to allocate memory for mClearData");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ mCachedHeader= new uint8_t [MAX_SLICEHEADER_BUFFER_SIZE];
+ if (mCachedHeader == NULL) {
+ ETRACE("Failed to allocate memory for mCachedHeader");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ return status;
+}
+
+void VideoDecoderAVCSecure::stop(void) {
+ VTRACE("VideoDecoderAVCSecure::stop");
+ VideoDecoderAVC::stop();
+
+ if (mClearData) {
+ delete [] mClearData;
+ mClearData = NULL;
+ }
+
+ if (mCachedHeader) {
+ delete [] mCachedHeader;
+ mCachedHeader = NULL;
+ }
+}
+Decode_Status VideoDecoderAVCSecure::processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data)
+{
+ VTRACE("processModularInputBuffer +++");
+ Decode_Status status;
+ int32_t clear_data_size = 0;
+ uint8_t *clear_data = NULL;
+
+ int32_t nalu_num = 0;
+ uint8_t nalu_type = 0;
+ int32_t nalu_offset = 0;
+ uint32_t nalu_size = 0;
+ uint8_t naluType = 0;
+ uint8_t *nalu_data = NULL;
+ uint32_t sliceidx = 0;
+
+ frame_info_t *pFrameInfo = NULL;
+ mSliceNum = 0;
+ memset(&mSliceInfo, 0, sizeof(mSliceInfo));
+ mIsEncryptData = 0;
+
+ if (buffer->flag & IS_SECURE_DATA) {
+ VTRACE("Decoding protected video ...");
+ pFrameInfo = (frame_info_t *) buffer->data;
+ if (pFrameInfo == NULL) {
+ ETRACE("Invalid parameter: pFrameInfo is NULL!");
+ return DECODE_MEMORY_FAIL;
+ }
+
+ mFrameData = pFrameInfo->data;
+ mFrameSize = pFrameInfo->size;
+ VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize);
+
+ nalu_num = pFrameInfo->num_nalus;
+ VTRACE("nalu_num = %d", nalu_num);
+
+ if (nalu_num <= 0 || nalu_num >= MAX_NUM_NALUS) {
+ ETRACE("Invalid parameter: nalu_num = %d", nalu_num);
+ return DECODE_MEMORY_FAIL;
+ }
+
+ for (int32_t i = 0; i < nalu_num; i++) {
+
+ nalu_size = pFrameInfo->nalus[i].length;
+ nalu_type = pFrameInfo->nalus[i].type;
+ nalu_offset = pFrameInfo->nalus[i].offset;
+ nalu_data = pFrameInfo->nalus[i].data;
+ naluType = nalu_type & NALU_TYPE_MASK;
+
+ VTRACE("nalu_type = 0x%x, nalu_size = %d, nalu_offset = 0x%x", nalu_type, nalu_size, nalu_offset);
+
+ if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) {
+
+ mIsEncryptData = 1;
+ VTRACE("slice idx = %d", sliceidx);
+ mSliceInfo[sliceidx].sliceHeaderByte = nalu_type;
+ mSliceInfo[sliceidx].sliceStartOffset = (nalu_offset >> 4) << 4;
+ mSliceInfo[sliceidx].sliceByteOffset = nalu_offset - mSliceInfo[sliceidx].sliceStartOffset;
+ mSliceInfo[sliceidx].sliceLength = mSliceInfo[sliceidx].sliceByteOffset + nalu_size;
+ mSliceInfo[sliceidx].sliceSize = (mSliceInfo[sliceidx].sliceByteOffset + nalu_size + 0xF) & ~0xF;
+ VTRACE("sliceHeaderByte = 0x%x", mSliceInfo[sliceidx].sliceHeaderByte);
+ VTRACE("sliceStartOffset = %d", mSliceInfo[sliceidx].sliceStartOffset);
+ VTRACE("sliceByteOffset = %d", mSliceInfo[sliceidx].sliceByteOffset);
+ VTRACE("sliceSize = %d", mSliceInfo[sliceidx].sliceSize);
+ VTRACE("sliceLength = %d", mSliceInfo[sliceidx].sliceLength);
+
+#if 0
+ uint32_t testsize;
+ uint8_t *testdata;
+ testsize = mSliceInfo[sliceidx].sliceSize > 64 ? 64 : mSliceInfo[sliceidx].sliceSize ;
+ testdata = (uint8_t *)(mFrameData);
+ for (int i = 0; i < testsize; i++) {
+ VTRACE("testdata[%d] = 0x%x", i, testdata[i]);
+ }
+#endif
+ sliceidx++;
+
+ } else if (naluType == h264_NAL_UNIT_TYPE_SPS || naluType == h264_NAL_UNIT_TYPE_PPS) {
+ if (nalu_data == NULL) {
+ ETRACE("Invalid parameter: nalu_data = NULL for naluType 0x%x", naluType);
+ return DECODE_MEMORY_FAIL;
+ }
+ memcpy(mClearData + clear_data_size,
+ nalu_data,
+ nalu_size);
+ clear_data_size += nalu_size;
+ } else {
+ ITRACE("Nalu type = 0x%x is skipped", naluType);
+ continue;
+ }
+ }
+ clear_data = mClearData;
+ mSliceNum = sliceidx;
+
+ } else {
+ VTRACE("Decoding clear video ...");
+ mIsEncryptData = 0;
+ mFrameSize = buffer->size;
+ mFrameData = buffer->data;
+ clear_data = buffer->data;
+ clear_data_size = buffer->size;
+ }
+
+ if (clear_data_size > 0) {
+ status = VideoDecoderBase::parseBuffer(
+ clear_data,
+ clear_data_size,
+ false,
+ (void**)data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+ } else {
+ status = VideoDecoderBase::queryBuffer((void**)data);
+ CHECK_STATUS("VideoDecoderBase::queryBuffer");
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data)
+{
+ Decode_Status status;
+ int32_t clear_data_size = 0;
+ uint8_t *clear_data = NULL;
+ uint8_t naluType = 0;
+
+ int32_t num_nalus;
+ int32_t nalu_offset;
+ int32_t offset;
+ uint8_t *data_src;
+ uint8_t *nalu_data;
+ uint32_t nalu_size;
+
+ if (buffer->flag & IS_SECURE_DATA) {
+ VTRACE("Decoding protected video ...");
+ mIsEncryptData = 1;
+
+ mFrameData = buffer->data;
+ mFrameSize = buffer->size;
+ VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize);
+ num_nalus = *(uint32_t *)(buffer->data + buffer->size + sizeof(uint32_t));
+ VTRACE("num_nalus = %d", num_nalus);
+ offset = 4;
+ for (int32_t i = 0; i < num_nalus; i++) {
+ VTRACE("%d nalu, offset = %d", i, offset);
+ data_src = buffer->data + buffer->size + sizeof(uint32_t) + offset;
+ nalu_size = *(uint32_t *)(data_src + 2 * sizeof(uint32_t));
+ nalu_size = (nalu_size + 0x03) & (~0x03);
+
+ nalu_data = data_src + 3 *sizeof(uint32_t);
+ naluType = nalu_data[0] & NALU_TYPE_MASK;
+ offset += nalu_size + 3 *sizeof(uint32_t);
+ VTRACE("naluType = 0x%x", naluType);
+ VTRACE("nalu_size = %d, nalu_data = %p", nalu_size, nalu_data);
+
+ if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) {
+ ETRACE("Slice NALU received!");
+ return DECODE_INVALID_DATA;
+ }
+
+ else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) {
+ memcpy(mClearData + clear_data_size,
+ startcodePrefix,
+ STARTCODE_PREFIX_LEN);
+ clear_data_size += STARTCODE_PREFIX_LEN;
+ memcpy(mClearData + clear_data_size,
+ nalu_data,
+ nalu_size);
+ clear_data_size += nalu_size;
+ } else {
+ ETRACE("Failure: DECODE_FRAME_DROPPED");
+ return DECODE_FRAME_DROPPED;
+ }
+ }
+ clear_data = mClearData;
+ } else {
+ VTRACE("Decoding clear video ...");
+ mIsEncryptData = 0;
+ mFrameSize = buffer->size;
+ mFrameData = buffer->data;
+ clear_data = buffer->data;
+ clear_data_size = buffer->size;
+ }
+
+ if (clear_data_size > 0) {
+ status = VideoDecoderBase::parseBuffer(
+ clear_data,
+ clear_data_size,
+ false,
+ (void**)data);
+ CHECK_STATUS("VideoDecoderBase::parseBuffer");
+ } else {
+ status = VideoDecoderBase::queryBuffer((void**)data);
+ CHECK_STATUS("VideoDecoderBase::queryBuffer");
+ }
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) {
+ VTRACE("VideoDecoderAVCSecure::decode");
+ Decode_Status status;
+ vbp_data_h264 *data = NULL;
+ if (buffer == NULL) {
+ return DECODE_INVALID_DATA;
+ }
+
+#if 0
+ uint32_t testsize;
+ uint8_t *testdata;
+ testsize = buffer->size > 16 ? 16:buffer->size ;
+ testdata = (uint8_t *)(buffer->data);
+ for (int i = 0; i < 16; i++) {
+ VTRACE("testdata[%d] = 0x%x", i, testdata[i]);
+ }
+#endif
+ if (buffer->flag & IS_SUBSAMPLE_ENCRYPTION) {
+ mModularMode = 1;
+ }
+
+ if (mModularMode) {
+ status = processModularInputBuffer(buffer,&data);
+ CHECK_STATUS("processModularInputBuffer");
+ }
+ else {
+ status = processClassicInputBuffer(buffer,&data);
+ CHECK_STATUS("processClassicInputBuffer");
+ }
+
+ if (!mVAStarted) {
+ if (data->has_sps && data->has_pps) {
+ status = startVA(data);
+ CHECK_STATUS("startVA");
+ } else {
+ WTRACE("Can't start VA as either SPS or PPS is still not available.");
+ return DECODE_SUCCESS;
+ }
+ }
+
+ status = decodeFrame(buffer, data);
+
+ return status;
+}
+
+Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
+ VTRACE("VideoDecoderAVCSecure::decodeFrame");
+ Decode_Status status;
+ VTRACE("data->has_sps = %d, data->has_pps = %d", data->has_sps, data->has_pps);
+
+#if 0
+ // Don't remove the following codes, it can be enabled for debugging DPB.
+ for (unsigned int i = 0; i < data->num_pictures; i++) {
+ VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
+ VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d",
+ i,
+ buffer->timeStamp/1E6,
+ pic.TopFieldOrderCnt,
+ pic.BottomFieldOrderCnt,
+ pic.flags,
+ (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
+ (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
+ }
+#endif
+
+ if (data->new_sps || data->new_pps) {
+ status = handleNewSequence(data);
+ CHECK_STATUS("handleNewSequence");
+ }
+
+ if (mModularMode && (!mIsEncryptData)) {
+ if (data->pic_data[0].num_slices == 0) {
+ ITRACE("No slice available for decoding.");
+ status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
+ mSizeChanged = false;
+ return status;
+ }
+ }
+
+ uint64_t lastPTS = mCurrentPTS;
+ mCurrentPTS = buffer->timeStamp;
+
+ // start decoding a new frame
+ status = acquireSurfaceBuffer();
+ CHECK_STATUS("acquireSurfaceBuffer");
+
+ if (mModularMode) {
+ parseModularSliceHeader(data);
+ }
+ else {
+ parseClassicSliceHeader(data);
+ }
+
+ if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ return status;
+ }
+
+ status = beginDecodingFrame(data);
+ CHECK_STATUS("beginDecodingFrame");
+
+ // finish decoding the last frame
+ status = endDecodingFrame(false);
+ CHECK_STATUS("endDecodingFrame");
+
+ if (isNewFrame(data, lastPTS == mCurrentPTS) == 0) {
+ ETRACE("Can't handle interlaced frames yet");
+ return DECODE_FAIL;
+ }
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::beginDecodingFrame(vbp_data_h264 *data) {
+ VTRACE("VideoDecoderAVCSecure::beginDecodingFrame");
+ Decode_Status status;
+ VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
+ if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
+ (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
+ mAcquiredBuffer->referenceFrame = true;
+ } else {
+ mAcquiredBuffer->referenceFrame = false;
+ }
+
+ if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
+ } else {
+ mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
+ }
+
+ mAcquiredBuffer->renderBuffer.flag = 0;
+ mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
+ mAcquiredBuffer->pictureOrder = getPOC(picture);
+
+ if (mSizeChanged) {
+ mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
+ mSizeChanged = false;
+ }
+
+ status = continueDecodingFrame(data);
+ return status;
+}
+
+Decode_Status VideoDecoderAVCSecure::continueDecodingFrame(vbp_data_h264 *data) {
+ VTRACE("VideoDecoderAVCSecure::continueDecodingFrame");
+ Decode_Status status;
+ vbp_picture_data_h264 *picData = data->pic_data;
+
+ if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
+ ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
+ return DECODE_FAIL;
+ }
+ VTRACE("data->num_pictures = %d", data->num_pictures);
+ for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
+ if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
+ return DECODE_PARSER_FAIL;
+ }
+
+ if (picIndex > 0 &&
+ (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
+ ETRACE("Packed frame is not supported yet!");
+ return DECODE_FAIL;
+ }
+ VTRACE("picData->num_slices = %d", picData->num_slices);
+ for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
+ status = decodeSlice(data, picIndex, sliceIndex);
+ if (status != DECODE_SUCCESS) {
+ endDecodingFrame(true);
+ // remove current frame from DPB as it can't be decoded.
+ removeReferenceFromDPB(picData->pic_parms);
+ return status;
+ }
+ }
+ }
+ mDecodingFrame = true;
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::parseClassicSliceHeader(vbp_data_h264 *data) {
+ Decode_Status status;
+ VAStatus vaStatus;
+
+ VABufferID sliceheaderbufferID;
+ VABufferID pictureparameterparsingbufferID;
+ VABufferID mSlicebufferID;
+
+ if (mFrameSize <= 0) {
+ return DECODE_SUCCESS;
+ }
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAParseSliceHeaderGroupBufferType,
+ MAX_SLICEHEADER_BUFFER_SIZE,
+ 1,
+ NULL,
+ &sliceheaderbufferID);
+ CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer");
+
+ void *sliceheaderbuf;
+ vaStatus = vaMapBuffer(
+ mVADisplay,
+ sliceheaderbufferID,
+ &sliceheaderbuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+ memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE);
+
+ vaStatus = vaUnmapBuffer(
+ mVADisplay,
+ sliceheaderbufferID);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ mFrameSize, //size
+ 1, //num_elements
+ mFrameData,
+ &mSlicebufferID);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+
+ data->pic_parse_buffer->frame_buf_id = mSlicebufferID;
+ data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID;
+ data->pic_parse_buffer->frame_size = mFrameSize;
+ data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE;
+
+#if 0
+
+ VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag);
+ VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag);
+ VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag);
+ VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag);
+ VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag);
+ VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag);
+ VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag);
+ VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc);
+
+ VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id);
+ VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1);
+ VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc);
+ VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4);
+ VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type);
+ VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag);
+ VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1);
+ VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1);
+#endif
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAParsePictureParameterBufferType,
+ sizeof(VAParsePictureParameterBuffer),
+ 1,
+ data->pic_parse_buffer,
+ &pictureparameterparsingbufferID);
+ CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer");
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ &pictureparameterparsingbufferID,
+ 1);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ vaStatus = vaMapBuffer(
+ mVADisplay,
+ sliceheaderbufferID,
+ &sliceheaderbuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+ status = updateSliceParameter(data,sliceheaderbuf);
+ CHECK_STATUS("processSliceHeader");
+
+ vaStatus = vaUnmapBuffer(
+ mVADisplay,
+ sliceheaderbufferID);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::parseModularSliceHeader(vbp_data_h264 *data) {
+ Decode_Status status;
+ VAStatus vaStatus;
+
+ VABufferID sliceheaderbufferID;
+ VABufferID pictureparameterparsingbufferID;
+ VABufferID mSlicebufferID;
+ int32_t sliceIdx;
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ if (mFrameSize <= 0 || mSliceNum <=0) {
+ return DECODE_SUCCESS;
+ }
+ void *sliceheaderbuf;
+ memset(mCachedHeader, 0, MAX_SLICEHEADER_BUFFER_SIZE);
+ int32_t offset = 0;
+ int32_t size = 0;
+
+ for (sliceIdx = 0; sliceIdx < mSliceNum; sliceIdx++) {
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAParseSliceHeaderGroupBufferType,
+ MAX_SLICEHEADER_BUFFER_SIZE,
+ 1,
+ NULL,
+ &sliceheaderbufferID);
+ CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer");
+
+ vaStatus = vaMapBuffer(
+ mVADisplay,
+ sliceheaderbufferID,
+ &sliceheaderbuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+ memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE);
+
+ vaStatus = vaUnmapBuffer(
+ mVADisplay,
+ sliceheaderbufferID);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ mSliceInfo[sliceIdx].sliceSize, //size
+ 1, //num_elements
+ mFrameData + mSliceInfo[sliceIdx].sliceStartOffset,
+ &mSlicebufferID);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+
+ data->pic_parse_buffer->frame_buf_id = mSlicebufferID;
+ data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID;
+ data->pic_parse_buffer->frame_size = mSliceInfo[sliceIdx].sliceLength;
+ data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE;
+ data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte;
+ data->pic_parse_buffer->slice_offset = mSliceInfo[sliceIdx].sliceByteOffset;
+
+#if 0
+ VTRACE("data->pic_parse_buffer->slice_offset = 0x%x", data->pic_parse_buffer->slice_offset);
+ VTRACE("pic_parse_buffer->nalu_header.value = %x", data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte);
+ VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag);
+ VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag);
+ VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag);
+ VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag);
+ VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag);
+ VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag);
+ VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag);
+ VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc);
+ VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id);
+ VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1);
+ VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc);
+ VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4);
+ VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type);
+ VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag);
+ VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1);
+ VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1);
+#endif
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAParsePictureParameterBufferType,
+ sizeof(VAParsePictureParameterBuffer),
+ 1,
+ data->pic_parse_buffer,
+ &pictureparameterparsingbufferID);
+ CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer");
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ &pictureparameterparsingbufferID,
+ 1);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ vaStatus = vaMapBuffer(
+ mVADisplay,
+ sliceheaderbufferID,
+ &sliceheaderbuf);
+ CHECK_VA_STATUS("vaMapBuffer");
+
+ size = *(uint32 *)((uint8 *)sliceheaderbuf + 4) + 4;
+ VTRACE("slice header size = 0x%x, offset = 0x%x", size, offset);
+ if (offset + size <= MAX_SLICEHEADER_BUFFER_SIZE - 4) {
+ memcpy(mCachedHeader+offset, sliceheaderbuf, size);
+ offset += size;
+ } else {
+ WTRACE("Cached slice header is not big enough!");
+ }
+ vaStatus = vaUnmapBuffer(
+ mVADisplay,
+ sliceheaderbufferID);
+ CHECK_VA_STATUS("vaUnmapBuffer");
+ }
+ memset(mCachedHeader + offset, 0xFF, 4);
+ status = updateSliceParameter(data,mCachedHeader);
+ CHECK_STATUS("processSliceHeader");
+ return DECODE_SUCCESS;
+}
+
+
+Decode_Status VideoDecoderAVCSecure::updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf) {
+ VTRACE("VideoDecoderAVCSecure::updateSliceParameter");
+ Decode_Status status;
+ status = VideoDecoderBase::updateBuffer(
+ (uint8_t *)sliceheaderbuf,
+ MAX_SLICEHEADER_BUFFER_SIZE,
+ (void**)&data);
+ CHECK_STATUS("updateBuffer");
+ return DECODE_SUCCESS;
+}
+
+Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
+ Decode_Status status;
+ VAStatus vaStatus;
+ uint32_t bufferIDCount = 0;
+ // maximum 3 buffers to render a slice: picture parameter, IQMatrix, slice parameter
+ VABufferID bufferIDs[3];
+
+ vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
+ vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
+ VAPictureParameterBufferH264 *picParam = picData->pic_parms;
+ VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
+ uint32_t slice_data_size = 0;
+ uint8_t* slice_data_addr = NULL;
+
+ if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
+ // either condition indicates start of a new frame
+ if (sliceParam->first_mb_in_slice != 0) {
+ WTRACE("The first slice is lost.");
+ }
+ VTRACE("Current frameidx = %d", mFrameIdx++);
+ // Update the reference frames and surface IDs for DPB and current frame
+ status = updateDPB(picParam);
+ CHECK_STATUS("updateDPB");
+
+ //We have to provide a hacked DPB rather than complete DPB for libva as workaround
+ status = updateReferenceFrames(picData);
+ CHECK_STATUS("updateReferenceFrames");
+
+ mDecodingFrame = true;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferH264),
+ 1,
+ picParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferH264),
+ 1,
+ data->IQ_matrix_buf,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
+ bufferIDCount++;
+ }
+
+ status = setReference(sliceParam);
+ CHECK_STATUS("setReference");
+
+ if (mModularMode) {
+ if (mIsEncryptData) {
+ sliceParam->slice_data_size = mSliceInfo[sliceIndex].sliceSize;
+ slice_data_size = mSliceInfo[sliceIndex].sliceSize;
+ slice_data_addr = mFrameData + mSliceInfo[sliceIndex].sliceStartOffset;
+ } else {
+ slice_data_size = sliceData->slice_size;
+ slice_data_addr = sliceData->buffer_addr + sliceData->slice_offset;
+ }
+ } else {
+ sliceParam->slice_data_size = mFrameSize;
+ slice_data_size = mFrameSize;
+ slice_data_addr = mFrameData;
+ }
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264),
+ 1,
+ sliceParam,
+ &bufferIDs[bufferIDCount]);
+ CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
+ bufferIDCount++;
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ bufferIDs,
+ bufferIDCount);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ VABufferID slicebufferID;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay,
+ mVAContext,
+ VASliceDataBufferType,
+ slice_data_size, //size
+ 1, //num_elements
+ slice_data_addr,
+ &slicebufferID);
+ CHECK_VA_STATUS("vaCreateSliceDataBuffer");
+
+ vaStatus = vaRenderPicture(
+ mVADisplay,
+ mVAContext,
+ &slicebufferID,
+ 1);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ return DECODE_SUCCESS;
+
+}
+
+Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs(
+ VAProfile profile, VAConfigID *config)
+{
+ VAStatus vaStatus;
+ VAConfigAttrib attrib[2];
+
+ if (config == NULL) {
+ ETRACE("Invalid parameter!");
+ return DECODE_FAIL;
+ }
+
+ attrib[0].type = VAConfigAttribRTFormat;
+ attrib[0].value = VA_RT_FORMAT_YUV420;
+ attrib[1].type = VAConfigAttribDecSliceMode;
+ attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
+ if (mModularMode) {
+ attrib[1].value = VA_DEC_SLICE_MODE_SUBSAMPLE;
+ }
+
+ vaStatus = vaCreateConfig(
+ mVADisplay,
+ profile,
+ VAEntrypointVLD,
+ &attrib[0],
+ 2,
+ config);
+ CHECK_VA_STATUS("vaCreateConfig");
+
+ return DECODE_SUCCESS;
+}
diff --git a/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h
new file mode 100644
index 0000000..f66d7b8
--- /dev/null
+++ b/videodecoder/securevideo/moorefield/VideoDecoderAVCSecure.h
@@ -0,0 +1,69 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_DECODER_AVC_SECURE_H
+#define VIDEO_DECODER_AVC_SECURE_H
+
+#include "VideoDecoderBase.h"
+#include "VideoDecoderAVC.h"
+#include "VideoDecoderDefs.h"
+
+class VideoDecoderAVCSecure : public VideoDecoderAVC {
+public:
+ VideoDecoderAVCSecure(const char *mimeType);
+ virtual Decode_Status start(VideoConfigBuffer *buffer);
+ virtual void stop(void);
+
+ // data in the decoded buffer is all encrypted.
+ virtual Decode_Status decode(VideoDecodeBuffer *buffer);
+protected:
+ virtual Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data);
+ virtual Decode_Status continueDecodingFrame(vbp_data_h264 *data);
+ virtual Decode_Status beginDecodingFrame(vbp_data_h264 *data);
+ virtual Decode_Status getCodecSpecificConfigs(VAProfile profile, VAConfigID*config);
+ Decode_Status parseClassicSliceHeader(vbp_data_h264 *data);
+ Decode_Status parseModularSliceHeader(vbp_data_h264 *data);
+
+ Decode_Status updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf);
+ virtual Decode_Status decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex);
+private:
+ Decode_Status processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data);
+ Decode_Status processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data);
+ int32_t mIsEncryptData;
+ int32_t mFrameSize;
+ uint8_t* mFrameData;
+ uint8_t* mClearData;
+ uint8_t* mCachedHeader;
+ int32_t mFrameIdx;
+ int32_t mModularMode;
+
+ enum {
+ MAX_SLICE_HEADER_NUM = 256,
+ };
+ int32_t mSliceNum;
+ // Information of Slices in the Modular DRM Mode
+ struct SliceInfo {
+ uint8_t sliceHeaderByte; // first byte of the slice header
+ uint32_t sliceStartOffset; // offset of Slice unit in the firewalled buffer
+ uint32_t sliceByteOffset; // extra offset from the blockAligned slice offset
+ uint32_t sliceSize; // block aligned length of slice unit
+ uint32_t sliceLength; // actual size of the slice
+ };
+
+ SliceInfo mSliceInfo[MAX_SLICE_HEADER_NUM];
+};
+
+#endif
diff --git a/videodecoder/securevideo/moorefield/VideoFrameInfo.h b/videodecoder/securevideo/moorefield/VideoFrameInfo.h
new file mode 100755
index 0000000..485b0da
--- /dev/null
+++ b/videodecoder/securevideo/moorefield/VideoFrameInfo.h
@@ -0,0 +1,36 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_FRAME_INFO_H_
+#define VIDEO_FRAME_INFO_H_
+
+#define MAX_NUM_NALUS 16
+
+typedef struct {
+ uint8_t type; // nalu type + nal_ref_idc
+ uint32_t offset; // offset to the pointer of the encrypted data
+ uint8_t* data; // if the nalu is encrypted, this field is useless; if current NALU is SPS/PPS, data is the pointer to clear SPS/PPS data
+ uint32_t length; // nalu length
+} nalu_info_t;
+
+typedef struct {
+ uint8_t* data; // pointer to the encrypted data
+ uint32_t size; // encrypted data size
+ uint32_t num_nalus; // number of NALU
+ nalu_info_t nalus[MAX_NUM_NALUS];
+} frame_info_t;
+
+#endif
diff --git a/videodecoder/use_util_sse4.h b/videodecoder/use_util_sse4.h
new file mode 100644
index 0000000..454099d
--- /dev/null
+++ b/videodecoder/use_util_sse4.h
@@ -0,0 +1,93 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <emmintrin.h>
+#include <x86intrin.h>
+
+inline void stream_memcpy(void* dst_buff, const void* src_buff, size_t size)
+{
+ bool isAligned = (((size_t)(src_buff) | (size_t)(dst_buff)) & 0xF) == 0;
+ if (!isAligned) {
+ memcpy(dst_buff, src_buff, size);
+ return;
+ }
+
+ static const size_t regs_count = 8;
+
+ __m128i xmm_data0, xmm_data1, xmm_data2, xmm_data3;
+ __m128i xmm_data4, xmm_data5, xmm_data6, xmm_data7;
+
+ size_t remain_data = size & (regs_count * sizeof(xmm_data0) - 1);
+ size_t end_position = 0;
+
+ __m128i* pWb_buff = (__m128i*)dst_buff;
+ __m128i* pWb_buff_end = pWb_buff + ((size - remain_data) >> 4);
+ __m128i* pWc_buff = (__m128i*)src_buff;
+
+ /*sync the wc memory data*/
+ _mm_mfence();
+
+ while (pWb_buff < pWb_buff_end)
+ {
+ xmm_data0 = _mm_stream_load_si128(pWc_buff);
+ xmm_data1 = _mm_stream_load_si128(pWc_buff + 1);
+ xmm_data2 = _mm_stream_load_si128(pWc_buff + 2);
+ xmm_data3 = _mm_stream_load_si128(pWc_buff + 3);
+ xmm_data4 = _mm_stream_load_si128(pWc_buff + 4);
+ xmm_data5 = _mm_stream_load_si128(pWc_buff + 5);
+ xmm_data6 = _mm_stream_load_si128(pWc_buff + 6);
+ xmm_data7 = _mm_stream_load_si128(pWc_buff + 7);
+
+ pWc_buff += regs_count;
+ _mm_store_si128(pWb_buff, xmm_data0);
+ _mm_store_si128(pWb_buff + 1, xmm_data1);
+ _mm_store_si128(pWb_buff + 2, xmm_data2);
+ _mm_store_si128(pWb_buff + 3, xmm_data3);
+ _mm_store_si128(pWb_buff + 4, xmm_data4);
+ _mm_store_si128(pWb_buff + 5, xmm_data5);
+ _mm_store_si128(pWb_buff + 6, xmm_data6);
+ _mm_store_si128(pWb_buff + 7, xmm_data7);
+
+ pWb_buff += regs_count;
+ }
+
+ /*copy data by 16 bytes step from the remainder*/
+ if (remain_data >= 16)
+ {
+ size = remain_data;
+ remain_data = size & 15;
+ end_position = size >> 4;
+ for (size_t i = 0; i < end_position; ++i)
+ {
+ pWb_buff[i] = _mm_stream_load_si128(pWc_buff + i);
+ }
+ }
+
+ /*copy the remainder data, if it still existed*/
+ if (remain_data)
+ {
+ __m128i temp_data = _mm_stream_load_si128(pWc_buff + end_position);
+
+ char* psrc_buf = (char*)(&temp_data);
+ char* pdst_buf = (char*)(pWb_buff + end_position);
+
+ for (size_t i = 0; i < remain_data; ++i)
+ {
+ pdst_buf[i] = psrc_buf[i];
+ }
+ }
+
+}
diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk
new file mode 100644
index 0000000..1fc4d9a
--- /dev/null
+++ b/videoencoder/Android.mk
@@ -0,0 +1,110 @@
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+ifeq ($(ENABLE_IMG_GRAPHICS),)
+LOCAL_CFLAGS += \
+ -DBX_RC \
+ -DOSCL_IMPORT_REF= \
+ -DOSCL_UNUSED_ARG= \
+ -DOSCL_EXPORT_REF=
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_m4vh263enc
+endif
+
+LOCAL_SRC_FILES := \
+ VideoEncoderBase.cpp \
+ VideoEncoderAVC.cpp \
+ VideoEncoderH263.cpp \
+ VideoEncoderMP4.cpp \
+ VideoEncoderVP8.cpp \
+ VideoEncoderUtils.cpp \
+ VideoEncoderHost.cpp
+
+ifeq ($(ENABLE_IMG_GRAPHICS),)
+ LOCAL_SRC_FILES += PVSoftMPEG4Encoder.cpp
+endif
+
+LOCAL_C_INCLUDES := \
+ $(TARGET_OUT_HEADERS)/libva \
+ $(call include-path-for, frameworks-native) \
+ $(TARGET_OUT_HEADERS)/pvr
+
+ifeq ($(ENABLE_IMG_GRAPHICS),)
+LOCAL_C_INCLUDES += \
+ frameworks/av/media/libstagefright/codecs/m4v_h263/enc/include \
+ frameworks/av/media/libstagefright/codecs/m4v_h263/enc/src \
+ frameworks/av/media/libstagefright/codecs/common/include \
+ frameworks/native/include/media/openmax \
+ frameworks/native/include/media/hardware \
+ frameworks/av/media/libstagefright/include
+endif
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ libutils \
+ libva \
+ libva-android \
+ libva-tpi \
+ libui \
+ libutils \
+ libhardware \
+ libintelmetadatabuffer
+
+LOCAL_COPY_HEADERS_TO := libmix_videoencoder
+
+LOCAL_COPY_HEADERS := \
+ VideoEncoderHost.h \
+ VideoEncoderInterface.h \
+ VideoEncoderDef.h
+
+ifeq ($(VIDEO_ENC_LOG_ENABLE),true)
+LOCAL_CPPFLAGS += -DVIDEO_ENC_LOG_ENABLE
+endif
+
+ifeq ($(NO_BUFFER_SHARE),true)
+LOCAL_CPPFLAGS += -DNO_BUFFER_SHARE
+endif
+
+ifeq ($(VIDEO_ENC_STATISTICS_ENABLE),true)
+LOCAL_CPPFLAGS += -DVIDEO_ENC_STATISTICS_ENABLE
+endif
+
+ifeq ($(ENABLE_IMG_GRAPHICS),true)
+ LOCAL_CFLAGS += -DIMG_GFX
+
+ ifeq ($(ENABLE_MRFL_GRAPHICS),true)
+ LOCAL_CFLAGS += -DMRFLD_GFX
+ endif
+endif
+
+LOCAL_CFLAGS += -Werror
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libva_videoencoder
+
+include $(BUILD_SHARED_LIBRARY)
+
+# For libintelmetadatabuffer
+# =====================================================
+
+include $(CLEAR_VARS)
+
+VIDEO_ENC_LOG_ENABLE := true
+
+LOCAL_SRC_FILES := \
+ IntelMetadataBuffer.cpp
+
+LOCAL_COPY_HEADERS_TO := libmix_videoencoder
+
+LOCAL_COPY_HEADERS := \
+ IntelMetadataBuffer.h
+
+ifeq ($(INTEL_VIDEO_XPROC_SHARING),true)
+LOCAL_SHARED_LIBRARIES := liblog libutils libbinder libgui \
+ libui libcutils libhardware
+endif
+LOCAL_CFLAGS += -Werror
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libintelmetadatabuffer
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp
new file mode 100644
index 0000000..28f8e63
--- /dev/null
+++ b/videoencoder/IntelMetadataBuffer.cpp
@@ -0,0 +1,832 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IntelMetadataBuffer"
+#include <wrs_omxil_core/log.h>
+
+#include "IntelMetadataBuffer.h"
+#include <string.h>
+#include <stdio.h>
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+#include <binder/IServiceManager.h>
+#include <binder/MemoryBase.h>
+#include <binder/Parcel.h>
+#include <utils/List.h>
+#include <utils/threads.h>
+#include <ui/GraphicBuffer.h>
+
+//#define TEST
+
+struct ShareMemMap {
+ uint32_t sessionflag;
+ intptr_t value;
+ intptr_t value_backup;
+ uint32_t type;
+ sp<MemoryBase> membase;
+ sp<GraphicBuffer> gbuffer;
+};
+
+List <ShareMemMap *> gShareMemMapList;
+Mutex gShareMemMapListLock;
+
+enum {
+ SHARE_MEM = IBinder::FIRST_CALL_TRANSACTION,
+ GET_MEM,
+ CLEAR_MEM,
+};
+
+enum {
+ ST_MEMBASE = 0,
+ ST_GFX,
+ ST_MAX,
+};
+
+#define REMOTE_PROVIDER 0x80000000
+#define REMOTE_CONSUMER 0x40000000
+
+static ShareMemMap* ReadMemObjFromBinder(const Parcel& data, uint32_t sessionflag, intptr_t value) {
+
+ uint32_t type = data.readInt32();
+ if (type >= ST_MAX)
+ return NULL;
+
+ ShareMemMap* map = new ShareMemMap;
+ map->sessionflag = sessionflag;
+ map->type = type;
+ map->value_backup = value;
+ map->membase = NULL;
+ map->gbuffer= NULL;
+
+// LOGI("ReadMemObjFromBinder");
+
+ if (type == ST_MEMBASE) /*offset, size, heap*/
+ {
+ ssize_t offset = data.readInt32();
+ size_t size = data.readInt32();
+
+ sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
+
+ sp<MemoryBase> mem = new MemoryBase(heap, offset, size);
+ if (mem == NULL)
+ {
+ delete map;
+ return NULL;
+ }
+
+ map->value = (intptr_t)( mem->pointer() + 0x0FFF) & ~0x0FFF;
+ map->membase = mem;
+
+#ifdef TEST
+ ALOGI("membase heapID:%d, pointer:%x data:%x, aligned value:%x", \
+ heap->getHeapID(), mem->pointer(), *((intptr_t *)(mem->pointer())), map->value);
+#endif
+
+ }
+ else if (type == ST_GFX) /*graphicbuffer*/
+ {
+ sp<GraphicBuffer> buffer = new GraphicBuffer();
+ if (buffer == NULL)
+ {
+ delete map;
+ return NULL;
+ }
+ data.read(*buffer);
+
+ map->value = (intptr_t)buffer->handle;
+ map->gbuffer = buffer;
+
+#ifdef TEST
+ void* usrptr[3];
+ buffer->lock(GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_READ_OFTEN, &usrptr[0]);
+ buffer->unlock();
+ ALOGI("gfx handle:%p data:%x", (intptr_t)buffer->handle, *((intptr_t *)usrptr[0]));
+#endif
+ }
+
+ gShareMemMapListLock.lock();
+ gShareMemMapList.push_back(map);
+ gShareMemMapListLock.unlock();
+ return map;
+}
+
+static status_t WriteMemObjToBinder(Parcel& data, ShareMemMap* smem) {
+
+ if (smem->type >= ST_MAX)
+ return BAD_VALUE;
+
+// LOGI("WriteMemObjToBinder");
+
+ data.writeInt32(smem->type);
+
+ if (smem->type == ST_MEMBASE) /*offset, size, heap*/
+ {
+ ssize_t offset;
+ size_t size;
+ sp<IMemoryHeap> heap = smem->membase->getMemory(&offset, &size);
+ data.writeInt32(offset);
+ data.writeInt32(size);
+ data.writeStrongBinder(heap->asBinder());
+#ifdef TEST
+ ALOGI("membase heapID:%d pointer:%x data:%x", \
+ heap->getHeapID(), smem->membase->pointer(), *((int *)(smem->membase->pointer())));
+#endif
+ }
+ else if (smem->type == ST_GFX) /*graphicbuffer*/
+ data.write(*(smem->gbuffer));
+
+ return NO_ERROR;
+}
+
+static void ClearLocalMem(uint32_t sessionflag)
+{
+ List<ShareMemMap *>::iterator node;
+
+ gShareMemMapListLock.lock();
+
+ for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); )
+ {
+ if ((*node)->sessionflag == sessionflag) //remove all buffers belong to this session
+ {
+ (*node)->membase = NULL;
+ (*node)->gbuffer = NULL;
+ delete (*node);
+ node = gShareMemMapList.erase(node);
+ }
+ else
+ node ++;
+ }
+
+ gShareMemMapListLock.unlock();
+}
+
+static ShareMemMap* FindShareMem(uint32_t sessionflag, intptr_t value, bool isBackup)
+{
+ List<ShareMemMap *>::iterator node;
+
+ gShareMemMapListLock.lock();
+ for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); node++)
+ {
+ if (isBackup)
+ {
+ if ((*node)->sessionflag == sessionflag && (*node)->value_backup == value)
+ {
+ gShareMemMapListLock.unlock();
+ return (*node);
+ }
+ }
+ else if ((*node)->sessionflag == sessionflag && (*node)->value == value)
+ {
+ gShareMemMapListLock.unlock();
+ return (*node);
+ }
+ }
+ gShareMemMapListLock.unlock();
+
+ return NULL;
+}
+
+static ShareMemMap* PopShareMem(uint32_t sessionflag, intptr_t value)
+{
+ List<ShareMemMap *>::iterator node;
+
+ gShareMemMapListLock.lock();
+ for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); node++)
+ {
+ if ((*node)->sessionflag == sessionflag && (*node)->value == value)
+ {
+ gShareMemMapList.erase(node);
+ gShareMemMapListLock.unlock();
+ return (*node);
+ }
+ }
+ gShareMemMapListLock.unlock();
+
+ return NULL;
+}
+
+static void PushShareMem(ShareMemMap* &smem)
+{
+ gShareMemMapListLock.lock();
+ gShareMemMapList.push_back(smem);
+ gShareMemMapListLock.unlock();
+}
+
+static sp<IBinder> GetIntelBufferSharingService() {
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(String16("media.IntelBufferSharing"));
+
+ if (binder == 0)
+ ALOGE("media.IntelBufferSharing service is not published");
+
+ return binder;
+}
+
+IntelBufferSharingService* IntelBufferSharingService::gBufferService = NULL;
+
+status_t IntelBufferSharingService::instantiate(){
+ status_t ret = NO_ERROR;
+
+ if (gBufferService == NULL) {
+ gBufferService = new IntelBufferSharingService();
+ ret = defaultServiceManager()->addService(String16("media.IntelBufferSharing"), gBufferService);
+ LOGI("IntelBufferSharingService::instantiate() ret = %d\n", ret);
+ }
+
+ return ret;
+}
+
+status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
+
+ //TODO: if pid is int32?
+ pid_t pid = data.readInt32();
+ uint32_t sessionflag = data.readInt32();
+
+ switch(code)
+ {
+ case SHARE_MEM:
+ {
+
+ if (pid == getpid()) //in same process, should not use binder
+ {
+ ALOGE("onTransact in same process, wrong sessionflag?");
+ return UNKNOWN_ERROR;
+ }
+
+ intptr_t value = data.readIntPtr();
+
+// LOGI("onTransact SHARE_MEM value=%x", value);
+
+ //different process
+ ShareMemMap* map = ReadMemObjFromBinder(data, sessionflag, value);
+ if (map == NULL)
+ return UNKNOWN_ERROR;
+
+ reply->writeIntPtr(map->value);
+
+ return NO_ERROR;
+ }
+ case CLEAR_MEM:
+ {
+// LOGI("onTransact CLEAR_MEM sessionflag=%x", sessionflag);
+
+ if (pid == getpid()) //in same process, should not use binder
+ {
+ //same process, return same pointer in data
+ ALOGE("onTransact CLEAR_MEM in same process, wrong sessionflag?");
+ return UNKNOWN_ERROR;
+ }
+
+ ClearLocalMem(sessionflag);
+ return NO_ERROR;
+ }
+ case GET_MEM:
+ {
+
+ if (pid == getpid()) //in same process, should not use binder
+ {
+ ALOGE("onTransact GET_MEM in same process, wrong sessionflag?");
+ return UNKNOWN_ERROR;
+ }
+
+ intptr_t value = data.readIntPtr();
+
+// LOGI("onTransact GET_MEM value=%x", value);
+
+ ShareMemMap* smem = FindShareMem(sessionflag, value, false);
+ if (smem && (NO_ERROR == WriteMemObjToBinder(*reply, smem)))
+ return NO_ERROR;
+ else
+ ALOGE("onTransact GET_MEM: Not find mem");
+
+ return UNKNOWN_ERROR;
+ }
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+
+ }
+ return NO_ERROR;
+}
+#endif
+
+IntelMetadataBuffer::IntelMetadataBuffer()
+{
+ mType = IntelMetadataBufferTypeCameraSource;
+ mValue = 0;
+ mInfo = NULL;
+ mExtraValues = NULL;
+ mExtraValues_Count = 0;
+ mBytes = NULL;
+ mSize = 0;
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ mSessionFlag = 0;
+#endif
+}
+
+IntelMetadataBuffer::IntelMetadataBuffer(IntelMetadataBufferType type, intptr_t value)
+{
+ mType = type;
+ mValue = value;
+ mInfo = NULL;
+ mExtraValues = NULL;
+ mExtraValues_Count = 0;
+ mBytes = NULL;
+ mSize = 0;
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ mSessionFlag = 0;
+#endif
+}
+
+IntelMetadataBuffer::~IntelMetadataBuffer()
+{
+ if (mInfo)
+ delete mInfo;
+
+ if (mExtraValues)
+ delete[] mExtraValues;
+
+ if (mBytes)
+ delete[] mBytes;
+}
+
+
+IntelMetadataBuffer::IntelMetadataBuffer(const IntelMetadataBuffer& imb)
+ :mType(imb.mType), mValue(imb.mValue), mInfo(NULL), mExtraValues(NULL),
+ mExtraValues_Count(imb.mExtraValues_Count), mBytes(NULL), mSize(imb.mSize)
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ ,mSessionFlag(imb.mSessionFlag)
+#endif
+{
+ if (imb.mInfo)
+ mInfo = new ValueInfo(*imb.mInfo);
+
+ if (imb.mExtraValues)
+ {
+ mExtraValues = new intptr_t[mExtraValues_Count];
+ memcpy(mExtraValues, imb.mExtraValues, sizeof(mValue) * mExtraValues_Count);
+ }
+
+ if (imb.mBytes)
+ {
+ mBytes = new uint8_t[mSize];
+ memcpy(mBytes, imb.mBytes, mSize);
+ }
+}
+
+const IntelMetadataBuffer& IntelMetadataBuffer::operator=(const IntelMetadataBuffer& imb)
+{
+ mType = imb.mType;
+ mValue = imb.mValue;
+ mInfo = NULL;
+ mExtraValues = NULL;
+ mExtraValues_Count = imb.mExtraValues_Count;
+ mBytes = NULL;
+ mSize = imb.mSize;
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ mSessionFlag = imb.mSessionFlag;
+#endif
+
+ if (imb.mInfo)
+ mInfo = new ValueInfo(*imb.mInfo);
+
+ if (imb.mExtraValues)
+ {
+ mExtraValues = new intptr_t[mExtraValues_Count];
+ memcpy(mExtraValues, imb.mExtraValues, sizeof(mValue) * mExtraValues_Count);
+ }
+
+ if (imb.mBytes)
+ {
+ mBytes = new uint8_t[mSize];
+ memcpy(mBytes, imb.mBytes, mSize);
+ }
+
+ return *this;
+}
+
+IMB_Result IntelMetadataBuffer::GetType(IntelMetadataBufferType& type)
+{
+ type = mType;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetType(IntelMetadataBufferType type)
+{
+ if (type < IntelMetadataBufferTypeLast)
+ mType = type;
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetValue(intptr_t& value)
+{
+ value = mValue;
+
+#ifndef INTEL_VIDEO_XPROC_SHARING
+ return IMB_SUCCESS;
+#else
+ if ((mSessionFlag & REMOTE_CONSUMER) == 0) //no sharing or is local consumer
+ return IMB_SUCCESS;
+
+ //try to find if it is already cached.
+ ShareMemMap* smem = FindShareMem(mSessionFlag, mValue, true);
+ if(smem)
+ {
+ value = smem->value;
+ return IMB_SUCCESS;
+ }
+
+ //is remote provider and not find from cache, then pull from service
+ sp<IBinder> binder = GetIntelBufferSharingService();
+ if (binder == 0)
+ return IMB_NO_SERVICE;
+
+ //Detect IntelBufferSharingService, share mem to service
+ Parcel data, reply;
+
+ //send pid, sessionflag, and memtype
+ pid_t pid = getpid();
+ //TODO: if pid is int32?
+ data.writeInt32(pid);
+ data.writeInt32(mSessionFlag);
+ data.writeIntPtr(mValue);
+
+ //do transcation
+ if (binder->transact(GET_MEM, data, &reply) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //get type/Mem OBJ
+ smem = ReadMemObjFromBinder(reply, mSessionFlag, mValue);
+ if (smem)
+ value = smem->value;
+ else
+ return IMB_SERVICE_FAIL;
+
+ return IMB_SUCCESS;
+#endif
+}
+
+IMB_Result IntelMetadataBuffer::SetValue(intptr_t value)
+{
+ mValue = value;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetValueInfo(ValueInfo* &info)
+{
+ info = mInfo;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetValueInfo(ValueInfo* info)
+{
+ if (info)
+ {
+ if (mInfo == NULL)
+ mInfo = new ValueInfo;
+
+ memcpy(mInfo, info, sizeof(ValueInfo));
+ }
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetExtraValues(intptr_t* &values, uint32_t& num)
+{
+ values = mExtraValues;
+ num = mExtraValues_Count;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetExtraValues(intptr_t* values, uint32_t num)
+{
+ if (values && num > 0)
+ {
+ if (mExtraValues && mExtraValues_Count != num)
+ {
+ delete[] mExtraValues;
+ mExtraValues = NULL;
+ }
+
+ if (mExtraValues == NULL)
+ mExtraValues = new intptr_t[num];
+
+ memcpy(mExtraValues, values, sizeof(intptr_t) * num);
+ mExtraValues_Count = num;
+ }
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size)
+{
+ if (!data || size == 0)
+ return IMB_INVAL_PARAM;
+
+ IntelMetadataBufferType type;
+ intptr_t value;
+ uint32_t extrasize = size - sizeof(type) - sizeof(value);
+ ValueInfo* info = NULL;
+ intptr_t* ExtraValues = NULL;
+ uint32_t ExtraValues_Count = 0;
+
+ memcpy(&type, data, sizeof(type));
+ data += sizeof(type);
+ memcpy(&value, data, sizeof(value));
+ data += sizeof(value);
+
+ switch (type)
+ {
+ case IntelMetadataBufferTypeCameraSource:
+ case IntelMetadataBufferTypeEncoder:
+ case IntelMetadataBufferTypeUser:
+ {
+ if (extrasize >0 && extrasize < sizeof(ValueInfo))
+ return IMB_INVAL_BUFFER;
+
+ if (extrasize > sizeof(ValueInfo)) //has extravalues
+ {
+ if ( (extrasize - sizeof(ValueInfo)) % sizeof(mValue) != 0 )
+ return IMB_INVAL_BUFFER;
+ ExtraValues_Count = (extrasize - sizeof(ValueInfo)) / sizeof(mValue);
+ }
+
+ if (extrasize > 0)
+ {
+ info = new ValueInfo;
+ memcpy(info, data, sizeof(ValueInfo));
+ data += sizeof(ValueInfo);
+ }
+
+ if (ExtraValues_Count > 0)
+ {
+ ExtraValues = new intptr_t[ExtraValues_Count];
+ memcpy(ExtraValues, data, ExtraValues_Count * sizeof(mValue));
+ }
+
+ break;
+ }
+ case IntelMetadataBufferTypeGrallocSource:
+ if (extrasize > 0)
+ return IMB_INVAL_BUFFER;
+
+ break;
+ default:
+ return IMB_INVAL_BUFFER;
+ }
+
+ //store data
+ mType = type;
+ mValue = value;
+ if (mInfo)
+ delete mInfo;
+ mInfo = info;
+ if (mExtraValues)
+ delete[] mExtraValues;
+ mExtraValues = ExtraValues;
+ mExtraValues_Count = ExtraValues_Count;
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ if (mInfo != NULL)
+ mSessionFlag = mInfo->sessionFlag;
+#endif
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size)
+{
+ if (mBytes == NULL)
+ {
+ if (mType == IntelMetadataBufferTypeGrallocSource && mInfo)
+ return IMB_INVAL_PARAM;
+
+ //assemble bytes according members
+ mSize = sizeof(mType) + sizeof(mValue);
+ if (mInfo)
+ {
+ mSize += sizeof(ValueInfo);
+ if (mExtraValues)
+ mSize += sizeof(mValue) * mExtraValues_Count;
+ }
+
+ mBytes = new uint8_t[mSize];
+ uint8_t *ptr = mBytes;
+ memcpy(ptr, &mType, sizeof(mType));
+ ptr += sizeof(mType);
+ memcpy(ptr, &mValue, sizeof(mValue));
+ ptr += sizeof(mValue);
+
+ if (mInfo)
+ {
+ #ifdef INTEL_VIDEO_XPROC_SHARING
+ mInfo->sessionFlag = mSessionFlag;
+ #endif
+ memcpy(ptr, mInfo, sizeof(ValueInfo));
+ ptr += sizeof(ValueInfo);
+
+ if (mExtraValues)
+ memcpy(ptr, mExtraValues, mExtraValues_Count * sizeof(mValue));
+ }
+ }
+
+ data = mBytes;
+ size = mSize;
+
+ return IMB_SUCCESS;
+}
+
+uint32_t IntelMetadataBuffer::GetMaxBufferSize()
+{
+ return 256;
+}
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+IMB_Result IntelMetadataBuffer::GetSessionFlag(uint32_t& sessionflag)
+{
+ sessionflag = mSessionFlag;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetSessionFlag(uint32_t sessionflag)
+{
+ mSessionFlag = sessionflag;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::ShareValue(sp<MemoryBase> mem)
+{
+ mValue = (intptr_t)((intptr_t) ( mem->pointer() + 0x0FFF) & ~0x0FFF);
+
+ if ( !(mSessionFlag & REMOTE_PROVIDER) && !(mSessionFlag & REMOTE_CONSUMER)) //no sharing
+ return IMB_SUCCESS;
+
+ if (mSessionFlag & REMOTE_PROVIDER) //is remote provider
+ {
+ sp<IBinder> binder = GetIntelBufferSharingService();
+ if (binder == 0)
+ return IMB_NO_SERVICE;
+
+ //Detect IntelBufferSharingService, share mem to service
+ Parcel data, reply;
+
+ //send pid, sessionflag, and value
+ pid_t pid = getpid();
+ //TODO: if pid is int32?
+ data.writeInt32(pid);
+ data.writeInt32(mSessionFlag);
+ data.writeIntPtr(mValue);
+
+ //send type/obj (offset/size/MemHeap)
+ ShareMemMap smem;
+ smem.membase = mem;
+ smem.type = ST_MEMBASE;
+ if (WriteMemObjToBinder(data, &smem) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //do transcation
+ if (binder->transact(SHARE_MEM, data, &reply) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //set new value gotten from peer
+ mValue = reply.readIntPtr();
+// LOGI("ShareValue(membase) Get reply from sevice, new value:%x\n", mValue);
+ }
+ else //is local provider , direct access list
+ {
+ ShareMemMap* smem = new ShareMemMap;
+ smem->sessionflag = mSessionFlag;
+ smem->value = mValue;
+ smem->value_backup = mValue;
+ smem->type = ST_MEMBASE;
+ smem->membase = mem;
+ smem->gbuffer = NULL;
+ PushShareMem(smem);
+ }
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::ShareValue(sp<GraphicBuffer> gbuffer)
+{
+ mValue = (intptr_t)gbuffer->handle;
+
+ if ( !(mSessionFlag & REMOTE_PROVIDER) && !(mSessionFlag & REMOTE_CONSUMER)) //no sharing
+ return IMB_SUCCESS;
+
+ if (mSessionFlag & REMOTE_PROVIDER == 0) //is remote provider
+ {
+ sp<IBinder> binder = GetIntelBufferSharingService();
+ if (binder == 0)
+ return IMB_NO_SERVICE;
+
+ Parcel data, reply;
+
+ //send pid, sessionflag, and memtype
+ pid_t pid = getpid();
+ //TODO: if pid is int32 ?
+ data.writeInt32(pid);
+ data.writeInt32(mSessionFlag);
+ data.writeIntPtr(mValue);
+
+ //send value/graphicbuffer obj
+ ShareMemMap smem;
+ smem.gbuffer = gbuffer;
+ smem.type = ST_GFX;
+ if (WriteMemObjToBinder(data, &smem) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //do transcation
+ if (binder->transact(SHARE_MEM, data, &reply) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //set new value gotten from peer
+ mValue = reply.readIntPtr();
+// LOGI("ShareValue(gfx) Get reply from sevice, new value:%x\n", mValue);
+ }
+ else //is local provider, direct access list
+ {
+ ShareMemMap* smem = new ShareMemMap;
+ smem->sessionflag = mSessionFlag;
+ smem->value = mValue;
+ smem->value_backup = mValue;
+ smem->type = ST_GFX;
+ smem->membase = NULL;
+ smem->gbuffer = gbuffer;
+ PushShareMem(smem);
+ }
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::ClearContext(uint32_t sessionflag, bool isProvider)
+{
+ if ( !(sessionflag & REMOTE_PROVIDER) && !(sessionflag & REMOTE_CONSUMER)) //no sharing
+ return IMB_SUCCESS;
+
+ //clear local firstly
+ ClearLocalMem(sessionflag);
+
+ //clear mem on service if it is remote user
+ if ((isProvider && (sessionflag & REMOTE_PROVIDER)) || (!isProvider && (sessionflag & REMOTE_CONSUMER)))
+ {
+// LOGI("CLEAR_MEM sessionflag=%x", sessionflag);
+
+ sp<IBinder> binder = GetIntelBufferSharingService();
+ if (binder == 0)
+ return IMB_NO_SERVICE;
+
+ //Detect IntelBufferSharingService, unshare mem from service
+ Parcel data, reply;
+
+ //send pid and sessionflag
+ pid_t pid = getpid();
+ //TODO: if pid is int32?
+ data.writeInt32(pid);
+ data.writeInt32(sessionflag);
+
+ if (binder->transact(CLEAR_MEM, data, &reply) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+ }
+
+ return IMB_SUCCESS;
+}
+
+uint32_t IntelMetadataBuffer::MakeSessionFlag(bool romoteProvider, bool remoteConsumer, uint16_t sindex)
+{
+ uint32_t sessionflag = 0;
+
+ if (romoteProvider)
+ sessionflag |= REMOTE_PROVIDER;
+
+ if (remoteConsumer)
+ sessionflag |= REMOTE_CONSUMER;
+
+ return sessionflag + sindex;
+}
+#endif
diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h
new file mode 100644
index 0000000..20a9590
--- /dev/null
+++ b/videoencoder/IntelMetadataBuffer.h
@@ -0,0 +1,162 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef _INTEL_METADATA_BUFFER_H_
+#define _INTEL_METADATA_BUFFER_H_
+
+#include <stdint.h>
+
+//#define INTEL_VIDEO_XPROC_SHARING
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+#include <binder/MemoryBase.h>
+#include <ui/GraphicBuffer.h>
+
+using namespace android;
+#endif
+#define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24)))
+
+typedef enum {
+ IMB_SUCCESS = 0,
+ IMB_INVAL_PARAM = 1,
+ IMB_INVAL_BUFFER = 2,
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ IMB_NO_SERVICE = 3,
+ IMB_SERVICE_FAIL = 4,
+#endif
+}IMB_Result;
+
+typedef enum {
+ MEM_MODE_MALLOC = 1,
+ MEM_MODE_CI = 2,
+ MEM_MODE_V4L2 = 4,
+ MEM_MODE_SURFACE = 8,
+ MEM_MODE_USRPTR = 16,
+ MEM_MODE_GFXHANDLE = 32,
+ MEM_MODE_KBUFHANDLE = 64,
+ MEM_MODE_ION = 128,
+ MEM_MODE_NONECACHE_USRPTR = 256,
+}MemMode;
+
+typedef struct {
+ MemMode mode; //memory type, vasurface/malloc/gfx/ion/v4l2/ci etc
+ intptr_t handle; //handle
+ uint32_t size; //memory size
+ uint32_t width; //picture width
+ uint32_t height; //picture height
+ uint32_t lumaStride; //picture luma stride
+ uint32_t chromStride; //picture chrom stride
+ uint32_t format; //color format
+ uint32_t s3dformat; //S3D format
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ uint32_t sessionFlag; //for buffer sharing session
+#endif
+}ValueInfo;
+
+typedef enum {
+ IntelMetadataBufferTypeCameraSource = 0, //same with kMetadataBufferTypeCameraSource in framework
+ IntelMetadataBufferTypeGrallocSource = 1, //same with kMetadataBufferTypeGrallocSource in framework
+
+ IntelMetadataBufferTypeExtension = 0xFF, //intel extended type
+ IntelMetadataBufferTypeEncoder = IntelMetadataBufferTypeExtension, //for WiDi clone mode
+ IntelMetadataBufferTypeUser = IntelMetadataBufferTypeExtension + 1, //for WiDi user mode
+ IntelMetadataBufferTypeLast = IntelMetadataBufferTypeExtension + 2, //type number
+}IntelMetadataBufferType;
+
+class IntelMetadataBuffer {
+public:
+ IntelMetadataBuffer(); //for generator
+ IntelMetadataBuffer(IntelMetadataBufferType type, intptr_t value); //for quick generator
+ ~IntelMetadataBuffer();
+
+ IntelMetadataBuffer(const IntelMetadataBuffer& imb);
+ const IntelMetadataBuffer& operator=(const IntelMetadataBuffer& imb);
+
+ IMB_Result GetType(IntelMetadataBufferType &type);
+ IMB_Result SetType(IntelMetadataBufferType type);
+ IMB_Result GetValue(intptr_t &value);
+ IMB_Result SetValue(intptr_t value);
+ IMB_Result GetValueInfo(ValueInfo* &info);
+ IMB_Result SetValueInfo(ValueInfo *info);
+ IMB_Result GetExtraValues(intptr_t* &values, uint32_t &num);
+ IMB_Result SetExtraValues(intptr_t *values, uint32_t num);
+
+ //New API for bytes input/ouput, UnSerialize=SetBytes, Serialize=GetBytes
+ IMB_Result UnSerialize(uint8_t* data, uint32_t size);
+ IMB_Result Serialize(uint8_t* &data, uint32_t& size);
+
+ //Static, for get max IntelMetadataBuffer size
+ static uint32_t GetMaxBufferSize();
+
+private:
+ IntelMetadataBufferType mType;
+ intptr_t mValue;
+ ValueInfo* mInfo;
+
+ intptr_t* mExtraValues;
+ uint32_t mExtraValues_Count;
+
+ uint8_t* mBytes;
+ uint32_t mSize;
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+public:
+ IMB_Result ShareValue(sp<MemoryBase> mem);
+ IMB_Result ShareValue(sp<GraphicBuffer> gbuffer);
+
+ IMB_Result GetSessionFlag(uint32_t &sessionflag);
+ IMB_Result SetSessionFlag(uint32_t sessionflag);
+
+ //Static, for clear context
+ static IMB_Result ClearContext(uint32_t sessionflag, bool isProvider = true);
+
+ static const uint16_t CAMERA_BASE = 0x0000;
+ static const uint16_t WIDI_BASE = 0x1000;
+ static const uint16_t WEBRTC_BASE = 0x2000;
+ static const uint16_t VIDEOEDIT_BASE = 0x3000;
+
+ static uint32_t MakeSessionFlag(bool romoteProvider, bool remoteConsumer, uint16_t sindex);
+
+private:
+ uint32_t mSessionFlag;
+#endif
+
+};
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+
+class IntelBufferSharingService : public BBinder
+{
+private:
+ static IntelBufferSharingService *gBufferService;
+
+public:
+ static status_t instantiate();
+
+ IntelBufferSharingService(){
+ ALOGI("IntelBufferSharingService instance is created");
+ }
+
+ ~IntelBufferSharingService(){
+ ALOGI("IntelBufferSharingService instance is destroyed");
+ }
+
+ status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
+};
+#endif
+
+#endif
+
diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp
new file mode 100644
index 0000000..6b893df
--- /dev/null
+++ b/videoencoder/PVSoftMPEG4Encoder.cpp
@@ -0,0 +1,513 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "PVSoftMPEG4Encoder"
+#include <wrs_omxil_core/log.h>
+
+#include "mp4enc_api.h"
+#include "OMX_Video.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <ui/GraphicBufferMapper.h>
+#include <ui/Rect.h>
+
+#include "PVSoftMPEG4Encoder.h"
+#include "VideoEncoderLog.h"
+
+#define ALIGN(x, align) (((x) + (align) - 1) & (~((align) - 1)))
+
+inline static void ConvertYUV420SemiPlanarToYUV420Planar(
+ uint8_t *inyuv, uint8_t* outyuv,
+ int32_t width, int32_t height) {
+
+ int32_t outYsize = width * height;
+ uint32_t *outy = (uint32_t *) outyuv;
+ uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
+ uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
+
+ /* Y copying */
+ memcpy(outy, inyuv, outYsize);
+
+ /* U & V copying */
+ uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
+ for (int32_t i = height >> 1; i > 0; --i) {
+ for (int32_t j = width >> 2; j > 0; --j) {
+ uint32_t temp = *inyuv_4++;
+ uint32_t tempU = temp & 0xFF;
+ tempU = tempU | ((temp >> 8) & 0xFF00);
+
+ uint32_t tempV = (temp >> 8) & 0xFF;
+ tempV = tempV | ((temp >> 16) & 0xFF00);
+
+ // Flip U and V
+ *outcb++ = tempU;
+ *outcr++ = tempV;
+ }
+ }
+}
+
+inline static void trimBuffer(uint8_t *dataIn, uint8_t *dataOut,
+ int32_t width, int32_t height,
+ int32_t alignedHeight, int32_t stride) {
+ int32_t h;
+ uint8_t *y_start, *uv_start, *_y_start, *_uv_start;
+ y_start = dataOut;
+ uv_start = dataOut + width * height;
+ _y_start = dataIn;
+ _uv_start = dataIn + stride * alignedHeight;
+
+ for (h = 0; h < height; h++)
+ memcpy(y_start + h * width, _y_start + h * stride, width);
+ for (h = 0; h < height / 2; h++)
+ memcpy(uv_start + h * width,
+ _uv_start + h * stride, width);
+}
+
+PVSoftMPEG4Encoder::PVSoftMPEG4Encoder(const char *name)
+ : mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
+ mVideoWidth(176),
+ mVideoHeight(144),
+ mVideoFrameRate(30),
+ mVideoBitRate(192000),
+ mVideoColorFormat(OMX_COLOR_FormatYUV420SemiPlanar),
+ mStoreMetaDataInBuffers(false),
+ mIDRFrameRefreshIntervalInSec(1),
+ mNumInputFrames(-1),
+ mStarted(false),
+ mSawInputEOS(false),
+ mSignalledError(false),
+ mHandle(new tagvideoEncControls),
+ mEncParams(new tagvideoEncOptions),
+ mInputFrameData(NULL)
+{
+
+ if (!strcmp(name, "OMX.google.h263.encoder")) {
+ mEncodeMode = H263_MODE;
+ LOG_I("construct h263 encoder");
+ } else {
+ CHECK(!strcmp(name, "OMX.google.mpeg4.encoder"));
+ LOG_I("construct mpeg4 encoder");
+ }
+
+ setDefaultParams();
+#if NO_BUFFER_SHARE
+ mVASurfaceMappingAction |= MAPACT_COPY;
+#endif
+
+ LOG_I("Construct PVSoftMPEG4Encoder");
+
+}
+
+PVSoftMPEG4Encoder::~PVSoftMPEG4Encoder() {
+ LOG_I("Destruct PVSoftMPEG4Encoder");
+ releaseEncoder();
+
+}
+
+void PVSoftMPEG4Encoder::setDefaultParams() {
+
+ // Set default value for input parameters
+ mComParams.profile = VAProfileH264Baseline;
+ mComParams.level = 41;
+ mComParams.rawFormat = RAW_FORMAT_NV12;
+ mComParams.frameRate.frameRateNum = 30;
+ mComParams.frameRate.frameRateDenom = 1;
+ mComParams.resolution.width = 0;
+ mComParams.resolution.height = 0;
+ mComParams.intraPeriod = 30;
+ mComParams.rcMode = RATE_CONTROL_NONE;
+ mComParams.rcParams.initQP = 15;
+ mComParams.rcParams.minQP = 0;
+ mComParams.rcParams.bitRate = 640000;
+ mComParams.rcParams.targetPercentage= 0;
+ mComParams.rcParams.windowSize = 0;
+ mComParams.rcParams.disableFrameSkip = 0;
+ mComParams.rcParams.disableBitsStuffing = 1;
+ mComParams.cyclicFrameInterval = 30;
+ mComParams.refreshType = VIDEO_ENC_NONIR;
+ mComParams.airParams.airMBs = 0;
+ mComParams.airParams.airThreshold = 0;
+ mComParams.airParams.airAuto = 1;
+ mComParams.disableDeblocking = 2;
+ mComParams.syncEncMode = false;
+ mComParams.codedBufNum = 2;
+
+}
+
+Encode_Status PVSoftMPEG4Encoder::initEncParams() {
+ CHECK(mHandle != NULL);
+ memset(mHandle, 0, sizeof(tagvideoEncControls));
+
+ CHECK(mEncParams != NULL);
+ memset(mEncParams, 0, sizeof(tagvideoEncOptions));
+ if (!PVGetDefaultEncOption(mEncParams, 0)) {
+ LOG_E("Failed to get default encoding parameters");
+ return ENCODE_FAIL;
+ }
+ mEncParams->encMode = mEncodeMode;
+ mEncParams->encWidth[0] = mVideoWidth;
+ mEncParams->encHeight[0] = mVideoHeight;
+ mEncParams->encFrameRate[0] = mVideoFrameRate;
+ mEncParams->rcType = VBR_1;
+ mEncParams->vbvDelay = 5.0f;
+
+ // FIXME:
+ // Add more profile and level support for MPEG4 encoder
+ mEncParams->profile_level = CORE_PROFILE_LEVEL2;
+ mEncParams->packetSize = 32;
+ mEncParams->rvlcEnable = PV_OFF;
+ mEncParams->numLayers = 1;
+ mEncParams->timeIncRes = 1000;
+ mEncParams->tickPerSrc = mEncParams->timeIncRes / mVideoFrameRate;
+
+ mEncParams->bitRate[0] = mVideoBitRate <= 2000000 ? mVideoBitRate : 2000000;
+ mEncParams->iQuant[0] = 15;
+ mEncParams->pQuant[0] = 12;
+ mEncParams->quantType[0] = 0;
+ mEncParams->noFrameSkipped = PV_OFF;
+
+ mTrimedInputData =
+ (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+ CHECK(mTrimedInputData != NULL);
+
+ if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ // Color conversion is needed.
+ CHECK(mInputFrameData == NULL);
+ mInputFrameData =
+ (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+ CHECK(mInputFrameData != NULL);
+ }
+
+ // PV's MPEG4 encoder requires the video dimension of multiple
+ if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+ LOG_E("Video frame size %dx%d must be a multiple of 16",
+ mVideoWidth, mVideoHeight);
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ // Set IDR frame refresh interval
+ if (mIDRFrameRefreshIntervalInSec < 0) {
+ mEncParams->intraPeriod = -1;
+ } else if (mIDRFrameRefreshIntervalInSec == 0) {
+ mEncParams->intraPeriod = 1; // All I frames
+ } else {
+ mEncParams->intraPeriod =
+ (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+ }
+
+ mEncParams->numIntraMB = 0;
+ mEncParams->sceneDetect = PV_ON;
+ mEncParams->searchRange = 16;
+ mEncParams->mv8x8Enable = PV_OFF;
+ mEncParams->gobHeaderInterval = 0;
+ mEncParams->useACPred = PV_ON;
+ mEncParams->intraDCVlcTh = 0;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status PVSoftMPEG4Encoder::initEncoder() {
+ LOG_V("Begin\n");
+
+ CHECK(!mStarted);
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ if (ENCODE_SUCCESS != (ret = initEncParams())) {
+ LOG_E("Failed to initialized encoder params");
+ mSignalledError = true;
+ return ret;
+ }
+
+ if (!PVInitVideoEncoder(mHandle, mEncParams)) {
+ LOG_E("Failed to initialize the encoder");
+ mSignalledError = true;
+ return ENCODE_FAIL;
+ }
+
+ mNumInputFrames = -1; // 1st buffer for codec specific data
+ mStarted = true;
+ mCurTimestampUs = 0;
+ mLastTimestampUs = 0;
+ mVolHeaderLength = 256;
+
+ LOG_V("End\n");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status PVSoftMPEG4Encoder::releaseEncoder() {
+ LOG_V("Begin\n");
+
+ if (!mStarted) {
+ return ENCODE_SUCCESS;
+ }
+
+ PVCleanUpVideoEncoder(mHandle);
+
+ delete mTrimedInputData;
+ mTrimedInputData = NULL;
+
+ delete mInputFrameData;
+ mInputFrameData = NULL;
+
+ delete mEncParams;
+ mEncParams = NULL;
+
+ delete mHandle;
+ mHandle = NULL;
+
+ mStarted = false;
+
+ LOG_V("End\n");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status PVSoftMPEG4Encoder::setParameters(
+ VideoParamConfigSet *videoEncParams)
+{
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ LOG_I("Config type = %d\n", (int)videoEncParams->type);
+
+ if (mStarted) {
+ LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
+ return ENCODE_ALREADY_INIT;
+ }
+
+ switch (videoEncParams->type) {
+ case VideoParamsTypeCommon: {
+
+ VideoParamsCommon *paramsCommon =
+ reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+ if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ if(paramsCommon->codedBufNum < 2)
+ paramsCommon->codedBufNum =2;
+ mComParams = *paramsCommon;
+
+ mVideoWidth = mComParams.resolution.width;
+ mVideoHeight = mComParams.resolution.height;
+ mVideoFrameRate = mComParams.frameRate.frameRateNum / \
+ mComParams.frameRate.frameRateDenom;
+ mVideoBitRate = mComParams.rcParams.bitRate;
+ mVideoColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ break;
+ }
+
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mStoreMetaDataInBuffers = metadata->isEnabled;
+
+ break;
+ }
+
+ default: {
+ LOG_I ("Wrong ParamType here\n");
+ break;
+ }
+ }
+
+ return ret;
+}
+
+Encode_Status PVSoftMPEG4Encoder::getParameters(
+ VideoParamConfigSet *videoEncParams) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ LOG_I("Config type = %d\n", (int)videoEncParams->type);
+
+ switch (videoEncParams->type) {
+ case VideoParamsTypeCommon: {
+
+ VideoParamsCommon *paramsCommon =
+ reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+
+ if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ *paramsCommon = mComParams;
+ break;
+ }
+
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ metadata->isEnabled = mStoreMetaDataInBuffers;
+
+ break;
+ }
+
+ default: {
+ LOG_I ("Wrong ParamType here\n");
+ break;
+ }
+
+ }
+ return ret;
+}
+
+Encode_Status PVSoftMPEG4Encoder::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout)
+{
+ LOG_V("Begin\n");
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ if (mCurTimestampUs <= inBuffer->timeStamp) {
+ mLastTimestampUs = mCurTimestampUs;
+ mCurTimestampUs = inBuffer->timeStamp;
+ }
+
+ if (mNumInputFrames < 0) {
+ if (!PVGetVolHeader(mHandle, mVolHeader, &mVolHeaderLength, 0)) {
+ LOG_E("Failed to get VOL header");
+ mSignalledError = true;
+ return ENCODE_FAIL;
+ }
+ LOG_I("Output VOL header: %d bytes", mVolHeaderLength);
+ mNumInputFrames++;
+ //return ENCODE_SUCCESS;
+ }
+
+ if (mStoreMetaDataInBuffers) {
+ IntelMetadataBuffer imb;
+ int32_t type;
+ int32_t value;
+ uint8_t *img;
+ const android::Rect rect(mVideoWidth, mVideoHeight);
+ android::status_t res;
+ ValueInfo vinfo;
+ ValueInfo *pvinfo = &vinfo;
+ CHECK(IMB_SUCCESS == imb.UnSerialize(inBuffer->data, inBuffer->size));
+ imb.GetType((::IntelMetadataBufferType&)type);
+ imb.GetValue(value);
+ imb.GetValueInfo(pvinfo);
+ if(pvinfo == NULL) {
+ res = android::GraphicBufferMapper::get().lock((buffer_handle_t)value,
+ GRALLOC_USAGE_SW_READ_MASK,
+ rect, (void**)&img);
+ } else {
+ img = (uint8_t*)value;
+ }
+ if (pvinfo != NULL)
+ trimBuffer(img, mTrimedInputData, pvinfo->width, pvinfo->height,
+ pvinfo->height, pvinfo->lumaStride);
+ else {
+ //NV12 Y-TILED
+ trimBuffer(img, mTrimedInputData, mVideoWidth, mVideoHeight,
+ ALIGN(mVideoHeight, 32), ALIGN(mVideoWidth, 128));
+ android::GraphicBufferMapper::get().unlock((buffer_handle_t)value);
+ }
+ } else {
+ memcpy(mTrimedInputData, inBuffer->data,
+ (mVideoWidth * mVideoHeight * 3 ) >> 1);
+ }
+
+ if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ ConvertYUV420SemiPlanarToYUV420Planar(
+ mTrimedInputData, mInputFrameData, mVideoWidth, mVideoHeight);
+ } else {
+ memcpy(mTrimedInputData, mInputFrameData,
+ (mVideoWidth * mVideoHeight * 3 ) >> 1);
+ }
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
+Encode_Status PVSoftMPEG4Encoder::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout)
+{
+ LOG_V("Begin\n");
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint8_t *outPtr = outBuffer->data;
+ int32_t dataLength = outBuffer->bufferSize;
+ outBuffer->flag = 0;
+
+ if ((mEncodeMode == COMBINE_MODE_WITH_ERR_RES) &&
+ (outBuffer->format == OUTPUT_CODEC_DATA)) {
+ memcpy(outPtr, mVolHeader, mVolHeaderLength);
+ ++mNumInputFrames;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+ outBuffer->dataSize = mVolHeaderLength;
+ outBuffer->remainingSize = 0;
+ return ENCODE_SUCCESS;
+ }
+
+ outBuffer->timeStamp = mCurTimestampUs;
+ LOG_I("info.mTimeUs %lld\n", outBuffer->timeStamp);
+
+ VideoEncFrameIO vin, vout;
+ memset(&vin, 0, sizeof(vin));
+ memset(&vout, 0, sizeof(vout));
+ vin.height = ((mVideoHeight + 15) >> 4) << 4;
+ vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
+ vin.timestamp = (outBuffer->timeStamp + 500) / 1000; // in ms
+ vin.yChan = mInputFrameData;
+ vin.uChan = vin.yChan + vin.height * vin.pitch;
+ vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2);
+
+ unsigned long modTimeMs = 0;
+ int32_t nLayer = 0;
+ MP4HintTrack hintTrack;
+ if (!PVEncodeVideoFrame(mHandle, &vin, &vout,
+ &modTimeMs, outPtr, &dataLength, &nLayer) ||
+ !PVGetHintTrack(mHandle, &hintTrack)) {
+ LOG_E("Failed to encode frame or get hink track at frame %lld",
+ mNumInputFrames);
+ mSignalledError = true;
+ hintTrack.CodeType = 0;
+ ret = ENCODE_FAIL;
+ }
+ LOG_I("dataLength %d\n", dataLength);
+ CHECK(NULL == PVGetOverrunBuffer(mHandle));
+ if (hintTrack.CodeType == 0) { // I-frame serves as sync frame
+ outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+ }
+
+ ++mNumInputFrames;
+
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ outBuffer->dataSize = dataLength;
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
diff --git a/videoencoder/PVSoftMPEG4Encoder.h b/videoencoder/PVSoftMPEG4Encoder.h
new file mode 100644
index 0000000..5d34e9f
--- /dev/null
+++ b/videoencoder/PVSoftMPEG4Encoder.h
@@ -0,0 +1,84 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __PV_SOFT_MPEG4_ENCODER__
+#define __PV_SOFT_MPEG4_ENCODER__
+
+#include <va/va.h>
+#include <va/va_tpi.h>
+#include "VideoEncoderDef.h"
+#include "VideoEncoderInterface.h"
+#include "IntelMetadataBuffer.h"
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/foundation/ABase.h>
+#include "SimpleSoftOMXComponent.h"
+#include "mp4enc_api.h"
+
+class PVSoftMPEG4Encoder : IVideoEncoder {
+
+public:
+ PVSoftMPEG4Encoder(const char *name);
+ virtual ~PVSoftMPEG4Encoder();
+
+ virtual Encode_Status start(void) {return initEncoder();}
+ virtual void flush(void) { }
+ virtual Encode_Status stop(void) {return releaseEncoder();}
+ virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout);
+
+ virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout);
+
+ virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) {return ENCODE_SUCCESS;}
+ virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) {return ENCODE_SUCCESS;}
+ virtual Encode_Status getMaxOutSize(uint32_t *maxSize) {return ENCODE_SUCCESS;}
+
+private:
+ void setDefaultParams(void);
+ VideoParamsCommon mComParams;
+
+ MP4EncodingMode mEncodeMode;
+ int32_t mVideoWidth;
+ int32_t mVideoHeight;
+ int32_t mVideoFrameRate;
+ int32_t mVideoBitRate;
+ int32_t mVideoColorFormat;
+ bool mStoreMetaDataInBuffers;
+ int32_t mIDRFrameRefreshIntervalInSec;
+
+ int64_t mNumInputFrames;
+ bool mStarted;
+ bool mSawInputEOS;
+ bool mSignalledError;
+ int64_t mCurTimestampUs;
+ int64_t mLastTimestampUs;
+
+ tagvideoEncControls *mHandle;
+ tagvideoEncOptions *mEncParams;
+ uint8_t *mInputFrameData;
+ uint8_t *mTrimedInputData;
+ uint8_t mVolHeader[256];
+ int32_t mVolHeaderLength;
+
+ Encode_Status initEncParams();
+ Encode_Status initEncoder();
+ Encode_Status releaseEncoder();
+
+ DISALLOW_EVIL_CONSTRUCTORS(PVSoftMPEG4Encoder);
+};
+
+#endif
diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp
new file mode 100644
index 0000000..47d8174
--- /dev/null
+++ b/videoencoder/VideoEncoderAVC.cpp
@@ -0,0 +1,1377 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include <stdlib.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderAVC.h"
+#include <va/va_tpi.h>
+#include <va/va_enc_h264.h>
+#include <bitstream.h>
+
+VideoEncoderAVC::VideoEncoderAVC()
+ :VideoEncoderBase() {
+ if(VideoEncoderBase::queryProfileLevelConfig(mVADisplay, VAProfileH264High) == ENCODE_SUCCESS){
+ mComParams.profile = VAProfileH264High;
+ mComParams.level = 42;
+ }else if(VideoEncoderBase::queryProfileLevelConfig(mVADisplay, VAProfileH264Main) == ENCODE_SUCCESS){
+ mComParams.profile = VAProfileH264Main;
+ mComParams.level = 41;
+ }
+ mVideoParamsAVC.basicUnitSize = 0;
+ mVideoParamsAVC.VUIFlag = 0;
+ mVideoParamsAVC.sliceNum.iSliceNum = 2;
+ mVideoParamsAVC.sliceNum.pSliceNum = 2;
+ mVideoParamsAVC.idrInterval = 2;
+ mVideoParamsAVC.ipPeriod = 1;
+ mVideoParamsAVC.maxSliceSize = 0;
+ mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB;
+ mSliceNum = 2;
+ mVideoParamsAVC.crop.LeftOffset = 0;
+ mVideoParamsAVC.crop.RightOffset = 0;
+ mVideoParamsAVC.crop.TopOffset = 0;
+ mVideoParamsAVC.crop.BottomOffset = 0;
+ mVideoParamsAVC.SAR.SarWidth = 0;
+ mVideoParamsAVC.SAR.SarHeight = 0;
+ mVideoParamsAVC.bEntropyCodingCABAC = 0;
+ mVideoParamsAVC.bWeightedPPrediction = 0;
+ mVideoParamsAVC.bDirect8x8Inference = 0;
+ mVideoParamsAVC.bConstIpred = 0;
+ mAutoReferenceSurfaceNum = 4;
+
+ packed_seq_header_param_buf_id = VA_INVALID_ID;
+ packed_seq_buf_id = VA_INVALID_ID;
+ packed_pic_header_param_buf_id = VA_INVALID_ID;
+ packed_pic_buf_id = VA_INVALID_ID;
+ packed_sei_header_param_buf_id = VA_INVALID_ID; /* the SEI buffer */
+ packed_sei_buf_id = VA_INVALID_ID;
+}
+
+Encode_Status VideoEncoderAVC::start() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ LOG_V( "Begin\n");
+
+ if (mComParams.rcMode == VA_RC_VCM) {
+ // If we are in VCM, we will set slice num to max value
+ // mVideoParamsAVC.sliceNum.iSliceNum = (mComParams.resolution.height + 15) / 16;
+ // mVideoParamsAVC.sliceNum.pSliceNum = mVideoParamsAVC.sliceNum.iSliceNum;
+ }
+
+ ret = VideoEncoderBase::start ();
+ CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start");
+
+ LOG_V( "end\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderAVC::derivedSetParams(VideoParamConfigSet *videoEncParams) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ VideoParamsAVC *encParamsAVC = reinterpret_cast <VideoParamsAVC *> (videoEncParams);
+
+ // AVC parames
+ if (encParamsAVC->size != sizeof (VideoParamsAVC)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ if(encParamsAVC->ipPeriod == 0 || encParamsAVC->ipPeriod >4)
+ return ENCODE_INVALID_PARAMS;
+
+ if((mComParams.intraPeriod >1)&&(mComParams.intraPeriod % encParamsAVC->ipPeriod !=0))
+ return ENCODE_INVALID_PARAMS;
+
+ mVideoParamsAVC = *encParamsAVC;
+ if(mComParams.profile == VAProfileH264Baseline){
+ mVideoParamsAVC.bEntropyCodingCABAC = 0;
+ mVideoParamsAVC.bDirect8x8Inference = 0;
+ mVideoParamsAVC.bWeightedPPrediction = 0;
+ }
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC:: derivedGetParams(VideoParamConfigSet *videoEncParams) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ VideoParamsAVC *encParamsAVC = reinterpret_cast <VideoParamsAVC *> (videoEncParams);
+
+ // AVC parames
+ if (encParamsAVC->size != sizeof (VideoParamsAVC)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ *encParamsAVC = mVideoParamsAVC;
+ return ENCODE_SUCCESS;
+
+}
+
+Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+ LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+ switch (videoEncConfig->type) {
+ case VideoConfigTypeAVCIntraPeriod: {
+
+ VideoConfigAVCIntraPeriod *configAVCIntraPeriod =
+ reinterpret_cast <VideoConfigAVCIntraPeriod *> (videoEncConfig);
+ // Config Intra Peroid
+ if (configAVCIntraPeriod->size != sizeof (VideoConfigAVCIntraPeriod)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ if(configAVCIntraPeriod->ipPeriod == 0 || configAVCIntraPeriod->ipPeriod >4)
+ return ENCODE_INVALID_PARAMS;
+ if((configAVCIntraPeriod->intraPeriod >1)&&(configAVCIntraPeriod->intraPeriod % configAVCIntraPeriod->ipPeriod !=0))
+ return ENCODE_INVALID_PARAMS;
+
+ mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval;
+ mVideoParamsAVC.ipPeriod = configAVCIntraPeriod->ipPeriod;
+ mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod;
+ mNewHeader = true;
+ break;
+ }
+ case VideoConfigTypeNALSize: {
+ // Config MTU
+ VideoConfigNALSize *configNALSize =
+ reinterpret_cast <VideoConfigNALSize *> (videoEncConfig);
+ if (configNALSize->size != sizeof (VideoConfigNALSize)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoParamsAVC.maxSliceSize = configNALSize->maxSliceSize;
+ mRenderMaxSliceSize = true;
+ break;
+ }
+ case VideoConfigTypeIDRRequest: {
+ if(mVideoParamsAVC.ipPeriod >1)
+ return ENCODE_FAIL;
+ else
+ mNewHeader = true;
+ break;
+ }
+ case VideoConfigTypeSliceNum: {
+
+ VideoConfigSliceNum *configSliceNum =
+ reinterpret_cast <VideoConfigSliceNum *> (videoEncConfig);
+ // Config Slice size
+ if (configSliceNum->size != sizeof (VideoConfigSliceNum)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoParamsAVC.sliceNum = configSliceNum->sliceNum;
+ break;
+ }
+ default: {
+ LOG_E ("Invalid Config Type");
+ break;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC:: derivedGetConfig(
+ VideoParamConfigSet *videoEncConfig) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+ LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+ switch (videoEncConfig->type) {
+
+ case VideoConfigTypeAVCIntraPeriod: {
+
+ VideoConfigAVCIntraPeriod *configAVCIntraPeriod =
+ reinterpret_cast <VideoConfigAVCIntraPeriod *> (videoEncConfig);
+ if (configAVCIntraPeriod->size != sizeof (VideoConfigAVCIntraPeriod)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configAVCIntraPeriod->idrInterval = mVideoParamsAVC.idrInterval;
+ configAVCIntraPeriod->intraPeriod = mComParams.intraPeriod;
+ configAVCIntraPeriod->ipPeriod = mVideoParamsAVC.ipPeriod;
+
+ break;
+ }
+ case VideoConfigTypeNALSize: {
+
+ VideoConfigNALSize *configNALSize =
+ reinterpret_cast <VideoConfigNALSize *> (videoEncConfig);
+ if (configNALSize->size != sizeof (VideoConfigNALSize)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configNALSize->maxSliceSize = mVideoParamsAVC.maxSliceSize;
+ break;
+ }
+ case VideoConfigTypeIDRRequest: {
+ break;
+
+ }
+ case VideoConfigTypeSliceNum: {
+
+ VideoConfigSliceNum *configSliceNum =
+ reinterpret_cast <VideoConfigSliceNum *> (videoEncConfig);
+ if (configSliceNum->size != sizeof (VideoConfigSliceNum)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configSliceNum->sliceNum = mVideoParamsAVC.sliceNum;
+ break;
+ }
+ default: {
+ LOG_E ("Invalid Config Type");
+ break;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::updateFrameInfo(EncodeTask* task) {
+ uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval;
+ FrameType frametype;
+ uint32_t frame_num = mFrameNum;
+ uint32_t intraPeriod = mComParams.intraPeriod;
+
+ if (idrPeroid != 0) {
+ if(mVideoParamsAVC.ipPeriod > 1)
+ frame_num = frame_num % (idrPeroid + 1);
+ else
+ frame_num = frame_num % idrPeroid ;
+ }else{
+ if (mComParams.intraPeriod == 0)
+ intraPeriod = 0xFFFFFFFF;
+ }
+
+
+ if(frame_num ==0){
+ frametype = FTYPE_IDR;
+ }else if(intraPeriod ==1)
+ // only I frame need intraPeriod=idrInterval=ipPeriod=0
+ frametype = FTYPE_I;
+ else if(mVideoParamsAVC.ipPeriod == 1){ // no B frame
+ if((frame_num > 1) &&((frame_num -1)%intraPeriod == 0))
+ frametype = FTYPE_I;
+ else
+ frametype = FTYPE_P;
+ } else {
+ if(((frame_num-1)%intraPeriod == 0)&&(frame_num >intraPeriod))
+ frametype = FTYPE_I;
+ else{
+ frame_num = frame_num%intraPeriod;
+ if(frame_num == 0)
+ frametype = FTYPE_B;
+ else if((frame_num-1)%mVideoParamsAVC.ipPeriod == 0)
+ frametype = FTYPE_P;
+ else
+ frametype = FTYPE_B;
+ }
+ }
+
+ if (frametype == FTYPE_IDR || frametype == FTYPE_I)
+ task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+
+ if (frametype != task->type) {
+ const char* FrameTypeStr[10] = {"UNKNOWN", "I", "P", "B", "SI", "SP", "EI", "EP", "S", "IDR"};
+ if ((uint32_t) task->type < 9)
+ LOG_V("libMIX thinks it is %s Frame, the input is %s Frame", FrameTypeStr[frametype], FrameTypeStr[task->type]);
+ else
+ LOG_V("Wrong Frame type %d, type may not be initialized ?\n", task->type);
+ }
+
+//temparily comment out to avoid uninitialize error
+// if (task->type == FTYPE_UNKNOWN || (uint32_t) task->type > 9)
+ task->type = frametype;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ LOG_V("Begin\n");
+
+ switch (outBuffer->format) {
+ case OUTPUT_CODEC_DATA: {
+ // Output the codec data
+ ret = outputCodecData(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputCodecData");
+ break;
+ }
+
+ case OUTPUT_ONE_NAL: {
+ // Output only one NAL unit
+ ret = outputOneNALU(outBuffer, true);
+ CHECK_ENCODE_STATUS_CLEANUP("outputOneNALU");
+ break;
+ }
+
+ case OUTPUT_ONE_NAL_WITHOUT_STARTCODE: {
+ ret = outputOneNALU(outBuffer, false);
+ CHECK_ENCODE_STATUS_CLEANUP("outputOneNALU");
+ break;
+ }
+
+ case OUTPUT_LENGTH_PREFIXED: {
+ // Output length prefixed
+ ret = outputLengthPrefixed(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputLengthPrefixed");
+ break;
+ }
+
+ case OUTPUT_NALULENGTHS_PREFIXED: {
+ // Output nalu lengths ahead of bitstream
+ ret = outputNaluLengthsPrefixed(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputNaluLengthsPrefixed");
+ break;
+ }
+
+ default:
+ LOG_E("Invalid buffer mode\n");
+ ret = ENCODE_FAIL;
+ break;
+ }
+
+ LOG_I("out size is = %d\n", outBuffer->dataSize);
+
+
+CLEAN_UP:
+
+
+ LOG_V("End\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderAVC::getOneNALUnit(
+ uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize,
+ uint32_t *nalType, uint32_t *nalOffset, uint32_t status) {
+ uint32_t pos = 0;
+ uint32_t zeroByteCount = 0;
+ uint32_t singleByteTable[3][2] = {{1,0},{2,0},{2,3}};
+ uint32_t dataRemaining = 0;
+ uint8_t *dataPtr;
+
+ // Don't need to check parameters here as we just checked by caller
+ while ((inBuffer[pos++] == 0x00)) {
+ zeroByteCount ++;
+ if (pos >= bufSize) //to make sure the buffer to be accessed is valid
+ break;
+ }
+
+ if (inBuffer[pos - 1] != 0x01 || zeroByteCount < 2) {
+ LOG_E("The stream is not AnnexB format \n");
+ LOG_E("segment status is %x \n", status);
+ return ENCODE_FAIL; //not AnnexB, we won't process it
+ }
+
+ *nalType = (*(inBuffer + pos)) & 0x1F;
+ LOG_I ("NAL type = 0x%x\n", *nalType);
+
+ zeroByteCount = 0;
+ *nalOffset = pos;
+
+ if (status & VA_CODED_BUF_STATUS_SINGLE_NALU) {
+ *nalSize = bufSize - pos;
+ return ENCODE_SUCCESS;
+ }
+
+ dataPtr = inBuffer + pos;
+ dataRemaining = bufSize - pos + 1;
+
+ while ((dataRemaining > 0) && (zeroByteCount < 3)) {
+ if (((((intptr_t)dataPtr) & 0xF ) == 0) && (0 == zeroByteCount)
+ && (dataRemaining > 0xF)) {
+
+ __asm__ (
+ //Data input
+ "movl %1, %%ecx\n\t"//data_ptr=>ecx
+ "movl %0, %%eax\n\t"//data_remaing=>eax
+ //Main compare loop
+ //
+ "0:\n\t" //MATCH_8_ZERO:
+ "pxor %%xmm0,%%xmm0\n\t"//set 0=>xmm0
+ "pcmpeqb (%%ecx),%%xmm0\n\t"//data_ptr=xmm0,(byte==0)?0xFF:0x00
+ "pmovmskb %%xmm0, %%edx\n\t"//edx[0]=xmm0[7],edx[1]=xmm0[15],...,edx[15]=xmm0[127]
+ "test $0xAAAA, %%edx\n\t"//edx& 1010 1010 1010 1010b
+ "jnz 2f\n\t"//Not equal to zero means that at least one byte 0x00
+
+ "1:\n\t" //PREPARE_NEXT_MATCH:
+ "sub $0x10, %%eax\n\t"//16 + ecx --> ecx
+ "add $0x10, %%ecx\n\t"//eax-16 --> eax
+ "cmp $0x10, %%eax\n\t"
+ "jge 0b\n\t"//search next 16 bytes
+
+ "2:\n\t" //DATA_RET:
+ "movl %%ecx, %1\n\t"//output ecx->data_ptr
+ "movl %%eax, %0\n\t"//output eax->data_remaining
+ : "+m"(dataRemaining), "+m"(dataPtr)
+ :
+ :"eax", "ecx", "edx", "xmm0"
+ );
+ if (0 >= dataRemaining) {
+ break;
+ }
+
+ }
+ //check the value of each byte
+ if ((*dataPtr) >= 2) {
+
+ zeroByteCount = 0;
+
+ }
+ else {
+ zeroByteCount = singleByteTable[zeroByteCount][*dataPtr];
+ }
+
+ dataPtr ++;
+ dataRemaining --;
+ }
+
+ if ((3 == zeroByteCount) && (dataRemaining > 0)) {
+
+ *nalSize = bufSize - dataRemaining - *nalOffset - 3;
+
+ } else if (0 == dataRemaining) {
+
+ *nalSize = bufSize - *nalOffset;
+ }
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::getHeader(
+ uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize, uint32_t status) {
+
+ uint32_t nalType = 0;
+ uint32_t nalSize = 0;
+ uint32_t nalOffset = 0;
+ uint32_t size = 0;
+ uint8_t *buf = inBuffer;
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ *headerSize = 0;
+ CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+ if (bufSize == 0) {
+ //bufSize shoule not be 0, error happens
+ LOG_E("Buffer size is 0\n");
+ return ENCODE_FAIL;
+ }
+
+ while (1) {
+ nalType = nalSize = nalOffset = 0;
+ ret = getOneNALUnit(buf, bufSize, &nalSize, &nalType, &nalOffset, status);
+ CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+ LOG_I("NAL type = %d, NAL size = %d, offset = %d\n", nalType, nalSize, nalOffset);
+ size = nalSize + nalOffset;
+
+ // Codec_data should be SPS or PPS
+ if (nalType == 7 || nalType == 8) {
+ *headerSize += size;
+ buf += size;
+ bufSize -= size;
+ } else {
+ LOG_V("No header found or no header anymore\n");
+ break;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::outputCodecData(
+ VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t headerSize = 0;
+
+ ret = getHeader((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &headerSize, mCurSegment->status);
+ CHECK_ENCODE_STATUS_RETURN("getHeader");
+ if (headerSize == 0) {
+ outBuffer->dataSize = 0;
+ mCurSegment = NULL;
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ if (headerSize <= outBuffer->bufferSize) {
+ memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize);
+ mTotalSizeCopied += headerSize;
+ mOffsetInSeg += headerSize;
+ outBuffer->dataSize = headerSize;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+ } else {
+ // we need a big enough buffer, otherwise we won't output anything
+ outBuffer->dataSize = 0;
+ outBuffer->remainingSize = headerSize;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+ LOG_E("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ return ret;
+}
+
+Encode_Status VideoEncoderAVC::outputOneNALU(
+ VideoEncOutputBuffer *outBuffer, bool startCode) {
+
+ uint32_t nalType = 0;
+ uint32_t nalSize = 0;
+ uint32_t nalOffset = 0;
+ uint32_t sizeToBeCopied = 0;
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf);
+
+ ret = getOneNALUnit((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status);
+ CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+ // check if we need startcode along with the payload
+ if (startCode) {
+ sizeToBeCopied = nalSize + nalOffset;
+ } else {
+ sizeToBeCopied = nalSize;
+ }
+
+ if (sizeToBeCopied <= outBuffer->bufferSize) {
+ if (startCode) {
+ memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
+ } else {
+ memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset,
+ sizeToBeCopied);
+ }
+ mTotalSizeCopied += sizeToBeCopied;
+ mOffsetInSeg += (nalSize + nalOffset);
+ outBuffer->dataSize = sizeToBeCopied;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+ outBuffer->remainingSize = 0;
+ } else {
+ // if nothing to be copied out, set flag to invalid
+ outBuffer->dataSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+ outBuffer->remainingSize = sizeToBeCopied;
+ LOG_W("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ // check if all data in current segment has been copied out
+ if (mCurSegment->size == mOffsetInSeg) {
+ if (mCurSegment->next != NULL) {
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ } else {
+ LOG_V("End of stream\n");
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ mCurSegment = NULL;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t nalType = 0;
+ uint32_t nalSize = 0;
+ uint32_t nalOffset = 0;
+ uint32_t sizeCopiedHere = 0;
+
+ CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf);
+
+ while (1) {
+
+ if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
+ LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n");
+ return ENCODE_FAIL;
+ }
+
+ // we need to handle the whole bitstream NAL by NAL
+ ret = getOneNALUnit(
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status);
+ CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+ if (nalSize + 4 <= outBuffer->bufferSize - sizeCopiedHere) {
+ // write the NAL length to bit stream
+ outBuffer->data[sizeCopiedHere] = (nalSize >> 24) & 0xff;
+ outBuffer->data[sizeCopiedHere + 1] = (nalSize >> 16) & 0xff;
+ outBuffer->data[sizeCopiedHere + 2] = (nalSize >> 8) & 0xff;
+ outBuffer->data[sizeCopiedHere + 3] = nalSize & 0xff;
+
+ sizeCopiedHere += 4;
+ mTotalSizeCopied += 4;
+
+ memcpy(outBuffer->data + sizeCopiedHere,
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset, nalSize);
+
+ sizeCopiedHere += nalSize;
+ mTotalSizeCopied += nalSize;
+ mOffsetInSeg += (nalSize + nalOffset);
+
+ } else {
+ outBuffer->dataSize = sizeCopiedHere;
+ // In case the start code is 3-byte length but we use 4-byte for length prefixed
+ // so the remainingSize size may larger than the remaining data size
+ outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+ LOG_E("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ // check if all data in current segment has been copied out
+ if (mCurSegment->size == mOffsetInSeg) {
+ if (mCurSegment->next != NULL) {
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ } else {
+ LOG_V("End of stream\n");
+ outBuffer->dataSize = sizeCopiedHere;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ mCurSegment = NULL;
+ break;
+ }
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::outputNaluLengthsPrefixed(VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t nalType = 0;
+ uint32_t nalSize = 0;
+ uint32_t nalOffset = 0;
+ uint32_t sizeCopiedHere = 0;
+ const uint32_t NALUINFO_OFFSET = 256;
+ uint32_t nalNum = 0;
+
+ CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf);
+
+ while (1) {
+
+ if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
+ LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n");
+ return ENCODE_FAIL;
+ }
+
+ // we need to handle the whole bitstream NAL by NAL
+ ret = getOneNALUnit(
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status);
+ CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+ if (nalSize + 4 <= outBuffer->bufferSize - NALUINFO_OFFSET - sizeCopiedHere) {
+
+ memcpy(outBuffer->data + NALUINFO_OFFSET + sizeCopiedHere,
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg, nalSize + nalOffset);
+
+ sizeCopiedHere += nalSize + nalOffset;
+ mTotalSizeCopied += nalSize + nalOffset;
+ mOffsetInSeg += (nalSize + nalOffset);
+
+ } else {
+ outBuffer->dataSize = sizeCopiedHere;
+ // In case the start code is 3-byte length but we use 4-byte for length prefixed
+ // so the remainingSize size may larger than the remaining data size
+ outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+ LOG_E("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ nalNum ++;
+ uint32_t *nalLength = (uint32_t *) (outBuffer->data + (nalNum+1) * 4);
+
+ *nalLength = nalSize + nalOffset;
+
+ // check if all data in current segment has been copied out
+ if (mCurSegment->size == mOffsetInSeg) {
+ if (mCurSegment->next != NULL) {
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ } else {
+ LOG_V("End of stream\n");
+ outBuffer->dataSize = sizeCopiedHere;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ mCurSegment = NULL;
+ break;
+ }
+ }
+ }
+
+ outBuffer->offset = NALUINFO_OFFSET;
+ uint32_t *nalHead = (uint32_t *) outBuffer->data;
+ *nalHead = 0x4E414C4C; //'nall'
+ *(++nalHead) = nalNum;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) {
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ LOG_V( "Begin\n");
+
+ if (mFrameNum == 0 || mNewHeader) {
+ if (mRenderHrd) {
+ ret = renderHrd();
+ mRenderHrd = false;
+ CHECK_ENCODE_STATUS_RETURN("renderHrd");
+ }
+
+ mFrameNum = 0;
+ ret = renderSequenceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+ if (mNewHeader) {
+ mNewHeader = false; //Set to require new header filed to false
+ mFrameNum = 0; //reset mFrameNum to 0
+ updateFrameInfo(task); //recalculate frame info if mNewHeader is set true after PrepareFrameInfo in encode()
+ }
+ }
+
+ if (mRenderMaxSliceSize && mVideoParamsAVC.maxSliceSize != 0) {
+ ret = renderMaxSliceSize();
+ CHECK_ENCODE_STATUS_RETURN("renderMaxSliceSize");
+ mRenderMaxSliceSize = false;
+ }
+
+ if (mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I))
+ mRenderBitRate = true;
+
+ if (mRenderBitRate) {
+ ret = VideoEncoderBase::renderDynamicBitrate(task);
+ CHECK_ENCODE_STATUS_RETURN("renderDynamicBitrate");
+ }
+
+ if (mRenderAIR &&
+ (mComParams.refreshType == VIDEO_ENC_AIR ||
+ mComParams.refreshType == VIDEO_ENC_BOTH)) {
+
+ ret = renderAIR();
+ CHECK_ENCODE_STATUS_RETURN("renderAIR");
+
+ mRenderAIR = false;
+ }
+
+ if (mRenderCIR) {
+
+ ret = renderCIR();
+ CHECK_ENCODE_STATUS_RETURN("renderCIR");
+
+ mRenderCIR = false;
+ }
+
+ if (mRenderFrameRate) {
+
+ ret = VideoEncoderBase::renderDynamicFrameRate();
+ CHECK_ENCODE_STATUS_RETURN("renderDynamicFrameRate");
+
+ mRenderFrameRate = false;
+ }
+
+ ret = renderPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ if (mFrameNum == 0 && (mEncPackedHeaders != VA_ATTRIB_NOT_SUPPORTED)) {
+ ret = renderPackedSequenceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPackedSequenceParams");
+
+ ret = renderPackedPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPackedPictureParams");
+ }
+
+ ret = renderSliceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSliceParams");
+
+ LOG_V( "End\n");
+ return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderAVC::renderMaxSliceSize() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ LOG_V( "Begin\n\n");
+
+ if (mComParams.rcMode != RATE_CONTROL_VCM) {
+ LOG_W ("Not in VCM mode, but call send_max_slice_size\n");
+ return ENCODE_SUCCESS;
+ }
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterMaxSliceSize *maxSliceSizeParam;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize),
+ 1, NULL, &miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeMaxSliceSize;
+ maxSliceSizeParam = (VAEncMiscParameterMaxSliceSize *)miscEncParamBuf->data;
+
+ maxSliceSizeParam->max_slice_size = mVideoParamsAVC.maxSliceSize;
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ LOG_I( "max slice size = %d\n", maxSliceSizeParam->max_slice_size);
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderCIR(){
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ LOG_I( "%s Begin\n", __FUNCTION__);
+
+ VABufferID miscParamBufferCIRid;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterCIR *misc_cir_param;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterCIR),
+ 1,
+ NULL,
+ &miscParamBufferCIRid);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferCIRid, (void **)&misc_param);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ misc_param->type = VAEncMiscParameterTypeCIR;
+ misc_cir_param = (VAEncMiscParameterCIR *)misc_param->data;
+ misc_cir_param->cir_num_mbs = mComParams.cirParams.cir_num_mbs;
+ LOG_I( "cir_num_mbs %d \n", misc_cir_param->cir_num_mbs);
+
+ vaUnmapBuffer(mVADisplay, miscParamBufferCIRid);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferCIRid, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderAIR() {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ LOG_V( "Begin\n\n");
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterAIR *airParams;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterAIR),
+ 1, NULL, &miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeAIR;
+ airParams = (VAEncMiscParameterAIR *)miscEncParamBuf->data;
+
+ airParams->air_num_mbs = mComParams.airParams.airMBs;
+ airParams->air_threshold= mComParams.airParams.airThreshold;
+ airParams->air_auto = mComParams.airParams.airAuto;
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_I( "airThreshold = %d\n", airParams->air_threshold);
+ return ENCODE_SUCCESS;
+}
+
+int VideoEncoderAVC::calcLevel(int numMbs) {
+ int level = 30;
+
+ if (numMbs < 1620) {
+ level = 30;
+ } else if (numMbs < 3600) {
+ level = 31;
+ } else if (numMbs < 5120) {
+ level = 32;
+ } else if (numMbs < 8192) {
+ level = 41;
+ } else if (numMbs < 8704) {
+ level = 42;
+ } else if (numMbs < 22080) {
+ level = 50;
+ } else if (numMbs < 36864) {
+ level = 51;
+ } else {
+ LOG_W("No such level can support that resolution");
+ level = 51;
+ }
+ return level;
+}
+
+Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferH264 avcSeqParams = VAEncSequenceParameterBufferH264();
+ VAEncMiscParameterBuffer *miscEncRCParamBuf;
+ VAEncMiscParameterBuffer *miscEncFrameRateParamBuf;
+ VAEncMiscParameterRateControl *rcMiscParam;
+ VAEncMiscParameterFrameRate *framerateParam;
+ int level;
+ uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+ uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+ LOG_V( "Begin\n\n");
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
+ 1, NULL,
+ &mRcParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+ vaStatus = vaMapBuffer(mVADisplay, mRcParamBuf, (void **)&miscEncRCParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterFrameRate),
+ 1, NULL,
+ &mFrameRateParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+ vaStatus = vaMapBuffer(mVADisplay, mFrameRateParamBuf, (void **)&miscEncFrameRateParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncRCParamBuf->type = VAEncMiscParameterTypeRateControl;
+ rcMiscParam = (VAEncMiscParameterRateControl *)miscEncRCParamBuf->data;
+ miscEncFrameRateParamBuf->type = VAEncMiscParameterTypeFrameRate;
+ framerateParam = (VAEncMiscParameterFrameRate *)miscEncFrameRateParamBuf->data;
+ // set up the sequence params for HW
+ // avcSeqParams.level_idc = mLevel;
+ avcSeqParams.intra_period = mComParams.intraPeriod;
+ avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval;
+ avcSeqParams.ip_period = mVideoParamsAVC.ipPeriod;
+ avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16;
+ avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16;
+
+ level = calcLevel (avcSeqParams.picture_width_in_mbs * avcSeqParams.picture_height_in_mbs);
+ avcSeqParams.level_idc = level;
+ avcSeqParams.bits_per_second = mComParams.rcParams.bitRate;
+ framerateParam->framerate =
+ (unsigned int) (frameRateNum + frameRateDenom /2 ) / frameRateDenom;
+ rcMiscParam->initial_qp = mComParams.rcParams.initQP;
+ rcMiscParam->min_qp = mComParams.rcParams.minQP;
+ rcMiscParam->max_qp = mComParams.rcParams.maxQP;
+ if (mComParams.rcParams.enableIntraFrameQPControl) {
+ rcMiscParam->min_qp = mComParams.rcParams.I_minQP;
+ rcMiscParam->max_qp = mComParams.rcParams.I_maxQP;
+ }
+ rcMiscParam->window_size = mComParams.rcParams.windowSize;
+ //target bitrate is sent to libva through Sequence Parameter Buffer
+ rcMiscParam->bits_per_second = 0;
+ rcMiscParam->basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage
+ avcSeqParams.intra_period = mComParams.intraPeriod;
+ //avcSeqParams.vui_flag = 248;
+ avcSeqParams.vui_parameters_present_flag = mVideoParamsAVC.VUIFlag;
+ avcSeqParams.num_units_in_tick = frameRateDenom;
+ avcSeqParams.time_scale = 2 * frameRateNum;
+ avcSeqParams.seq_parameter_set_id = 0;
+ if (mVideoParamsAVC.crop.LeftOffset ||
+ mVideoParamsAVC.crop.RightOffset ||
+ mVideoParamsAVC.crop.TopOffset ||
+ mVideoParamsAVC.crop.BottomOffset) {
+ avcSeqParams.frame_cropping_flag = true;
+ avcSeqParams.frame_crop_left_offset = mVideoParamsAVC.crop.LeftOffset;
+ avcSeqParams.frame_crop_right_offset = mVideoParamsAVC.crop.RightOffset;
+ avcSeqParams.frame_crop_top_offset = mVideoParamsAVC.crop.TopOffset;
+ avcSeqParams.frame_crop_bottom_offset = mVideoParamsAVC.crop.BottomOffset;
+ } else {
+ avcSeqParams.frame_cropping_flag = false;
+
+ if (mComParams.resolution.width & 0xf) {
+ avcSeqParams.frame_cropping_flag = true;
+ uint32_t AWidth = (mComParams.resolution.width + 0xf) & (~0xf);
+ avcSeqParams.frame_crop_right_offset = ( AWidth - mComParams.resolution.width ) / 2;
+ }
+
+ if (mComParams.resolution.height & 0xf) {
+ avcSeqParams.frame_cropping_flag = true;
+ uint32_t AHeight = (mComParams.resolution.height + 0xf) & (~0xf);
+ avcSeqParams.frame_crop_bottom_offset = ( AHeight - mComParams.resolution.height ) / 2;
+ }
+ }
+
+ if(avcSeqParams.vui_parameters_present_flag && (mVideoParamsAVC.SAR.SarWidth || mVideoParamsAVC.SAR.SarHeight)) {
+ avcSeqParams.vui_fields.bits.aspect_ratio_info_present_flag = true;
+ avcSeqParams.aspect_ratio_idc = 0xff /* Extended_SAR */;
+ avcSeqParams.sar_width = mVideoParamsAVC.SAR.SarWidth;
+ avcSeqParams.sar_height = mVideoParamsAVC.SAR.SarHeight;
+ }
+
+ avcSeqParams.max_num_ref_frames = 1;
+
+ if(avcSeqParams.ip_period > 1)
+ avcSeqParams.max_num_ref_frames = 2;
+
+ LOG_V("===h264 sequence params===\n");
+ LOG_I( "seq_parameter_set_id = %d\n", (uint32_t)avcSeqParams.seq_parameter_set_id);
+ LOG_I( "level_idc = %d\n", (uint32_t)avcSeqParams.level_idc);
+ LOG_I( "intra_period = %d\n", avcSeqParams.intra_period);
+ LOG_I( "idr_interval = %d\n", avcSeqParams.intra_idr_period);
+ LOG_I( "picture_width_in_mbs = %d\n", avcSeqParams.picture_width_in_mbs);
+ LOG_I( "picture_height_in_mbs = %d\n", avcSeqParams.picture_height_in_mbs);
+ LOG_I( "bitrate = %d\n", rcMiscParam->bits_per_second);
+ LOG_I( "frame_rate = %d\n", framerateParam->framerate);
+ LOG_I( "initial_qp = %d\n", rcMiscParam->initial_qp);
+ LOG_I( "min_qp = %d\n", rcMiscParam->min_qp);
+ LOG_I( "basic_unit_size = %d\n", rcMiscParam->basic_unit_size);
+ LOG_I( "bDirect8x8Inference = %d\n",mVideoParamsAVC.bDirect8x8Inference);
+
+ // Not sure whether these settings work for all drivers
+ avcSeqParams.seq_fields.bits.frame_mbs_only_flag = 1;
+ avcSeqParams.seq_fields.bits.pic_order_cnt_type = 0;
+ avcSeqParams.seq_fields.bits.direct_8x8_inference_flag = mVideoParamsAVC.bDirect8x8Inference;
+
+ avcSeqParams.seq_fields.bits.log2_max_frame_num_minus4 = 0;
+ avcSeqParams.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = 2;
+// avcSeqParams.time_scale = 900;
+// avcSeqParams.num_units_in_tick = 15; /* Tc = num_units_in_tick / time_sacle */
+ // Not sure whether these settings work for all drivers
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mRcParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ vaStatus = vaUnmapBuffer(mVADisplay, mFrameRateParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSequenceParameterBufferType,
+ sizeof(avcSeqParams), 1, &avcSeqParams,
+ &mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mFrameRateParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderPackedSequenceParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferH264 *avcSeqParams;
+ VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
+ unsigned char *packed_seq_buffer = NULL;
+ unsigned int length_in_bits, offset_in_bytes;
+
+ LOG_V("Begin\n");
+
+ vaStatus = vaMapBuffer(mVADisplay, mSeqParamBuf, (void **)&avcSeqParams);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ length_in_bits = build_packed_seq_buffer(&packed_seq_buffer, mComParams.profile, avcSeqParams);
+ packed_header_param_buffer.type = VAEncPackedHeaderSequence;
+ packed_header_param_buffer.bit_length = length_in_bits;
+ packed_header_param_buffer.has_emulation_bytes = 0;
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncPackedHeaderParameterBufferType,
+ sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
+ &packed_seq_header_param_buf_id);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncPackedHeaderDataBufferType,
+ (length_in_bits + 7) / 8, 1, packed_seq_buffer,
+ &packed_seq_buf_id);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_seq_header_param_buf_id, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_seq_buf_id, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ free(packed_seq_buffer);
+
+ LOG_V("End\n");
+
+ return vaStatus;
+}
+
+Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferH264 avcPicParams = VAEncPictureParameterBufferH264();
+ uint32_t RefFrmIdx;
+
+ LOG_V( "Begin\n\n");
+ // set picture params for HW
+ if (mAutoReference == false) {
+ for (RefFrmIdx = 0; RefFrmIdx < 16; RefFrmIdx++) {
+ avcPicParams.ReferenceFrames[RefFrmIdx].picture_id = VA_INVALID_ID;
+ avcPicParams.ReferenceFrames[RefFrmIdx].flags = VA_PICTURE_H264_INVALID;
+ }
+ avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface;
+ avcPicParams.ReferenceFrames[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ avcPicParams.CurrPic.picture_id= task->rec_surface;
+ // Not sure whether these settings work for all drivers
+ avcPicParams.CurrPic.TopFieldOrderCnt = mFrameNum * 2;
+
+ avcPicParams.pic_fields.bits.transform_8x8_mode_flag = 0;
+ avcPicParams.seq_parameter_set_id = 0;
+ avcPicParams.pic_parameter_set_id = 0;
+
+ avcPicParams.last_picture = 0;
+ avcPicParams.frame_num = 0;
+
+ avcPicParams.pic_init_qp = 26;
+ avcPicParams.num_ref_idx_l0_active_minus1 = 0;
+ avcPicParams.num_ref_idx_l1_active_minus1 = 0;
+
+ avcPicParams.pic_fields.bits.idr_pic_flag = 0;
+ avcPicParams.pic_fields.bits.reference_pic_flag = 0;
+ avcPicParams.pic_fields.bits.entropy_coding_mode_flag = 0;
+ avcPicParams.pic_fields.bits.weighted_pred_flag = 0;
+ avcPicParams.pic_fields.bits.weighted_bipred_idc = 0;
+ avcPicParams.pic_fields.bits.transform_8x8_mode_flag = 0;
+ avcPicParams.pic_fields.bits.deblocking_filter_control_present_flag = 1;
+
+ avcPicParams.frame_num = mFrameNum;
+ avcPicParams.pic_fields.bits.reference_pic_flag = 1;
+ // Not sure whether these settings work for all drivers
+ }else {
+ avcPicParams.CurrPic.picture_id= VA_INVALID_SURFACE;
+ for(uint32_t i =0; i< mAutoReferenceSurfaceNum; i++)
+ avcPicParams.ReferenceFrames[i].picture_id = mAutoRefSurfaces[i];
+ }
+
+ avcPicParams.pic_fields.bits.idr_pic_flag = (mFrameNum == 0);
+ avcPicParams.pic_fields.bits.entropy_coding_mode_flag = mVideoParamsAVC.bEntropyCodingCABAC;
+ avcPicParams.coded_buf = task->coded_buffer;
+ avcPicParams.last_picture = 0;
+
+ LOG_V("======h264 picture params======\n");
+ LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id);
+ LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id);
+ LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf);
+ //LOG_I( "picture_width = %d\n", avcPicParams.picture_width);
+ //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncPictureParameterBufferType,
+ sizeof(avcPicParams),
+ 1,&avcPicParams,
+ &mPicParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderPackedPictureParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferH264 *avcPicParams;
+ VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
+ unsigned char *packed_pic_buffer = NULL;
+ unsigned int length_in_bits, offset_in_bytes;
+
+ LOG_V("Begin\n");
+
+ vaStatus = vaMapBuffer(mVADisplay, mPicParamBuf, (void **)&avcPicParams);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ length_in_bits = build_packed_pic_buffer(&packed_pic_buffer, avcPicParams);
+ packed_header_param_buffer.type = VAEncPackedHeaderPicture;
+ packed_header_param_buffer.bit_length = length_in_bits;
+ packed_header_param_buffer.has_emulation_bytes = 0;
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncPackedHeaderParameterBufferType,
+ sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
+ &packed_pic_header_param_buf_id);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncPackedHeaderDataBufferType,
+ (length_in_bits + 7) / 8, 1, packed_pic_buffer,
+ &packed_pic_buf_id);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_pic_header_param_buf_id, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_pic_buf_id, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ free(packed_pic_buffer);
+
+ LOG_V("End\n");
+
+ return vaStatus;
+}
+
+Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ uint32_t sliceNum = 0;
+ uint32_t sliceIndex = 0;
+ uint32_t sliceHeightInMB = 0;
+ uint32_t maxSliceNum = 0;
+ uint32_t minSliceNum = 0;
+ uint32_t actualSliceHeightInMB = 0;
+ uint32_t startRowInMB = 0;
+ uint32_t modulus = 0;
+ uint32_t RefFrmIdx;
+
+ LOG_V( "Begin\n\n");
+
+ maxSliceNum = (mComParams.resolution.height + 15) / 16;
+ minSliceNum = 1;
+
+ if (task->type == FTYPE_I || task->type == FTYPE_IDR) {
+ sliceNum = mVideoParamsAVC.sliceNum.iSliceNum;
+ } else {
+ sliceNum = mVideoParamsAVC.sliceNum.pSliceNum;
+ }
+
+ if (sliceNum < minSliceNum) {
+ LOG_W("Slice Number is too small");
+ sliceNum = minSliceNum;
+ }
+
+ if (sliceNum > maxSliceNum) {
+ LOG_W("Slice Number is too big");
+ sliceNum = maxSliceNum;
+ }
+
+ mSliceNum= sliceNum;
+ modulus = maxSliceNum % sliceNum;
+ sliceHeightInMB = (maxSliceNum - modulus) / sliceNum ;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSliceParameterBufferType,
+ sizeof(VAEncSliceParameterBufferH264),
+ sliceNum, NULL,
+ &mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ VAEncSliceParameterBufferH264 *sliceParams, *currentSlice;
+
+ vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+ if(!sliceParams)
+ return ENCODE_NULL_PTR;
+ memset(sliceParams, 0 , sizeof(VAEncSliceParameterBufferH264));
+ if(!sliceParams)
+ return ENCODE_NULL_PTR;
+
+ currentSlice = sliceParams;
+ startRowInMB = 0;
+ for (sliceIndex = 0; sliceIndex < sliceNum; sliceIndex++) {
+ currentSlice = sliceParams + sliceIndex;
+ actualSliceHeightInMB = sliceHeightInMB;
+ if (sliceIndex < modulus) {
+ actualSliceHeightInMB ++;
+ }
+
+ // starting MB row number for this slice, suppose macroblock 16x16
+ currentSlice->macroblock_address = startRowInMB * ((mComParams.resolution.width + 0xf) & ~0xf) / 16;
+ // slice height measured in MB
+ currentSlice->num_macroblocks = actualSliceHeightInMB * ((mComParams.resolution.width + 0xf) & ~0xf) / 16;
+ if(task->type == FTYPE_I||task->type == FTYPE_IDR)
+ currentSlice->slice_type = 2;
+ else if(task->type == FTYPE_P)
+ currentSlice->slice_type = 0;
+ else if(task->type == FTYPE_B)
+ currentSlice->slice_type = 1;
+ currentSlice->disable_deblocking_filter_idc = mComParams.disableDeblocking;
+
+ // This is a temporary fix suggested by Binglin for bad encoding quality issue
+ // TODO: We need a long term design for this field
+ //currentSlice->slice_flags.bits.uses_long_term_ref = 0;
+ //currentSlice->slice_flags.bits.is_long_term_ref = 0;
+
+ LOG_V("======AVC slice params======\n");
+ LOG_I( "slice_index = %d\n", (int) sliceIndex);
+ LOG_I( "macroblock_address = %d\n", (int) currentSlice->macroblock_address);
+ LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->num_macroblocks);
+ LOG_I( "slice.type = %d\n", (int) currentSlice->slice_type);
+ LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->disable_deblocking_filter_idc);
+
+ // Not sure whether these settings work for all drivers
+ currentSlice->pic_parameter_set_id = 0;
+ currentSlice->pic_order_cnt_lsb = mFrameNum * 2;
+ currentSlice->direct_spatial_mv_pred_flag = 0;
+ currentSlice->num_ref_idx_l0_active_minus1 = 0; /* FIXME: ??? */
+ currentSlice->num_ref_idx_l1_active_minus1 = 0;
+ currentSlice->cabac_init_idc = 0;
+ currentSlice->slice_qp_delta = 0;
+ currentSlice->disable_deblocking_filter_idc = 0;
+ currentSlice->slice_alpha_c0_offset_div2 = 2;
+ currentSlice->slice_beta_offset_div2 = 2;
+ currentSlice->idr_pic_id = 0;
+ for (RefFrmIdx = 0; RefFrmIdx < 32; RefFrmIdx++) {
+ currentSlice->RefPicList0[RefFrmIdx].picture_id = VA_INVALID_ID;
+ currentSlice->RefPicList0[RefFrmIdx].flags = VA_PICTURE_H264_INVALID;
+ }
+ currentSlice->RefPicList0[0].picture_id = task->ref_surface;
+ currentSlice->RefPicList0[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ // Not sure whether these settings work for all drivers
+
+ startRowInMB += actualSliceHeightInMB;
+ }
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h
new file mode 100644
index 0000000..87c9407
--- /dev/null
+++ b/videoencoder/VideoEncoderAVC.h
@@ -0,0 +1,73 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_AVC_H__
+#define __VIDEO_ENCODER_AVC_H__
+
+#include "VideoEncoderBase.h"
+
+class VideoEncoderAVC : public VideoEncoderBase {
+
+public:
+ VideoEncoderAVC();
+ ~VideoEncoderAVC() {};
+
+ virtual Encode_Status start();
+
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig);
+
+protected:
+
+ virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer);
+ virtual Encode_Status updateFrameInfo(EncodeTask* task);
+private:
+ // Local Methods
+
+ Encode_Status getOneNALUnit(uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize, uint32_t *nalType, uint32_t *nalOffset, uint32_t status);
+ Encode_Status getHeader(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize, uint32_t status);
+ Encode_Status outputCodecData(VideoEncOutputBuffer *outBuffer);
+ Encode_Status outputOneNALU(VideoEncOutputBuffer *outBuffer, bool startCode);
+ Encode_Status outputLengthPrefixed(VideoEncOutputBuffer *outBuffer);
+ Encode_Status outputNaluLengthsPrefixed(VideoEncOutputBuffer *outBuffer);
+
+ Encode_Status renderMaxSliceSize();
+ Encode_Status renderAIR();
+ Encode_Status renderCIR();
+ Encode_Status renderSequenceParams(EncodeTask *task);
+ Encode_Status renderPictureParams(EncodeTask *task);
+ Encode_Status renderSliceParams(EncodeTask *task);
+ int calcLevel(int numMbs);
+ Encode_Status renderPackedSequenceParams(EncodeTask *task);
+ Encode_Status renderPackedPictureParams(EncodeTask *task);
+
+public:
+
+ VideoParamsAVC mVideoParamsAVC;
+ uint32_t mSliceNum;
+ VABufferID packed_seq_header_param_buf_id;
+ VABufferID packed_seq_buf_id;
+ VABufferID packed_pic_header_param_buf_id;
+ VABufferID packed_pic_buf_id;
+ VABufferID packed_sei_header_param_buf_id; /* the SEI buffer */
+ VABufferID packed_sei_buf_id;
+
+};
+
+#endif /* __VIDEO_ENCODER_AVC_H__ */
diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp
new file mode 100644
index 0000000..b3fd3c2
--- /dev/null
+++ b/videoencoder/VideoEncoderBase.cpp
@@ -0,0 +1,1928 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderBase.h"
+#include "IntelMetadataBuffer.h"
+#include <va/va_tpi.h>
+#include <va/va_android.h>
+
+VideoEncoderBase::VideoEncoderBase()
+ :mInitialized(true)
+ ,mStarted(false)
+ ,mVADisplay(NULL)
+ ,mVAContext(VA_INVALID_ID)
+ ,mVAConfig(VA_INVALID_ID)
+ ,mVAEntrypoint(VAEntrypointEncSlice)
+ ,mNewHeader(false)
+ ,mRenderMaxSliceSize(false)
+ ,mRenderQP (false)
+ ,mRenderAIR(false)
+ ,mRenderCIR(false)
+ ,mRenderFrameRate(false)
+ ,mRenderBitRate(false)
+ ,mRenderHrd(false)
+ ,mRenderMultiTemporal(false)
+ ,mForceKFrame(false)
+ ,mSeqParamBuf(0)
+ ,mPicParamBuf(0)
+ ,mSliceParamBuf(0)
+ ,mAutoRefSurfaces(NULL)
+ ,mRefSurface(VA_INVALID_SURFACE)
+ ,mRecSurface(VA_INVALID_SURFACE)
+ ,mFrameNum(0)
+ ,mCodedBufSize(0)
+ ,mAutoReference(false)
+ ,mAutoReferenceSurfaceNum(4)
+ ,mEncPackedHeaders(VA_ATTRIB_NOT_SUPPORTED)
+ ,mSliceSizeOverflow(false)
+ ,mCurOutputTask(NULL)
+ ,mOutCodedBuffer(0)
+ ,mOutCodedBufferPtr(NULL)
+ ,mCurSegment(NULL)
+ ,mOffsetInSeg(0)
+ ,mTotalSize(0)
+ ,mTotalSizeCopied(0)
+ ,mFrameSkipped(false)
+ ,mSupportedSurfaceMemType(0)
+ ,mVASurfaceMappingAction(0)
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ ,mSessionFlag(0)
+#endif
+ {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ // here the display can be any value, use following one
+ // just for consistence purpose, so don't define it
+ unsigned int display = 0x18C34078;
+ int majorVersion = -1;
+ int minorVersion = -1;
+
+ setDefaultParams();
+
+ LOG_V("vaGetDisplay \n");
+ mVADisplay = vaGetDisplay(&display);
+ if (mVADisplay == NULL) {
+ LOG_E("vaGetDisplay failed.");
+ }
+
+ vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
+ LOG_V("vaInitialize \n");
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
+ mInitialized = false;
+ }
+}
+
+VideoEncoderBase::~VideoEncoderBase() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ stop();
+
+ vaStatus = vaTerminate(mVADisplay);
+ LOG_V( "vaTerminate\n");
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ LOG_W( "Failed vaTerminate, vaStatus = %d\n", vaStatus);
+ } else {
+ mVADisplay = NULL;
+ }
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ IntelMetadataBuffer::ClearContext(mSessionFlag, false);
+#endif
+}
+
+Encode_Status VideoEncoderBase::start() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ if (!mInitialized) {
+ LOGE("Encoder Initialize fail can not start");
+ return ENCODE_DRIVER_FAIL;
+ }
+
+ if (mStarted) {
+ LOG_V("Encoder has been started\n");
+ return ENCODE_ALREADY_INIT;
+ }
+
+ if (mComParams.rawFormat != RAW_FORMAT_NV12)
+#ifdef IMG_GFX
+ mVASurfaceMappingAction |= MAP_ACTION_COLORCONVERT;
+#else
+ return ENCODE_NOT_SUPPORTED;
+#endif
+
+ if (mComParams.resolution.width > 2048 || mComParams.resolution.height > 2048){
+ LOGE("Unsupported resolution width %d, height %d\n",
+ mComParams.resolution.width, mComParams.resolution.height);
+ return ENCODE_NOT_SUPPORTED;
+ }
+ queryAutoReferenceConfig(mComParams.profile);
+
+ VAConfigAttrib vaAttrib_tmp[6],vaAttrib[VAConfigAttribTypeMax];
+ int vaAttribNumber = 0;
+ vaAttrib_tmp[0].type = VAConfigAttribRTFormat;
+ vaAttrib_tmp[1].type = VAConfigAttribRateControl;
+ vaAttrib_tmp[2].type = VAConfigAttribEncAutoReference;
+ vaAttrib_tmp[3].type = VAConfigAttribEncPackedHeaders;
+ vaAttrib_tmp[4].type = VAConfigAttribEncMaxRefFrames;
+ vaAttrib_tmp[5].type = VAConfigAttribEncRateControlExt;
+
+ vaStatus = vaGetConfigAttributes(mVADisplay, mComParams.profile,
+ VAEntrypointEncSlice, &vaAttrib_tmp[0], 6);
+ CHECK_VA_STATUS_RETURN("vaGetConfigAttributes");
+
+ if((vaAttrib_tmp[0].value & VA_RT_FORMAT_YUV420) != 0)
+ {
+ vaAttrib[vaAttribNumber].type = VAConfigAttribRTFormat;
+ vaAttrib[vaAttribNumber].value = VA_RT_FORMAT_YUV420;
+ vaAttribNumber++;
+ }
+
+ vaAttrib[vaAttribNumber].type = VAConfigAttribRateControl;
+ vaAttrib[vaAttribNumber].value = mComParams.rcMode;
+ vaAttribNumber++;
+
+ vaAttrib[vaAttribNumber].type = VAConfigAttribEncAutoReference;
+ vaAttrib[vaAttribNumber].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED;
+ vaAttribNumber++;
+
+ if(vaAttrib_tmp[3].value != VA_ATTRIB_NOT_SUPPORTED)
+ {
+ vaAttrib[vaAttribNumber].type = VAConfigAttribEncPackedHeaders;
+ vaAttrib[vaAttribNumber].value = vaAttrib[3].value;
+ vaAttribNumber++;
+ mEncPackedHeaders = vaAttrib[3].value;
+ }
+
+ if(vaAttrib_tmp[4].value != VA_ATTRIB_NOT_SUPPORTED)
+ {
+ vaAttrib[vaAttribNumber].type = VAConfigAttribEncMaxRefFrames;
+ vaAttrib[vaAttribNumber].value = vaAttrib[4].value;
+ vaAttribNumber++;
+ mEncMaxRefFrames = vaAttrib[4].value;
+ }
+
+ if(vaAttrib_tmp[5].value != VA_ATTRIB_NOT_SUPPORTED)
+ {
+ vaAttrib[vaAttribNumber].type = VAConfigAttribEncRateControlExt;
+ vaAttrib[vaAttribNumber].value = mComParams.numberOfLayer;
+ vaAttribNumber++;
+ }
+
+ LOG_V( "======VA Configuration======\n");
+ LOG_I( "profile = %d\n", mComParams.profile);
+ LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint);
+ LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type);
+ LOG_I( "vaAttrib[1].type = %d\n", vaAttrib[1].type);
+ LOG_I( "vaAttrib[2].type = %d\n", vaAttrib[2].type);
+ LOG_I( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value);
+ LOG_I( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value);
+ LOG_I( "vaAttrib[2].value (AutoReference) = %d\n", vaAttrib[2].value);
+ LOG_I( "vaAttribNumber is %d\n", vaAttribNumber);
+ LOG_I( "mComParams.numberOfLayer is %d\n", mComParams.numberOfLayer);
+
+ LOG_V( "vaCreateConfig\n");
+
+ vaStatus = vaCreateConfig(
+ mVADisplay, mComParams.profile, mVAEntrypoint,
+ &vaAttrib[0], vaAttribNumber, &(mVAConfig));
+// &vaAttrib[0], 3, &(mVAConfig)); //uncomment this after psb_video supports
+ CHECK_VA_STATUS_RETURN("vaCreateConfig");
+
+ querySupportedSurfaceMemTypes();
+
+ if (mComParams.rcMode == VA_RC_VCM) {
+ // Following three features are only enabled in VCM mode
+ mRenderMaxSliceSize = true;
+ mRenderAIR = true;
+ mRenderBitRate = true;
+ }
+
+ LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n");
+
+ uint32_t stride_aligned, height_aligned;
+ if(mAutoReference == false){
+ stride_aligned = (mComParams.resolution.width + 15) & ~15;
+ height_aligned = (mComParams.resolution.height + 15) & ~15;
+ }else{
+ // this alignment is used for AVC. For vp8 encode, driver will handle the alignment
+ if(mComParams.profile == VAProfileVP8Version0_3)
+ {
+ stride_aligned = mComParams.resolution.width;
+ height_aligned = mComParams.resolution.height;
+ mVASurfaceMappingAction |= MAP_ACTION_COPY;
+ }
+ else
+ {
+ stride_aligned = (mComParams.resolution.width + 63) & ~63; //on Merr, stride must be 64 aligned.
+ height_aligned = (mComParams.resolution.height + 31) & ~31;
+ mVASurfaceMappingAction |= MAP_ACTION_ALIGN64;
+ }
+ }
+
+ if(mAutoReference == false){
+ mRefSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
+ mRecSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
+
+ }else {
+ mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum];
+ for(uint32_t i = 0; i < mAutoReferenceSurfaceNum; i ++)
+ mAutoRefSurfaces[i] = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
+ }
+ CHECK_VA_STATUS_RETURN("vaCreateSurfaces");
+
+ //Prepare all Surfaces to be added into Context
+ uint32_t contextSurfaceCnt;
+ if(mAutoReference == false )
+ contextSurfaceCnt = 2 + mSrcSurfaceMapList.size();
+ else
+ contextSurfaceCnt = mAutoReferenceSurfaceNum + mSrcSurfaceMapList.size();
+
+ VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt];
+ int32_t index = -1;
+ android::List<VASurfaceMap *>::iterator map_node;
+
+ for(map_node = mSrcSurfaceMapList.begin(); map_node != mSrcSurfaceMapList.end(); map_node++)
+ {
+ contextSurfaces[++index] = (*map_node)->getVASurface();
+ (*map_node)->setTracked();
+ }
+
+ if(mAutoReference == false){
+ contextSurfaces[++index] = mRefSurface;
+ contextSurfaces[++index] = mRecSurface;
+ } else {
+ for (uint32_t i=0; i < mAutoReferenceSurfaceNum; i++)
+ contextSurfaces[++index] = mAutoRefSurfaces[i];
+ }
+
+ //Initialize and save the VA context ID
+ LOG_V( "vaCreateContext\n");
+ vaStatus = vaCreateContext(mVADisplay, mVAConfig,
+#ifdef IMG_GFX
+ mComParams.resolution.width,
+ mComParams.resolution.height,
+#else
+ stride_aligned,
+ height_aligned,
+#endif
+ VA_PROGRESSIVE, contextSurfaces, contextSurfaceCnt,
+ &(mVAContext));
+ CHECK_VA_STATUS_RETURN("vaCreateContext");
+
+ delete [] contextSurfaces;
+
+ LOG_I("Success to create libva context width %d, height %d\n",
+ mComParams.resolution.width, mComParams.resolution.height);
+
+ uint32_t maxSize = 0;
+ ret = getMaxOutSize(&maxSize);
+ CHECK_ENCODE_STATUS_RETURN("getMaxOutSize");
+
+ // Create CodedBuffer for output
+ VABufferID VACodedBuffer;
+
+ for(uint32_t i = 0; i <mComParams.codedBufNum; i++) {
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncCodedBufferType,
+ mCodedBufSize,
+ 1, NULL,
+ &VACodedBuffer);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer::VAEncCodedBufferType");
+
+ mVACodedBufferList.push_back(VACodedBuffer);
+ }
+
+ if (ret == ENCODE_SUCCESS)
+ mStarted = true;
+
+ LOG_V( "end\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ if (!mStarted) {
+ LOG_E("Encoder has not initialized yet\n");
+ return ENCODE_NOT_INIT;
+ }
+
+ CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+ //======Prepare all resources encoder needed=====.
+
+ //Prepare encode vaSurface
+ VASurfaceID sid = VA_INVALID_SURFACE;
+ ret = manageSrcSurface(inBuffer, &sid);
+ CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
+
+ //Prepare CodedBuffer
+ mCodedBuffer_Lock.lock();
+ if(mVACodedBufferList.empty()){
+ if(timeout == FUNC_BLOCK)
+ mCodedBuffer_Cond.wait(mCodedBuffer_Lock);
+ else if (timeout > 0) {
+ if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){
+ mCodedBuffer_Lock.unlock();
+ LOG_E("Time out wait for Coded buffer.\n");
+ return ENCODE_DEVICE_BUSY;
+ }
+ }
+ else {//Nonblock
+ mCodedBuffer_Lock.unlock();
+ LOG_E("Coded buffer is not ready now.\n");
+ return ENCODE_DEVICE_BUSY;
+ }
+ }
+
+ if(mVACodedBufferList.empty()){
+ mCodedBuffer_Lock.unlock();
+ return ENCODE_DEVICE_BUSY;
+ }
+ VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin());
+ mVACodedBufferList.erase(mVACodedBufferList.begin());
+ mCodedBuffer_Lock.unlock();
+
+ LOG_V("CodedBuffer ID 0x%08x\n", coded_buf);
+
+ //All resources are ready, start to assemble EncodeTask
+ EncodeTask* task = new EncodeTask();
+
+ task->completed = false;
+ task->enc_surface = sid;
+ task->coded_buffer = coded_buf;
+ task->timestamp = inBuffer->timeStamp;
+ task->priv = inBuffer->priv;
+
+ //Setup frame info, like flag ( SYNCFRAME), frame number, type etc
+ task->type = inBuffer->type;
+ task->flag = inBuffer->flag;
+ PrepareFrameInfo(task);
+
+ if(mAutoReference == false){
+ //Setup ref /rec frames
+ //TODO: B frame support, temporary use same logic
+ switch (inBuffer->type) {
+ case FTYPE_UNKNOWN:
+ case FTYPE_IDR:
+ case FTYPE_I:
+ case FTYPE_P:
+ {
+ if(!mFrameSkipped) {
+ VASurfaceID tmpSurface = mRecSurface;
+ mRecSurface = mRefSurface;
+ mRefSurface = tmpSurface;
+ }
+
+ task->ref_surface = mRefSurface;
+ task->rec_surface = mRecSurface;
+
+ break;
+ }
+ case FTYPE_B:
+ default:
+ LOG_V("Something wrong, B frame may not be supported in this mode\n");
+ ret = ENCODE_NOT_SUPPORTED;
+ goto CLEAN_UP;
+ }
+ }else {
+ task->ref_surface = VA_INVALID_SURFACE;
+ task->rec_surface = VA_INVALID_SURFACE;
+ }
+ //======Start Encoding, add task to list======
+ LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface);
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture");
+
+ ret = sendEncodeCommand(task);
+ CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand");
+
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture");
+
+ LOG_V("Add Task %p into Encode Task list\n", task);
+ mEncodeTask_Lock.lock();
+ mEncodeTaskList.push_back(task);
+ mEncodeTask_Cond.signal();
+ mEncodeTask_Lock.unlock();
+
+ mFrameNum ++;
+
+ LOG_V("encode return Success\n");
+
+ return ENCODE_SUCCESS;
+
+CLEAN_UP:
+
+ delete task;
+ mCodedBuffer_Lock.lock();
+ mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used
+ mCodedBuffer_Cond.signal();
+ mCodedBuffer_Lock.unlock();
+
+ LOG_V("encode return error=%x\n", ret);
+
+ return ret;
+}
+
+/*
+ 1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list.
+ 2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes,
+ start output data
+ 3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure
+ on non-block/block with timeout modes.
+ 4. if complete all output data, curoutputtask should be set NULL
+*/
+Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ bool useLocalBuffer = false;
+
+ CHECK_NULL_RETURN_IFFAIL(outBuffer);
+
+ if (mCurOutputTask == NULL) {
+ mEncodeTask_Lock.lock();
+ if(mEncodeTaskList.empty()) {
+ LOG_V("getOutput CurrentTask is NULL\n");
+ if(timeout == FUNC_BLOCK) {
+ LOG_V("waiting for task....\n");
+ mEncodeTask_Cond.wait(mEncodeTask_Lock);
+ } else if (timeout > 0) {
+ LOG_V("waiting for task in %i ms....\n", timeout);
+ if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) {
+ mEncodeTask_Lock.unlock();
+ LOG_E("Time out wait for encode task.\n");
+ return ENCODE_NO_REQUEST_DATA;
+ }
+ } else {//Nonblock
+ mEncodeTask_Lock.unlock();
+ return ENCODE_NO_REQUEST_DATA;
+ }
+ }
+
+ if(mEncodeTaskList.empty()){
+ mEncodeTask_Lock.unlock();
+ return ENCODE_DATA_NOT_READY;
+ }
+ mCurOutputTask = *(mEncodeTaskList.begin());
+ mEncodeTaskList.erase(mEncodeTaskList.begin());
+ mEncodeTask_Lock.unlock();
+ }
+
+ //sync/query/wait task if not completed
+ if (mCurOutputTask->completed == false) {
+ VASurfaceStatus vaSurfaceStatus;
+
+ if (timeout == FUNC_BLOCK) {
+ //block mode, direct sync surface to output data
+
+ mOutCodedBuffer = mCurOutputTask->coded_buffer;
+
+ // Check frame skip
+ // Need encoding to be completed before calling query surface below to
+ // get the right skip frame flag for current frame
+ // It is a requirement of video driver
+ // vaSyncSurface syncs the wrong frame when rendering the same surface multiple times,
+ // so use vaMapbuffer instead
+ LOG_I ("block mode, vaMapBuffer ID = 0x%08x\n", mOutCodedBuffer);
+ if (mOutCodedBufferPtr == NULL) {
+ vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaMapBuffer");
+ CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
+ }
+
+ vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus);
+ CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
+ mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
+
+ mCurOutputTask->completed = true;
+
+ } else {
+ //For both block with timeout and non-block mode, query surface, if ready, output data
+ LOG_I ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface);
+
+ vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus);
+ if (vaSurfaceStatus & VASurfaceReady) {
+ mOutCodedBuffer = mCurOutputTask->coded_buffer;
+ mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
+ mCurOutputTask->completed = true;
+ //if need to call SyncSurface again ?
+
+ } else {//not encode complet yet, but keep all context and return directly
+ return ENCODE_DATA_NOT_READY;
+ }
+
+ }
+
+ }
+
+ //start to output data
+ ret = prepareForOutput(outBuffer, &useLocalBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
+
+ //copy all flags to outBuffer
+ outBuffer->offset = 0;
+ outBuffer->flag = mCurOutputTask->flag;
+ outBuffer->type = mCurOutputTask->type;
+ outBuffer->timeStamp = mCurOutputTask->timestamp;
+ outBuffer->priv = mCurOutputTask->priv;
+
+ if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) {
+ ret = outputAllData(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
+ }else {
+ ret = getExtFormatOutput(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput");
+ }
+
+ LOG_I("out size for this getOutput call = %d\n", outBuffer->dataSize);
+
+ ret = cleanupForOutput();
+ CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput");
+
+ LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped);
+
+ return ENCODE_SUCCESS;
+
+CLEAN_UP:
+
+ if (outBuffer->data && (useLocalBuffer == true)) {
+ delete[] outBuffer->data;
+ outBuffer->data = NULL;
+ useLocalBuffer = false;
+ }
+
+ if (mOutCodedBufferPtr != NULL) {
+ vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+ mOutCodedBufferPtr = NULL;
+ mCurSegment = NULL;
+ }
+
+ delete mCurOutputTask;
+ mCurOutputTask = NULL;
+ mCodedBuffer_Lock.lock();
+ mVACodedBufferList.push_back(mOutCodedBuffer);
+ mCodedBuffer_Cond.signal();
+ mCodedBuffer_Lock.unlock();
+
+ LOG_V("getOutput return error=%x\n", ret);
+ return ret;
+}
+
+void VideoEncoderBase::flush() {
+
+ LOG_V( "Begin\n");
+
+ // reset the properities
+ mFrameNum = 0;
+
+ LOG_V( "end\n");
+}
+
+Encode_Status VideoEncoderBase::stop() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ LOG_V( "Begin\n");
+
+ // It is possible that above pointers have been allocated
+ // before we set mStarted to true
+ if (!mStarted) {
+ LOG_V("Encoder has been stopped\n");
+ return ENCODE_SUCCESS;
+ }
+ if (mAutoRefSurfaces) {
+ delete[] mAutoRefSurfaces;
+ mAutoRefSurfaces = NULL;
+ }
+
+ mCodedBuffer_Lock.lock();
+ mVACodedBufferList.clear();
+ mCodedBuffer_Lock.unlock();
+ mCodedBuffer_Cond.broadcast();
+
+ //Delete all uncompleted tasks
+ mEncodeTask_Lock.lock();
+ while(! mEncodeTaskList.empty())
+ {
+ delete *mEncodeTaskList.begin();
+ mEncodeTaskList.erase(mEncodeTaskList.begin());
+ }
+ mEncodeTask_Lock.unlock();
+ mEncodeTask_Cond.broadcast();
+
+ //Release Src Surface Buffer Map, destroy surface manually since it is not added into context
+ LOG_V( "Rlease Src Surface Map\n");
+ while(! mSrcSurfaceMapList.empty())
+ {
+ delete (*mSrcSurfaceMapList.begin());
+ mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
+ }
+
+ LOG_V( "vaDestroyContext\n");
+ if (mVAContext != VA_INVALID_ID) {
+ vaStatus = vaDestroyContext(mVADisplay, mVAContext);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
+ }
+
+ LOG_V( "vaDestroyConfig\n");
+ if (mVAConfig != VA_INVALID_ID) {
+ vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
+ }
+
+CLEAN_UP:
+
+ mStarted = false;
+ mSliceSizeOverflow = false;
+ mCurOutputTask= NULL;
+ mOutCodedBuffer = 0;
+ mCurSegment = NULL;
+ mOffsetInSeg =0;
+ mTotalSize = 0;
+ mTotalSizeCopied = 0;
+ mFrameSkipped = false;
+ mSupportedSurfaceMemType = 0;
+
+ LOG_V( "end\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::prepareForOutput(
+ VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VACodedBufferSegment *vaCodedSeg = NULL;
+ uint32_t status = 0;
+
+ LOG_V( "begin\n");
+ // Won't check parameters here as the caller already checked them
+ // mCurSegment is NULL means it is first time to be here after finishing encoding a frame
+ if (mCurSegment == NULL) {
+ if (mOutCodedBufferPtr == NULL) {
+ vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+ CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
+ }
+
+ LOG_I ("Coded Buffer ID been mapped = 0x%08x\n", mOutCodedBuffer);
+
+ mTotalSize = 0;
+ mOffsetInSeg = 0;
+ mTotalSizeCopied = 0;
+ vaCodedSeg = (VACodedBufferSegment *)mOutCodedBufferPtr;
+ mCurSegment = (VACodedBufferSegment *)mOutCodedBufferPtr;
+
+ while (1) {
+
+ mTotalSize += vaCodedSeg->size;
+ status = vaCodedSeg->status;
+#ifndef IMG_GFX
+ uint8_t *pTemp;
+ uint32_t ii;
+ pTemp = (uint8_t*)vaCodedSeg->buf;
+ for(ii = 0; ii < 16;){
+ if (*(pTemp + ii) == 0xFF)
+ ii++;
+ else
+ break;
+ }
+ if (ii > 0) {
+ mOffsetInSeg = ii;
+ }
+#endif
+ if (!mSliceSizeOverflow) {
+ mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
+ }
+
+ if (vaCodedSeg->next == NULL)
+ break;
+
+ vaCodedSeg = (VACodedBufferSegment *)vaCodedSeg->next;
+ }
+ }
+
+ // We will support two buffer allocation mode,
+ // one is application allocates the buffer and passes to encode,
+ // the other is encode allocate memory
+
+ //means app doesn't allocate the buffer, so _encode will allocate it.
+ if (outBuffer->data == NULL) {
+ *useLocalBuffer = true;
+ outBuffer->data = new uint8_t[mTotalSize - mTotalSizeCopied + 100];
+ if (outBuffer->data == NULL) {
+ LOG_E( "outBuffer->data == NULL\n");
+ return ENCODE_NO_MEMORY;
+ }
+ outBuffer->bufferSize = mTotalSize + 100;
+ outBuffer->dataSize = 0;
+ }
+
+ // Clear all flag for every call
+ outBuffer->flag = 0;
+ if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW;
+
+ if (!mCurSegment)
+ return ENCODE_FAIL;
+
+ if (mCurSegment->size < mOffsetInSeg) {
+ LOG_E("mCurSegment->size < mOffsetInSeg\n");
+ return ENCODE_FAIL;
+ }
+
+ // Make sure we have data in current segment
+ if (mCurSegment->size == mOffsetInSeg) {
+ if (mCurSegment->next != NULL) {
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ } else {
+ LOG_V("No more data available\n");
+ outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+ outBuffer->dataSize = 0;
+ mCurSegment = NULL;
+ return ENCODE_NO_REQUEST_DATA;
+ }
+ }
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::cleanupForOutput() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ //mCurSegment is NULL means all data has been copied out
+ if (mCurSegment == NULL && mOutCodedBufferPtr) {
+ vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ mOutCodedBufferPtr = NULL;
+ mTotalSize = 0;
+ mOffsetInSeg = 0;
+ mTotalSizeCopied = 0;
+
+ delete mCurOutputTask;
+ mCurOutputTask = NULL;
+ mCodedBuffer_Lock.lock();
+ mVACodedBufferList.push_back(mOutCodedBuffer);
+ mCodedBuffer_Cond.signal();
+ mCodedBuffer_Lock.unlock();
+
+ LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer);
+ }
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::queryProfileLevelConfig(VADisplay dpy, VAProfile profile) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEntrypoint entryPtr[8];
+ int i, entryPtrNum;
+
+ if(profile == VAProfileH264Main) //need to be fixed
+ return ENCODE_NOT_SUPPORTED;
+
+ vaStatus = vaQueryConfigEntrypoints(dpy, profile, entryPtr, &entryPtrNum);
+ CHECK_VA_STATUS_RETURN("vaQueryConfigEntrypoints");
+
+ for(i=0; i<entryPtrNum; i++){
+ if(entryPtr[i] == VAEntrypointEncSlice)
+ return ENCODE_SUCCESS;
+ }
+
+ return ENCODE_NOT_SUPPORTED;
+}
+
+Encode_Status VideoEncoderBase::queryAutoReferenceConfig(VAProfile profile) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAConfigAttrib attrib_list;
+ attrib_list.type = VAConfigAttribEncAutoReference;
+ attrib_list.value = VA_ATTRIB_NOT_SUPPORTED;
+
+ vaStatus = vaGetConfigAttributes(mVADisplay, profile, VAEntrypointEncSlice, &attrib_list, 1);
+ if(attrib_list.value == VA_ATTRIB_NOT_SUPPORTED )
+ mAutoReference = false;
+ else
+ mAutoReference = true;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::querySupportedSurfaceMemTypes() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ unsigned int num = 0;
+
+ VASurfaceAttrib* attribs = NULL;
+
+ //get attribs number
+ vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
+ CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
+
+ if (num == 0)
+ return ENCODE_SUCCESS;
+
+ attribs = new VASurfaceAttrib[num];
+
+ vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
+ CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
+
+ for(uint32_t i = 0; i < num; i ++) {
+ if (attribs[i].type == VASurfaceAttribMemoryType) {
+ mSupportedSurfaceMemType = attribs[i].value.value.i;
+ break;
+ }
+ else
+ continue;
+ }
+
+ delete attribs;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) {
+
+ // Data size been copied for every single call
+ uint32_t sizeCopiedHere = 0;
+ uint32_t sizeToBeCopied = 0;
+
+ CHECK_NULL_RETURN_IFFAIL(outBuffer->data);
+
+ while (1) {
+
+ LOG_I("mCurSegment->size = %d, mOffsetInSeg = %d\n", mCurSegment->size, mOffsetInSeg);
+ LOG_I("outBuffer->bufferSize = %d, sizeCopiedHere = %d, mTotalSizeCopied = %d\n",
+ outBuffer->bufferSize, sizeCopiedHere, mTotalSizeCopied);
+
+ if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
+ LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n");
+ return ENCODE_FAIL;
+ }
+
+ if ((mCurSegment->size - mOffsetInSeg) <= outBuffer->bufferSize - sizeCopiedHere) {
+ sizeToBeCopied = mCurSegment->size - mOffsetInSeg;
+ memcpy(outBuffer->data + sizeCopiedHere,
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
+ sizeCopiedHere += sizeToBeCopied;
+ mTotalSizeCopied += sizeToBeCopied;
+ mOffsetInSeg = 0;
+ } else {
+ sizeToBeCopied = outBuffer->bufferSize - sizeCopiedHere;
+ memcpy(outBuffer->data + sizeCopiedHere,
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg, outBuffer->bufferSize - sizeCopiedHere);
+ mTotalSizeCopied += sizeToBeCopied;
+ mOffsetInSeg += sizeToBeCopied;
+ outBuffer->dataSize = outBuffer->bufferSize;
+ outBuffer->remainingSize = mTotalSize - mTotalSizeCopied;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ if (mCurSegment->next == NULL) {
+ outBuffer->dataSize = sizeCopiedHere;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ mCurSegment = NULL;
+ return ENCODE_SUCCESS;
+ }
+
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ }
+}
+
+void VideoEncoderBase::setDefaultParams() {
+
+ // Set default value for input parameters
+ mComParams.profile = VAProfileH264Baseline;
+ mComParams.level = 41;
+ mComParams.rawFormat = RAW_FORMAT_NV12;
+ mComParams.frameRate.frameRateNum = 30;
+ mComParams.frameRate.frameRateDenom = 1;
+ mComParams.resolution.width = 0;
+ mComParams.resolution.height = 0;
+ mComParams.intraPeriod = 30;
+ mComParams.rcMode = RATE_CONTROL_NONE;
+ mComParams.rcParams.initQP = 15;
+ mComParams.rcParams.minQP = 0;
+ mComParams.rcParams.maxQP = 0;
+ mComParams.rcParams.I_minQP = 0;
+ mComParams.rcParams.I_maxQP = 0;
+ mComParams.rcParams.bitRate = 640000;
+ mComParams.rcParams.targetPercentage= 0;
+ mComParams.rcParams.windowSize = 0;
+ mComParams.rcParams.disableFrameSkip = 0;
+ mComParams.rcParams.disableBitsStuffing = 1;
+ mComParams.rcParams.enableIntraFrameQPControl = 0;
+ mComParams.rcParams.temporalFrameRate = 0;
+ mComParams.rcParams.temporalID = 0;
+ mComParams.cyclicFrameInterval = 30;
+ mComParams.refreshType = VIDEO_ENC_NONIR;
+ mComParams.airParams.airMBs = 0;
+ mComParams.airParams.airThreshold = 0;
+ mComParams.airParams.airAuto = 1;
+ mComParams.disableDeblocking = 2;
+ mComParams.syncEncMode = false;
+ mComParams.codedBufNum = 2;
+ mComParams.numberOfLayer = 1;
+ mComParams.nPeriodicity = 0;
+ memset(mComParams.nLayerID,0,32*sizeof(uint32_t));
+
+ mHrdParam.bufferSize = 0;
+ mHrdParam.initBufferFullness = 0;
+
+ mStoreMetaDataInBuffers.isEnabled = false;
+}
+
+Encode_Status VideoEncoderBase::setParameters(
+ VideoParamConfigSet *videoEncParams) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ LOG_I("Config type = %x\n", (int)videoEncParams->type);
+
+ if (mStarted) {
+ LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
+ return ENCODE_ALREADY_INIT;
+ }
+
+ switch (videoEncParams->type) {
+ case VideoParamsTypeCommon: {
+
+ VideoParamsCommon *paramsCommon =
+ reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+ if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ if(paramsCommon->codedBufNum < 2)
+ paramsCommon->codedBufNum =2;
+ mComParams = *paramsCommon;
+ break;
+ }
+
+ case VideoParamsTypeUpSteamBuffer: {
+
+ VideoParamsUpstreamBuffer *upStreamBuffer =
+ reinterpret_cast <VideoParamsUpstreamBuffer *> (videoEncParams);
+
+ if (upStreamBuffer->size != sizeof (VideoParamsUpstreamBuffer)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ ret = setUpstreamBuffer(upStreamBuffer);
+ break;
+ }
+
+ case VideoParamsTypeUsrptrBuffer: {
+
+ // usrptr only can be get
+ // this case should not happen
+ break;
+ }
+
+ case VideoParamsTypeHRD: {
+ VideoParamsHRD *hrd =
+ reinterpret_cast <VideoParamsHRD *> (videoEncParams);
+
+ if (hrd->size != sizeof (VideoParamsHRD)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mHrdParam.bufferSize = hrd->bufferSize;
+ mHrdParam.initBufferFullness = hrd->initBufferFullness;
+ mRenderHrd = true;
+
+ break;
+ }
+
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mStoreMetaDataInBuffers.isEnabled = metadata->isEnabled;
+
+ break;
+ }
+
+ case VideoParamsTypeTemporalLayer:{
+ VideoParamsTemporalLayer *temporallayer =
+ reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
+
+ if (temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mComParams.numberOfLayer = temporallayer->numberOfLayer;
+ mComParams.nPeriodicity = temporallayer->nPeriodicity;
+ for(uint32_t i=0;i<temporallayer->nPeriodicity;i++)
+ mComParams.nLayerID[i] = temporallayer->nLayerID[i];
+ mRenderMultiTemporal = true;
+ break;
+ }
+
+ case VideoParamsTypeAVC:
+ case VideoParamsTypeH263:
+ case VideoParamsTypeMP4:
+ case VideoParamsTypeVC1:
+ case VideoParamsTypeVP8: {
+ ret = derivedSetParams(videoEncParams);
+ break;
+ }
+
+ default: {
+ LOG_E ("Wrong ParamType here\n");
+ return ENCODE_INVALID_PARAMS;
+ }
+ }
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::getParameters(
+ VideoParamConfigSet *videoEncParams) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ LOG_I("Config type = %d\n", (int)videoEncParams->type);
+
+ switch (videoEncParams->type) {
+ case VideoParamsTypeCommon: {
+
+ VideoParamsCommon *paramsCommon =
+ reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+
+ if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ *paramsCommon = mComParams;
+ break;
+ }
+
+ case VideoParamsTypeUpSteamBuffer: {
+
+ // Get upstream buffer could happen
+ // but not meaningful a lot
+ break;
+ }
+
+ case VideoParamsTypeUsrptrBuffer: {
+ VideoParamsUsrptrBuffer *usrptrBuffer =
+ reinterpret_cast <VideoParamsUsrptrBuffer *> (videoEncParams);
+
+ if (usrptrBuffer->size != sizeof (VideoParamsUsrptrBuffer)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ ret = getNewUsrptrFromSurface(
+ usrptrBuffer->width, usrptrBuffer->height, usrptrBuffer->format,
+ usrptrBuffer->expectedSize, &(usrptrBuffer->actualSize),
+ &(usrptrBuffer->stride), &(usrptrBuffer->usrPtr));
+
+ break;
+ }
+
+ case VideoParamsTypeHRD: {
+ VideoParamsHRD *hrd =
+ reinterpret_cast <VideoParamsHRD *> (videoEncParams);
+
+ if (hrd->size != sizeof (VideoParamsHRD)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ hrd->bufferSize = mHrdParam.bufferSize;
+ hrd->initBufferFullness = mHrdParam.initBufferFullness;
+
+ break;
+ }
+
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ metadata->isEnabled = mStoreMetaDataInBuffers.isEnabled;
+
+ break;
+ }
+
+ case VideoParamsTypeProfileLevel: {
+ VideoParamsProfileLevel *profilelevel =
+ reinterpret_cast <VideoParamsProfileLevel *> (videoEncParams);
+
+ if (profilelevel->size != sizeof (VideoParamsProfileLevel)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ profilelevel->level = 0;
+ if(queryProfileLevelConfig(mVADisplay, profilelevel->profile) == ENCODE_SUCCESS){
+ profilelevel->isSupported = true;
+ if(profilelevel->profile == VAProfileH264High)
+ profilelevel->level = 42;
+ else if(profilelevel->profile == VAProfileH264Main)
+ profilelevel->level = 42;
+ else if(profilelevel->profile == VAProfileH264Baseline)
+ profilelevel->level = 41;
+ else{
+ profilelevel->level = 0;
+ profilelevel->isSupported = false;
+ }
+ }
+ }
+
+ case VideoParamsTypeTemporalLayer:{
+ VideoParamsTemporalLayer *temporallayer =
+ reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
+
+ if(temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ temporallayer->numberOfLayer = mComParams.numberOfLayer;
+
+ break;
+ }
+
+ case VideoParamsTypeAVC:
+ case VideoParamsTypeH263:
+ case VideoParamsTypeMP4:
+ case VideoParamsTypeVC1:
+ case VideoParamsTypeVP8: {
+ derivedGetParams(videoEncParams);
+ break;
+ }
+
+ default: {
+ LOG_E ("Wrong ParamType here\n");
+ break;
+ }
+
+ }
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+ LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+ // workaround
+#if 0
+ if (!mStarted) {
+ LOG_E("Encoder has not initialized yet, can't call setConfig\n");
+ return ENCODE_NOT_INIT;
+ }
+#endif
+
+ switch (videoEncConfig->type) {
+ case VideoConfigTypeFrameRate: {
+ VideoConfigFrameRate *configFrameRate =
+ reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
+
+ if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ mComParams.frameRate = configFrameRate->frameRate;
+ mRenderFrameRate = true;
+ break;
+ }
+
+ case VideoConfigTypeBitRate: {
+ VideoConfigBitRate *configBitRate =
+ reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
+
+ if (configBitRate->size != sizeof (VideoConfigBitRate)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ if(mComParams.numberOfLayer == 1)
+ {
+ mComParams.rcParams = configBitRate->rcParams;
+ mRenderBitRate = true;
+ }
+ else
+ {
+ mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].nLayerID = configBitRate->rcParams.temporalID;
+ mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].bitRate = configBitRate->rcParams.bitRate;
+ mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].frameRate = configBitRate->rcParams.temporalFrameRate;
+ }
+ break;
+ }
+
+ case VideoConfigTypeResolution: {
+
+ // Not Implemented
+ break;
+ }
+ case VideoConfigTypeIntraRefreshType: {
+
+ VideoConfigIntraRefreshType *configIntraRefreshType =
+ reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
+
+ if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ mComParams.refreshType = configIntraRefreshType->refreshType;
+ break;
+ }
+
+ case VideoConfigTypeCyclicFrameInterval: {
+ VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
+ reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
+ if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mComParams.cyclicFrameInterval = configCyclicFrameInterval->cyclicFrameInterval;
+ break;
+ }
+
+ case VideoConfigTypeAIR: {
+
+ VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
+
+ if (configAIR->size != sizeof (VideoConfigAIR)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mComParams.airParams = configAIR->airParams;
+ mRenderAIR = true;
+ break;
+ }
+ case VideoConfigTypeCIR: {
+
+ VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
+
+ if (configCIR->size != sizeof (VideoConfigCIR)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mComParams.cirParams = configCIR->cirParams;
+ mRenderCIR = true;
+ break;
+ }
+ case VideoConfigTypeAVCIntraPeriod:
+ case VideoConfigTypeNALSize:
+ case VideoConfigTypeIDRRequest:
+ case VideoConfigTypeSliceNum:
+ case VideoConfigTypeVP8:
+ case VideoConfigTypeVP8ReferenceFrame:
+ case VideoConfigTypeVP8MaxFrameSizeRatio:{
+ ret = derivedSetConfig(videoEncConfig);
+ break;
+ }
+ default: {
+ LOG_E ("Wrong Config Type here\n");
+ break;
+ }
+ }
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+ LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+ switch (videoEncConfig->type) {
+ case VideoConfigTypeFrameRate: {
+ VideoConfigFrameRate *configFrameRate =
+ reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
+
+ if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configFrameRate->frameRate = mComParams.frameRate;
+ break;
+ }
+
+ case VideoConfigTypeBitRate: {
+ VideoConfigBitRate *configBitRate =
+ reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
+
+ if (configBitRate->size != sizeof (VideoConfigBitRate)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ configBitRate->rcParams = mComParams.rcParams;
+
+
+ break;
+ }
+ case VideoConfigTypeResolution: {
+ // Not Implemented
+ break;
+ }
+ case VideoConfigTypeIntraRefreshType: {
+
+ VideoConfigIntraRefreshType *configIntraRefreshType =
+ reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
+
+ if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ configIntraRefreshType->refreshType = mComParams.refreshType;
+ break;
+ }
+
+ case VideoConfigTypeCyclicFrameInterval: {
+ VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
+ reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
+ if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configCyclicFrameInterval->cyclicFrameInterval = mComParams.cyclicFrameInterval;
+ break;
+ }
+
+ case VideoConfigTypeAIR: {
+
+ VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
+
+ if (configAIR->size != sizeof (VideoConfigAIR)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configAIR->airParams = mComParams.airParams;
+ break;
+ }
+ case VideoConfigTypeCIR: {
+
+ VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
+
+ if (configCIR->size != sizeof (VideoConfigCIR)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configCIR->cirParams = mComParams.cirParams;
+ break;
+ }
+ case VideoConfigTypeAVCIntraPeriod:
+ case VideoConfigTypeNALSize:
+ case VideoConfigTypeIDRRequest:
+ case VideoConfigTypeSliceNum:
+ case VideoConfigTypeVP8: {
+
+ ret = derivedGetConfig(videoEncConfig);
+ break;
+ }
+ default: {
+ LOG_E ("Wrong ParamType here\n");
+ break;
+ }
+ }
+ return ret;
+}
+
+void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) {
+ if (mNewHeader) mFrameNum = 0;
+ LOG_I( "mFrameNum = %d ", mFrameNum);
+
+ updateFrameInfo(task) ;
+}
+
+Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) {
+
+ task->type = FTYPE_P;
+
+ // determine the picture type
+ if (mFrameNum == 0)
+ task->type = FTYPE_I;
+ if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0))
+ task->type = FTYPE_I;
+
+ if (task->type == FTYPE_I)
+ task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) {
+
+ uint32_t size = mComParams.resolution.width * mComParams.resolution.height;
+
+ if (maxSize == NULL) {
+ LOG_E("maxSize == NULL\n");
+ return ENCODE_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ if (mCodedBufSize > 0) {
+ *maxSize = mCodedBufSize;
+ LOG_V ("Already calculate the max encoded size, get the value directly");
+ return ENCODE_SUCCESS;
+ }
+
+ // here, VP8 is different from AVC/H263
+ if(mComParams.profile == VAProfileVP8Version0_3) // for VP8 encode
+ {
+ // According to VIED suggestions, in CBR mode, coded buffer should be the size of 3 bytes per luma pixel
+ // in CBR_HRD mode, coded buffer size should be 5 * rc_buf_sz * rc_target_bitrate;
+ // now we just hardcode mCodedBufSize as 2M to walk round coded buffer size issue;
+ /*
+ if(mComParams.rcMode == VA_RC_CBR) // CBR_HRD mode
+ mCodedBufSize = 5 * mComParams.rcParams.bitRate * 6000;
+ else // CBR mode
+ mCodedBufSize = 3 * mComParams.resolution.width * mComParams.resolution.height;
+ */
+ mCodedBufSize = (2 * 1024 * 1024 + 31) & (~31);
+ }
+ else // for AVC/H263/MPEG4 encode
+ {
+ // base on the rate control mode to calculate the defaule encoded buffer size
+ if (mComParams.rcMode == VA_RC_NONE) {
+ mCodedBufSize = (size * 400) / (16 * 16);
+ // set to value according to QP
+ } else {
+ mCodedBufSize = mComParams.rcParams.bitRate / 4;
+ }
+
+ mCodedBufSize = max (mCodedBufSize , (size * 400) / (16 * 16));
+
+ // in case got a very large user input bit rate value
+ mCodedBufSize = min(mCodedBufSize, (size * 1.5 * 8));
+ mCodedBufSize = (mCodedBufSize + 15) &(~15);
+ }
+
+ *maxSize = mCodedBufSize;
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::getNewUsrptrFromSurface(
+ uint32_t width, uint32_t height, uint32_t format,
+ uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) {
+
+ Encode_Status ret = ENCODE_FAIL;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ VASurfaceID surface = VA_INVALID_SURFACE;
+ VAImage image;
+ uint32_t index = 0;
+
+ LOG_V( "Begin\n");
+ // If encode session has been configured, we can not request surface creation anymore
+ if (mStarted) {
+ LOG_E( "Already Initialized, can not request VA surface anymore\n");
+ return ENCODE_WRONG_STATE;
+ }
+ if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) {
+ LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n");
+ return ENCODE_NULL_PTR;
+ }
+
+ // Current only NV12 is supported in VA API
+ // Through format we can get known the number of planes
+ if (format != STRING_TO_FOURCC("NV12")) {
+ LOG_W ("Format is not supported\n");
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ surface = CreateNewVASurface(mVADisplay, width, height);
+ if (surface == VA_INVALID_SURFACE)
+ return ENCODE_DRIVER_FAIL;
+
+ vaStatus = vaDeriveImage(mVADisplay, surface, &image);
+ CHECK_VA_STATUS_RETURN("vaDeriveImage");
+ LOG_V( "vaDeriveImage Done\n");
+ vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) usrptr);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ // make sure the physical page been allocated
+ for (index = 0; index < image.data_size; index = index + 4096) {
+ unsigned char tmp = *(*usrptr + index);
+ if (tmp == 0)
+ *(*usrptr + index) = 0;
+ }
+
+ *outsize = image.data_size;
+ *stride = image.pitches[0];
+
+ LOG_I( "surface = 0x%08x\n",(uint32_t)surface);
+ LOG_I("image->pitches[0] = %d\n", image.pitches[0]);
+ LOG_I("image->pitches[1] = %d\n", image.pitches[1]);
+ LOG_I("image->offsets[0] = %d\n", image.offsets[0]);
+ LOG_I("image->offsets[1] = %d\n", image.offsets[1]);
+ LOG_I("image->num_planes = %d\n", image.num_planes);
+ LOG_I("image->width = %d\n", image.width);
+ LOG_I("image->height = %d\n", image.height);
+ LOG_I ("data_size = %d\n", image.data_size);
+ LOG_I ("usrptr = 0x%p\n", *usrptr);
+
+ vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ vaStatus = vaDestroyImage(mVADisplay, image.image_id);
+ CHECK_VA_STATUS_RETURN("vaDestroyImage");
+
+ if (*outsize < expectedSize) {
+ LOG_E ("Allocated buffer size is small than the expected size, destroy the surface");
+ LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expectedSize);
+ vaStatus = vaDestroySurfaces(mVADisplay, &surface, 1);
+ CHECK_VA_STATUS_RETURN("vaDestroySurfaces");
+ return ENCODE_FAIL;
+ }
+
+ VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
+ if (map == NULL) {
+ LOG_E( "new VASurfaceMap failed\n");
+ return ENCODE_NO_MEMORY;
+ }
+
+ map->setVASurface(surface); //special case, vasuface is set, so nothing do in doMapping
+// map->setType(MetadataBufferTypeEncoder);
+ map->setValue((intptr_t)*usrptr);
+ ValueInfo vinfo;
+ memset(&vinfo, 0, sizeof(ValueInfo));
+ vinfo.mode = (MemMode)MEM_MODE_USRPTR;
+ vinfo.handle = 0;
+ vinfo.size = 0;
+ vinfo.width = width;
+ vinfo.height = height;
+ vinfo.lumaStride = width;
+ vinfo.chromStride = width;
+ vinfo.format = VA_FOURCC_NV12;
+ vinfo.s3dformat = 0xffffffff;
+ map->setValueInfo(vinfo);
+ map->doMapping();
+
+ mSrcSurfaceMapList.push_back(map);
+
+ ret = ENCODE_SUCCESS;
+
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) {
+
+ Encode_Status status = ENCODE_SUCCESS;
+
+ CHECK_NULL_RETURN_IFFAIL(upStreamBuffer);
+ if (upStreamBuffer->bufCnt == 0) {
+ LOG_E("bufCnt == 0\n");
+ return ENCODE_FAIL;
+ }
+
+ for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) {
+ if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL) //already mapped
+ continue;
+
+ //wrap upstream buffer into vaSurface
+ VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
+
+// map->setType(MetadataBufferTypeUser);
+ map->setValue(upStreamBuffer->bufList[i]);
+ ValueInfo vinfo;
+ memset(&vinfo, 0, sizeof(ValueInfo));
+ vinfo.mode = (MemMode)upStreamBuffer->bufferMode;
+ vinfo.handle = (intptr_t)upStreamBuffer->display;
+ vinfo.size = 0;
+ if (upStreamBuffer->bufAttrib) {
+ vinfo.width = upStreamBuffer->bufAttrib->realWidth;
+ vinfo.height = upStreamBuffer->bufAttrib->realHeight;
+ vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride;
+ vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride;
+ vinfo.format = upStreamBuffer->bufAttrib->format;
+ }
+ vinfo.s3dformat = 0xFFFFFFFF;
+ map->setValueInfo(vinfo);
+ status = map->doMapping();
+
+ if (status == ENCODE_SUCCESS)
+ mSrcSurfaceMapList.push_back(map);
+ else
+ delete map;
+ }
+
+ return status;
+}
+
+Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ IntelMetadataBufferType type;
+ intptr_t value;
+ ValueInfo vinfo;
+ ValueInfo *pvinfo = &vinfo;
+ intptr_t *extravalues = NULL;
+ unsigned int extravalues_count = 0;
+
+ IntelMetadataBuffer imb;
+ VASurfaceMap *map = NULL;
+
+ memset(&vinfo, 0, sizeof(ValueInfo));
+ if (mStoreMetaDataInBuffers.isEnabled) {
+ //metadatabuffer mode
+ LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
+ if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) {
+ //fail to parse buffer
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ imb.GetType(type);
+ imb.GetValue(value);
+ } else {
+ //raw mode
+ LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
+ if (! inBuffer->data || inBuffer->size == 0) {
+ return ENCODE_NULL_PTR;
+ }
+
+ type = IntelMetadataBufferTypeUser;
+ value = (intptr_t)inBuffer->data;
+ }
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ uint32_t sflag = mSessionFlag;
+ imb.GetSessionFlag(mSessionFlag);
+ if (mSessionFlag != sflag) {
+ //new sharing session, flush buffer sharing cache
+ IntelMetadataBuffer::ClearContext(sflag, false);
+ //flush surfacemap cache
+ LOG_V( "Flush Src Surface Map\n");
+ while(! mSrcSurfaceMapList.empty())
+ {
+ delete (*mSrcSurfaceMapList.begin());
+ mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
+ }
+ }
+#endif
+
+ //find if mapped
+ map = (VASurfaceMap*) findSurfaceMapByValue(value);
+
+ if (map) {
+ //has mapped, get surfaceID directly and do all necessary actions
+ LOG_I("direct find surface %d from value %i\n", map->getVASurface(), value);
+ *sid = map->getVASurface();
+ map->doMapping();
+ return ret;
+ }
+
+ //if no found from list, then try to map value with parameters
+ LOG_I("not find surface from cache with value %i, start mapping if enough information\n", value);
+
+ if (mStoreMetaDataInBuffers.isEnabled) {
+
+ //if type is IntelMetadataBufferTypeGrallocSource, use default parameters since no ValueInfo
+ if (type == IntelMetadataBufferTypeGrallocSource) {
+ vinfo.mode = MEM_MODE_GFXHANDLE;
+ vinfo.handle = 0;
+ vinfo.size = 0;
+ vinfo.width = mComParams.resolution.width;
+ vinfo.height = mComParams.resolution.height;
+ vinfo.lumaStride = mComParams.resolution.width;
+ vinfo.chromStride = mComParams.resolution.width;
+ vinfo.format = VA_FOURCC_NV12;
+ vinfo.s3dformat = 0xFFFFFFFF;
+ } else {
+ //get all info mapping needs
+ imb.GetValueInfo(pvinfo);
+ imb.GetExtraValues(extravalues, extravalues_count);
+ }
+
+ } else {
+
+ //raw mode
+ vinfo.mode = MEM_MODE_MALLOC;
+ vinfo.handle = 0;
+ vinfo.size = inBuffer->size;
+ vinfo.width = mComParams.resolution.width;
+ vinfo.height = mComParams.resolution.height;
+ vinfo.lumaStride = mComParams.resolution.width;
+ vinfo.chromStride = mComParams.resolution.width;
+ vinfo.format = VA_FOURCC_NV12;
+ vinfo.s3dformat = 0xFFFFFFFF;
+ }
+
+ /* Start mapping, if pvinfo is not NULL, then have enough info to map;
+ * if extravalues is not NULL, then need to do more times mapping
+ */
+ if (pvinfo){
+ //map according info, and add to surfacemap list
+ map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
+ map->setValue(value);
+ map->setValueInfo(*pvinfo);
+ map->setAction(mVASurfaceMappingAction);
+
+ ret = map->doMapping();
+ if (ret == ENCODE_SUCCESS) {
+ LOG_I("surface mapping success, map value %i into surface %d\n", value, map->getVASurface());
+ mSrcSurfaceMapList.push_back(map);
+ } else {
+ delete map;
+ LOG_E("surface mapping failed, wrong info or meet serious error\n");
+ return ret;
+ }
+
+ *sid = map->getVASurface();
+
+ } else {
+ //can't map due to no info
+ LOG_E("surface mapping failed, missing information\n");
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ if (extravalues) {
+ //map more using same ValueInfo
+ for(unsigned int i=0; i<extravalues_count; i++) {
+ map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
+ map->setValue(extravalues[i]);
+ map->setValueInfo(vinfo);
+
+ ret = map->doMapping();
+ if (ret == ENCODE_SUCCESS) {
+ LOG_I("surface mapping extravalue success, map value %i into surface %d\n", extravalues[i], map->getVASurface());
+ mSrcSurfaceMapList.push_back(map);
+ } else {
+ delete map;
+ map = NULL;
+ LOG_E( "surface mapping extravalue failed, extravalue is %i\n", extravalues[i]);
+ }
+ }
+ }
+
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::renderDynamicBitrate(EncodeTask* task) {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ LOG_V( "Begin\n\n");
+ // disable bits stuffing and skip frame apply to all rate control mode
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterRateControl *bitrateControlParam;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
+ 1, NULL,
+ &miscParamBufferID);
+
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeRateControl;
+ bitrateControlParam = (VAEncMiscParameterRateControl *)miscEncParamBuf->data;
+
+ bitrateControlParam->bits_per_second = mComParams.rcParams.bitRate;
+ bitrateControlParam->initial_qp = mComParams.rcParams.initQP;
+ if(mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I)) {
+ bitrateControlParam->min_qp = mComParams.rcParams.I_minQP;
+ bitrateControlParam->max_qp = mComParams.rcParams.I_maxQP;
+ mRenderBitRate = true;
+ LOG_I("apply I min/max qp for IDR or I frame\n");
+ } else {
+ bitrateControlParam->min_qp = mComParams.rcParams.minQP;
+ bitrateControlParam->max_qp = mComParams.rcParams.maxQP;
+ mRenderBitRate = false;
+ LOG_I("revert to original min/max qp after IDR or I frame\n");
+ }
+ bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage;
+ bitrateControlParam->window_size = mComParams.rcParams.windowSize;
+ bitrateControlParam->rc_flags.bits.disable_frame_skip = mComParams.rcParams.disableFrameSkip;
+ bitrateControlParam->rc_flags.bits.disable_bit_stuffing = mComParams.rcParams.disableBitsStuffing;
+ bitrateControlParam->basic_unit_size = 0;
+
+ LOG_I("bits_per_second = %d\n", bitrateControlParam->bits_per_second);
+ LOG_I("initial_qp = %d\n", bitrateControlParam->initial_qp);
+ LOG_I("min_qp = %d\n", bitrateControlParam->min_qp);
+ LOG_I("max_qp = %d\n", bitrateControlParam->max_qp);
+ LOG_I("target_percentage = %d\n", bitrateControlParam->target_percentage);
+ LOG_I("window_size = %d\n", bitrateControlParam->window_size);
+ LOG_I("disable_frame_skip = %d\n", bitrateControlParam->rc_flags.bits.disable_frame_skip);
+ LOG_I("disable_bit_stuffing = %d\n", bitrateControlParam->rc_flags.bits.disable_bit_stuffing);
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext,
+ &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderBase::renderDynamicFrameRate() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ if (mComParams.rcMode != RATE_CONTROL_VCM) {
+
+ LOG_W("Not in VCM mode, but call SendDynamicFramerate\n");
+ return ENCODE_SUCCESS;
+ }
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterFrameRate *frameRateParam;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterFrameRate),
+ 1, NULL, &miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeFrameRate;
+ frameRateParam = (VAEncMiscParameterFrameRate *)miscEncParamBuf->data;
+ frameRateParam->framerate =
+ (unsigned int) (mComParams.frameRate.frameRateNum + mComParams.frameRate.frameRateDenom/2)
+ / mComParams.frameRate.frameRateDenom;
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_I( "frame rate = %d\n", frameRateParam->framerate);
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::renderHrd() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterHRD *hrdParam;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterHRD),
+ 1, NULL, &miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeHRD;
+ hrdParam = (VAEncMiscParameterHRD *)miscEncParamBuf->data;
+
+ hrdParam->buffer_size = mHrdParam.bufferSize;
+ hrdParam->initial_buffer_fullness = mHrdParam.initBufferFullness;
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+VASurfaceMap *VideoEncoderBase::findSurfaceMapByValue(intptr_t value) {
+ android::List<VASurfaceMap *>::iterator node;
+
+ for(node = mSrcSurfaceMapList.begin(); node != mSrcSurfaceMapList.end(); node++)
+ {
+ if ((*node)->getValue() == value)
+ return *node;
+ else
+ continue;
+ }
+
+ return NULL;
+}
diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h
new file mode 100644
index 0000000..bf1eecf
--- /dev/null
+++ b/videoencoder/VideoEncoderBase.h
@@ -0,0 +1,186 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_BASE_H__
+#define __VIDEO_ENCODER_BASE_H__
+
+#include <va/va.h>
+#include <va/va_tpi.h>
+#include "VideoEncoderDef.h"
+#include "VideoEncoderInterface.h"
+#include "IntelMetadataBuffer.h"
+#include <utils/List.h>
+#include <utils/threads.h>
+#include "VideoEncoderUtils.h"
+
+struct SurfaceMap {
+ VASurfaceID surface;
+ VASurfaceID surface_backup;
+ IntelMetadataBufferType type;
+ int32_t value;
+ ValueInfo vinfo;
+ bool added;
+};
+
+struct EncodeTask {
+ VASurfaceID enc_surface;
+ VASurfaceID ref_surface;
+ VASurfaceID rec_surface;
+ VABufferID coded_buffer;
+
+ FrameType type;
+ int flag;
+ int64_t timestamp; //corresponding input frame timestamp
+ void *priv; //input buffer data
+
+ bool completed; //if encode task is done complet by HW
+};
+
+class VideoEncoderBase : IVideoEncoder {
+
+public:
+ VideoEncoderBase();
+ virtual ~VideoEncoderBase();
+
+ virtual Encode_Status start(void);
+ virtual void flush(void);
+ virtual Encode_Status stop(void);
+ virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout);
+
+ /*
+ * getOutput can be called several time for a frame (such as first time codec data, and second time others)
+ * encoder will provide encoded data according to the format (whole frame, codec_data, sigle NAL etc)
+ * If the buffer passed to encoded is not big enough, this API call will return ENCODE_BUFFER_TOO_SMALL
+ * and caller should provide a big enough buffer and call again
+ */
+ virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout);
+
+ virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status getMaxOutSize(uint32_t *maxSize);
+
+protected:
+ virtual Encode_Status sendEncodeCommand(EncodeTask* task) = 0;
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0;
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) = 0;
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) = 0;
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) = 0;
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) = 0;
+ virtual Encode_Status updateFrameInfo(EncodeTask* task) ;
+
+ Encode_Status renderDynamicFrameRate();
+ Encode_Status renderDynamicBitrate(EncodeTask* task);
+ Encode_Status renderHrd();
+ Encode_Status queryProfileLevelConfig(VADisplay dpy, VAProfile profile);
+
+private:
+ void setDefaultParams(void);
+ Encode_Status setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer);
+ Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format,
+ uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr);
+ VASurfaceMap* findSurfaceMapByValue(intptr_t value);
+ Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid);
+ void PrepareFrameInfo(EncodeTask* task);
+
+ Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer);
+ Encode_Status cleanupForOutput();
+ Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer);
+ Encode_Status queryAutoReferenceConfig(VAProfile profile);
+ Encode_Status querySupportedSurfaceMemTypes();
+ Encode_Status copySurfaces(VASurfaceID srcId, VASurfaceID destId);
+ VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo);
+
+protected:
+
+ bool mInitialized;
+ bool mStarted;
+ VADisplay mVADisplay;
+ VAContextID mVAContext;
+ VAConfigID mVAConfig;
+ VAEntrypoint mVAEntrypoint;
+
+
+ VideoParamsCommon mComParams;
+ VideoParamsHRD mHrdParam;
+ VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers;
+
+ bool mNewHeader;
+
+ bool mRenderMaxSliceSize; //Max Slice Size
+ bool mRenderQP;
+ bool mRenderAIR;
+ bool mRenderCIR;
+ bool mRenderFrameRate;
+ bool mRenderBitRate;
+ bool mRenderHrd;
+ bool mRenderMaxFrameSize;
+ bool mRenderMultiTemporal;
+ bool mForceKFrame;
+
+ VABufferID mSeqParamBuf;
+ VABufferID mRcParamBuf;
+ VABufferID mFrameRateParamBuf;
+ VABufferID mPicParamBuf;
+ VABufferID mSliceParamBuf;
+ VASurfaceID* mAutoRefSurfaces;
+
+ android::List <VASurfaceMap *> mSrcSurfaceMapList; //all mapped surface info list from input buffer
+ android::List <EncodeTask *> mEncodeTaskList; //all encode tasks list
+ android::List <VABufferID> mVACodedBufferList; //all available codedbuffer list
+
+ VASurfaceID mRefSurface; //reference surface, only used in base
+ VASurfaceID mRecSurface; //reconstructed surface, only used in base
+ uint32_t mFrameNum;
+ uint32_t mCodedBufSize;
+ bool mAutoReference;
+ uint32_t mAutoReferenceSurfaceNum;
+ uint32_t mEncPackedHeaders;
+ uint32_t mEncMaxRefFrames;
+
+ bool mSliceSizeOverflow;
+
+ //Current Outputting task
+ EncodeTask *mCurOutputTask;
+
+ //Current outputting CodedBuffer status
+ VABufferID mOutCodedBuffer;
+ bool mCodedBufferMapped;
+ uint8_t *mOutCodedBufferPtr;
+ VACodedBufferSegment *mCurSegment;
+ uint32_t mOffsetInSeg;
+ uint32_t mTotalSize;
+ uint32_t mTotalSizeCopied;
+ android::Mutex mCodedBuffer_Lock, mEncodeTask_Lock;
+ android::Condition mCodedBuffer_Cond, mEncodeTask_Cond;
+
+ bool mFrameSkipped;
+
+ //supported surface memory types
+ int mSupportedSurfaceMemType;
+
+ //VASurface mapping extra action
+ int mVASurfaceMappingAction;
+
+ // For Temporal Layer Bitrate FrameRate settings
+ VideoConfigTemperalLayerBitrateFramerate mTemporalLayerBitrateFramerate[3];
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ uint32_t mSessionFlag;
+#endif
+};
+#endif /* __VIDEO_ENCODER_BASE_H__ */
diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h
new file mode 100644
index 0000000..d89d93a
--- /dev/null
+++ b/videoencoder/VideoEncoderDef.h
@@ -0,0 +1,731 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_DEF_H__
+#define __VIDEO_ENCODER_DEF_H__
+
+#include <stdint.h>
+
+#define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24)))
+#define min(X,Y) (((X) < (Y)) ? (X) : (Y))
+#define max(X,Y) (((X) > (Y)) ? (X) : (Y))
+
+typedef int32_t Encode_Status;
+
+// Video encode error code
+enum {
+ ENCODE_INVALID_SURFACE = -11,
+ ENCODE_NO_REQUEST_DATA = -10,
+ ENCODE_WRONG_STATE = -9,
+ ENCODE_NOTIMPL = -8,
+ ENCODE_NO_MEMORY = -7,
+ ENCODE_NOT_INIT = -6,
+ ENCODE_DRIVER_FAIL = -5,
+ ENCODE_INVALID_PARAMS = -4,
+ ENCODE_NOT_SUPPORTED = -3,
+ ENCODE_NULL_PTR = -2,
+ ENCODE_FAIL = -1,
+ ENCODE_SUCCESS = 0,
+ ENCODE_ALREADY_INIT = 1,
+ ENCODE_SLICESIZE_OVERFLOW = 2,
+ ENCODE_BUFFER_TOO_SMALL = 3, // The buffer passed to encode is too small to contain encoded data
+ ENCODE_DEVICE_BUSY = 4,
+ ENCODE_DATA_NOT_READY = 5,
+};
+
+typedef enum {
+ OUTPUT_EVERYTHING = 0, //Output whatever driver generates
+ OUTPUT_CODEC_DATA = 1,
+ OUTPUT_FRAME_DATA = 2, //Equal to OUTPUT_EVERYTHING when no header along with the frame data
+ OUTPUT_ONE_NAL = 4,
+ OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8,
+ OUTPUT_LENGTH_PREFIXED = 16,
+ OUTPUT_CODEDBUFFER = 32,
+ OUTPUT_NALULENGTHS_PREFIXED = 64,
+ OUTPUT_BUFFER_LAST
+} VideoOutputFormat;
+
+typedef enum {
+ RAW_FORMAT_NONE = 0,
+ RAW_FORMAT_YUV420 = 1,
+ RAW_FORMAT_YUV422 = 2,
+ RAW_FORMAT_YUV444 = 4,
+ RAW_FORMAT_NV12 = 8,
+ RAW_FORMAT_RGBA = 16,
+ RAW_FORMAT_OPAQUE = 32,
+ RAW_FORMAT_PROTECTED = 0x80000000,
+ RAW_FORMAT_LAST
+} VideoRawFormat;
+
+typedef enum {
+ RATE_CONTROL_NONE = 1,
+ RATE_CONTROL_CBR = 2,
+ RATE_CONTROL_VBR = 4,
+ RATE_CONTROL_VCM = 8,
+ RATE_CONTROL_LAST
+} VideoRateControl;
+
+typedef enum {
+ PROFILE_MPEG2SIMPLE = 0,
+ PROFILE_MPEG2MAIN,
+ PROFILE_MPEG4SIMPLE,
+ PROFILE_MPEG4ADVANCEDSIMPLE,
+ PROFILE_MPEG4MAIN,
+ PROFILE_H264BASELINE,
+ PROFILE_H264MAIN,
+ PROFILE_H264HIGH,
+ PROFILE_VC1SIMPLE,
+ PROFILE_VC1MAIN,
+ PROFILE_VC1ADVANCED,
+ PROFILE_H263BASELINE
+} VideoProfile;
+
+typedef enum {
+ AVC_DELIMITER_LENGTHPREFIX = 0,
+ AVC_DELIMITER_ANNEXB
+} AVCDelimiterType;
+
+typedef enum {
+ VIDEO_ENC_NONIR, // Non intra refresh
+ VIDEO_ENC_CIR, // Cyclic intra refresh
+ VIDEO_ENC_AIR, // Adaptive intra refresh
+ VIDEO_ENC_BOTH,
+ VIDEO_ENC_LAST
+} VideoIntraRefreshType;
+
+enum VideoBufferSharingMode {
+ BUFFER_SHARING_NONE = 1, //Means non shared buffer mode
+ BUFFER_SHARING_CI = 2,
+ BUFFER_SHARING_V4L2 = 4,
+ BUFFER_SHARING_SURFACE = 8,
+ BUFFER_SHARING_USRPTR = 16,
+ BUFFER_SHARING_GFXHANDLE = 32,
+ BUFFER_SHARING_KBUFHANDLE = 64,
+ BUFFER_LAST
+};
+
+typedef enum {
+ FTYPE_UNKNOWN = 0, // Unknown
+ FTYPE_I = 1, // General I-frame type
+ FTYPE_P = 2, // General P-frame type
+ FTYPE_B = 3, // General B-frame type
+ FTYPE_SI = 4, // H.263 SI-frame type
+ FTYPE_SP = 5, // H.263 SP-frame type
+ FTYPE_EI = 6, // H.264 EI-frame type
+ FTYPE_EP = 7, // H.264 EP-frame type
+ FTYPE_S = 8, // MPEG-4 S-frame type
+ FTYPE_IDR = 9, // IDR-frame type
+}FrameType;
+
+//function call mode
+#define FUNC_BLOCK 0xFFFFFFFF
+#define FUNC_NONBLOCK 0
+
+// Output buffer flag
+#define ENCODE_BUFFERFLAG_ENDOFFRAME 0x00000001
+#define ENCODE_BUFFERFLAG_PARTIALFRAME 0x00000002
+#define ENCODE_BUFFERFLAG_SYNCFRAME 0x00000004
+#define ENCODE_BUFFERFLAG_CODECCONFIG 0x00000008
+#define ENCODE_BUFFERFLAG_DATACORRUPT 0x00000010
+#define ENCODE_BUFFERFLAG_DATAINVALID 0x00000020
+#define ENCODE_BUFFERFLAG_SLICEOVERFOLOW 0x00000040
+#define ENCODE_BUFFERFLAG_ENDOFSTREAM 0x00000080
+#define ENCODE_BUFFERFLAG_NSTOPFRAME 0x00000100
+
+typedef struct {
+ uint8_t *data;
+ uint32_t bufferSize; //buffer size
+ uint32_t dataSize; //actual size
+ uint32_t offset; //buffer offset
+ uint32_t remainingSize;
+ int flag; //Key frame, Codec Data etc
+ VideoOutputFormat format; //output format
+ int64_t timeStamp; //reserved
+ FrameType type;
+ void *priv; //indicate corresponding input data
+} VideoEncOutputBuffer;
+
+typedef struct {
+ uint8_t *data;
+ uint32_t size;
+ bool bufAvailable; //To indicate whether this buffer can be reused
+ int64_t timeStamp; //reserved
+ FrameType type; //frame type expected to be encoded
+ int flag; // flag to indicate buffer property
+ void *priv; //indicate corresponding input data
+} VideoEncRawBuffer;
+
+struct VideoEncSurfaceBuffer {
+ VASurfaceID surface;
+ uint8_t *usrptr;
+ uint32_t index;
+ bool bufAvailable;
+ VideoEncSurfaceBuffer *next;
+};
+
+struct CirParams {
+ uint32_t cir_num_mbs;
+
+ CirParams &operator=(const CirParams &other) {
+ if (this == &other) return *this;
+
+ this->cir_num_mbs = other.cir_num_mbs;
+ return *this;
+ }
+};
+
+struct AirParams {
+ uint32_t airMBs;
+ uint32_t airThreshold;
+ uint32_t airAuto;
+
+ AirParams &operator=(const AirParams &other) {
+ if (this == &other) return *this;
+
+ this->airMBs= other.airMBs;
+ this->airThreshold= other.airThreshold;
+ this->airAuto = other.airAuto;
+ return *this;
+ }
+};
+
+struct VideoFrameRate {
+ uint32_t frameRateNum;
+ uint32_t frameRateDenom;
+
+ VideoFrameRate &operator=(const VideoFrameRate &other) {
+ if (this == &other) return *this;
+
+ this->frameRateNum = other.frameRateNum;
+ this->frameRateDenom = other.frameRateDenom;
+ return *this;
+ }
+};
+
+struct VideoResolution {
+ uint32_t width;
+ uint32_t height;
+
+ VideoResolution &operator=(const VideoResolution &other) {
+ if (this == &other) return *this;
+
+ this->width = other.width;
+ this->height = other.height;
+ return *this;
+ }
+};
+
+struct VideoRateControlParams {
+ uint32_t bitRate;
+ uint32_t initQP;
+ uint32_t minQP;
+ uint32_t maxQP;
+ uint32_t I_minQP;
+ uint32_t I_maxQP;
+ uint32_t windowSize;
+ uint32_t targetPercentage;
+ uint32_t disableFrameSkip;
+ uint32_t disableBitsStuffing;
+ uint32_t enableIntraFrameQPControl;
+ uint32_t temporalFrameRate;
+ uint32_t temporalID;
+
+ VideoRateControlParams &operator=(const VideoRateControlParams &other) {
+ if (this == &other) return *this;
+
+ this->bitRate = other.bitRate;
+ this->initQP = other.initQP;
+ this->minQP = other.minQP;
+ this->maxQP = other.maxQP;
+ this->I_minQP = other.I_minQP;
+ this->I_maxQP = other.I_maxQP;
+ this->windowSize = other.windowSize;
+ this->targetPercentage = other.targetPercentage;
+ this->disableFrameSkip = other.disableFrameSkip;
+ this->disableBitsStuffing = other.disableBitsStuffing;
+ this->enableIntraFrameQPControl = other.enableIntraFrameQPControl;
+ this->temporalFrameRate = other.temporalFrameRate;
+ this->temporalID = other.temporalID;
+
+ return *this;
+ }
+};
+
+struct SliceNum {
+ uint32_t iSliceNum;
+ uint32_t pSliceNum;
+
+ SliceNum &operator=(const SliceNum &other) {
+ if (this == &other) return *this;
+
+ this->iSliceNum = other.iSliceNum;
+ this->pSliceNum= other.pSliceNum;
+ return *this;
+ }
+};
+
+typedef struct {
+ uint32_t realWidth;
+ uint32_t realHeight;
+ uint32_t lumaStride;
+ uint32_t chromStride;
+ uint32_t format;
+} ExternalBufferAttrib;
+
+struct Cropping {
+ uint32_t LeftOffset;
+ uint32_t RightOffset;
+ uint32_t TopOffset;
+ uint32_t BottomOffset;
+
+ Cropping &operator=(const Cropping &other) {
+ if (this == &other) return *this;
+
+ this->LeftOffset = other.LeftOffset;
+ this->RightOffset = other.RightOffset;
+ this->TopOffset = other.TopOffset;
+ this->BottomOffset = other.BottomOffset;
+ return *this;
+ }
+};
+
+struct SamplingAspectRatio {
+ uint16_t SarWidth;
+ uint16_t SarHeight;
+
+ SamplingAspectRatio &operator=(const SamplingAspectRatio &other) {
+ if (this == &other) return *this;
+
+ this->SarWidth = other.SarWidth;
+ this->SarHeight = other.SarHeight;
+ return *this;
+ }
+};
+
+enum VideoParamConfigType {
+ VideoParamsTypeStartUnused = 0x01000000,
+ VideoParamsTypeCommon,
+ VideoParamsTypeAVC,
+ VideoParamsTypeH263,
+ VideoParamsTypeMP4,
+ VideoParamsTypeVC1,
+ VideoParamsTypeUpSteamBuffer,
+ VideoParamsTypeUsrptrBuffer,
+ VideoParamsTypeHRD,
+ VideoParamsTypeStoreMetaDataInBuffers,
+ VideoParamsTypeProfileLevel,
+ VideoParamsTypeVP8,
+ VideoParamsTypeTemporalLayer,
+
+ VideoConfigTypeFrameRate,
+ VideoConfigTypeBitRate,
+ VideoConfigTypeResolution,
+ VideoConfigTypeIntraRefreshType,
+ VideoConfigTypeAIR,
+ VideoConfigTypeCyclicFrameInterval,
+ VideoConfigTypeAVCIntraPeriod,
+ VideoConfigTypeNALSize,
+ VideoConfigTypeIDRRequest,
+ VideoConfigTypeSliceNum,
+ VideoConfigTypeVP8,
+ VideoConfigTypeVP8ReferenceFrame,
+ VideoConfigTypeCIR,
+ VideoConfigTypeVP8MaxFrameSizeRatio,
+ VideoConfigTypeTemperalLayerBitrateFramerate,
+
+ VideoParamsConfigExtension
+};
+
+struct VideoParamConfigSet {
+ VideoParamConfigType type;
+ uint32_t size;
+
+ VideoParamConfigSet &operator=(const VideoParamConfigSet &other) {
+ if (this == &other) return *this;
+ this->type = other.type;
+ this->size = other.size;
+ return *this;
+ }
+};
+
+struct VideoParamsCommon : VideoParamConfigSet {
+
+ VAProfile profile;
+ uint8_t level;
+ VideoRawFormat rawFormat;
+ VideoResolution resolution;
+ VideoFrameRate frameRate;
+ int32_t intraPeriod;
+ VideoRateControl rcMode;
+ VideoRateControlParams rcParams;
+ VideoIntraRefreshType refreshType;
+ int32_t cyclicFrameInterval;
+ AirParams airParams;
+ CirParams cirParams;
+ uint32_t disableDeblocking;
+ bool syncEncMode;
+ //CodedBuffer properties
+ uint32_t codedBufNum;
+ uint32_t numberOfLayer;
+ uint32_t nPeriodicity;
+ uint32_t nLayerID[32];
+
+ VideoParamsCommon() {
+ type = VideoParamsTypeCommon;
+ size = sizeof(VideoParamsCommon);
+ }
+
+ VideoParamsCommon &operator=(const VideoParamsCommon &other) {
+ if (this == &other) return *this;
+
+ VideoParamConfigSet::operator=(other);
+ this->profile = other.profile;
+ this->level = other.level;
+ this->rawFormat = other.rawFormat;
+ this->resolution = other.resolution;
+ this->frameRate = other.frameRate;
+ this->intraPeriod = other.intraPeriod;
+ this->rcMode = other.rcMode;
+ this->rcParams = other.rcParams;
+ this->refreshType = other.refreshType;
+ this->cyclicFrameInterval = other.cyclicFrameInterval;
+ this->airParams = other.airParams;
+ this->disableDeblocking = other.disableDeblocking;
+ this->syncEncMode = other.syncEncMode;
+ this->codedBufNum = other.codedBufNum;
+ this->numberOfLayer = other.numberOfLayer;
+ return *this;
+ }
+};
+
+struct VideoParamsAVC : VideoParamConfigSet {
+ uint32_t basicUnitSize; //for rate control
+ uint8_t VUIFlag;
+ int32_t maxSliceSize;
+ uint32_t idrInterval;
+ uint32_t ipPeriod;
+ uint32_t refFrames;
+ SliceNum sliceNum;
+ AVCDelimiterType delimiterType;
+ Cropping crop;
+ SamplingAspectRatio SAR;
+ uint32_t refIdx10ActiveMinus1;
+ uint32_t refIdx11ActiveMinus1;
+ bool bFrameMBsOnly;
+ bool bMBAFF;
+ bool bEntropyCodingCABAC;
+ bool bWeightedPPrediction;
+ uint32_t weightedBipredicitonMode;
+ bool bConstIpred ;
+ bool bDirect8x8Inference;
+ bool bDirectSpatialTemporal;
+ uint32_t cabacInitIdc;
+
+ VideoParamsAVC() {
+ type = VideoParamsTypeAVC;
+ size = sizeof(VideoParamsAVC);
+ }
+
+ VideoParamsAVC &operator=(const VideoParamsAVC &other) {
+ if (this == &other) return *this;
+
+ VideoParamConfigSet::operator=(other);
+ this->basicUnitSize = other.basicUnitSize;
+ this->VUIFlag = other.VUIFlag;
+ this->maxSliceSize = other.maxSliceSize;
+ this->idrInterval = other.idrInterval;
+ this->ipPeriod = other.ipPeriod;
+ this->refFrames = other.refFrames;
+ this->sliceNum = other.sliceNum;
+ this->delimiterType = other.delimiterType;
+ this->crop.LeftOffset = other.crop.LeftOffset;
+ this->crop.RightOffset = other.crop.RightOffset;
+ this->crop.TopOffset = other.crop.TopOffset;
+ this->crop.BottomOffset = other.crop.BottomOffset;
+ this->SAR.SarWidth = other.SAR.SarWidth;
+ this->SAR.SarHeight = other.SAR.SarHeight;
+
+ this->refIdx10ActiveMinus1 = other.refIdx10ActiveMinus1;
+ this->refIdx11ActiveMinus1 = other.refIdx11ActiveMinus1;
+ this->bFrameMBsOnly = other.bFrameMBsOnly;
+ this->bMBAFF = other.bMBAFF;
+ this->bEntropyCodingCABAC = other.bEntropyCodingCABAC;
+ this->bWeightedPPrediction = other.bWeightedPPrediction;
+ this->weightedBipredicitonMode = other.weightedBipredicitonMode;
+ this->bConstIpred = other.bConstIpred;
+ this->bDirect8x8Inference = other.bDirect8x8Inference;
+ this->bDirectSpatialTemporal = other.bDirectSpatialTemporal;
+ this->cabacInitIdc = other.cabacInitIdc;
+ return *this;
+ }
+};
+
+struct VideoParamsUpstreamBuffer : VideoParamConfigSet {
+
+ VideoParamsUpstreamBuffer() {
+ type = VideoParamsTypeUpSteamBuffer;
+ size = sizeof(VideoParamsUpstreamBuffer);
+ }
+
+ VideoBufferSharingMode bufferMode;
+ intptr_t *bufList;
+ uint32_t bufCnt;
+ ExternalBufferAttrib *bufAttrib;
+ void *display;
+};
+
+struct VideoParamsUsrptrBuffer : VideoParamConfigSet {
+
+ VideoParamsUsrptrBuffer() {
+ type = VideoParamsTypeUsrptrBuffer;
+ size = sizeof(VideoParamsUsrptrBuffer);
+ }
+
+ //input
+ uint32_t width;
+ uint32_t height;
+ uint32_t format;
+ uint32_t expectedSize;
+
+ //output
+ uint32_t actualSize;
+ uint32_t stride;
+ uint8_t *usrPtr;
+};
+
+struct VideoParamsHRD : VideoParamConfigSet {
+
+ VideoParamsHRD() {
+ type = VideoParamsTypeHRD;
+ size = sizeof(VideoParamsHRD);
+ }
+
+ uint32_t bufferSize;
+ uint32_t initBufferFullness;
+};
+
+struct VideoParamsStoreMetaDataInBuffers : VideoParamConfigSet {
+
+ VideoParamsStoreMetaDataInBuffers() {
+ type = VideoParamsTypeStoreMetaDataInBuffers;
+ size = sizeof(VideoParamsStoreMetaDataInBuffers);
+ }
+
+ bool isEnabled;
+};
+
+struct VideoParamsProfileLevel : VideoParamConfigSet {
+
+ VideoParamsProfileLevel() {
+ type = VideoParamsTypeProfileLevel;
+ size = sizeof(VideoParamsProfileLevel);
+ }
+
+ VAProfile profile;
+ uint32_t level;
+ bool isSupported;
+};
+
+struct VideoParamsTemporalLayer : VideoParamConfigSet {
+
+ VideoParamsTemporalLayer() {
+ type = VideoParamsTypeTemporalLayer;
+ size = sizeof(VideoParamsTemporalLayer);
+ }
+
+ uint32_t numberOfLayer;
+ uint32_t nPeriodicity;
+ uint32_t nLayerID[32];
+};
+
+
+struct VideoConfigFrameRate : VideoParamConfigSet {
+
+ VideoConfigFrameRate() {
+ type = VideoConfigTypeFrameRate;
+ size = sizeof(VideoConfigFrameRate);
+ }
+
+ VideoFrameRate frameRate;
+};
+
+struct VideoConfigBitRate : VideoParamConfigSet {
+
+ VideoConfigBitRate() {
+ type = VideoConfigTypeBitRate;
+ size = sizeof(VideoConfigBitRate);
+ }
+
+ VideoRateControlParams rcParams;
+};
+
+struct VideoConfigAVCIntraPeriod : VideoParamConfigSet {
+
+ VideoConfigAVCIntraPeriod() {
+ type = VideoConfigTypeAVCIntraPeriod;
+ size = sizeof(VideoConfigAVCIntraPeriod);
+ }
+
+ uint32_t idrInterval; //How many Intra frame will have a IDR frame
+ uint32_t intraPeriod;
+ uint32_t ipPeriod;
+};
+
+struct VideoConfigNALSize : VideoParamConfigSet {
+
+ VideoConfigNALSize() {
+ type = VideoConfigTypeNALSize;
+ size = sizeof(VideoConfigNALSize);
+ }
+
+ uint32_t maxSliceSize;
+};
+
+struct VideoConfigResolution : VideoParamConfigSet {
+
+ VideoConfigResolution() {
+ type = VideoConfigTypeResolution;
+ size = sizeof(VideoConfigResolution);
+ }
+
+ VideoResolution resolution;
+};
+
+struct VideoConfigIntraRefreshType : VideoParamConfigSet {
+
+ VideoConfigIntraRefreshType() {
+ type = VideoConfigTypeIntraRefreshType;
+ size = sizeof(VideoConfigIntraRefreshType);
+ }
+
+ VideoIntraRefreshType refreshType;
+};
+
+struct VideoConfigCyclicFrameInterval : VideoParamConfigSet {
+
+ VideoConfigCyclicFrameInterval() {
+ type = VideoConfigTypeCyclicFrameInterval;
+ size = sizeof(VideoConfigCyclicFrameInterval);
+ }
+
+ int32_t cyclicFrameInterval;
+};
+
+struct VideoConfigCIR : VideoParamConfigSet {
+
+ VideoConfigCIR() {
+ type = VideoConfigTypeCIR;
+ size = sizeof(VideoConfigCIR);
+ }
+
+ CirParams cirParams;
+};
+
+struct VideoConfigAIR : VideoParamConfigSet {
+
+ VideoConfigAIR() {
+ type = VideoConfigTypeAIR;
+ size = sizeof(VideoConfigAIR);
+ }
+
+ AirParams airParams;
+};
+
+struct VideoConfigSliceNum : VideoParamConfigSet {
+
+ VideoConfigSliceNum() {
+ type = VideoConfigTypeSliceNum;
+ size = sizeof(VideoConfigSliceNum);
+ }
+
+ SliceNum sliceNum;
+};
+
+struct VideoParamsVP8 : VideoParamConfigSet {
+
+ uint32_t profile;
+ uint32_t error_resilient;
+ uint32_t num_token_partitions;
+ uint32_t kf_auto;
+ uint32_t kf_min_dist;
+ uint32_t kf_max_dist;
+ uint32_t min_qp;
+ uint32_t max_qp;
+ uint32_t init_qp;
+ uint32_t rc_undershoot;
+ uint32_t rc_overshoot;
+ uint32_t hrd_buf_size;
+ uint32_t hrd_buf_initial_fullness;
+ uint32_t hrd_buf_optimal_fullness;
+ uint32_t max_frame_size_ratio;
+
+ VideoParamsVP8() {
+ type = VideoParamsTypeVP8;
+ size = sizeof(VideoParamsVP8);
+ }
+};
+
+struct VideoConfigVP8 : VideoParamConfigSet {
+
+ uint32_t force_kf;
+ uint32_t refresh_entropy_probs;
+ uint32_t value;
+ unsigned char sharpness_level;
+
+ VideoConfigVP8 () {
+ type = VideoConfigTypeVP8;
+ size = sizeof(VideoConfigVP8);
+ }
+};
+
+struct VideoConfigVP8ReferenceFrame : VideoParamConfigSet {
+
+ uint32_t no_ref_last;
+ uint32_t no_ref_gf;
+ uint32_t no_ref_arf;
+ uint32_t refresh_last;
+ uint32_t refresh_golden_frame;
+ uint32_t refresh_alternate_frame;
+
+ VideoConfigVP8ReferenceFrame () {
+ type = VideoConfigTypeVP8ReferenceFrame;
+ size = sizeof(VideoConfigVP8ReferenceFrame);
+ }
+};
+
+struct VideoConfigVP8MaxFrameSizeRatio : VideoParamConfigSet {
+
+ VideoConfigVP8MaxFrameSizeRatio() {
+ type = VideoConfigTypeVP8MaxFrameSizeRatio;
+ size = sizeof(VideoConfigVP8MaxFrameSizeRatio);
+ }
+
+ uint32_t max_frame_size_ratio;
+};
+
+struct VideoConfigTemperalLayerBitrateFramerate : VideoParamConfigSet {
+
+ VideoConfigTemperalLayerBitrateFramerate() {
+ type = VideoConfigTypeTemperalLayerBitrateFramerate;
+ size = sizeof(VideoConfigTemperalLayerBitrateFramerate);
+ }
+
+ uint32_t nLayerID;
+ uint32_t bitRate;
+ uint32_t frameRate;
+};
+
+#endif /* __VIDEO_ENCODER_DEF_H__ */
diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp
new file mode 100644
index 0000000..b9f3a9c
--- /dev/null
+++ b/videoencoder/VideoEncoderH263.cpp
@@ -0,0 +1,178 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include <stdlib.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderH263.h"
+#include <va/va_tpi.h>
+
+VideoEncoderH263::VideoEncoderH263() {
+ mComParams.profile = (VAProfile)PROFILE_H263BASELINE;
+ mAutoReferenceSurfaceNum = 2;
+}
+
+Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ LOG_V( "Begin\n");
+
+ if (mFrameNum == 0) {
+ ret = renderSequenceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+ }
+
+ ret = renderPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ ret = renderSliceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSliceParams");
+
+ LOG_V( "End\n");
+ return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderH263::renderSequenceParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferH263 h263SequenceParam = VAEncSequenceParameterBufferH263();
+ uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+ uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+ LOG_V( "Begin\n\n");
+ //set up the sequence params for HW
+ h263SequenceParam.bits_per_second= mComParams.rcParams.bitRate;
+ h263SequenceParam.frame_rate =
+ (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; //hard-coded, driver need;
+ h263SequenceParam.initial_qp = mComParams.rcParams.initQP;
+ h263SequenceParam.min_qp = mComParams.rcParams.minQP;
+ h263SequenceParam.intra_period = mComParams.intraPeriod;
+
+ //h263_seq_param.fixed_vop_rate = 30;
+
+ LOG_V("===h263 sequence params===\n");
+ LOG_I( "bitrate = %d\n", h263SequenceParam.bits_per_second);
+ LOG_I( "frame_rate = %d\n", h263SequenceParam.frame_rate);
+ LOG_I( "initial_qp = %d\n", h263SequenceParam.initial_qp);
+ LOG_I( "min_qp = %d\n", h263SequenceParam.min_qp);
+ LOG_I( "intra_period = %d\n\n", h263SequenceParam.intra_period);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSequenceParameterBufferType,
+ sizeof(h263SequenceParam),
+ 1, &h263SequenceParam,
+ &mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferH263 h263PictureParams = VAEncPictureParameterBufferH263();
+
+ LOG_V( "Begin\n\n");
+
+ // set picture params for HW
+ if(mAutoReference == false){
+ h263PictureParams.reference_picture = task->ref_surface;
+ h263PictureParams.reconstructed_picture = task->rec_surface;
+ }else {
+ h263PictureParams.reference_picture = mAutoRefSurfaces[0];
+ h263PictureParams.reconstructed_picture = mAutoRefSurfaces[1];
+ }
+
+ h263PictureParams.coded_buf = task->coded_buffer;
+ h263PictureParams.picture_width = mComParams.resolution.width;
+ h263PictureParams.picture_height = mComParams.resolution.height;
+ h263PictureParams.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+
+ LOG_V("======h263 picture params======\n");
+ LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture);
+ LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture);
+ LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf);
+// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
+ LOG_I( "picture_width = %d\n", h263PictureParams.picture_width);
+ LOG_I( "picture_height = %d\n",h263PictureParams.picture_height);
+ LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncPictureParameterBufferType,
+ sizeof(h263PictureParams),
+ 1,&h263PictureParams,
+ &mPicParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf , 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderH263::renderSliceParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ uint32_t sliceHeight;
+ uint32_t sliceHeightInMB;
+
+ LOG_V("Begin\n\n");
+
+ sliceHeight = mComParams.resolution.height;
+ sliceHeight += 15;
+ sliceHeight &= (~15);
+ sliceHeightInMB = sliceHeight / 16;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSliceParameterBufferType,
+ sizeof(VAEncSliceParameterBuffer),
+ 1, NULL, &mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ VAEncSliceParameterBuffer *sliceParams;
+ vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ // starting MB row number for this slice
+ sliceParams->start_row_number = 0;
+ // slice height measured in MB
+ sliceParams->slice_height = sliceHeightInMB;
+ sliceParams->slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0;
+ sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0;
+
+ LOG_V("======h263 slice params======\n");
+ LOG_I("start_row_number = %d\n", (int) sliceParams->start_row_number);
+ LOG_I("slice_height_in_mb = %d\n", (int) sliceParams->slice_height);
+ LOG_I("slice.is_intra = %d\n", (int) sliceParams->slice_flags.bits.is_intra);
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V("end\n");
+ return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderH263.h b/videoencoder/VideoEncoderH263.h
new file mode 100644
index 0000000..4d0e7a2
--- /dev/null
+++ b/videoencoder/VideoEncoderH263.h
@@ -0,0 +1,57 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_H263_H__
+#define __VIDEO_ENCODER_H263_H__
+
+#include "VideoEncoderBase.h"
+
+/**
+ * H.263 Encoder class, derived from VideoEncoderBase
+ */
+class VideoEncoderH263: public VideoEncoderBase {
+public:
+ VideoEncoderH263();
+ virtual ~VideoEncoderH263() {};
+
+protected:
+ virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *) {
+ return ENCODE_NOT_SUPPORTED;
+ }
+ //virtual Encode_Status updateFrameInfo(EncodeTask* task);
+
+ // Local Methods
+private:
+ Encode_Status renderSequenceParams(EncodeTask *task);
+ Encode_Status renderPictureParams(EncodeTask *task);
+ Encode_Status renderSliceParams(EncodeTask *task);
+};
+
+#endif /* __VIDEO_ENCODER_H263_H__ */
+
diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp
new file mode 100644
index 0000000..e4ea968
--- /dev/null
+++ b/videoencoder/VideoEncoderHost.cpp
@@ -0,0 +1,76 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoEncoderMP4.h"
+#include "VideoEncoderH263.h"
+#include "VideoEncoderAVC.h"
+#include "VideoEncoderVP8.h"
+#ifndef IMG_GFX
+#include "PVSoftMPEG4Encoder.h"
+#endif
+#include "VideoEncoderHost.h"
+#include <string.h>
+#include <cutils/properties.h>
+#include <wrs_omxil_core/log.h>
+
+int32_t gLogLevel = 0;
+
+IVideoEncoder *createVideoEncoder(const char *mimeType) {
+
+ char logLevelProp[PROPERTY_VALUE_MAX];
+
+ if (property_get("libmix.debug", logLevelProp, NULL)) {
+ gLogLevel = atoi(logLevelProp);
+ LOGD("Debug level is %d", gLogLevel);
+ }
+
+ if (mimeType == NULL) {
+ LOGE("NULL mime type");
+ return NULL;
+ }
+
+ if (strcasecmp(mimeType, "video/avc") == 0 ||
+ strcasecmp(mimeType, "video/h264") == 0) {
+ VideoEncoderAVC *p = new VideoEncoderAVC();
+ return (IVideoEncoder *)p;
+ } else if (strcasecmp(mimeType, "video/h263") == 0) {
+#ifdef IMG_GFX
+ VideoEncoderH263 *p = new VideoEncoderH263();
+#else
+ PVSoftMPEG4Encoder *p = new PVSoftMPEG4Encoder("OMX.google.h263.encoder");
+#endif
+ return (IVideoEncoder *)p;
+ } else if (strcasecmp(mimeType, "video/mpeg4") == 0 ||
+ strcasecmp(mimeType, "video/mp4v-es") == 0) {
+#ifdef IMG_GFX
+ VideoEncoderMP4 *p = new VideoEncoderMP4();
+#else
+ PVSoftMPEG4Encoder *p = new PVSoftMPEG4Encoder("OMX.google.mpeg4.encoder");
+#endif
+ return (IVideoEncoder *)p;
+ } else if (strcasecmp(mimeType, "video/x-vnd.on2.vp8") == 0) {
+ VideoEncoderVP8 *p = new VideoEncoderVP8();
+ return (IVideoEncoder *)p;
+ } else {
+ LOGE ("Unknown mime type: %s", mimeType);
+ }
+ return NULL;
+}
+
+void releaseVideoEncoder(IVideoEncoder *p) {
+ if (p) delete p;
+}
+
diff --git a/videoencoder/VideoEncoderHost.h b/videoencoder/VideoEncoderHost.h
new file mode 100644
index 0000000..ad5df6e
--- /dev/null
+++ b/videoencoder/VideoEncoderHost.h
@@ -0,0 +1,25 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_ENCODER_HOST_H_
+#define VIDEO_ENCODER_HOST_H_
+
+#include "VideoEncoderInterface.h"
+
+IVideoEncoder *createVideoEncoder(const char *mimeType);
+void releaseVideoEncoder(IVideoEncoder *p);
+
+#endif /* VIDEO_ENCODER_HOST_H_ */ \ No newline at end of file
diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h
new file mode 100644
index 0000000..00604ce
--- /dev/null
+++ b/videoencoder/VideoEncoderInterface.h
@@ -0,0 +1,37 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_ENCODER_INTERFACE_H_
+#define VIDEO_ENCODER_INTERFACE_H_
+
+#include "VideoEncoderDef.h"
+
+class IVideoEncoder {
+public:
+ virtual ~IVideoEncoder() {};
+ virtual Encode_Status start(void) = 0;
+ virtual Encode_Status stop(void) = 0;
+ virtual void flush(void) = 0;
+ virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout = FUNC_BLOCK) = 0;
+ virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout = FUNC_BLOCK) = 0;
+ virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams) = 0;
+ virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams) = 0;
+ virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0;
+ virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0;
+ virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0;
+};
+
+#endif /* VIDEO_ENCODER_INTERFACE_H_ */
diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h
new file mode 100644
index 0000000..c38eb94
--- /dev/null
+++ b/videoencoder/VideoEncoderLog.h
@@ -0,0 +1,61 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_LOG_H__
+#define __VIDEO_ENCODER_LOG_H__
+
+#define LOG_TAG "VideoEncoder"
+
+#include <wrs_omxil_core/log.h>
+
+#define LOG_V ALOGV
+#define LOG_D ALOGD
+#define LOG_I ALOGI
+#define LOG_W ALOGW
+#define LOG_E ALOGE
+
+extern int32_t gLogLevel;
+#define CHECK_VA_STATUS_RETURN(FUNC)\
+ if (vaStatus != VA_STATUS_SUCCESS) {\
+ LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\
+ return ENCODE_DRIVER_FAIL;\
+ }
+
+#define CHECK_VA_STATUS_GOTO_CLEANUP(FUNC)\
+ if (vaStatus != VA_STATUS_SUCCESS) {\
+ LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\
+ ret = ENCODE_DRIVER_FAIL; \
+ goto CLEAN_UP;\
+ }
+
+#define CHECK_ENCODE_STATUS_RETURN(FUNC)\
+ if (ret != ENCODE_SUCCESS) { \
+ LOG_E(FUNC"Failed. ret = 0x%08x\n", ret); \
+ return ret; \
+ }
+
+#define CHECK_ENCODE_STATUS_CLEANUP(FUNC)\
+ if (ret != ENCODE_SUCCESS) { \
+ LOG_E(FUNC"Failed, ret = 0x%08x\n", ret); \
+ goto CLEAN_UP;\
+ }
+
+#define CHECK_NULL_RETURN_IFFAIL(POINTER)\
+ if (POINTER == NULL) { \
+ LOG_E("Invalid pointer\n"); \
+ return ENCODE_NULL_PTR;\
+ }
+#endif /* __VIDEO_ENCODER_LOG_H__ */
diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp
new file mode 100644
index 0000000..b414c1d
--- /dev/null
+++ b/videoencoder/VideoEncoderMP4.cpp
@@ -0,0 +1,281 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include <stdlib.h>
+
+#include "VideoEncoderLog.h"
+#include "VideoEncoderMP4.h"
+#include <va/va_tpi.h>
+
+VideoEncoderMP4::VideoEncoderMP4()
+ :mProfileLevelIndication(3)
+ ,mFixedVOPTimeIncrement(0) {
+ mComParams.profile = (VAProfile)PROFILE_MPEG4SIMPLE;
+ mAutoReferenceSurfaceNum = 2;
+}
+
+Encode_Status VideoEncoderMP4::getHeaderPos(
+ uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) {
+
+ uint32_t bytesLeft = bufSize;
+
+ *headerSize = 0;
+ CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+ if (bufSize < 4) {
+ //bufSize shoule not < 4
+ LOG_E("Buffer size too small\n");
+ return ENCODE_FAIL;
+ }
+
+ while (bytesLeft > 4 &&
+ (memcmp("\x00\x00\x01\xB6", &inBuffer[bufSize - bytesLeft], 4) &&
+ memcmp("\x00\x00\x01\xB3", &inBuffer[bufSize - bytesLeft], 4))) {
+ --bytesLeft;
+ }
+
+ if (bytesLeft <= 4) {
+ LOG_E("NO header found\n");
+ *headerSize = 0; //
+ } else {
+ *headerSize = bufSize - bytesLeft;
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::outputConfigData(
+ VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t headerSize = 0;
+
+ ret = getHeaderPos((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &headerSize);
+ CHECK_ENCODE_STATUS_RETURN("getHeaderPos");
+ if (headerSize == 0) {
+ outBuffer->dataSize = 0;
+ mCurSegment = NULL;
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ if (headerSize <= outBuffer->bufferSize) {
+ memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize);
+ mTotalSizeCopied += headerSize;
+ mOffsetInSeg += headerSize;
+ outBuffer->dataSize = headerSize;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+ } else {
+ // we need a big enough buffer, otherwise we won't output anything
+ outBuffer->dataSize = 0;
+ outBuffer->remainingSize = headerSize;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+ LOG_E("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ return ret;
+}
+
+Encode_Status VideoEncoderMP4::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ LOG_V("Begin\n");
+ CHECK_NULL_RETURN_IFFAIL(outBuffer);
+
+ switch (outBuffer->format) {
+ case OUTPUT_CODEC_DATA: {
+ // Output the codec config data
+ ret = outputConfigData(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputCodecData");
+ break;
+ }
+ default:
+ LOG_E("Invalid buffer mode for MPEG-4:2\n");
+ ret = ENCODE_FAIL;
+ break;
+ }
+
+ LOG_I("out size is = %d\n", outBuffer->dataSize);
+
+
+CLEAN_UP:
+
+ LOG_V("End\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderMP4::renderSequenceParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferMPEG4 mp4SequenceParams = VAEncSequenceParameterBufferMPEG4();
+
+ uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+ uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+ LOG_V( "Begin\n\n");
+ // set up the sequence params for HW
+ mp4SequenceParams.profile_and_level_indication = mProfileLevelIndication;
+ mp4SequenceParams.video_object_layer_width= mComParams.resolution.width;
+ mp4SequenceParams.video_object_layer_height= mComParams.resolution.height;
+ mp4SequenceParams.vop_time_increment_resolution =
+ (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;
+ mp4SequenceParams.fixed_vop_time_increment= mFixedVOPTimeIncrement;
+ mp4SequenceParams.bits_per_second= mComParams.rcParams.bitRate;
+ mp4SequenceParams.frame_rate =
+ (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;
+ mp4SequenceParams.initial_qp = mComParams.rcParams.initQP;
+ mp4SequenceParams.min_qp = mComParams.rcParams.minQP;
+ mp4SequenceParams.intra_period = mComParams.intraPeriod;
+ //mpeg4_seq_param.fixed_vop_rate = 30;
+
+ LOG_V("===mpeg4 sequence params===\n");
+ LOG_I("profile_and_level_indication = %d\n", (uint32_t)mp4SequenceParams.profile_and_level_indication);
+ LOG_I("intra_period = %d\n", mp4SequenceParams.intra_period);
+ LOG_I("video_object_layer_width = %d\n", mp4SequenceParams.video_object_layer_width);
+ LOG_I("video_object_layer_height = %d\n", mp4SequenceParams.video_object_layer_height);
+ LOG_I("vop_time_increment_resolution = %d\n", mp4SequenceParams.vop_time_increment_resolution);
+ LOG_I("fixed_vop_rate = %d\n", mp4SequenceParams.fixed_vop_rate);
+ LOG_I("fixed_vop_time_increment = %d\n", mp4SequenceParams.fixed_vop_time_increment);
+ LOG_I("bitrate = %d\n", mp4SequenceParams.bits_per_second);
+ LOG_I("frame_rate = %d\n", mp4SequenceParams.frame_rate);
+ LOG_I("initial_qp = %d\n", mp4SequenceParams.initial_qp);
+ LOG_I("min_qp = %d\n", mp4SequenceParams.min_qp);
+ LOG_I("intra_period = %d\n\n", mp4SequenceParams.intra_period);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSequenceParameterBufferType,
+ sizeof(mp4SequenceParams),
+ 1, &mp4SequenceParams,
+ &mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::renderPictureParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferMPEG4 mpeg4_pic_param = VAEncPictureParameterBufferMPEG4();
+ LOG_V( "Begin\n\n");
+ // set picture params for HW
+ if(mAutoReference == false){
+ mpeg4_pic_param.reference_picture = task->ref_surface;
+ mpeg4_pic_param.reconstructed_picture = task->rec_surface;
+ }else {
+ mpeg4_pic_param.reference_picture = mAutoRefSurfaces[0];
+ mpeg4_pic_param.reconstructed_picture = mAutoRefSurfaces[1];
+ }
+
+ mpeg4_pic_param.coded_buf = task->coded_buffer;
+ mpeg4_pic_param.picture_width = mComParams.resolution.width;
+ mpeg4_pic_param.picture_height = mComParams.resolution.height;
+ mpeg4_pic_param.vop_time_increment= mFrameNum;
+ mpeg4_pic_param.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+
+ LOG_V("======mpeg4 picture params======\n");
+ LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture);
+ LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture);
+ LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf);
+// LOG_I("coded_buf_index = %d\n", mCodedBufIndex);
+ LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width);
+ LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height);
+ LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment);
+ LOG_I("picture_type = %d\n\n", mpeg4_pic_param.picture_type);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncPictureParameterBufferType,
+ sizeof(mpeg4_pic_param),
+ 1,&mpeg4_pic_param,
+ &mPicParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderMP4::renderSliceParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ uint32_t sliceHeight;
+ uint32_t sliceHeightInMB;
+
+ VAEncSliceParameterBuffer sliceParams;
+
+ LOG_V( "Begin\n\n");
+
+ sliceHeight = mComParams.resolution.height;
+ sliceHeight += 15;
+ sliceHeight &= (~15);
+ sliceHeightInMB = sliceHeight / 16;
+
+ sliceParams.start_row_number = 0;
+ sliceParams.slice_height = sliceHeightInMB;
+ sliceParams.slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0;
+ sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0;
+
+ LOG_V("======mpeg4 slice params======\n");
+ LOG_I( "start_row_number = %d\n", (int) sliceParams.start_row_number);
+ LOG_I( "sliceHeightInMB = %d\n", (int) sliceParams.slice_height);
+ LOG_I( "is_intra = %d\n", (int) sliceParams.slice_flags.bits.is_intra);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSliceParameterBufferType,
+ sizeof(VAEncSliceParameterBuffer),
+ 1, &sliceParams,
+ &mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::sendEncodeCommand(EncodeTask *task) {
+ Encode_Status ret = ENCODE_SUCCESS;
+ LOG_V( "Begin\n");
+
+ if (mFrameNum == 0) {
+ ret = renderSequenceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+ }
+
+ ret = renderPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ ret = renderSliceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ LOG_V( "End\n");
+ return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderMP4.h b/videoencoder/VideoEncoderMP4.h
new file mode 100644
index 0000000..2691aab
--- /dev/null
+++ b/videoencoder/VideoEncoderMP4.h
@@ -0,0 +1,61 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER__MPEG4_H__
+#define __VIDEO_ENCODER__MPEG4_H__
+
+#include "VideoEncoderBase.h"
+
+/**
+ * MPEG-4:2 Encoder class, derived from VideoEncoderBase
+ */
+class VideoEncoderMP4: public VideoEncoderBase {
+public:
+ VideoEncoderMP4();
+ virtual ~VideoEncoderMP4() {};
+
+// Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
+
+protected:
+ virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer);
+ //virtual Encode_Status updateFrameInfo(EncodeTask* task);
+
+ // Local Methods
+private:
+ Encode_Status getHeaderPos(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize);
+ Encode_Status outputConfigData(VideoEncOutputBuffer *outBuffer);
+ Encode_Status renderSequenceParams(EncodeTask *task);
+ Encode_Status renderPictureParams(EncodeTask *task);
+ Encode_Status renderSliceParams(EncodeTask *task);
+
+ unsigned char mProfileLevelIndication;
+ uint32_t mFixedVOPTimeIncrement;
+};
+
+#endif /* __VIDEO_ENCODER__MPEG4_H__ */
diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp
new file mode 100644
index 0000000..8b55bb0
--- /dev/null
+++ b/videoencoder/VideoEncoderUtils.cpp
@@ -0,0 +1,808 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoEncoderLog.h"
+#include "VideoEncoderUtils.h"
+#include <va/va_android.h>
+#include <va/va_drmcommon.h>
+
+#ifdef IMG_GFX
+#include <hal/hal_public.h>
+#include <hardware/gralloc.h>
+
+//#define GFX_DUMP
+
+#define OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar 0x7FA00E00
+
+static hw_module_t const *gModule = NULL;
+static gralloc_module_t *gAllocMod = NULL; /* get by force hw_module_t */
+static alloc_device_t *gAllocDev = NULL;
+
+static int gfx_init(void) {
+
+ int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &gModule);
+ if (err) {
+ LOG_E("FATAL: can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ } else
+ LOG_V("hw_get_module returned\n");
+ gAllocMod = (gralloc_module_t *)gModule;
+
+ return 0;
+}
+
+static int gfx_alloc(uint32_t w, uint32_t h, int format,
+ int usage, buffer_handle_t* handle, int32_t* stride) {
+
+ int err;
+
+ if (!gAllocDev) {
+ if (!gModule) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ err = gralloc_open(gModule, &gAllocDev);
+ if (err) {
+ LOG_E("FATAL: gralloc open failed\n");
+ return -1;
+ }
+ }
+
+ err = gAllocDev->alloc(gAllocDev, w, h, format, usage, handle, stride);
+ if (err) {
+ LOG_E("alloc(%u, %u, %d, %08x, ...) failed %d (%s)\n",
+ w, h, format, usage, err, strerror(-err));
+ }
+
+ return err;
+}
+
+static int gfx_free(buffer_handle_t handle) {
+
+ int err;
+
+ if (!gAllocDev) {
+ if (!gModule) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ err = gralloc_open(gModule, &gAllocDev);
+ if (err) {
+ LOG_E("FATAL: gralloc open failed\n");
+ return -1;
+ }
+ }
+
+ err = gAllocDev->free(gAllocDev, handle);
+ if (err) {
+ LOG_E("free(...) failed %d (%s)\n", err, strerror(-err));
+ }
+
+ return err;
+}
+
+static int gfx_lock(buffer_handle_t handle, int usage,
+ int left, int top, int width, int height, void** vaddr) {
+
+ int err;
+
+ if (!gAllocMod) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ err = gAllocMod->lock(gAllocMod, handle, usage,
+ left, top, width, height, vaddr);
+ LOG_V("gfx_lock: handle is %x, usage is %x, vaddr is %x.\n", (unsigned int)handle, usage, (unsigned int)*vaddr);
+
+ if (err){
+ LOG_E("lock(...) failed %d (%s).\n", err, strerror(-err));
+ return -1;
+ } else
+ LOG_V("lock returned with address %p\n", *vaddr);
+
+ return err;
+}
+
+static int gfx_unlock(buffer_handle_t handle) {
+
+ int err;
+
+ if (!gAllocMod) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ err = gAllocMod->unlock(gAllocMod, handle);
+ if (err) {
+ LOG_E("unlock(...) failed %d (%s)", err, strerror(-err));
+ return -1;
+ } else
+ LOG_V("unlock returned\n");
+
+ return err;
+}
+
+static int gfx_Blit(buffer_handle_t src, buffer_handle_t dest,
+ int w, int h, int , int )
+{
+ int err;
+
+ if (!gAllocMod) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ IMG_gralloc_module_public_t* GrallocMod = (IMG_gralloc_module_public_t*)gModule;
+
+#ifdef MRFLD_GFX
+ err = GrallocMod->Blit(GrallocMod, src, dest, w, h, 0, 0, 0, 0);
+#else
+ err = GrallocMod->Blit2(GrallocMod, src, dest, w, h, 0, 0);
+#endif
+
+ if (err) {
+ LOG_E("Blit(...) failed %d (%s)", err, strerror(-err));
+ return -1;
+ } else
+ LOG_V("Blit returned\n");
+
+ return err;
+}
+
+Encode_Status GetGfxBufferInfo(intptr_t handle, ValueInfo& vinfo){
+
+ /* only support OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar
+ HAL_PIXEL_FORMAT_NV12
+ HAL_PIXEL_FORMAT_BGRA_8888
+ HAL_PIXEL_FORMAT_RGBA_8888
+ HAL_PIXEL_FORMAT_RGBX_8888
+ HAL_PIXEL_FORMAT_BGRX_8888 */
+ IMG_native_handle_t* h = (IMG_native_handle_t*) handle;
+
+ vinfo.width = h->iWidth;
+ vinfo.height = h->iHeight;
+ vinfo.lumaStride = h->iWidth;
+
+ LOG_I("GetGfxBufferInfo: gfx iWidth=%d, iHeight=%d, iFormat=%x in handle structure\n", h->iWidth, h->iHeight, h->iFormat);
+
+ if (h->iFormat == HAL_PIXEL_FORMAT_NV12) {
+ #ifdef MRFLD_GFX
+ if((h->usage & GRALLOC_USAGE_HW_CAMERA_READ) || (h->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) )
+ vinfo.lumaStride = (h->iWidth + 63) & ~63; //64 aligned
+ else
+ vinfo.lumaStride = (h->iWidth + 31) & ~31; //32 aligned
+ #else //on CTP
+ if (h->iWidth > 512)
+ vinfo.lumaStride = (h->iWidth + 63) & ~63; //64 aligned
+ else
+ vinfo.lumaStride = 512;
+ #endif
+ } else if ((h->iFormat == HAL_PIXEL_FORMAT_BGRA_8888)||
+ (h->iFormat == HAL_PIXEL_FORMAT_RGBA_8888)||
+ (h->iFormat == HAL_PIXEL_FORMAT_RGBX_8888)||
+ (h->iFormat == HAL_PIXEL_FORMAT_BGRX_8888)) {
+ vinfo.lumaStride = (h->iWidth + 31) & ~31;
+ } else if (h->iFormat == OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar) {
+ //nothing to do
+ } else
+ return ENCODE_NOT_SUPPORTED;
+
+ vinfo.format = h->iFormat;
+
+ LOG_I(" Actual Width=%d, Height=%d, Stride=%d\n\n", vinfo.width, vinfo.height, vinfo.lumaStride);
+ return ENCODE_SUCCESS;
+}
+
+#ifdef GFX_DUMP
+void DumpGfx(intptr_t handle, char* filename) {
+ ValueInfo vinfo;
+ void* vaddr[3];
+ FILE* fp;
+ int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN;
+
+ GetGfxBufferInfo(handle, vinfo);
+ if (gfx_lock((buffer_handle_t)handle, usage, 0, 0, vinfo.width, vinfo.height, &vaddr[0]) != 0)
+ return ENCODE_DRIVER_FAIL;
+ fp = fopen(filename, "wb");
+ fwrite(vaddr[0], 1, vinfo.lumaStride * vinfo.height * 4, fp);
+ fclose(fp);
+ LOG_I("dump %d bytes data to %s\n", vinfo.lumaStride * vinfo.height * 4, filename);
+ gfx_unlock((buffer_handle_t)handle);
+
+ return;
+}
+#endif
+
+#endif
+
+extern "C" {
+VAStatus vaLockSurface(VADisplay dpy,
+ VASurfaceID surface,
+ unsigned int *fourcc,
+ unsigned int *luma_stride,
+ unsigned int *chroma_u_stride,
+ unsigned int *chroma_v_stride,
+ unsigned int *luma_offset,
+ unsigned int *chroma_u_offset,
+ unsigned int *chroma_v_offset,
+ unsigned int *buffer_name,
+ void **buffer
+);
+
+VAStatus vaUnlockSurface(VADisplay dpy,
+ VASurfaceID surface
+);
+}
+
+VASurfaceMap::VASurfaceMap(VADisplay display, int hwcap) {
+
+ mVADisplay = display;
+ mSupportedSurfaceMemType = hwcap;
+ mValue = 0;
+ mVASurface = VA_INVALID_SURFACE;
+ mTracked = false;
+ mAction = 0;
+ memset(&mVinfo, 0, sizeof(ValueInfo));
+#ifdef IMG_GFX
+ mGfxHandle = NULL;
+#endif
+}
+
+VASurfaceMap::~VASurfaceMap() {
+
+ if (!mTracked && (mVASurface != VA_INVALID_SURFACE))
+ vaDestroySurfaces(mVADisplay, &mVASurface, 1);
+
+#ifdef IMG_GFX
+ if (mGfxHandle)
+ gfx_free(mGfxHandle);
+#endif
+}
+
+Encode_Status VASurfaceMap::doMapping() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ if (mVASurface == VA_INVALID_SURFACE) {
+
+ int width = mVASurfaceWidth = mVinfo.width;
+ int height = mVASurfaceHeight = mVinfo.height;
+ int stride = mVASurfaceStride = mVinfo.lumaStride;
+
+ if (mAction & MAP_ACTION_COLORCONVERT) {
+
+ //only support gfx buffer
+ if (mVinfo.mode != MEM_MODE_GFXHANDLE)
+ return ENCODE_NOT_SUPPORTED;
+
+ #ifdef IMG_GFX //only enable on IMG chip
+
+ //do not trust valueinfo for gfx case, directly get from structure
+ ValueInfo tmp;
+
+ ret = GetGfxBufferInfo(mValue, tmp);
+ CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo");
+ width = tmp.width;
+ height = tmp.height;
+ stride = tmp.lumaStride;
+
+ if (HAL_PIXEL_FORMAT_NV12 == tmp.format || OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar == tmp.format)
+ mAction &= ~MAP_ACTION_COLORCONVERT;
+ else {
+ //allocate new gfx buffer if format is not NV12
+ int usage = GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
+
+ //use same size with original and HAL_PIXEL_FORMAT_NV12 format
+ if (gfx_alloc(width, height, HAL_PIXEL_FORMAT_NV12, usage, &mGfxHandle, &stride) != 0)
+ return ENCODE_DRIVER_FAIL;
+
+ LOG_I("Create an new gfx buffer handle 0x%p for color convert, width=%d, height=%d, stride=%d\n",
+ mGfxHandle, width, height, stride);
+ }
+
+ #else
+ return ENCODE_NOT_SUPPORTED;
+ #endif
+ }
+
+ if (mAction & MAP_ACTION_ALIGN64 && stride % 64 != 0) {
+ //check if stride is not 64 aligned, must allocate new 64 aligned vasurface
+ stride = (stride + 63 ) & ~63;
+ mAction |= MAP_ACTION_COPY;
+ }
+
+ if(mAction & MAP_ACTION_ALIGN64 && width <= 320 && height <= 240) {
+ mAction |= MAP_ACTION_COPY;
+ }
+
+ if (mAction & MAP_ACTION_COPY) { //must allocate new vasurface(EXternalMemoryNULL, uncached)
+ //allocate new vasurface
+ mVASurface = CreateNewVASurface(mVADisplay, stride, height);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_DRIVER_FAIL;
+ mVASurfaceWidth = mVASurfaceStride = stride;
+ mVASurfaceHeight = height;
+ LOGI("create new vaSurface for MAP_ACTION_COPY\n");
+ } else {
+ #ifdef IMG_GFX
+ if (mGfxHandle != NULL) {
+ //map new gfx handle to vasurface
+ ret = MappingGfxHandle((intptr_t)mGfxHandle);
+ CHECK_ENCODE_STATUS_RETURN("MappingGfxHandle");
+ LOGI("map new allocated gfx handle to vaSurface\n");
+ } else
+ #endif
+ {
+ //map original value to vasurface
+ ret = MappingToVASurface();
+ CHECK_ENCODE_STATUS_RETURN("MappingToVASurface");
+ }
+ }
+ }
+
+ if (mAction & MAP_ACTION_COLORCONVERT) {
+ ret = doActionColConv();
+ CHECK_ENCODE_STATUS_RETURN("doActionColConv");
+ }
+
+ if (mAction & MAP_ACTION_COPY) {
+ //keep src color format is NV12, then do copy
+ ret = doActionCopy();
+ CHECK_ENCODE_STATUS_RETURN("doActionCopy");
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::MappingToVASurface() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ if (mVASurface != VA_INVALID_SURFACE) {
+ LOG_I("VASurface is already set before, nothing to do here\n");
+ return ENCODE_SUCCESS;
+ }
+ LOG_I("MappingToVASurface mode=%d, value=%p\n", mVinfo.mode, (void*)mValue);
+
+ const char *mode = NULL;
+ switch (mVinfo.mode) {
+ case MEM_MODE_SURFACE:
+ mode = "SURFACE";
+ ret = MappingSurfaceID(mValue);
+ break;
+ case MEM_MODE_GFXHANDLE:
+ mode = "GFXHANDLE";
+ ret = MappingGfxHandle(mValue);
+ break;
+ case MEM_MODE_KBUFHANDLE:
+ mode = "KBUFHANDLE";
+ ret = MappingKbufHandle(mValue);
+ break;
+ case MEM_MODE_MALLOC:
+ case MEM_MODE_NONECACHE_USRPTR:
+ mode = "MALLOC or NONCACHE_USRPTR";
+ ret = MappingMallocPTR(mValue);
+ break;
+ case MEM_MODE_ION:
+ case MEM_MODE_V4L2:
+ case MEM_MODE_USRPTR:
+ case MEM_MODE_CI:
+ default:
+ LOG_I("UnSupported memory mode 0x%08x", mVinfo.mode);
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ LOG_I("%s: Format=%x, lumaStride=%d, width=%d, height=%d\n", mode, mVinfo.format, mVinfo.lumaStride, mVinfo.width, mVinfo.height);
+ LOG_I("vaSurface 0x%08x is created for value = 0x%p\n", mVASurface, (void*)mValue);
+
+ return ret;
+}
+
+Encode_Status VASurfaceMap::MappingSurfaceID(intptr_t value) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VASurfaceID surface;
+
+ //try to get kbufhandle from SurfaceID
+ uint32_t fourCC = 0;
+ uint32_t lumaStride = 0;
+ uint32_t chromaUStride = 0;
+ uint32_t chromaVStride = 0;
+ uint32_t lumaOffset = 0;
+ uint32_t chromaUOffset = 0;
+ uint32_t chromaVOffset = 0;
+ uint32_t kBufHandle = 0;
+
+ vaStatus = vaLockSurface(
+ (VADisplay)mVinfo.handle, (VASurfaceID)value,
+ &fourCC, &lumaStride, &chromaUStride, &chromaVStride,
+ &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL);
+
+ CHECK_VA_STATUS_RETURN("vaLockSurface");
+ LOG_I("Surface incoming = 0x%p\n", (void*)value);
+ LOG_I("lumaStride = %d, chromaUStride = %d, chromaVStride=%d\n", lumaStride, chromaUStride, chromaVStride);
+ LOG_I("lumaOffset = %d, chromaUOffset = %d, chromaVOffset = %d\n", lumaOffset, chromaUOffset, chromaVOffset);
+ LOG_I("kBufHandle = 0x%08x, fourCC = %d\n", kBufHandle, fourCC);
+
+ vaStatus = vaUnlockSurface((VADisplay)mVinfo.handle, (VASurfaceID)value);
+ CHECK_VA_STATUS_RETURN("vaUnlockSurface");
+
+ mVinfo.mode = MEM_MODE_KBUFHANDLE;
+ mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5;
+
+ mVASurface = CreateSurfaceFromExternalBuf(kBufHandle, mVinfo);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_INVALID_SURFACE;
+
+ mVASurfaceWidth = mVinfo.width;
+ mVASurfaceHeight = mVinfo.height;
+ mVASurfaceStride = mVinfo.lumaStride;
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::MappingGfxHandle(intptr_t value) {
+
+ LOG_I("MappingGfxHandle %p......\n", (void*)value);
+ LOG_I("format = 0x%08x, lumaStride = %d in ValueInfo\n", mVinfo.format, mVinfo.lumaStride);
+
+ //default value for all HW platforms, maybe not accurate
+ mVASurfaceWidth = mVinfo.width;
+ mVASurfaceHeight = mVinfo.height;
+ mVASurfaceStride = mVinfo.lumaStride;
+
+#ifdef IMG_GFX
+ Encode_Status ret;
+ ValueInfo tmp;
+
+ ret = GetGfxBufferInfo(value, tmp);
+ CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo");
+ mVASurfaceWidth = tmp.width;
+ mVASurfaceHeight = tmp.height;
+ mVASurfaceStride = tmp.lumaStride;
+#endif
+
+ LOG_I("Mapping vasurface Width=%d, Height=%d, Stride=%d\n", mVASurfaceWidth, mVASurfaceHeight, mVASurfaceStride);
+
+ ValueInfo vinfo;
+ memset(&vinfo, 0, sizeof(ValueInfo));
+ vinfo.mode = MEM_MODE_GFXHANDLE;
+ vinfo.width = mVASurfaceWidth;
+ vinfo.height = mVASurfaceHeight;
+ vinfo.lumaStride = mVASurfaceStride;
+ mVASurface = CreateSurfaceFromExternalBuf(value, vinfo);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_INVALID_SURFACE;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::MappingKbufHandle(intptr_t value) {
+
+ LOG_I("MappingKbufHandle value=%p\n", (void*)value);
+
+ mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5;
+ mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_INVALID_SURFACE;
+
+ mVASurfaceWidth = mVinfo.width;
+ mVASurfaceHeight = mVinfo.height;
+ mVASurfaceStride = mVinfo.lumaStride;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::MappingMallocPTR(intptr_t value) {
+
+ mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_INVALID_SURFACE;
+
+ mVASurfaceWidth = mVinfo.width;
+ mVASurfaceHeight = mVinfo.height;
+ mVASurfaceStride = mVinfo.lumaStride;
+
+ return ENCODE_SUCCESS;
+}
+
+//always copy with same color format NV12
+Encode_Status VASurfaceMap::doActionCopy() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ uint32_t width = 0, height = 0, stride = 0;
+ uint8_t *pSrcBuffer, *pDestBuffer;
+ intptr_t handle = 0;
+
+ LOG_I("Copying Src Buffer data to VASurface\n");
+
+ if (mVinfo.mode != MEM_MODE_MALLOC && mVinfo.mode != MEM_MODE_GFXHANDLE) {
+ LOG_E("Not support copy in mode %d", mVinfo.mode);
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ LOG_I("Src Buffer information\n");
+ LOG_I("Mode = %d, width = %d, stride = %d, height = %d\n",
+ mVinfo.mode, mVinfo.width, mVinfo.lumaStride, mVinfo.height);
+
+ uint32_t srcY_offset, srcUV_offset;
+ uint32_t srcY_pitch, srcUV_pitch;
+
+ if (mVinfo.mode == MEM_MODE_MALLOC) {
+ width = mVinfo.width;
+ height = mVinfo.height;
+ stride = mVinfo.lumaStride;
+ pSrcBuffer = (uint8_t*) mValue;
+ srcY_offset = 0;
+ srcUV_offset = stride * height;
+ srcY_pitch = stride;
+ srcUV_pitch = stride;
+ } else {
+
+ #ifdef IMG_GFX //only enable on IMG chips
+ int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN;
+
+ //do not trust valueinfo, directly get from structure
+ Encode_Status ret;
+ ValueInfo tmp;
+
+ if (mGfxHandle)
+ handle = (intptr_t) mGfxHandle;
+ else
+ handle = mValue;
+
+ ret = GetGfxBufferInfo(handle, tmp);
+ CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo");
+ width = tmp.width;
+ height = tmp.height;
+ stride = tmp.lumaStride;
+
+ //only support HAL_PIXEL_FORMAT_NV12 & OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar
+ if (HAL_PIXEL_FORMAT_NV12 != tmp.format && OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar != tmp.format) {
+ LOG_E("Not support gfx buffer format %x", tmp.format);
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ srcY_offset = 0;
+ srcUV_offset = stride * height;
+ srcY_pitch = stride;
+ srcUV_pitch = stride;
+
+ //lock gfx handle with buffer real size
+ void* vaddr[3];
+ if (gfx_lock((buffer_handle_t) handle, usage, 0, 0, width, height, &vaddr[0]) != 0)
+ return ENCODE_DRIVER_FAIL;
+ pSrcBuffer = (uint8_t*)vaddr[0];
+ #else
+
+ return ENCODE_NOT_SUPPORTED;
+ #endif
+ }
+
+
+ VAImage destImage;
+ vaStatus = vaDeriveImage(mVADisplay, mVASurface, &destImage);
+ CHECK_VA_STATUS_RETURN("vaDeriveImage");
+ vaStatus = vaMapBuffer(mVADisplay, destImage.buf, (void **)&pDestBuffer);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ LOG_I("\nDest VASurface information\n");
+ LOG_I("pitches[0] = %d\n", destImage.pitches[0]);
+ LOG_I("pitches[1] = %d\n", destImage.pitches[1]);
+ LOG_I("offsets[0] = %d\n", destImage.offsets[0]);
+ LOG_I("offsets[1] = %d\n", destImage.offsets[1]);
+ LOG_I("num_planes = %d\n", destImage.num_planes);
+ LOG_I("width = %d\n", destImage.width);
+ LOG_I("height = %d\n", destImage.height);
+
+ if (width > destImage.width || height > destImage.height) {
+ LOG_E("src buffer is bigger than destination buffer\n");
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ uint8_t *srcY, *dstY;
+ uint8_t *srcU, *srcV;
+ uint8_t *srcUV, *dstUV;
+
+ srcY = pSrcBuffer + srcY_offset;
+ dstY = pDestBuffer + destImage.offsets[0];
+ srcUV = pSrcBuffer + srcUV_offset;
+ dstUV = pDestBuffer + destImage.offsets[1];
+
+ for (uint32_t i = 0; i < height; i++) {
+ memcpy(dstY, srcY, width);
+ srcY += srcY_pitch;
+ dstY += destImage.pitches[0];
+ }
+
+ for (uint32_t i = 0; i < height / 2; i++) {
+ memcpy(dstUV, srcUV, width);
+ srcUV += srcUV_pitch;
+ dstUV += destImage.pitches[1];
+ }
+
+ vaStatus = vaUnmapBuffer(mVADisplay, destImage.buf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ vaStatus = vaDestroyImage(mVADisplay, destImage.image_id);
+ CHECK_VA_STATUS_RETURN("vaDestroyImage");
+
+#ifdef IMG_GFX
+ if (mVinfo.mode == MEM_MODE_GFXHANDLE) {
+ //unlock gfx handle
+ gfx_unlock((buffer_handle_t) handle);
+ }
+#endif
+ LOG_I("Copying Src Buffer data to VASurface Complete\n");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::doActionColConv() {
+
+#ifdef IMG_GFX
+ if (mGfxHandle == NULL) {
+ LOG_E("something wrong, why new gfxhandle is not allocated? \n");
+ return ENCODE_FAIL;
+ }
+
+ LOG_I("doActionColConv gfx_Blit width=%d, height=%d\n", mVinfo.width, mVinfo.height);
+ if (gfx_Blit((buffer_handle_t)mValue, mGfxHandle,
+ mVinfo.width, mVinfo.height, 0, 0) != 0)
+ return ENCODE_DRIVER_FAIL;
+
+ #ifdef GFX_DUMP
+ LOG_I("dumpping gfx data.....\n");
+ DumpGfx(mValue, "/data/dump.rgb");
+ DumpGfx((intptr_t)mGfxHandle, "/data/dump.yuv");
+ #endif
+ return ENCODE_SUCCESS;
+
+#else
+ return ENCODE_NOT_SUPPORTED;
+#endif
+}
+
+VASurfaceID VASurfaceMap::CreateSurfaceFromExternalBuf(intptr_t value, ValueInfo& vinfo) {
+
+ VAStatus vaStatus;
+ VASurfaceAttribExternalBuffers extbuf;
+ VASurfaceAttrib attribs[2];
+ VASurfaceID surface = VA_INVALID_SURFACE;
+ int type;
+ unsigned long data = value;
+
+ extbuf.pixel_format = VA_FOURCC_NV12;
+ extbuf.width = vinfo.width;
+ extbuf.height = vinfo.height;
+ extbuf.data_size = vinfo.size;
+ if (extbuf.data_size == 0)
+ extbuf.data_size = vinfo.lumaStride * vinfo.height * 1.5;
+ extbuf.num_buffers = 1;
+ extbuf.num_planes = 3;
+ extbuf.pitches[0] = vinfo.lumaStride;
+ extbuf.pitches[1] = vinfo.lumaStride;
+ extbuf.pitches[2] = vinfo.lumaStride;
+ extbuf.pitches[3] = 0;
+ extbuf.offsets[0] = 0;
+ extbuf.offsets[1] = vinfo.lumaStride * vinfo.height;
+ extbuf.offsets[2] = extbuf.offsets[1];
+ extbuf.offsets[3] = 0;
+ extbuf.buffers = &data;
+ extbuf.flags = 0;
+ extbuf.private_data = NULL;
+
+ switch(vinfo.mode) {
+ case MEM_MODE_GFXHANDLE:
+ type = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+ break;
+ case MEM_MODE_KBUFHANDLE:
+ type = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM;
+ break;
+ case MEM_MODE_MALLOC:
+ type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR;
+ break;
+ case MEM_MODE_NONECACHE_USRPTR:
+ type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR;
+ extbuf.flags |= VA_SURFACE_EXTBUF_DESC_UNCACHED;
+ break;
+ case MEM_MODE_SURFACE:
+ case MEM_MODE_ION:
+ case MEM_MODE_V4L2:
+ case MEM_MODE_USRPTR:
+ case MEM_MODE_CI:
+ default:
+ //not support
+ return VA_INVALID_SURFACE;
+ }
+
+ if (!(mSupportedSurfaceMemType & type))
+ return VA_INVALID_SURFACE;
+
+ attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
+ attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[0].value.type = VAGenericValueTypeInteger;
+ attribs[0].value.value.i = type;
+
+ attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
+ attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[1].value.type = VAGenericValueTypePointer;
+ attribs[1].value.value.p = (void *)&extbuf;
+
+ vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, vinfo.width,
+ vinfo.height, &surface, 1, attribs, 2);
+ if (vaStatus != VA_STATUS_SUCCESS){
+ LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus);
+ surface = VA_INVALID_SURFACE;
+ }
+ return surface;
+}
+
+VASurfaceID CreateNewVASurface(VADisplay display, int32_t width, int32_t height) {
+
+ VAStatus vaStatus;
+ VASurfaceID surface = VA_INVALID_SURFACE;
+ VASurfaceAttrib attribs[2];
+ VASurfaceAttribExternalBuffers extbuf;
+ unsigned long data;
+
+ extbuf.pixel_format = VA_FOURCC_NV12;
+ extbuf.width = width;
+ extbuf.height = height;
+ extbuf.data_size = width * height * 3 / 2;
+ extbuf.num_buffers = 1;
+ extbuf.num_planes = 3;
+ extbuf.pitches[0] = width;
+ extbuf.pitches[1] = width;
+ extbuf.pitches[2] = width;
+ extbuf.pitches[3] = 0;
+ extbuf.offsets[0] = 0;
+ extbuf.offsets[1] = width * height;
+ extbuf.offsets[2] = extbuf.offsets[1];
+ extbuf.offsets[3] = 0;
+ extbuf.buffers = &data;
+ extbuf.flags = 0;
+ extbuf.private_data = NULL;
+
+ attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
+ attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[0].value.type = VAGenericValueTypeInteger;
+ attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA;
+
+ attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
+ attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[1].value.type = VAGenericValueTypePointer;
+ attribs[1].value.value.p = (void *)&extbuf;
+
+ vaStatus = vaCreateSurfaces(display, VA_RT_FORMAT_YUV420, width,
+ height, &surface, 1, attribs, 2);
+ if (vaStatus != VA_STATUS_SUCCESS)
+ LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus);
+
+ return surface;
+}
diff --git a/videoencoder/VideoEncoderUtils.h b/videoencoder/VideoEncoderUtils.h
new file mode 100644
index 0000000..05911cd
--- /dev/null
+++ b/videoencoder/VideoEncoderUtils.h
@@ -0,0 +1,85 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_UTILS_H__
+#define __VIDEO_ENCODER_UTILS_H__
+#include <va/va.h>
+#include <va/va_tpi.h>
+#include "VideoEncoderDef.h"
+#include "IntelMetadataBuffer.h"
+#ifdef IMG_GFX
+#include <hardware/gralloc.h>
+#endif
+
+#define MAP_ACTION_COPY 0x00000001 //mem copy
+#define MAP_ACTION_ALIGN64 0x00000002 //align 64
+#define MAP_ACTION_COLORCONVERT 0x00000004 //color convert
+#define MAP_ACTION_RESIZE 0x00000008 //resize
+
+class VASurfaceMap {
+public:
+ VASurfaceMap(VADisplay display, int hwcap);
+ ~VASurfaceMap();
+
+ Encode_Status doMapping();
+ VASurfaceID getVASurface() {return mVASurface;}
+ intptr_t getValue() {return mValue;}
+ ValueInfo* getValueInfo() {return &mVinfo;}
+
+ void setVASurface(VASurfaceID surface) {mVASurface = surface;}
+ void setValue(intptr_t value) {mValue = value;}
+ void setValueInfo(ValueInfo& vinfo) {memcpy(&mVinfo, &vinfo, sizeof(ValueInfo));}
+ void setTracked() {mTracked = true;}
+ void setAction(int32_t action) {mAction = action;}
+
+private:
+ Encode_Status doActionCopy();
+ Encode_Status doActionColConv();
+ Encode_Status MappingToVASurface();
+ Encode_Status MappingSurfaceID(intptr_t value);
+ Encode_Status MappingGfxHandle(intptr_t value);
+ Encode_Status MappingKbufHandle(intptr_t value);
+ Encode_Status MappingMallocPTR(intptr_t value);
+ VASurfaceID CreateSurfaceFromExternalBuf(intptr_t value, ValueInfo& vinfo);
+
+ VADisplay mVADisplay;
+
+ intptr_t mValue;
+
+ VASurfaceID mVASurface;
+ int32_t mVASurfaceWidth;
+ int32_t mVASurfaceHeight;
+ int32_t mVASurfaceStride;
+
+// MetadataBufferType mType;
+
+ ValueInfo mVinfo;
+ bool mTracked;
+
+ int32_t mAction;
+
+ int32_t mSupportedSurfaceMemType;
+
+#ifdef IMG_GFX
+ //special for gfx color format converter
+ buffer_handle_t mGfxHandle;
+#endif
+};
+
+VASurfaceID CreateNewVASurface(VADisplay display, int32_t width, int32_t height);
+
+#endif
+
diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp
new file mode 100644
index 0000000..d65b385
--- /dev/null
+++ b/videoencoder/VideoEncoderVP8.cpp
@@ -0,0 +1,521 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include <stdlib.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderVP8.h"
+#include <va/va_tpi.h>
+#include <va/va_enc_vp8.h>
+
+VideoEncoderVP8::VideoEncoderVP8()
+ :VideoEncoderBase() {
+
+ mVideoParamsVP8.profile = 0;
+ mVideoParamsVP8.error_resilient = 0;
+ mVideoParamsVP8.num_token_partitions = 4;
+ mVideoParamsVP8.kf_auto = 0;
+ mVideoParamsVP8.kf_min_dist = 128;
+ mVideoParamsVP8.kf_max_dist = 128;
+ mVideoParamsVP8.min_qp = 0;
+ mVideoParamsVP8.max_qp = 63;
+ mVideoParamsVP8.init_qp = 26;
+ mVideoParamsVP8.rc_undershoot = 100;
+ mVideoParamsVP8.rc_overshoot = 100;
+ mVideoParamsVP8.hrd_buf_size = 1000;
+ mVideoParamsVP8.hrd_buf_initial_fullness = 500;
+ mVideoParamsVP8.hrd_buf_optimal_fullness = 600;
+ mVideoParamsVP8.max_frame_size_ratio = 0;
+
+ mVideoConfigVP8.force_kf = 0;
+ mVideoConfigVP8.refresh_entropy_probs = 0;
+ mVideoConfigVP8.value = 0;
+ mVideoConfigVP8.sharpness_level = 2;
+
+ mVideoConfigVP8ReferenceFrame.no_ref_last = 0;
+ mVideoConfigVP8ReferenceFrame.no_ref_gf = 0;
+ mVideoConfigVP8ReferenceFrame.no_ref_arf = 0;
+ mVideoConfigVP8ReferenceFrame.refresh_last = 1;
+ mVideoConfigVP8ReferenceFrame.refresh_golden_frame = 1;
+ mVideoConfigVP8ReferenceFrame.refresh_alternate_frame = 1;
+
+ mComParams.profile = VAProfileVP8Version0_3;
+}
+
+VideoEncoderVP8::~VideoEncoderVP8() {
+}
+
+Encode_Status VideoEncoderVP8::start() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ LOG_V( "Begin\n");
+
+ ret = VideoEncoderBase::start ();
+ CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start");
+
+ if (mComParams.rcMode == VA_RC_VCM) {
+ mRenderBitRate = false;
+ }
+
+ LOG_V( "end\n");
+ return ret;
+}
+
+
+Encode_Status VideoEncoderVP8::renderSequenceParams() {
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferVP8 vp8SeqParam = VAEncSequenceParameterBufferVP8();
+
+ LOG_V( "Begin\n");
+
+ vp8SeqParam.frame_width = mComParams.resolution.width;
+ vp8SeqParam.frame_height = mComParams.resolution.height;
+ vp8SeqParam.error_resilient = mVideoParamsVP8.error_resilient;
+ vp8SeqParam.kf_auto = mVideoParamsVP8.kf_auto;
+ vp8SeqParam.kf_min_dist = mVideoParamsVP8.kf_min_dist;
+ vp8SeqParam.kf_max_dist = mVideoParamsVP8.kf_max_dist;
+ vp8SeqParam.bits_per_second = mComParams.rcParams.bitRate;
+ memcpy(vp8SeqParam.reference_frames, mAutoRefSurfaces, sizeof(mAutoRefSurfaces) * mAutoReferenceSurfaceNum);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSequenceParameterBufferType,
+ sizeof(vp8SeqParam),
+ 1, &vp8SeqParam,
+ &mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "End\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) {
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferVP8 vp8PicParam = VAEncPictureParameterBufferVP8();
+ LOG_V( "Begin\n");
+
+ vp8PicParam.coded_buf = task->coded_buffer;
+ vp8PicParam.pic_flags.value = 0;
+ vp8PicParam.ref_flags.bits.force_kf = mVideoConfigVP8.force_kf; //0;
+ if(!vp8PicParam.ref_flags.bits.force_kf) {
+ vp8PicParam.ref_flags.bits.no_ref_last = mVideoConfigVP8ReferenceFrame.no_ref_last;
+ vp8PicParam.ref_flags.bits.no_ref_arf = mVideoConfigVP8ReferenceFrame.no_ref_arf;
+ vp8PicParam.ref_flags.bits.no_ref_gf = mVideoConfigVP8ReferenceFrame.no_ref_gf;
+ }
+ vp8PicParam.pic_flags.bits.refresh_entropy_probs = 0;
+ vp8PicParam.sharpness_level = 2;
+ vp8PicParam.pic_flags.bits.num_token_partitions = 2;
+ vp8PicParam.pic_flags.bits.refresh_last = mVideoConfigVP8ReferenceFrame.refresh_last;
+ vp8PicParam.pic_flags.bits.refresh_golden_frame = mVideoConfigVP8ReferenceFrame.refresh_golden_frame;
+ vp8PicParam.pic_flags.bits.refresh_alternate_frame = mVideoConfigVP8ReferenceFrame.refresh_alternate_frame;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncPictureParameterBufferType,
+ sizeof(vp8PicParam),
+ 1, &vp8PicParam,
+ &mPicParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "End\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderVP8::renderRCParams(uint32_t layer_id, bool total_bitrate)
+{
+ VABufferID rc_param_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterRateControl *misc_rate_ctrl;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
+ 1,NULL,&rc_param_buf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaMapBuffer(mVADisplay, rc_param_buf,(void **)&misc_param);
+
+ misc_param->type = VAEncMiscParameterTypeRateControl;
+ misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data;
+ memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl));
+
+ if(total_bitrate)
+ misc_rate_ctrl->bits_per_second = mComParams.rcParams.bitRate;
+ else
+ {
+ misc_rate_ctrl->rc_flags.bits.temporal_id = layer_id;
+ if(mTemporalLayerBitrateFramerate[layer_id].bitRate != 0)
+ misc_rate_ctrl->bits_per_second = mTemporalLayerBitrateFramerate[layer_id].bitRate;
+ }
+
+ misc_rate_ctrl->target_percentage = 100;
+ misc_rate_ctrl->window_size = 1000;
+ misc_rate_ctrl->initial_qp = mVideoParamsVP8.init_qp;
+ misc_rate_ctrl->min_qp = mVideoParamsVP8.min_qp;
+ misc_rate_ctrl->basic_unit_size = 0;
+ misc_rate_ctrl->max_qp = mVideoParamsVP8.max_qp;
+
+ vaUnmapBuffer(mVADisplay, rc_param_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay,mVAContext, &rc_param_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+ return 0;
+}
+
+Encode_Status VideoEncoderVP8::renderFrameRateParams(uint32_t layer_id, bool total_framerate)
+{
+ VABufferID framerate_param_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterFrameRate * misc_framerate;
+ uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+ uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterFrameRate),
+ 1,NULL,&framerate_param_buf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaMapBuffer(mVADisplay, framerate_param_buf,(void **)&misc_param);
+ misc_param->type = VAEncMiscParameterTypeFrameRate;
+ misc_framerate = (VAEncMiscParameterFrameRate *)misc_param->data;
+ memset(misc_framerate, 0, sizeof(*misc_framerate));
+
+ if(total_framerate)
+ misc_framerate->framerate = (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;
+ else
+ {
+ misc_framerate->framerate_flags.bits.temporal_id = layer_id;
+ if(mTemporalLayerBitrateFramerate[layer_id].frameRate != 0)
+ misc_framerate->framerate = mTemporalLayerBitrateFramerate[layer_id].frameRate;
+ }
+
+ vaUnmapBuffer(mVADisplay, framerate_param_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay,mVAContext, &framerate_param_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+
+ return 0;
+}
+
+Encode_Status VideoEncoderVP8::renderHRDParams(void)
+{
+ VABufferID hrd_param_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterHRD * misc_hrd;
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD),
+ 1,NULL,&hrd_param_buf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaMapBuffer(mVADisplay, hrd_param_buf,(void **)&misc_param);
+ misc_param->type = VAEncMiscParameterTypeHRD;
+ misc_hrd = (VAEncMiscParameterHRD *)misc_param->data;
+ memset(misc_hrd, 0, sizeof(*misc_hrd));
+ misc_hrd->buffer_size = 1000;
+ misc_hrd->initial_buffer_fullness = 500;
+ misc_hrd->optimal_buffer_fullness = 600;
+ vaUnmapBuffer(mVADisplay, hrd_param_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay,mVAContext, &hrd_param_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+
+ return 0;
+}
+
+Encode_Status VideoEncoderVP8::renderMaxFrameSizeParams(void)
+{
+ VABufferID max_frame_size_param_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterBufferMaxFrameSize * misc_maxframesize;
+ unsigned int frameRateNum = mComParams.frameRate.frameRateNum;
+ unsigned int frameRateDenom = mComParams.frameRate.frameRateDenom;
+ unsigned int frameRate = (unsigned int)(frameRateNum + frameRateDenom /2);
+ unsigned int bitRate = mComParams.rcParams.bitRate;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD),
+ 1,NULL,&max_frame_size_param_buf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaMapBuffer(mVADisplay, max_frame_size_param_buf,(void **)&misc_param);
+ misc_param->type = VAEncMiscParameterTypeMaxFrameSize;
+ misc_maxframesize = (VAEncMiscParameterBufferMaxFrameSize *)misc_param->data;
+ memset(misc_maxframesize, 0, sizeof(*misc_maxframesize));
+ misc_maxframesize->max_frame_size = (unsigned int)((bitRate/frameRate) * mVideoParamsVP8.max_frame_size_ratio);
+ vaUnmapBuffer(mVADisplay, max_frame_size_param_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay,mVAContext, &max_frame_size_param_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+
+ return 0;
+}
+
+Encode_Status VideoEncoderVP8::renderLayerStructureParam(void)
+{
+ VABufferID layer_struc_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterTemporalLayerStructure *misc_layer_struc;
+ uint32_t i;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterTemporalLayerStructure),
+ 1, NULL, &layer_struc_buf);
+
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+ vaMapBuffer(mVADisplay, layer_struc_buf, (void **)&misc_param);
+ misc_param->type = VAEncMiscParameterTypeTemporalLayerStructure;
+ misc_layer_struc = (VAEncMiscParameterTemporalLayerStructure *)misc_param->data;
+ memset(misc_layer_struc, 0, sizeof(*misc_layer_struc));
+
+ misc_layer_struc->number_of_layers = mComParams.numberOfLayer;
+ misc_layer_struc->periodicity = mComParams.nPeriodicity;
+ LOGE("renderLayerStructureParam misc_layer_struc->number_of_layers is %d",misc_layer_struc->number_of_layers);
+
+ for(i=0;i<mComParams.nPeriodicity;i++)
+ {
+ misc_layer_struc->layer_id[i] = mComParams.nLayerID[i];
+ }
+
+ vaUnmapBuffer(mVADisplay, layer_struc_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &layer_struc_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+
+ return 0;
+}
+
+
+Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t i;
+
+ if (mFrameNum == 0) {
+ ret = renderSequenceParams();
+ ret = renderFrameRateParams(0,true);
+ ret = renderRCParams(0,true);
+ ret = renderHRDParams();
+ ret = renderMaxFrameSizeParams();
+ if(mRenderMultiTemporal)
+ {
+ ret = renderLayerStructureParam();
+ mRenderMultiTemporal = false;
+
+ }
+
+ if(mComParams.numberOfLayer > 1)
+ for(i=0;i<mComParams.numberOfLayer;i++)
+ {
+ ret = renderFrameRateParams(i, false);
+ ret = renderRCParams(i, false);
+ }
+
+ CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+ }
+
+ if (mRenderBitRate){
+ ret = renderRCParams(0,true);
+ CHECK_ENCODE_STATUS_RETURN("renderRCParams");
+
+ mRenderBitRate = false;
+ }
+
+ if (mRenderFrameRate) {
+ ret = renderFrameRateParams(0,true);
+ CHECK_ENCODE_STATUS_RETURN("renderFrameRateParams");
+
+ mRenderFrameRate = false;
+ }
+
+ if (mRenderMaxFrameSize) {
+ ret = renderMaxFrameSizeParams();
+ CHECK_ENCODE_STATUS_RETURN("renderMaxFrameSizeParams");
+
+ mRenderMaxFrameSize = false;
+ }
+
+ ret = renderPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ if(mForceKFrame) {
+ mVideoConfigVP8.force_kf = 0;//rest it as default value
+ mForceKFrame = false;
+ }
+
+ LOG_V( "End\n");
+ return ret;
+}
+
+
+Encode_Status VideoEncoderVP8::derivedSetParams(VideoParamConfigSet *videoEncParams) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ VideoParamsVP8 *encParamsVP8 = reinterpret_cast <VideoParamsVP8*> (videoEncParams);
+
+ if (encParamsVP8->size != sizeof(VideoParamsVP8)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoParamsVP8 = *encParamsVP8;
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderVP8::derivedGetParams(VideoParamConfigSet *videoEncParams) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ VideoParamsVP8 *encParamsVP8 = reinterpret_cast <VideoParamsVP8*> (videoEncParams);
+
+ if (encParamsVP8->size != sizeof(VideoParamsVP8)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ *encParamsVP8 = mVideoParamsVP8;
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncConfig) {
+
+ int layer_id;
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+
+ switch (videoEncConfig->type)
+ {
+ case VideoConfigTypeVP8:{
+ VideoConfigVP8 *encConfigVP8 =
+ reinterpret_cast<VideoConfigVP8*> (videoEncConfig);
+
+ if (encConfigVP8->size != sizeof(VideoConfigVP8)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ *encConfigVP8 = mVideoConfigVP8;
+ }
+ break;
+
+ case VideoConfigTypeVP8ReferenceFrame:{
+
+ VideoConfigVP8ReferenceFrame *encConfigVP8ReferenceFrame =
+ reinterpret_cast<VideoConfigVP8ReferenceFrame*> (videoEncConfig);
+
+ if (encConfigVP8ReferenceFrame->size != sizeof(VideoConfigVP8ReferenceFrame)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ *encConfigVP8ReferenceFrame = mVideoConfigVP8ReferenceFrame;
+
+ }
+ break;
+
+ case VideoConfigTypeVP8MaxFrameSizeRatio :{
+
+ VideoConfigVP8MaxFrameSizeRatio *encConfigVP8MaxFrameSizeRatio =
+ reinterpret_cast<VideoConfigVP8MaxFrameSizeRatio*> (videoEncConfig);
+
+ if (encConfigVP8MaxFrameSizeRatio->size != sizeof(VideoConfigVP8MaxFrameSizeRatio)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ encConfigVP8MaxFrameSizeRatio->max_frame_size_ratio = mVideoParamsVP8.max_frame_size_ratio;
+ }
+ break;
+
+ default: {
+ LOG_E ("Invalid Config Type");
+ break;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
+
+ int layer_id;
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+
+ switch (videoEncConfig->type)
+ {
+ case VideoConfigTypeVP8:{
+ VideoConfigVP8 *encConfigVP8 =
+ reinterpret_cast<VideoConfigVP8*> (videoEncConfig);
+
+ if (encConfigVP8->size != sizeof(VideoConfigVP8)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoConfigVP8 = *encConfigVP8;
+ }
+ break;
+
+ case VideoConfigTypeVP8ReferenceFrame:{
+ VideoConfigVP8ReferenceFrame *encConfigVP8ReferenceFrame =
+ reinterpret_cast<VideoConfigVP8ReferenceFrame*> (videoEncConfig);
+
+ if (encConfigVP8ReferenceFrame->size != sizeof(VideoConfigVP8ReferenceFrame)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoConfigVP8ReferenceFrame = *encConfigVP8ReferenceFrame;
+
+ }
+ break;
+
+ case VideoConfigTypeVP8MaxFrameSizeRatio:{
+ VideoConfigVP8MaxFrameSizeRatio *encConfigVP8MaxFrameSizeRatio =
+ reinterpret_cast<VideoConfigVP8MaxFrameSizeRatio*> (videoEncConfig);
+
+ if (encConfigVP8MaxFrameSizeRatio->size != sizeof(VideoConfigVP8MaxFrameSizeRatio)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoParamsVP8.max_frame_size_ratio = encConfigVP8MaxFrameSizeRatio->max_frame_size_ratio;
+ mRenderMaxFrameSize = true;
+ }
+ break;
+
+ case VideoConfigTypeIDRRequest:{
+ VideoParamConfigSet *encConfigVP8KFrameRequest =
+ reinterpret_cast<VideoParamConfigSet*> (videoEncConfig);
+
+ mVideoConfigVP8.force_kf = 1;
+ mForceKFrame = true;
+ }
+ break;
+
+ default: {
+ LOG_E ("Invalid Config Type");
+ break;
+ }
+ }
+ return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h
new file mode 100644
index 0000000..1a4360b
--- /dev/null
+++ b/videoencoder/VideoEncoderVP8.h
@@ -0,0 +1,58 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_VP8_H__
+#define __VIDEO_ENCODER_VP8_H__
+
+#include "VideoEncoderBase.h"
+
+/**
+ * VP8 Encoder class, derived from VideoEncoderBase
+ */
+class VideoEncoderVP8: public VideoEncoderBase {
+public:
+ VideoEncoderVP8();
+ virtual ~VideoEncoderVP8();
+ virtual Encode_Status start();
+
+
+
+protected:
+ virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *) {
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ // Local Methods
+private:
+ Encode_Status renderSequenceParams();
+ Encode_Status renderPictureParams(EncodeTask *task);
+ Encode_Status renderRCParams(uint32_t layer_id, bool total_bitrate);
+ Encode_Status renderHRDParams(void);
+ Encode_Status renderFrameRateParams(uint32_t layer_id, bool total_framerate);
+ Encode_Status renderMaxFrameSizeParams(void);
+ Encode_Status renderLayerStructureParam(void);
+
+ VideoConfigVP8 mVideoConfigVP8;
+ VideoParamsVP8 mVideoParamsVP8;
+ VideoConfigVP8ReferenceFrame mVideoConfigVP8ReferenceFrame;
+};
+
+#endif /* __VIDEO_ENCODER_VP8_H__ */
diff --git a/videoencoder/bitstream.h b/videoencoder/bitstream.h
new file mode 100644
index 0000000..c7f919e
--- /dev/null
+++ b/videoencoder/bitstream.h
@@ -0,0 +1,403 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __BITSTREAM_H__
+#define __BITSTREAM_H__
+
+#include <VideoEncoderBase.h>
+#include <assert.h>
+
+struct bitstream {
+ unsigned int *buffer;
+ int bit_offset;
+ int max_size_in_dword;
+};
+
+#define BITSTREAM_ALLOCATE_STEPPING 4096
+
+static unsigned int va_swap32(unsigned int val)
+{
+ unsigned char *pval = (unsigned char *)&val;
+
+ return ((pval[0] << 24) |
+ (pval[1] << 16) |
+ (pval[2] << 8) |
+ (pval[3] << 0));
+}
+
+static void bitstream_start(bitstream *bs)
+{
+ bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
+ bs->buffer = (unsigned int*)calloc(bs->max_size_in_dword * sizeof(int), 1);
+ bs->bit_offset = 0;
+}
+
+static void bitstream_end(bitstream *bs)
+{
+ int pos = (bs->bit_offset >> 5);
+ int bit_offset = (bs->bit_offset & 0x1f);
+ int bit_left = 32 - bit_offset;
+
+ if (bit_offset) {
+ bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
+ }
+}
+
+static void bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
+{
+ int pos = (bs->bit_offset >> 5);
+ int bit_offset = (bs->bit_offset & 0x1f);
+ int bit_left = 32 - bit_offset;
+
+ if (!size_in_bits)
+ return;
+
+ bs->bit_offset += size_in_bits;
+
+ if (bit_left > size_in_bits) {
+ bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
+ } else {
+ size_in_bits -= bit_left;
+ bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
+ bs->buffer[pos] = va_swap32(bs->buffer[pos]);
+
+ if (pos + 1 == bs->max_size_in_dword) {
+ bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
+ bs->buffer = (unsigned int*)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
+ if (bs->buffer == NULL)
+ abort();
+ }
+
+ bs->buffer[pos + 1] = val;
+ }
+}
+
+static void bitstream_put_ue(bitstream *bs, unsigned int val)
+{
+ int size_in_bits = 0;
+ int tmp_val = ++val;
+
+ while (tmp_val) {
+ tmp_val >>= 1;
+ size_in_bits++;
+ }
+
+ bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
+ bitstream_put_ui(bs, val, size_in_bits);
+}
+
+static void bitstream_put_se(bitstream *bs, int val)
+{
+ unsigned int new_val;
+
+ if (val <= 0)
+ new_val = -2 * val;
+ else
+ new_val = 2 * val - 1;
+
+ bitstream_put_ue(bs, new_val);
+}
+
+static void bitstream_byte_aligning(bitstream *bs, int bit)
+{
+ int bit_offset = (bs->bit_offset & 0x7);
+ int bit_left = 8 - bit_offset;
+ int new_val;
+
+ if (!bit_offset)
+ return;
+
+ assert(bit == 0 || bit == 1);
+
+ if (bit)
+ new_val = (1 << bit_left) - 1;
+ else
+ new_val = 0;
+
+ bitstream_put_ui(bs, new_val, bit_left);
+}
+
+static void rbsp_trailing_bits(bitstream *bs)
+{
+ bitstream_put_ui(bs, 1, 1);
+ bitstream_byte_aligning(bs, 0);
+}
+
+static void nal_start_code_prefix(bitstream *bs)
+{
+ bitstream_put_ui(bs, 0x00000001, 32);
+}
+
+static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
+{
+ bitstream_put_ui(bs, 0, 1); /* forbidden_zero_bit: 0 */
+ bitstream_put_ui(bs, nal_ref_idc, 2);
+ bitstream_put_ui(bs, nal_unit_type, 5);
+}
+
+#define NAL_REF_IDC_NONE 0
+#define NAL_REF_IDC_LOW 1
+#define NAL_REF_IDC_MEDIUM 2
+#define NAL_REF_IDC_HIGH 3
+
+#define NAL_NON_IDR 1
+#define NAL_IDR 5
+#define NAL_SPS 7
+#define NAL_PPS 8
+#define NAL_SEI 6
+
+#define SLICE_TYPE_P 0
+#define SLICE_TYPE_B 1
+#define SLICE_TYPE_I 2
+
+#define ENTROPY_MODE_CAVLC 0
+#define ENTROPY_MODE_CABAC 1
+
+#define PROFILE_IDC_BASELINE 66
+#define PROFILE_IDC_MAIN 77
+#define PROFILE_IDC_HIGH 100
+
+static void sps_rbsp(bitstream *bs, VAProfile profile, int frame_bit_rate, VAEncSequenceParameterBufferH264 *seq_param)
+{
+ int profile_idc = 0;
+ int constraint_set_flag = 0;
+
+ if (profile == VAProfileH264High) {
+ profile_idc = PROFILE_IDC_HIGH;
+ constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
+ }
+ else if (profile == VAProfileH264Main) {
+ profile_idc = PROFILE_IDC_MAIN;
+ constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
+ } else {
+ profile_idc = PROFILE_IDC_BASELINE;
+ constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
+ }
+
+ bitstream_put_ui(bs, profile_idc, 8); /* profile_idc */
+ bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1); /* constraint_set0_flag */
+ bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1); /* constraint_set1_flag */
+ bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1); /* constraint_set2_flag */
+ bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1); /* constraint_set3_flag */
+ bitstream_put_ui(bs, 0, 4); /* reserved_zero_4bits */
+ bitstream_put_ui(bs, seq_param->level_idc, 8); /* level_idc */
+ bitstream_put_ue(bs, seq_param->seq_parameter_set_id); /* seq_parameter_set_id */
+
+ if ( profile_idc == PROFILE_IDC_HIGH) {
+ bitstream_put_ue(bs, 1); /* chroma_format_idc = 1, 4:2:0 */
+ bitstream_put_ue(bs, 0); /* bit_depth_luma_minus8 */
+ bitstream_put_ue(bs, 0); /* bit_depth_chroma_minus8 */
+ bitstream_put_ui(bs, 0, 1); /* qpprime_y_zero_transform_bypass_flag */
+ bitstream_put_ui(bs, 0, 1); /* seq_scaling_matrix_present_flag */
+ }
+
+ bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
+ bitstream_put_ue(bs, seq_param->seq_fields.bits.pic_order_cnt_type); /* pic_order_cnt_type */
+
+ if (seq_param->seq_fields.bits.pic_order_cnt_type == 0)
+ bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4); /* log2_max_pic_order_cnt_lsb_minus4 */
+ else {
+ assert(0);
+ }
+
+ bitstream_put_ue(bs, seq_param->max_num_ref_frames); /* num_ref_frames */
+ bitstream_put_ui(bs, 0, 1); /* gaps_in_frame_num_value_allowed_flag */
+
+ bitstream_put_ue(bs, seq_param->picture_width_in_mbs - 1); /* pic_width_in_mbs_minus1 */
+ bitstream_put_ue(bs, seq_param->picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
+ bitstream_put_ui(bs, seq_param->seq_fields.bits.frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
+
+ if (!seq_param->seq_fields.bits.frame_mbs_only_flag) {
+ assert(0);
+ }
+
+ bitstream_put_ui(bs, seq_param->seq_fields.bits.direct_8x8_inference_flag, 1); /* direct_8x8_inference_flag */
+ bitstream_put_ui(bs, seq_param->frame_cropping_flag, 1); /* frame_cropping_flag */
+
+ if (seq_param->frame_cropping_flag) {
+ bitstream_put_ue(bs, seq_param->frame_crop_left_offset); /* frame_crop_left_offset */
+ bitstream_put_ue(bs, seq_param->frame_crop_right_offset); /* frame_crop_right_offset */
+ bitstream_put_ue(bs, seq_param->frame_crop_top_offset); /* frame_crop_top_offset */
+ bitstream_put_ue(bs, seq_param->frame_crop_bottom_offset); /* frame_crop_bottom_offset */
+ }
+
+ if ( frame_bit_rate < 0 ) {
+ bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
+ } else {
+ bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* video_signal_type_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
+ bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
+ {
+ bitstream_put_ui(bs, 15, 32);
+ bitstream_put_ui(bs, 900, 32);
+ bitstream_put_ui(bs, 1, 1);
+ }
+ bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
+ {
+ // hrd_parameters
+ bitstream_put_ue(bs, 0); /* cpb_cnt_minus1 */
+ bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */
+ bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */
+
+ bitstream_put_ue(bs, frame_bit_rate - 1); /* bit_rate_value_minus1[0] */
+ bitstream_put_ue(bs, frame_bit_rate*8 - 1); /* cpb_size_value_minus1[0] */
+ bitstream_put_ui(bs, 1, 1); /* cbr_flag[0] */
+
+ bitstream_put_ui(bs, 23, 5); /* initial_cpb_removal_delay_length_minus1 */
+ bitstream_put_ui(bs, 23, 5); /* cpb_removal_delay_length_minus1 */
+ bitstream_put_ui(bs, 23, 5); /* dpb_output_delay_length_minus1 */
+ bitstream_put_ui(bs, 23, 5); /* time_offset_length */
+ }
+ bitstream_put_ui(bs, 0, 1); /* vcl_hrd_parameters_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* low_delay_hrd_flag */
+
+ bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
+ }
+
+ rbsp_trailing_bits(bs); /* rbsp_trailing_bits */
+}
+
+static void pps_rbsp(bitstream *bs, VAEncPictureParameterBufferH264 *pic_param)
+{
+
+ bitstream_put_ue(bs, pic_param->pic_parameter_set_id); /* pic_parameter_set_id */
+ bitstream_put_ue(bs, pic_param->seq_parameter_set_id); /* seq_parameter_set_id */
+
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */
+
+ bitstream_put_ui(bs, 0, 1); /* pic_order_present_flag: 0 */
+
+ bitstream_put_ue(bs, 0); /* num_slice_groups_minus1 */
+
+ bitstream_put_ue(bs, pic_param->num_ref_idx_l0_active_minus1); /* num_ref_idx_l0_active_minus1 */
+ bitstream_put_ue(bs, pic_param->num_ref_idx_l1_active_minus1); /* num_ref_idx_l1_active_minus1 1 */
+
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_pred_flag, 1); /* weighted_pred_flag: 0 */
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_bipred_idc, 2); /* weighted_bipred_idc: 0 */
+
+ bitstream_put_se(bs, pic_param->pic_init_qp - 26); /* pic_init_qp_minus26 */
+ bitstream_put_se(bs, 0); /* pic_init_qs_minus26 */
+ bitstream_put_se(bs, 0); /* chroma_qp_index_offset */
+
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* constrained_intra_pred_flag */
+ bitstream_put_ui(bs, 0, 1); /* redundant_pic_cnt_present_flag */
+
+ /* more_rbsp_data */
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.transform_8x8_mode_flag, 1); /*transform_8x8_mode_flag */
+ bitstream_put_ui(bs, 0, 1); /* pic_scaling_matrix_present_flag */
+ bitstream_put_se(bs, pic_param->second_chroma_qp_index_offset ); /*second_chroma_qp_index_offset */
+
+ rbsp_trailing_bits(bs);
+}
+
+int build_packed_seq_buffer(unsigned char **header_buffer, VAProfile profile, VAEncSequenceParameterBufferH264 *seq_param)
+{
+ bitstream bs;
+
+ bitstream_start(&bs);
+ nal_start_code_prefix(&bs);
+ nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
+ sps_rbsp(&bs, profile, seq_param->bits_per_second, seq_param);
+ bitstream_end(&bs);
+
+ *header_buffer = (unsigned char *)bs.buffer;
+ return bs.bit_offset;
+}
+
+int build_packed_pic_buffer(unsigned char **header_buffer, VAEncPictureParameterBufferH264 *pic_param)
+{
+ bitstream bs;
+
+ bitstream_start(&bs);
+ nal_start_code_prefix(&bs);
+ nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
+ pps_rbsp(&bs, pic_param);
+ bitstream_end(&bs);
+
+ *header_buffer = (unsigned char *)bs.buffer;
+ return bs.bit_offset;
+}
+
+int build_packed_sei_buffer_timing(unsigned int init_cpb_removal_delay,
+ unsigned int init_cpb_removal_delay_offset,
+ unsigned int cpb_removal_length,
+ unsigned int cpb_removal_delay,
+ unsigned int dpb_output_length,
+ unsigned int dpb_output_delay,
+ unsigned char **sei_buffer)
+{
+ unsigned char *byte_buf;
+ int bp_byte_size, i, pic_byte_size;
+
+ bitstream nal_bs;
+ bitstream sei_bp_bs, sei_pic_bs;
+
+ bitstream_start(&sei_bp_bs);
+ bitstream_put_ue(&sei_bp_bs, 0); /*seq_parameter_set_id*/
+ bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay, cpb_removal_length);
+ bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay_offset, cpb_removal_length);
+ if ( sei_bp_bs.bit_offset & 0x7) {
+ bitstream_put_ui(&sei_bp_bs, 1, 1);
+ }
+ bitstream_end(&sei_bp_bs);
+ bp_byte_size = (sei_bp_bs.bit_offset + 7) / 8;
+
+ bitstream_start(&sei_pic_bs);
+ bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
+ bitstream_put_ui(&sei_pic_bs, dpb_output_delay, dpb_output_length);
+ if ( sei_pic_bs.bit_offset & 0x7) {
+ bitstream_put_ui(&sei_pic_bs, 1, 1);
+ }
+ bitstream_end(&sei_pic_bs);
+ pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
+
+ bitstream_start(&nal_bs);
+ nal_start_code_prefix(&nal_bs);
+ nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI);
+
+ /* Write the SEI buffer period data */
+ bitstream_put_ui(&nal_bs, 0, 8);
+ bitstream_put_ui(&nal_bs, bp_byte_size, 8);
+
+ byte_buf = (unsigned char *)sei_bp_bs.buffer;
+ for(i = 0; i < bp_byte_size; i++) {
+ bitstream_put_ui(&nal_bs, byte_buf[i], 8);
+ }
+ free(byte_buf);
+ /* write the SEI timing data */
+ bitstream_put_ui(&nal_bs, 0x01, 8);
+ bitstream_put_ui(&nal_bs, pic_byte_size, 8);
+
+ byte_buf = (unsigned char *)sei_pic_bs.buffer;
+ for(i = 0; i < pic_byte_size; i++) {
+ bitstream_put_ui(&nal_bs, byte_buf[i], 8);
+ }
+ free(byte_buf);
+
+ rbsp_trailing_bits(&nal_bs);
+ bitstream_end(&nal_bs);
+
+ *sei_buffer = (unsigned char *)nal_bs.buffer;
+
+ return nal_bs.bit_offset;
+}
+
+#endif