aboutsummaryrefslogtreecommitdiff
path: root/mix_vbp/viddec_fw
diff options
context:
space:
mode:
authorwonjong.lee <wongjong.lee@windriver.com>2010-02-08 11:30:37 +0900
committerPatrick Tjin <pattjin@google.com>2014-07-21 21:51:20 -0700
commitbd8388b4555645b3d29abc6a94c303638064d69a (patch)
tree54e18444e70d2b95f7376f89e38b0bf34fcf3d2d /mix_vbp/viddec_fw
parent94d1758a877c17a2caf639be527688f7a24b6048 (diff)
downloadlibmix-bd8388b4555645b3d29abc6a94c303638064d69a.tar.gz
start new branch (intel-cdk-moblin-libmix-20100129)
from: Ho-Eun, Ryu
Diffstat (limited to 'mix_vbp/viddec_fw')
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h1034
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h172
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h107
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h314
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c786
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c228
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c4171
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c82
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c198
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c128
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c1176
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c740
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c513
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c575
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c559
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c1306
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h195
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h231
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c32
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c114
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c1039
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c380
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c461
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h231
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c371
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c98
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h11
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c278
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h527
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c134
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h11
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c596
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h17
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c422
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h11
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c287
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h13
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c143
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h111
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c16
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h224
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c557
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h136
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c753
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c100
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c257
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h608
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c198
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c97
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c101
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c257
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c82
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c101
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c403
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c149
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c368
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c130
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c345
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c691
-rw-r--r--mix_vbp/viddec_fw/fw/include/stdint.h23
-rw-r--r--mix_vbp/viddec_fw/fw/include/viddec_debug.h31
-rw-r--r--mix_vbp/viddec_fw/fw/include/viddec_fw_version.h7
-rw-r--r--mix_vbp/viddec_fw/fw/parser/Makefile.am205
-rw-r--r--mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c224
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h114
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h87
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h96
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h80
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h194
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h106
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h95
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h24
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h17
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h81
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h51
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/main.c608
-rw-r--r--mix_vbp/viddec_fw/fw/parser/utils.c253
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c1568
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h48
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_loader.c162
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_loader.h318
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c1277
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h49
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_trace.c28
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_trace.h47
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_utils.c548
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_utils.h106
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c1029
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h54
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_emit.c78
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_intr.c56
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c119
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c190
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm.c554
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c127
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c178
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c21
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c304
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c472
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c221
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_common_defs.h200
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h242
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h281
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_item_types.h738
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_parser_host.h237
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_workload.h152
111 files changed, 35105 insertions, 0 deletions
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h
new file mode 100644
index 0000000..51f0602
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h
@@ -0,0 +1,1034 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_H_
+#define _H264_H_
+
+#ifdef HOST_ONLY
+#include <stdio.h>
+#include <stdlib.h>
+#include <memory.h>
+#endif
+
+#include "stdint.h"
+#include "viddec_debug.h"
+
+#include "viddec_fw_workload.h"
+#include "h264parse_sei.h"
+
+
+#ifdef WIN32
+#define mfd_printf OS_INFO
+#endif
+
+#ifdef H264_VERBOSE
+#define PRINTF(format, args...) OS_INFO("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args )
+#else
+//#define PRINTF(args...)
+#endif
+
+//#pragma warning(disable : 4710) // function not inlined
+//#pragma warning(disable : 4514) // unreferenced inline function has been removed CL
+//#pragma warning(disable : 4100) // unreferenced formal parameter CL
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define MAX_INT32_VALUE 0x7fffffff
+
+#define MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE 256
+#define MAX_CPB_CNT 32
+#define MAX_NUM_SLICE_GRPS 1 //As per Annex A for high profile, the num_slice_groups_minus1 is 0
+#define MAX_PIC_LIST_NUM 8
+
+//#define MAX_PIC_SIZE_IN_MAP_UNITS 1024 //0 ???????? Henry
+#define MAX_NUM_REF_IDX_L0_ACTIVE 32
+//#define STARTCODE_BUF_SIZE 2048+1024
+
+#define NUM_MMCO_OPERATIONS 17
+
+// Used to check whether the SEI RP is the only way for recovery (cisco contents)
+// This threshold will decide the interval of recovery even no error detected if no IDR during this time
+#define SEI_REC_CHECK_TH 8
+
+//SPS
+#define MAX_NUM_SPS 32
+#define SCL_DEFAULT 1
+
+//PPS
+#define MAX_PIC_PARAMS 255
+#define MAX_NUM_REF_FRAMES 32
+#define MAX_QP 51
+#define MAX_NUM_PPS 256
+
+#define PUT_FS_IDC_BITS(w) (w&0x1F)
+#define PUT_LIST_INDEX_FIELD_BIT(w) ((w&0x1)<<5)
+#define PUT_LIST_LONG_TERM_BITS(w) ((w&0x1)<<6)
+#define PUT_LIST_PTR_LIST_ID_BIT(id) (id<<5)
+
+
+// DPB
+#define FRAME_FLAG_DANGLING_TOP_FIELD ( 0x1 << 3 )
+#define FRAME_FLAG_DANGLING_BOTTOM_FIELD ( 0x1 << 4 )
+
+#define MPD_DPB_FS_NULL_IDC 31 // May need to be changed if we alter gaps_in_frame_num to use
+
+#define MFD_H264_MAX_FRAME_BUFFERS 17
+#define NUM_DPB_FRAME_STORES (MFD_H264_MAX_FRAME_BUFFERS + 1) // 1 extra for storign non-existent pictures.
+
+//Scalling Matrix Type
+#define PPS_QM 0
+#define SPS_QM 1
+#define FB_QM 2
+#define DEFAULT_QM 3
+
+//Frame Type
+#define FRAME_TYPE_IDR 0x00
+#define FRAME_TYPE_I 0x01
+#define FRAME_TYPE_P 0x02
+#define FRAME_TYPE_B 0x03
+#define FRAME_TYPE_INVALID 0x04
+
+
+#define FRAME_TYPE_FRAME_OFFSET 3
+#define FRAME_TYPE_TOP_OFFSET 3
+#define FRAME_TYPE_BOTTOM_OFFSET 0
+#define FRAME_TYPE_STRUCTRUE_OFFSET 6
+
+//// Error handling
+#define FIELD_ERR_OFFSET 17 //offset for Field error flag ----refer to the structure definition viddec_fw_workload_error_codes in viddec_fw_common_defs.h
+
+////Bits Handling
+#define h264_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) )
+#define h264_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start))))
+
+
+//// PIP
+typedef enum _pip_setting_t
+{
+ PIP_SCALER_DISABLED,
+ PIP_SCALE_FACTOR_1_BY_4,
+ PIP_SCALE_FACTOR_1_BY_2,
+ PIP_SCALER_INVALID,
+
+} pip_setting_t;
+
+
+#ifdef VERBOSE
+#define DEBUGGETBITS(args...) OS_INFO( args )
+#else
+//#define DEBUGGETBITS(args...)
+#endif
+
+/* status codes */
+typedef enum _h264_Status
+{
+ H264_STATUS_EOF = 1, // end of file
+ H264_STATUS_OK = 0, // no error
+ H264_STATUS_NO_MEM = 2, // out of memory
+ H264_STATUS_FILE_ERROR = 3, // file error
+ H264_STATUS_NOTSUPPORT = 4, // not supported mode
+ H264_STATUS_PARSE_ERROR = 5, // fail in parse MPEG-4 stream
+ H264_STATUS_ERROR = 6, // unknown/unspecified error
+ H264_NAL_ERROR,
+ H264_SPS_INVALID_PROFILE,
+ H264_SPS_INVALID_LEVEL,
+ H264_SPS_INVALID_SEQ_PARAM_ID,
+ H264_SPS_ERROR,
+ H264_PPS_INVALID_PIC_ID,
+ H264_PPS_INVALID_SEQ_ID,
+ H264_PPS_ERROR,
+ H264_SliceHeader_INVALID_MB,
+ H264_SliceHeader_ERROR,
+ H264_FRAME_DONE,
+ H264_SLICE_DONE,
+ H264_STATUS_POLL_ONCE_ERROR,
+ H264_STATUS_DEC_MEMINIT_ERROR,
+ H264_STATUS_NAL_UNIT_TYPE_ERROR,
+ H264_STATUS_SEI_ERROR,
+ H264_STATUS_SEI_DONE,
+} h264_Status;
+
+
+
+typedef enum _picture_structure_t
+{
+ TOP_FIELD = 1,
+ BOTTOM_FIELD = 2,
+ FRAME = 3,
+ INVALID = 4
+} picture_structure_t;
+
+///// Chorma format
+
+typedef enum _h264_chroma_format_t
+{
+ H264_CHROMA_MONOCHROME,
+ H264_CHROMA_420,
+ H264_CHROMA_422,
+ H264_CHROMA_444,
+}h264_chroma_format_t;
+
+/* H264 start code values */
+typedef enum _h264_nal_unit_type
+{
+ h264_NAL_UNIT_TYPE_unspecified = 0,
+ h264_NAL_UNIT_TYPE_SLICE,
+ h264_NAL_UNIT_TYPE_DPA,
+ h264_NAL_UNIT_TYPE_DPB,
+ h264_NAL_UNIT_TYPE_DPC,
+ h264_NAL_UNIT_TYPE_IDR,
+ h264_NAL_UNIT_TYPE_SEI,
+ h264_NAL_UNIT_TYPE_SPS,
+ h264_NAL_UNIT_TYPE_PPS,
+ h264_NAL_UNIT_TYPE_Acc_unit_delimiter,
+ h264_NAL_UNIT_TYPE_EOSeq,
+ h264_NAL_UNIT_TYPE_EOstream,
+ h264_NAL_UNIT_TYPE_filler_data,
+ h264_NAL_UNIT_TYPE_SPS_extension,
+ h264_NAL_UNIT_TYPE_Reserved1 =14, /*14-18*/
+ h264_NAL_UNIT_TYPE_Reserved2 =15, /*14-18*/
+ h264_NAL_UNIT_TYPE_Reserved3 =16, /*14-18*/
+ h264_NAL_UNIT_TYPE_Reserved4 =17, /*14-18*/
+ h264_NAL_UNIT_TYPE_Reserved5 =18, /*14-18*/
+ h264_NAL_UNIT_TYPE_ACP =19,
+ h264_NAL_UNIT_TYPE_Reserved6 =20, /*20-23*/
+ h264_NAL_UNIT_TYPE_unspecified2 =24, /*24-31*/
+} h264_nal_unit_type;
+
+#define h264_NAL_PRIORITY_HIGHEST 3
+#define h264_NAL_PRIORITY_HIGH 2
+#define h264_NAL_PRIRITY_LOW 1
+#define h264_NAL_PRIORITY_DISPOSABLE 0
+
+
+typedef enum _h264_Profile
+{
+ h264_ProfileBaseline = 66, /** Baseline profile */
+ h264_ProfileMain = 77, /** Main profile */
+ h264_ProfileExtended = 88, /** Extended profile */
+ h264_ProfileHigh = 100 , /** High profile */
+ h264_ProfileHigh10 = 110, /** High 10 profile */
+ h264_ProfileHigh422 = 122, /** High profile 4:2:2 */
+ h264_ProfileHigh444 = 144, /** High profile 4:4:4 */
+} h264_Profile;
+
+
+typedef enum _h264_Level
+{
+ h264_Level1b = 9, /** Level 1b */
+ h264_Level1 = 10, /** Level 1 */
+ h264_Level11 = 11, /** Level 1.1 */
+ h264_Level12 = 12, /** Level 1.2 */
+ h264_Level13 = 13, /** Level 1.3 */
+ h264_Level2 = 20, /** Level 2 */
+ h264_Level21 = 21, /** Level 2.1 */
+ h264_Level22 = 22, /** Level 2.2 */
+ h264_Level3 = 30, /** Level 3 */
+ h264_Level31 = 31, /** Level 3.1 */
+ h264_Level32 = 32, /** Level 3.2 */
+ h264_Level4 = 40, /** Level 4 */
+ h264_Level41 = 41, /** Level 4.1 */
+ h264_Level42 = 42, /** Level 4.2 */
+ h264_Level5 = 50, /** Level 5 */
+ h264_Level51 = 51, /** Level 5.1 */
+ h264_LevelReserved = 255 /** Unknown profile */
+} h264_Level;
+
+
+typedef enum _h264_video_format
+{
+ h264_Component =0,
+ h264_PAL,
+ h264_NTSC,
+ h264_SECAM,
+ h264_MAC,
+ h264_unspecified,
+ h264_Reserved6,
+ h264_Reserved7
+}h264_video_format;
+
+
+typedef enum _h264_fcm
+{
+ h264_ProgressiveFrame = 0,
+ h264_InterlacedFrame = 1,
+ h264_InterlacedField = 3,
+ h264_PictureFormatNone
+} h264_fcm;
+
+
+///// Define the picture types []
+typedef enum _h264_ptype_t
+{
+ h264_PtypeP = 0,
+ h264_PtypeB = 1,
+ h264_PtypeI = 2,
+ h264_PtypeSP = 3,
+ h264_PtypeSI = 4,
+ h264_Ptype_unspecified,
+} h264_ptype_t;
+
+
+///// Aspect ratio
+typedef enum _h264_aspect_ratio
+{
+ h264_AR_Unspecified = 0,
+ h264_AR_1_1 = 1,
+ h264_AR_12_11 = 2,
+ h264_AR_10_11 = 3,
+ h264_AR_16_11 = 4,
+ h264_AR_40_33 = 5,
+ h264_AR_24_11 = 6,
+ h264_AR_20_11 = 7,
+ h264_AR_32_11 = 8,
+ h264_AR_80_33 = 9,
+ h264_AR_18_11 = 10,
+ h264_AR_15_11 = 11,
+ h264_AR_64_33 = 12,
+ h264_AR_160_99 = 13,
+ h264_AR_4_3 = 14,
+ h264_AR_3_2 = 15,
+ h264_AR_2_1 = 16,
+ h264_AR_RESERVED = 17,
+ h264_AR_Extended_SAR = 255,
+}h264_aspect_ratio;
+
+
+//////////////////////////////////////////////
+
+//////////////////////////////////////////////
+// storable_picture
+
+/* Structure details
+ If all members remain ints
+ Size = 11 ints, i.e. 44 bytes
+*/
+
+typedef struct
+{
+ int32_t poc;
+ int32_t pic_num;
+
+ int32_t long_term_pic_num;
+
+ uint8_t long_term_frame_idx;
+ uint8_t is_long_term;
+ uint8_t used_for_reference;
+ uint8_t pad_flag; // Used to indicate the status
+
+} storable_picture, *storable_picture_ptr;
+
+//////////////////////////////////////////////
+// frame store
+
+/* Structure details
+ If all members remain ints
+ Size = 46 ints, i.e. 184 bytes
+*/
+
+typedef struct _frame_store
+{
+ storable_picture frame;
+ storable_picture top_field;
+ storable_picture bottom_field;
+
+ int32_t frame_num;
+
+ int32_t frame_num_wrap;
+
+
+ uint8_t fs_idc;
+ uint8_t pic_type; //bit7 structure: 1 frame , 0 field;
+ //bit4,5,6 top field (frame) pic type, 00 IDR 01 I 10 P 11 B 100 INVALID
+ //bit1,2,3 bottom pic type, 00 IDR 01 I 10 P 11 B 100 INVALID
+ uint8_t long_term_frame_idx; // No two frame stores may have the same long-term frame index
+
+ #define viddec_h264_get_dec_structure(x) h264_bitfields_extract( (x)->fs_flag_1, 0, 0x03)
+ #define viddec_h264_set_dec_structure(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 0, 0x03)
+ #define viddec_h264_get_is_used(x) h264_bitfields_extract( (x)->fs_flag_1, 2, 0x03)
+ #define viddec_h264_set_is_frame_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x03)
+ #define viddec_h264_set_is_top_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x01)
+ #define viddec_h264_set_is_bottom_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 3, 0x01)
+ #define viddec_h264_get_is_skipped(x) h264_bitfields_extract( (x)->fs_flag_1, 4, 0x03)
+ #define viddec_h264_set_is_frame_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x03)
+ #define viddec_h264_set_is_top_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x01)
+ #define viddec_h264_set_is_bottom_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 5, 0x01)
+ #define viddec_h264_get_is_long_term(x) h264_bitfields_extract( (x)->fs_flag_1, 6, 0x03)
+ #define viddec_h264_set_is_frame_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x03)
+ #define viddec_h264_set_is_top_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x01)
+ #define viddec_h264_set_is_bottom_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 7, 0x01)
+ uint8_t fs_flag_1;
+
+
+ #define viddec_h264_get_is_non_existent(x) h264_bitfields_extract( (x)->fs_flag_2, 0, 0x01)
+ #define viddec_h264_set_is_non_existent(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 0, 0x01)
+ #define viddec_h264_get_is_output(x) h264_bitfields_extract( (x)->fs_flag_2, 1, 0x01)
+ #define viddec_h264_set_is_output(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 1, 0x01)
+ #define viddec_h264_get_is_dangling(x) h264_bitfields_extract( (x)->fs_flag_2, 2, 0x01)
+ #define viddec_h264_set_is_dangling(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 2, 0x01)
+ #define viddec_h264_get_recovery_pt_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 3, 0x01)
+ #define viddec_h264_set_recovery_pt_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 3, 0x01)
+ #define viddec_h264_get_broken_link_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 4, 0x01)
+ #define viddec_h264_set_broken_link_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 4, 0x01)
+ #define viddec_h264_get_open_gop_entry(x) h264_bitfields_extract( (x)->fs_flag_2, 5, 0x01)
+ #define viddec_h264_set_open_gop_entry(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 5, 0x01)
+ #define viddec_h264_get_first_field_intra(x) h264_bitfields_extract( (x)->fs_flag_2, 6, 0x01)
+ #define viddec_h264_set_first_field_intra(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 6, 0x01)
+ uint8_t fs_flag_2;
+
+ uint8_t fs_flag_reserve_1;
+ uint8_t fs_flag_reserve_2;
+ uint8_t fs_flag_reserve_3;
+
+ // If non-reference, may have skipped pixel decode
+ //uint8_t non_ref_skipped;
+} frame_store, *frame_param_ptr;
+
+//! Decoded Picture Buffer
+typedef struct _h264_decoded_picture_buffer
+{
+ ///
+ int32_t last_output_poc;
+ int32_t max_long_term_pic_idx;
+
+ //// Resolutions
+ int32_t PicWidthInMbs;
+ int32_t FrameHeightInMbs;
+
+ frame_store fs[NUM_DPB_FRAME_STORES];
+
+ uint8_t fs_ref_idc[16];
+ uint8_t fs_ltref_idc[16];
+
+ uint8_t fs_dpb_idc[NUM_DPB_FRAME_STORES+2];
+
+ uint8_t listX_0[33+3]; // [bit5}:field_flag:0 for top, 1 for bottom, [bit4~0]:fs_idc
+ uint8_t listX_1[33+3];
+
+ uint8_t listXsize[2]; // 1 to 32
+ uint8_t nInitListSize[2];
+
+ //uint32_t size;
+ uint8_t fs_dec_idc;
+ uint8_t fs_non_exist_idc;
+ uint8_t BumpLevel;
+ uint8_t used_size;
+
+ uint8_t OutputLevel;
+ uint8_t OutputLevelValid;
+ uint8_t OutputCtrl;
+ uint8_t num_ref_frames;
+
+ uint8_t ref_frames_in_buffer;
+ uint8_t ltref_frames_in_buffer;
+ uint8_t SuspendOutput;
+ uint8_t WaitSeiRecovery;
+
+
+ uint8_t frame_numbers_need_to_be_allocated;
+ uint8_t frame_id_need_to_be_allocated;
+
+ //// frame list to release from dpb, need be displayed
+ uint8_t frame_numbers_need_to_be_removed;
+ uint8_t frame_id_need_to_be_removed[17];
+
+ //// frame list to removed from dpb but not display
+ uint8_t frame_numbers_need_to_be_dropped;
+ uint8_t frame_id_need_to_be_dropped[17];
+
+ //// frame list to display (in display order)
+ uint8_t frame_numbers_need_to_be_displayed;
+ uint8_t frame_id_need_to_be_displayed[17];
+
+
+} h264_DecodedPictureBuffer;
+
+
+//////////////////////////////////////////////
+// qm_matrix_set
+typedef struct _qm_matrix_set
+{
+ // uint8_t scaling_default_vector;
+ uint8_t scaling_list[56]; // 0 to 23 for qm 0 to 5 (4x4), 24 to 55 for qm 6 & 7 (8x8)
+
+} qm_matrix_set, *qm_matrix_set_ptr;
+
+/*
+///////// Currently not enabled in parser fw///////////////////
+typedef struct _h264_SPS_Extension_RBSP {
+ int32_t seq_parameter_set_id; //UE
+ int32_t aux_format_idc; //UE
+ int32_t bit_depth_aux_minus8; //UE
+ int32_t alpha_incr_flag;
+ int32_t alpha_opaque_value;
+ int32_t alpha_transparent_value;
+ int32_t additional_extension_flag;
+// h264_rbsp_trail_set* rbsp_trail_ptr;
+}h264_SPS_Extension_RBSP_t;
+*/
+
+typedef struct _h264_hrd_param_set {
+ int32_t bit_rate_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2
+ int32_t cpb_size_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2
+
+ uint8_t cbr_flag[MAX_CPB_CNT]; // u(1) * 32
+
+} h264_hrd_param_set, *h264_hrd_param_set_ptr;
+
+typedef struct _vui_seq_parameters_t_used
+{
+ uint32_t num_units_in_tick; // u(32)
+ uint32_t time_scale; // u(32)
+
+ int32_t num_reorder_frames; // ue(v), 0 to max_dec_frame_buffering
+ int32_t max_dec_frame_buffering; // ue(v), 0 to MaxDpbSize, specified in subclause A.3
+
+ uint16_t sar_width; // u(16)
+ uint16_t sar_height; // u(16)
+
+ uint8_t aspect_ratio_info_present_flag; // u(1)
+ uint8_t aspect_ratio_idc; // u(8)
+ uint8_t video_signal_type_present_flag; // u(1)
+ uint8_t video_format; // u(3)
+
+ uint8_t colour_description_present_flag; // u(1)
+ uint8_t colour_primaries; // u(8)
+ uint8_t transfer_characteristics; // u(8)
+ uint8_t timing_info_present_flag; // u(1)
+
+ uint8_t fixed_frame_rate_flag; // u(1)
+ uint8_t low_delay_hrd_flag; // u(1)
+ uint8_t bitstream_restriction_flag; // u(1)
+ uint8_t pic_struct_present_flag;
+
+ uint8_t nal_hrd_parameters_present_flag; // u(1)
+ uint8_t nal_hrd_cpb_removal_delay_length_minus1; // u(5)
+ uint8_t nal_hrd_dpb_output_delay_length_minus1; // u(5)
+ uint8_t nal_hrd_time_offset_length; // u(5)
+
+ uint8_t nal_hrd_cpb_cnt_minus1; // ue(v), 0 to 31
+ uint8_t nal_hrd_initial_cpb_removal_delay_length_minus1; // u(5)
+ uint8_t vcl_hrd_parameters_present_flag; // u(1)
+ uint8_t vcl_hrd_cpb_removal_delay_length_minus1; // u(5)
+
+ uint8_t vcl_hrd_dpb_output_delay_length_minus1; // u(5)
+ uint8_t vcl_hrd_time_offset_length; // u(5)
+ uint8_t vcl_hrd_cpb_cnt_minus1; // ue(v), 0 to 31
+ uint8_t vcl_hrd_initial_cpb_removal_delay_length_minus1; // u(5)
+
+ /////// Here should be kept as 32-bits aligned for next structures
+ /// 2 structures for NAL&VCL HRD
+
+
+} vui_seq_parameters_t_used;
+
+
+typedef struct _vui_seq_parameters_t_not_used
+{
+ int16_t chroma_sample_loc_type_top_field; // ue(v)
+ int16_t chroma_sample_loc_type_bottom_field; // ue(v)
+
+ uint8_t overscan_info_present_flag; // u(1)
+ uint8_t overscan_appropriate_flag; // u(1)
+
+ uint8_t video_full_range_flag; // u(1)
+ uint8_t matrix_coefficients; // u(8)
+
+ uint8_t chroma_location_info_present_flag; // u(1)
+ uint8_t max_bytes_per_pic_denom; // ue(v), 0 to 16
+ uint8_t max_bits_per_mb_denom; // ue(v), 0 to 16
+ uint8_t log2_max_mv_length_vertical; // ue(v), 0 to 16, default to 16
+ uint8_t log2_max_mv_length_horizontal; // ue(v), 0 to 16, default to 16
+
+ uint8_t motion_vectors_over_pic_boundaries_flag; // u(1)
+
+ uint8_t nal_hrd_bit_rate_scale; // u(4)
+ uint8_t nal_hrd_cpb_size_scale; // u(4)
+
+ uint8_t vcl_hrd_bit_rate_scale; // u(4)
+ uint8_t vcl_hrd_cpb_size_scale; // u(4)
+
+ h264_hrd_param_set nal_hrd_parameters;
+ h264_hrd_param_set vcl_hrd_parameters;
+
+
+} vui_seq_parameters_t_not_used, *vui_seq_parameters_t_not_used_ptr;
+
+
+//////////////////////////////////////////////
+// picture parameter set
+
+typedef struct _PPS_PAR
+{
+ //int32_t DOUBLE_ALIGN valid; // indicates the parameter set is valid
+
+ int32_t pic_init_qp_minus26; // se(v), -26 to +25
+ int32_t pic_init_qs_minus26; // se(v), -26 to +25
+ int32_t chroma_qp_index_offset; // se(v), -12 to +12
+ int32_t second_chroma_qp_index_offset;
+
+ uint8_t pic_parameter_set_id; // ue(v), 0 to 255, restricted to 0 to 127 by MPD_CTRL_MAXPPS = 128
+ uint8_t seq_parameter_set_id; // ue(v), 0 to 31
+ uint8_t entropy_coding_mode_flag; // u(1)
+ uint8_t pic_order_present_flag; // u(1)
+
+ uint8_t num_slice_groups_minus1; // ue(v), shall be 0 for MP
+ // Below are not relevant for main profile...
+ uint8_t slice_group_map_type; // ue(v), 0 to 6
+ uint8_t num_ref_idx_l0_active; // ue(v), 0 to 31
+ uint8_t num_ref_idx_l1_active; // ue(v), 0 to 31
+
+ uint8_t weighted_pred_flag; // u(1)
+ uint8_t weighted_bipred_idc; // u(2)
+ uint8_t deblocking_filter_control_present_flag; // u(1)
+ uint8_t constrained_intra_pred_flag; // u(1)
+
+ uint8_t redundant_pic_cnt_present_flag; // u(1)
+ uint8_t transform_8x8_mode_flag;
+ uint8_t pic_scaling_matrix_present_flag;
+ uint8_t pps_status_flag;
+
+ //// Keep here with 32-bits aligned
+ uint8_t pic_scaling_list_present_flag[MAX_PIC_LIST_NUM];
+
+ qm_matrix_set pps_qm;
+
+ uint8_t ScalingList4x4[6][16];
+ uint8_t ScalingList8x8[2][64];
+ uint8_t UseDefaultScalingMatrix4x4Flag[6+2];
+ uint8_t UseDefaultScalingMatrix8x8Flag[6+2];
+
+} pic_param_set, *pic_param_set_ptr, h264_PicParameterSet_t;
+
+typedef union _list_reordering_num_t
+{
+ int32_t abs_diff_pic_num_minus1;
+ int32_t long_term_pic_num;
+} list_reordering_num_t;
+
+typedef struct _h264_Ref_Pic_List_Reordering ////size = 8*33+ 1 + 33
+{
+ list_reordering_num_t list_reordering_num[MAX_NUM_REF_FRAMES+1];
+
+ uint8_t ref_pic_list_reordering_flag;
+ uint8_t reordering_of_pic_nums_idc[MAX_NUM_REF_FRAMES+1]; //UE
+
+}h264_Ref_Pic_List_Reordering_t;
+
+typedef enum _H264_DANGLING_TYPE
+{
+ DANGLING_TYPE_LAST_FIELD,
+ DANGLING_TYPE_DPB_RESET,
+ DANGLING_TYPE_FIELD,
+ DANGLING_TYPE_FRAME,
+ DANGLING_TYPE_GAP_IN_FRAME
+
+} H264_DANGLING_TYPE;
+
+
+typedef struct _h264_Dec_Ref_Pic_Marking //size = 17*4*2 + 17*3 + 4 + 1
+{
+ int32_t difference_of_pic_num_minus1[NUM_MMCO_OPERATIONS];
+ int32_t long_term_pic_num[NUM_MMCO_OPERATIONS];
+
+ /// MMCO
+ uint8_t memory_management_control_operation[NUM_MMCO_OPERATIONS];
+ uint8_t max_long_term_frame_idx_plus1[NUM_MMCO_OPERATIONS];
+ uint8_t long_term_frame_idx[NUM_MMCO_OPERATIONS];
+ uint8_t long_term_reference_flag;
+
+ uint8_t adaptive_ref_pic_marking_mode_flag;
+ uint8_t dec_ref_pic_marking_count;
+ uint8_t no_output_of_prior_pics_flag;
+
+ uint8_t pad;
+}h264_Dec_Ref_Pic_Marking_t;
+
+
+
+typedef struct old_slice_par
+{
+ int32_t frame_num;
+ int32_t pic_order_cnt_lsb;
+ int32_t delta_pic_order_cnt_bottom;
+ int32_t delta_pic_order_cnt[2];
+
+ uint8_t field_pic_flag;
+ uint8_t bottom_field_flag;
+ uint8_t nal_ref_idc;
+ uint8_t structure;
+
+ uint8_t idr_flag;
+ uint8_t idr_pic_id;
+ uint8_t pic_parameter_id;
+ uint8_t status;
+} OldSliceParams;
+
+#ifdef VBP
+typedef struct _h264__pred_weight_table
+{
+ uint8_t luma_log2_weight_denom;
+ uint8_t chroma_log2_weight_denom;
+ uint8_t luma_weight_l0_flag;
+ int16_t luma_weight_l0[32];
+ int8_t luma_offset_l0[32];
+ uint8_t chroma_weight_l0_flag;
+ int16_t chroma_weight_l0[32][2];
+ int8_t chroma_offset_l0[32][2];
+
+ uint8_t luma_weight_l1_flag;
+ int16_t luma_weight_l1[32];
+ int8_t luma_offset_l1[32];
+ uint8_t chroma_weight_l1_flag;
+ int16_t chroma_weight_l1[32][2];
+ int8_t chroma_offset_l1[32][2];
+} h264_pred_weight_table;
+#endif
+
+typedef struct _h264_Slice_Header
+{
+ int32_t first_mb_in_slice; //UE
+ int32_t frame_num; //UV
+ int32_t pic_order_cnt_lsb; //UV
+ int32_t delta_pic_order_cnt_bottom; //SE
+ int32_t delta_pic_order_cnt[2]; //SE
+ int32_t redundant_pic_cnt; //UE
+
+ uint32_t num_ref_idx_l0_active; //UE
+ uint32_t num_ref_idx_l1_active; //UE
+
+ int32_t slice_qp_delta; //SE
+ int32_t slice_qs_delta; //SE
+ int32_t slice_alpha_c0_offset_div2; //SE
+ int32_t slice_beta_offset_div2; //SE
+ int32_t slice_group_change_cycle; //UV
+
+#ifdef VBP
+ h264_pred_weight_table sh_predwttbl;
+#endif
+
+ ///// Flags or IDs
+ //h264_ptype_t slice_type; //UE
+ uint8_t slice_type;
+ uint8_t nal_ref_idc;
+ uint8_t structure;
+ uint8_t pic_parameter_id; //UE
+
+ uint8_t field_pic_flag;
+ uint8_t bottom_field_flag;
+ uint8_t idr_flag; //UE
+ uint8_t idr_pic_id; //UE
+
+ uint8_t sh_error;
+ uint8_t cabac_init_idc; //UE
+ uint8_t sp_for_switch_flag;
+ uint8_t disable_deblocking_filter_idc; //UE
+
+ uint8_t direct_spatial_mv_pred_flag;
+ uint8_t num_ref_idx_active_override_flag;
+ int16_t current_slice_nr;
+
+ //// For Ref list reordering
+ h264_Dec_Ref_Pic_Marking_t sh_dec_refpic;
+ h264_Ref_Pic_List_Reordering_t sh_refpic_l0;
+ h264_Ref_Pic_List_Reordering_t sh_refpic_l1;
+
+} h264_Slice_Header_t;
+
+
+#define MAX_USER_DATA_SIZE 1024
+typedef struct _h264_user_data_t
+{
+ h264_sei_payloadtype user_data_type;
+
+ int32_t user_data_id;
+ int32_t dsn;
+ int32_t user_data_size;
+ int32_t user_data[MAX_USER_DATA_SIZE>>2];
+} h264_user_data_t;
+
+// SPS DISPLAY parameters: seq_param_set_disp, *seq_param_set_disp_ptr;
+typedef struct _SPS_DISP
+{
+ ///// VUI info
+ vui_seq_parameters_t_used vui_seq_parameters; //size =
+
+ ///// Resolution
+ int16_t pic_width_in_mbs_minus1;
+ int16_t pic_height_in_map_units_minus1;
+
+ ///// Cropping
+ int16_t frame_crop_rect_left_offset;
+ int16_t frame_crop_rect_right_offset;
+
+ int16_t frame_crop_rect_top_offset;
+ int16_t frame_crop_rect_bottom_offset;
+
+ ///// Flags
+ uint8_t frame_mbs_only_flag;
+ uint8_t mb_adaptive_frame_field_flag;
+ uint8_t direct_8x8_inference_flag;
+ uint8_t frame_cropping_flag;
+
+ uint16_t vui_parameters_present_flag;
+ uint16_t chroma_format_idc;
+} seq_param_set_disp, *seq_param_set_disp_ptr;
+
+
+////SPS: seq_param_set, *seq_param_set_ptr;
+
+typedef struct _SPS_PAR_USED
+{
+ uint32_t is_updated;
+
+ /////////// Required for display section //////////////////////////
+ seq_param_set_disp sps_disp;
+
+ int32_t expectedDeltaPerPOCCycle;
+ int32_t offset_for_non_ref_pic; // se(v), -2^31 to (2^31)-1, 32-bit integer
+ int32_t offset_for_top_to_bottom_field; // se(v), -2^31 to (2^31)-1, 32-bit integer
+
+ /////////// IDC
+ uint8_t profile_idc; // u(8), 0x77 for MP
+ uint8_t constraint_set_flags; // bit 0 to 3 for set0 to set3
+ uint8_t level_idc; // u(8)
+ uint8_t seq_parameter_set_id; // ue(v), 0 to 31
+
+
+ uint8_t pic_order_cnt_type; // ue(v), 0 to 2
+ uint8_t log2_max_frame_num_minus4; // ue(v), 0 to 12
+ uint8_t log2_max_pic_order_cnt_lsb_minus4; // ue(v), 0 to 12
+ uint8_t num_ref_frames_in_pic_order_cnt_cycle; // ue(v), 0 to 255
+
+ //int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; // se(v), -2^31 to (2^31)-1, 32-bit integer
+ uint8_t num_ref_frames; // ue(v), 0 to 16,
+ uint8_t gaps_in_frame_num_value_allowed_flag; // u(1)
+ // This is my addition, we should calculate this once and leave it with the sps
+ // as opposed to calculating it each time in h264_hdr_decoding_POC()
+
+ uint8_t delta_pic_order_always_zero_flag; // u(1)
+ uint8_t residual_colour_transform_flag;
+
+ uint8_t bit_depth_luma_minus8;
+ uint8_t bit_depth_chroma_minus8;
+ uint8_t lossless_qpprime_y_zero_flag;
+ uint8_t seq_scaling_matrix_present_flag;
+
+ uint8_t seq_scaling_list_present_flag[MAX_PIC_LIST_NUM]; //0-7
+
+ //// Combine the scaling matrix to word ( 24 + 32)
+ uint8_t ScalingList4x4[6][16];
+ uint8_t ScalingList8x8[2][64];
+ uint8_t UseDefaultScalingMatrix4x4Flag[6];
+ uint8_t UseDefaultScalingMatrix8x8Flag[6];
+
+} seq_param_set_used, *seq_param_set_used_ptr;
+
+
+typedef struct _SPS_PAR_ALL
+{
+
+ seq_param_set_used sps_par_used;
+ vui_seq_parameters_t_not_used sps_vui_par_not_used;
+
+}seq_param_set_all, *seq_param_set_all_ptr;
+
+
+///// Image control parameter////////////
+typedef struct _h264_img_par
+{
+ int32_t frame_num; // decoding num of current frame
+ int32_t frame_count; // count of decoded frames
+ int32_t current_slice_num;
+ int32_t gaps_in_frame_num;
+
+ // POC decoding
+ int32_t num_ref_frames_in_pic_order_cnt_cycle;
+ int32_t delta_pic_order_always_zero_flag;
+ int32_t offset_for_non_ref_pic;
+ int32_t offset_for_top_to_bottom_field;
+
+ int32_t pic_order_cnt_lsb;
+ int32_t pic_order_cnt_msb;
+ int32_t delta_pic_order_cnt_bottom;
+ int32_t delta_pic_order_cnt[2];
+
+ int32_t PicOrderCntMsb;
+ int32_t CurrPicOrderCntMsb;
+ int32_t PrevPicOrderCntLsb;
+
+ int32_t FrameNumOffset;
+
+ int32_t PreviousFrameNum;
+ int32_t PreviousFrameNumOffset;
+
+ int32_t toppoc;
+ int32_t bottompoc;
+ int32_t framepoc;
+ int32_t ThisPOC;
+
+ //int32_t sei_freeze_this_image;
+
+ ///////////////////// Resolutions
+ int32_t PicWidthInMbs;
+ int32_t FrameHeightInMbs;
+
+ ///////////////////// MMCO
+ uint8_t last_has_mmco_5;
+ uint8_t curr_has_mmco_5;
+
+ /////////////////// Flags
+ uint8_t g_new_frame;
+ uint8_t g_new_pic;
+
+ uint8_t structure;
+ uint8_t second_field; // Set to one if this is the second field of a set of paired fields...
+ uint8_t field_pic_flag;
+ uint8_t last_pic_bottom_field;
+
+ uint8_t bottom_field_flag;
+ uint8_t MbaffFrameFlag;
+ uint8_t no_output_of_prior_pics_flag;
+ uint8_t long_term_reference_flag;
+
+ uint8_t skip_this_pic;
+ uint8_t pic_order_cnt_type;
+ // Recovery
+ uint8_t recovery_point_found;
+ uint8_t used_for_reference;
+} h264_img_par;
+
+
+typedef struct _h264_slice_reg_data
+{
+ uint32_t h264_bsd_slice_p1; // 0x150
+ //uint32_t h264_mpr_list0[8]; // from 0x380 to 0x3BC
+ uint32_t h264_bsd_slice_p2; // 0x154
+ uint32_t h264_bsd_slice_start; // 0x158
+
+} h264_slice_data;
+
+
+typedef struct _h264_pic_data
+{
+ uint32_t h264_dpb_init; // 0x40
+ //info For current pic
+ uint32_t h264_cur_bsd_img_init; // 0x140
+ uint32_t h264_cur_mpr_tf_poc; // 0x300
+ uint32_t h264_cur_mpr_bf_poc; // 0x304
+
+ //info For framess in DPB
+ //uint32_t h264_dpb_bsd_img_init[16]; //0x140
+ //uint32_t h264_dpb_mpr_tf_poc[16]; // 0x300
+ //uint32_t h264_dpb_mpr_bf_poc[16]; // 0x304
+} h264_pic_data;
+
+enum h264_workload_item_type
+{
+ VIDDEC_WORKLOAD_H264_SLICE_REG = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+ VIDDEC_WORKLOAD_H264_PIC_REG,
+ VIDDEC_WORKLOAD_H264_DPB_FRAME_POC,
+ VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET,
+ VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET,
+ VIDDEC_WORKLOAD_H264_PWT_ES_BYTES,
+ VIDDEC_WORKLOAD_H264_SCALING_MATRIX,
+ VIDDEC_WORKLOAD_H264_DEBUG
+};
+
+
+
+////////////////////////////////////////////
+/* Full Info set*/
+////////////////////////////////////////////
+typedef struct _h264_Info
+{
+
+ h264_DecodedPictureBuffer dpb;
+
+ //// Structures
+ //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address
+ seq_param_set_used active_SPS;
+ pic_param_set active_PPS;
+
+
+ h264_Slice_Header_t SliceHeader;
+ OldSliceParams old_slice;
+ sei_info sei_information;
+
+ h264_img_par img;
+
+ uint32_t SPS_PADDR_GL;
+ uint32_t PPS_PADDR_GL;
+ uint32_t OFFSET_REF_FRAME_PADDR_GL;
+ uint32_t TMP_OFFSET_REFFRM_PADDR_GL;
+
+ uint32_t h264_list_replacement;
+
+ uint32_t h264_pwt_start_byte_offset;
+ uint32_t h264_pwt_start_bit_offset;
+ uint32_t h264_pwt_end_byte_offset;
+ uint32_t h264_pwt_end_bit_offset;
+ uint32_t h264_pwt_enabled;
+
+ uint32_t sps_valid;
+
+ uint8_t slice_ref_list0[32];
+ uint8_t slice_ref_list1[32];
+
+
+ uint8_t qm_present_list;
+ //h264_NAL_Unit_t
+ uint8_t nal_unit_type;
+ uint8_t old_nal_unit_type;
+ uint8_t got_start;
+
+ //workload
+ uint8_t push_to_cur;
+ uint8_t Is_first_frame_in_stream;
+ uint8_t Is_SPS_updated;
+ uint8_t number_of_first_au_info_nal_before_first_slice;
+
+ uint8_t is_frame_boundary_detected_by_non_slice_nal;
+ uint8_t is_frame_boundary_detected_by_slice_nal;
+ uint8_t is_current_workload_done;
+ uint8_t primary_pic_type_plus_one; //AUD---[0,7]
+
+ //Error handling
+ uint8_t sei_rp_received;
+ uint8_t last_I_frame_idc;
+ uint8_t sei_b_state_ready;
+ uint8_t gop_err_flag;
+
+
+ uint32_t wl_err_curr;
+ uint32_t wl_err_next;
+
+} h264_Info;
+
+
+
+struct h264_viddec_parser
+{
+ uint32_t sps_pps_ddr_paddr;
+ h264_Info info;
+};
+
+
+
+
+
+#endif //_H264_H_
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h
new file mode 100644
index 0000000..c255980
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h
@@ -0,0 +1,172 @@
+#ifndef __H264PARSE_H_
+#define __H264PARSE_H_
+
+#include "h264.h"
+
+#ifndef MFD_FIRMWARE
+#define true 1
+#define false 0
+#endif
+
+////////////////////////////////////////////////////////////////////
+// The following part is only for Parser Debug
+///////////////////////////////////////////////////////////////////
+
+
+
+enum h264_debug_point_id
+{
+ WARNING_H264_GENERAL = 0xff000000,
+ WARNING_H264_DPB,
+ WARNING_H264_REFLIST,
+ WARNING_H264_SPS,
+ WARNING_H264_PPS,
+ WARNING_H264_SEI,
+ WARNING_H264_VCL,
+
+ ERROR_H264_GENERAL = 0xffff0000,
+ ERROR_H264_DPB,
+ ERROR_H264_REFLIST,
+ ERROR_H264_SPS,
+ ERROR_H264_PPS,
+ ERROR_H264_SEI,
+ ERROR_H264_VCL
+};
+
+static inline void MFD_PARSER_DEBUG(int debug_point_id)
+{
+#ifdef H264_MFD_DEBUG
+
+ int p1,p2,p3,p4,p5,p6;
+
+ p1 = 0x0BAD;
+ p2 = 0xC0DE;
+ p3 = debug_point_id;
+ p4=p5=p6 = 0;
+
+ DEBUG_WRITE(p1,p2,p3,p4,p5,p6);
+#endif
+
+ debug_point_id = debug_point_id;
+
+ return;
+}
+
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Init functions
+////////////////////////////////////////////////////////////////////
+extern void h264_init_old_slice(h264_Info* pInfo);
+extern void h264_init_img(h264_Info* pInfo);
+extern void h264_init_Info(h264_Info* pInfo);
+extern void h264_init_Info_under_sps_pps_level(h264_Info* pInfo);
+extern void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem);
+
+extern void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader);
+extern void h264_sei_stream_initialise (h264_Info* pInfo);
+extern void h264_update_img_info(h264_Info * pInfo );
+extern void h264_update_frame_type(h264_Info * pInfo );
+
+extern int32_t h264_check_previous_frame_end(h264_Info * pInfo);
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// bsd functions
+////////////////////////////////////////////////////////////////////
+extern uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo);
+////// VLE and bit operation
+extern uint32_t h264_get_codeNum(void *parent,h264_Info* pInfo);
+extern int32_t h264_GetVLCElement(void *parent,h264_Info* pInfo, uint8_t bIsSigned);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// parse functions
+////////////////////////////////////////////////////////////////////
+
+//NAL
+extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc);
+
+////// Slice header
+extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Slice_Header_1(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+
+
+////// SPS
+extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame);
+//extern h264_Status h264_Parse_SeqParameterSet_Extension(void *parent, h264_Info * pInfo);
+extern h264_Status h264_Parse_PicParameterSet(void *parent, h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet);
+
+////// SEI functions
+h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent,h264_Info* pInfo);
+h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize);
+
+//////
+extern h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo);
+extern h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// utils functions
+////////////////////////////////////////////////////////////////////
+extern int32_t h264_is_new_picture_start(h264_Info* pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice);
+extern int32_t h264_is_second_field(h264_Info * pInfo);
+///// Math functions
+uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod);
+uint32_t mult_u(uint32_t var1, uint32_t var2);
+///// Mem functions
+extern void* h264_memset( void* buf, uint32_t c, uint32_t num );
+extern void* h264_memcpy( void* dest, void* src, uint32_t num );
+
+extern void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId);
+extern void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId);
+
+extern void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId);
+extern void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId);
+
+extern void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId);
+extern void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId);
+extern uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId);
+extern void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId);
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// workload functions
+////////////////////////////////////////////////////////////////////
+
+extern void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo );
+
+extern void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo );
+
+extern void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo );
+extern void h264_parse_emit_eos( void *parent, h264_Info *pInfo );
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// utils functions outside h264
+////////////////////////////////////////////////////////////////////
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+extern void *memcpy(void *dest, const void *src, uint32_t n);
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+extern int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits);
+extern int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Second level parse functions
+////////////////////////////////////////////////////////////////////
+
+#endif ////__H264PARSE_H_
+
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h
new file mode 100644
index 0000000..2a19b5f
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h
@@ -0,0 +1,107 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_DPB_CTL_H_
+#define _H264_DPB_CTL_H_
+
+
+#include "h264.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Parser control functions
+////////////////////////////////////////////////////////////////////
+
+///// Reference list
+extern void h264_dpb_update_ref_lists(h264_Info * pInfo);
+extern void h264_dpb_reorder_lists(h264_Info * pInfo);
+
+extern void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting);
+
+///// POC
+extern void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num);
+extern void h264_hdr_post_poc(h264_Info* pInfo,int32_t NonExisting, int32_t frame_num, int32_t use_old);
+
+///// DPB buffer mangement
+extern void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb);
+
+extern void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+extern void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+extern void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx);
+extern void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity);
+extern void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX);
+extern void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+
+extern void h264_dpb_update_queue_dangling_field(h264_Info * pInfo);
+extern void h264_dpb_is_used_for_reference(int32_t * flag);
+
+
+extern void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb,int32_t index);
+extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames);
+
+extern void h264_dpb_idr_memory_management (h264_Info * pInfo,
+ seq_param_set_used_ptr active_sps,
+ int32_t no_output_of_prior_pics_flag);
+
+extern void h264_dpb_init_frame_store(h264_Info * pInfo);
+extern void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs,
+ int32_t SizeChange, int32_t no_output_of_prior_pics_flag);
+
+extern void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo);
+
+extern int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting);
+
+extern void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos);
+extern void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag);
+
+extern void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb,
+ int32_t NonExisting,
+ int32_t num_ref_frames);
+extern int32_t h264_dpb_queue_update(h264_Info * pInfo,
+ int32_t push,
+ int32_t direct,
+ int32_t frame_request,
+ int32_t num_ref_frames);
+
+extern void h264_dpb_split_field (h264_Info * pInfo);
+extern void h264_dpb_combine_field(int32_t use_old);
+
+extern void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,
+ int32_t used_for_reference,
+ int32_t add2dpb,
+ int32_t NonExisting,
+ int32_t use_old);
+
+extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,
+ int32_t NonExisting,
+ int32_t use_old);
+
+extern void h264_dpb_adaptive_memory_management (h264_Info * pInfo);
+
+extern int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,
+ int32_t direct, int32_t request, int32_t num_ref_frames);
+
+extern void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx);
+extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing);
+
+//////////////////////////////////////////////////////////// Globals
+extern frame_store *active_fs;
+
+
+
+#endif //_H264_DPB_CTL_H_
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h
new file mode 100644
index 0000000..e5903cd
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h
@@ -0,0 +1,314 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_SEI_H_
+#define _H264_SEI_H_
+
+#include "h264.h"
+
+
+//defines for SEI
+#define MAX_CPB_CNT 32
+#define MAX_NUM_CLOCK_TS 3
+#define MAX_PAN_SCAN_CNT 3
+#define MAX_NUM_SPARE_PICS 16
+#define MAX_SUB_SEQ_LAYERS 256
+#define MAX_SLICE_GRPS 1 // for high profile
+#define NUM_SPS 32
+#define MAX_NUM_REF_SUBSEQS 256
+
+
+#define SEI_SCAN_FORMAT_INTERLACED 0x1
+#define SEI_SCAN_FORMAT_PROGRESSIVE 0x3
+#define SEI_SCAN_FORMAT_VALID(r) (r&0x1)
+#define SEI_SCAN_FORMAT(r) ((r&0x2)>>1)
+
+typedef enum
+{
+ SEI_BUF_PERIOD = 0,
+ SEI_PIC_TIMING,
+ SEI_PAN_SCAN,
+ SEI_FILLER_PAYLOAD,
+ SEI_REG_USERDATA,
+ SEI_UNREG_USERDATA,
+ SEI_RECOVERY_POINT,
+ SEI_DEC_REF_PIC_MARKING_REP,
+ SEI_SPARE_PIC,
+ SEI_SCENE_INFO,
+ SEI_SUB_SEQ_INFO,
+ SEI_SUB_SEQ_LAYER,
+ SEI_SUB_SEQ,
+ SEI_FULL_FRAME_FREEZE,
+ SEI_FULL_FRAME_FREEZE_RELEASE,
+ SEI_FULL_FRAME_SNAPSHOT,
+ SEI_PROGRESSIVE_SEGMENT_START,
+ SEI_PROGRESSIVE_SEGMENT_END,
+ SEI_MOTION_CONSTRAINED_SLICE_GRP_SET,
+ SEI_FILM_GRAIN_CHARACTERISTICS,
+ SEI_DEBLK_FILTER_DISPLAY_PREFERENCE,
+ SEI_STEREO_VIDEO_INFO,
+ SEI_RESERVED,
+}h264_sei_payloadtype;
+
+
+
+typedef struct _h264_SEI_buffering_period
+{
+ int32_t seq_param_set_id;
+ int32_t initial_cpb_removal_delay_nal;
+ int32_t initial_cpb_removal_delay_offset_nal;
+ int32_t initial_cpb_removal_delay_vcl;
+ int32_t initial_cpb_removal_delay_offset_vcl;
+
+}h264_SEI_buffering_period_t;
+
+typedef struct _h264_SEI_pic_timing
+{
+ int32_t cpb_removal_delay;
+ int32_t dpb_output_delay;
+ int32_t pic_struct;
+}h264_SEI_pic_timing_t;
+
+#if 0
+int32_t clock_timestamp_flag[MAX_NUM_CLOCK_TS];
+int32_t ct_type[MAX_NUM_CLOCK_TS];
+int32_t nuit_field_based_flag[MAX_NUM_CLOCK_TS];
+int32_t counting_type[MAX_NUM_CLOCK_TS];
+int32_t full_timestamp_flag[MAX_NUM_CLOCK_TS];
+int32_t discontinuity_flag[MAX_NUM_CLOCK_TS];
+int32_t cnt_dropped_flag[MAX_NUM_CLOCK_TS];
+int32_t n_frames[MAX_NUM_CLOCK_TS];
+int32_t seconds_value[MAX_NUM_CLOCK_TS];
+int32_t minutes_value[MAX_NUM_CLOCK_TS];
+int32_t hours_value[MAX_NUM_CLOCK_TS];
+int32_t seconds_flag[MAX_NUM_CLOCK_TS];
+int32_t minutes_flag[MAX_NUM_CLOCK_TS];
+int32_t hours_flag[MAX_NUM_CLOCK_TS];
+int32_t time_offset[MAX_NUM_CLOCK_TS];
+
+#endif
+
+typedef struct _h264_SEI_pan_scan_rectangle
+{
+ int32_t pan_scan_rect_id;
+ int32_t pan_scan_rect_cancel_flag;
+ int32_t pan_scan_cnt_minus1;
+ int32_t pan_scan_rect_left_offset[MAX_PAN_SCAN_CNT];
+ int32_t pan_scan_rect_right_offset[MAX_PAN_SCAN_CNT];
+ int32_t pan_scan_rect_top_offset[MAX_PAN_SCAN_CNT];
+ int32_t pan_scan_rect_bottom_offset[MAX_PAN_SCAN_CNT];
+ int32_t pan_scan_rect_repetition_period;
+}h264_SEI_pan_scan_rectangle_t;
+
+typedef struct _h264_SEI_filler_payload
+{
+ int32_t ff_byte;
+}h264_SEI_filler_payload_t;
+
+typedef struct _h264_SEI_userdata_registered
+{
+ int32_t itu_t_t35_country_code;
+ int32_t itu_t_t35_country_code_extension_byte;
+ int32_t itu_t_t35_payload_byte;
+}h264_SEI_userdata_registered_t;
+
+typedef struct _h264_SEI_userdata_unregistered
+{
+ int32_t uuid_iso_iec_11578[4];
+ int32_t user_data_payload_byte;
+}h264_SEI_userdata_unregistered_t;
+
+typedef struct _h264_SEI_recovery_point
+{
+ int32_t recovery_frame_cnt;
+ int32_t exact_match_flag;
+ int32_t broken_link_flag;
+ int32_t changing_slice_group_idc;
+}h264_SEI_recovery_point_t;
+
+typedef struct _h264_SEI_decoded_ref_pic_marking_repetition
+{
+ int32_t original_idr_flag;
+ int32_t original_frame_num;
+ int32_t orignal_field_pic_flag;
+ int32_t original_bottom_field_pic_flag;
+ int32_t no_output_of_prior_pics_flag;
+ int32_t long_term_reference_flag;
+ int32_t adaptive_ref_pic_marking_mode_flag;
+ int32_t memory_management_control_operation; //UE
+ int32_t difference_of_pics_num_minus1; //UE
+ int32_t long_term_pic_num; //UE
+ int32_t long_term_frame_idx; //UE
+ int32_t max_long_term_frame_idx_plus1; //UE
+}h264_SEI_decoded_ref_pic_marking_repetition_t;
+
+typedef struct _h264_SEI_spare_picture
+{
+ int32_t target_frame_num;
+ int32_t spare_field_flag;
+ int32_t target_bottom_field_flag;
+ int32_t num_spare_pics_minus1;
+ int32_t delta_spare_frame_num[MAX_NUM_SPARE_PICS];
+ int32_t spare_bottom_field_flag[MAX_NUM_SPARE_PICS];
+ int32_t spare_area_idc[MAX_NUM_SPARE_PICS]; // not complete
+}h264_SEI_spare_picture_t;
+
+typedef struct _h264_SEI_scene_info
+{
+ int32_t scene_info_present_flag;
+ int32_t scene_id;
+ int32_t scene_transitioning_type;
+ int32_t second_scene_id;
+}h264_SEI_scene_info_t;
+
+typedef struct _h264_SEI_sub_sequence_info
+{
+ int32_t sub_seq_layer_num;
+ int32_t sub_seq_id;
+ int32_t first_ref_pic_flag;
+ int32_t leading_non_ref_pic_flag;
+ int32_t last_pic_flag;
+ int32_t sub_seq_frame_num_flag;
+ int32_t sub_seq_frame_num;
+}h264_SEI_sub_sequence_info_t;
+
+typedef struct _h264_SEI_sub_sequence_layer
+{
+ int32_t num_sub_seq_layers_minus1;
+ int32_t accurate_statistics_flag[MAX_SUB_SEQ_LAYERS];
+ int32_t average_bit_rate[MAX_SUB_SEQ_LAYERS];
+ int32_t average_frame_rate[MAX_SUB_SEQ_LAYERS];
+}h264_SEI_sub_sequence_layer_t;
+
+typedef struct _h264_SEI_sub_sequence
+{
+ int32_t sub_seq_layer_num;
+ int32_t sub_seq_id;
+ int32_t duration_flag;
+ int32_t sub_seq_duration;
+ int32_t average_rate_flag;
+ int32_t average_statistics_flag;
+ int32_t average_bit_rate;
+ int32_t average_frame_rate;
+ int32_t num_referenced_subseqs;
+ int32_t ref_sub_seq_layer_num;
+ int32_t ref_sub_seq_id;
+ int32_t ref_sub_seq_direction;
+}h264_SEI_sub_sequence_t;
+
+typedef struct _h264_SEI_full_frame_freeze
+{
+ int32_t full_frame_freeze_repetition_period;
+}h264_SEI_full_frame_freeze_t;
+
+typedef struct _h264_SEI_full_frame_snapshot
+{
+ int32_t snapshot_id;
+}h264_SEI_full_frame_snapshot_t;
+
+typedef struct _h264_SEI_progressive_segment_start
+{
+ int32_t progressive_refinement_id;
+ int32_t num_refinement_steps_minus1;
+}h264_SEI_progressive_segment_start_t;
+
+typedef struct _h264_SEI_progressive_segment_end
+{
+ int32_t progressive_refinement_id;
+}h264_SEI_progressive_segment_end_t;
+
+typedef struct _h264_SEI_motion_constrained_slice_group
+{
+ int32_t num_slice_groups_in_set_minus1;
+ int32_t slice_group_id[MAX_SLICE_GRPS];
+ int32_t exact_sample_value_match_flag;
+ int32_t pan_scan_rect_flag;
+ int32_t pan_scan_rect_id;
+}h264_SEI_motion_constrained_slice_group_t;
+
+typedef struct _h264_SEI_deblocking_filter_display_pref
+{
+ int32_t devlocking_display_preference_cancel_flag;
+ int32_t display_prior_to_deblocking_preferred_flag;
+ int32_t dec_frame_buffering_constraint_flag;
+ int32_t deblocking_display_preference_repetition_period;
+}h264_SEI_deblocking_filter_display_pref_t;
+
+typedef struct _h264_SEI_stereo_video_info
+{
+ int32_t field_views_flag;
+ int32_t top_field_is_left_view_flag;
+ int32_t curent_frame_is_left_view_flag;
+ int32_t next_frame_is_second_view_flag;
+ int32_t left_view_self_contained_flag;
+ int32_t right_view_self_contained_flag;
+}h264_SEI_stereo_video_info_t;
+
+typedef struct _h264_SEI_reserved
+{
+ int32_t reserved_sei_message_payload_byte;
+}h264_SEI_reserved_t;
+
+
+////////////////////////////
+// SEI Info
+/////////////////////////////
+
+typedef struct sei_info
+{
+ int32_t recovery_point;
+ int32_t recovery_frame_num;
+
+ int32_t capture_POC;
+ int32_t freeze_POC;
+ int32_t release_POC; // The POC which when reached will allow display update to re-commence
+ int32_t disp_frozen; // Indicates display is currently frozen
+ int32_t freeze_rep_period;
+ int32_t recovery_frame_cnt;
+ int32_t capture_fn;
+ int32_t recovery_fn;
+ int32_t broken_link;
+ int32_t scan_format;
+ int32_t broken_link_pic;
+}sei_info, *sei_info_ptr;
+
+/*typedef struct _h264_SEI
+{
+ h264_SEI_buffering_period_t buf_period;
+ h264_SEI_pic_timing_t pic_timing;
+ h264_SEI_pan_scan_rectangle_t pan_scan_timing;
+ h264_SEI_filler_payload_t filler_payload;
+ h264_SEI_userdata_registered_t userdata_reg;
+ h264_SEI_userdata_unregistered_t userdata_unreg;
+ h264_SEI_recovery_point_t recovery_point;
+ h264_SEI_decoded_ref_pic_marking_repetition_t dec_ref_pic_marking_rep;
+ h264_SEI_spare_picture_t spare_pic;
+ h264_SEI_scene_info_t scene_info;
+ h264_SEI_sub_sequence_info_t sub_sequence_info;
+ h264_SEI_sub_sequence_layer_t sub_sequence_layer;
+ h264_SEI_sub_sequence_t sub_sequence;
+ h264_SEI_full_frame_snapshot_t full_frame_snapshot;
+ h264_SEI_full_frame_t full_frame;
+ h264_SEI_progressive_segment_start_t progressive_segment_start;
+ h264_SEI_progressive_segment_end_t progressive_segment_end;
+ h264_SEI_motion_constrained_slice_group_t motion_constrained_slice_grp;
+ h264_SEI_deblocking_filter_display_pref_t deblk_filter_display_pref;
+ h264_SEI_stereo_video_info_t stereo_video_info;
+ h264_SEI_reserved_t reserved;
+}h264_SEI_t;
+*/
+
+
+#endif //_H264_SEI_H_
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c
new file mode 100644
index 0000000..a96285d
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c
@@ -0,0 +1,786 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: h264 parser
+//
+///////////////////////////////////////////////////////////////////////*/
+
+
+#include "h264.h"
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+
+h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo)
+{
+ int32_t j, scanj;
+ int32_t delta_scale, lastScale, nextScale;
+
+#if 0
+ const uint8_t ZZ_SCAN[16] =
+ { 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15
+ };
+
+ const uint8_t ZZ_SCAN8[64] =
+ { 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+ };
+#endif
+
+ lastScale = 8;
+ nextScale = 8;
+ scanj = 0;
+
+ for(j=0; j<sizeOfScalingList; j++)
+ {
+ //scanj = (sizeOfScalingList==16)?ZZ_SCAN[j]:ZZ_SCAN8[j];
+
+ if(nextScale!=0)
+ {
+ delta_scale = h264_GetVLCElement(parent, pInfo, true);
+ nextScale = (lastScale + delta_scale + 256) % 256;
+ *UseDefaultScalingMatrix = (uint8_t) (scanj==0 && nextScale==0);
+ }
+
+ scalingList[scanj] = (nextScale==0) ? lastScale:nextScale;
+ lastScale = scalingList[scanj];
+ scanj ++;
+ }
+
+ return H264_STATUS_OK;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader)
+{
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+
+ ///////////////////////////////////////////////////
+ // Reload SPS/PPS while
+ // 1) Start of Frame (in case of context switch)
+ // 2) PPS id changed
+ ///////////////////////////////////////////////////
+ if((SliceHeader->first_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id))
+ {
+#ifndef WIN32
+ h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id);
+
+ if(pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS)
+ {
+ return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected
+ }
+
+ if(pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id)
+ {
+ pInfo->Is_SPS_updated =1;
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id);
+ h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id);
+ }
+ else
+ {
+ if(h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id))
+ {
+ pInfo->Is_SPS_updated =1;
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id);
+ h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id);
+ }
+ }
+
+#else
+ pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id];
+ pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id];
+#endif
+
+ if(pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)
+ {
+ return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected
+ }
+ }
+ else {
+ if((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS))
+ {
+ return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected
+ }
+ }
+
+
+ pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1);
+ //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1);
+ pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \
+ (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1): \
+ ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1);
+
+
+ return H264_STATUS_OK;
+}; //// End of h264_active_par_set
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////
+// Parse slice header info
+//////////////////////////////////////////////////
+h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+ h264_Status retStatus = H264_STATUS_ERROR;
+
+ ////////////////////////////////////////////////////
+ //// Parse slice header info
+ //// Part1: not depend on the active PPS/SPS
+ //// Part2/3: depend on the active parset
+ //////////////////////////////////////////////////
+
+ //retStatus = h264_Parse_Slice_Header_1(pInfo);
+
+ SliceHeader->sh_error = 0;
+
+ if(h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK)
+ {
+ //////////////////////////////////////////
+ //// Active parameter set for this slice
+ //////////////////////////////////////////
+ retStatus = h264_active_par_set(pInfo, SliceHeader);
+ }
+
+ if(retStatus == H264_STATUS_OK) {
+ switch(pInfo->active_SPS.profile_idc)
+ {
+ case h264_ProfileBaseline:
+ case h264_ProfileMain:
+ case h264_ProfileExtended:
+ pInfo->active_PPS.transform_8x8_mode_flag=0;
+ pInfo->active_PPS.pic_scaling_matrix_present_flag =0;
+ pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset;
+
+ default:
+ break;
+ }
+
+ if( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ SliceHeader->sh_error |= 2;
+ }
+ else if( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ SliceHeader->sh_error |= 4;
+ }
+
+ } else {
+ SliceHeader->sh_error |= 1;
+ }
+
+
+ //if(SliceHeader->sh_error) {
+ //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ //}
+
+
+
+ //////////////////////////////////
+ //// Parse slice data (MB loop)
+ //////////////////////////////////
+ //retStatus = h264_Parse_Slice_Data(pInfo);
+ {
+ //uint32_t data = 0;
+ //if( viddec_pm_peek_bits(parent, &data, 32) == -1)
+ //retStatus = H264_STATUS_ERROR;
+ }
+ //h264_Parse_rbsp_trailing_bits(pInfo);
+
+ return retStatus;
+}
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc)
+{
+ h264_Status ret = H264_STATUS_ERROR;
+
+ //h264_NAL_Unit_t* NAL = &pInfo->NAL;
+ uint32_t code;
+#if 0
+ viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24);
+ viddec_pm_get_bits(parent, &code, 1); //forbidden_zero_bit
+
+ viddec_pm_get_bits(parent, &code, 2);
+ SliceHeader->nal_ref_idc = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 5);
+ pInfo->nal_unit_type = (uint8_t)code;
+#else
+#ifdef VBP
+ if( viddec_pm_get_bits(parent, &code, 8) != -1)
+#else
+ //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type
+ if( viddec_pm_get_bits(parent, &code, 32) != -1)
+#endif
+ {
+ *nal_ref_idc = (uint8_t)((code>>5)&0x3);
+ pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f);
+ ret = H264_STATUS_OK;
+ }
+#endif
+
+ return ret;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+/*!
+ ************************************************************************
+ * \brief
+ * set defaults for old_slice
+ * NAL unit of a picture"
+ ************************************************************************
+ */
+#ifndef INT_MAX
+#define INT_MAX 0xFFFFFFFF
+#endif
+
+#ifndef UINT_MAX
+#define UINT_MAX 0x7FFFFFFF
+#endif
+
+void h264_init_old_slice(h264_Info* pInfo)
+{
+ pInfo->SliceHeader.field_pic_flag = 0;
+
+ pInfo->SliceHeader.pic_parameter_id = 0xFF;
+
+ pInfo->SliceHeader.frame_num = INT_MAX;
+
+ pInfo->SliceHeader.nal_ref_idc = 0xFF;
+
+ pInfo->SliceHeader.idr_flag = 0;
+
+ pInfo->SliceHeader.pic_order_cnt_lsb = UINT_MAX;
+ pInfo->SliceHeader.delta_pic_order_cnt_bottom = INT_MAX;
+
+ pInfo->SliceHeader.delta_pic_order_cnt[0] = INT_MAX;
+ pInfo->SliceHeader.delta_pic_order_cnt[1] = INT_MAX;
+
+ return;
+}
+
+
+void h264_init_img(h264_Info* pInfo)
+{
+ h264_memset(&(pInfo->img), 0x0, sizeof(h264_img_par) );
+
+
+ return;
+}
+
+
+void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem)
+{
+ int32_t i;
+
+ h264_Info * pInfo = &(parser->info);
+
+ parser->sps_pps_ddr_paddr = (uint32_t)persist_mem;
+
+ pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr;
+ pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all);
+ pInfo->OFFSET_REF_FRAME_PADDR_GL = pInfo->PPS_PADDR_GL + MAX_NUM_PPS * sizeof(pic_param_set);
+ pInfo->TMP_OFFSET_REFFRM_PADDR_GL = pInfo->OFFSET_REF_FRAME_PADDR_GL +
+ MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+
+ h264_memset( &(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used) );
+ h264_memset( &(pInfo->active_PPS), 0x0, sizeof(pic_param_set) );
+
+ /* Global for SPS & PPS */
+ for(i=0;i<MAX_NUM_SPS;i++)
+ {
+ pInfo->active_SPS.seq_parameter_set_id = 0xff;
+ h264_Parse_Copy_Sps_To_DDR (pInfo, &(pInfo->active_SPS), i);
+ }
+ for(i=0;i<MAX_NUM_PPS;i++)
+ {
+ pInfo->active_PPS.seq_parameter_set_id = 0xff;
+ h264_Parse_Copy_Pps_To_DDR (pInfo, &(pInfo->active_PPS), i);
+ }
+
+ pInfo->active_SPS.seq_parameter_set_id = 0xff;
+ pInfo->sps_valid = 0;
+ pInfo->got_start = 0;
+
+ return;
+}
+
+
+void h264_init_Info_under_sps_pps_level(h264_Info* pInfo)
+{
+ int32_t i=0;
+
+ h264_memset( &(pInfo->dpb), 0x0, sizeof(h264_DecodedPictureBuffer) );
+ h264_memset( &(pInfo->SliceHeader), 0x0, sizeof(h264_Slice_Header_t) );
+ h264_memset( &(pInfo->old_slice), 0x0, sizeof(OldSliceParams) );
+ h264_memset( &(pInfo->sei_information), 0x0, sizeof(sei_info) );
+ h264_memset( &(pInfo->img), 0x0, sizeof(h264_img_par) );
+
+ pInfo->h264_list_replacement = 0;
+
+ pInfo->h264_pwt_start_byte_offset = 0;
+ pInfo->h264_pwt_start_bit_offset = 0;
+ pInfo->h264_pwt_end_byte_offset = 0;
+ pInfo->h264_pwt_end_bit_offset = 0;
+ pInfo->h264_pwt_enabled = 0;
+
+ for(i=0;i<32;i++)
+ {
+ pInfo->slice_ref_list0[i] = 0;
+ pInfo->slice_ref_list1[i] = 0;
+ }
+
+ pInfo->qm_present_list = 0;
+
+ pInfo->nal_unit_type = 0;
+ pInfo->old_nal_unit_type = 0xff;
+
+ pInfo->push_to_cur = 0;
+ pInfo->Is_first_frame_in_stream = 1;
+ pInfo->Is_SPS_updated = 0;
+ pInfo->number_of_first_au_info_nal_before_first_slice = 0;
+
+ pInfo->is_frame_boundary_detected_by_non_slice_nal = 0;
+ pInfo->is_frame_boundary_detected_by_slice_nal = 0;
+ pInfo->is_current_workload_done = 0;
+
+ pInfo->sei_rp_received = 0;
+ pInfo->last_I_frame_idc = 255;
+ pInfo->wl_err_curr = 0;
+ pInfo->wl_err_next = 0;
+
+ pInfo->primary_pic_type_plus_one = 0;
+ pInfo->sei_b_state_ready = 0;
+
+ /* Init old slice structure */
+ h264_init_old_slice(pInfo);
+
+ /* init_dpb */
+ h264_init_dpb(&(pInfo->dpb));
+
+ /* init_sei */
+ h264_sei_stream_initialise(pInfo);
+
+}
+
+void h264_init_Info(h264_Info* pInfo)
+{
+ h264_memset(pInfo, 0x0, sizeof(h264_Info));
+
+ pInfo->old_nal_unit_type = 0xff;
+
+ pInfo->Is_first_frame_in_stream =1;
+ pInfo->img.frame_count = 0;
+ pInfo->last_I_frame_idc = 255;
+
+ return;
+}
+
+ /* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+/////////////////////////////////////////////////////
+//
+// Judge whether it is the first VCL of a new picture
+//
+/////////////////////////////////////////////////////
+ int32_t h264_is_second_field(h264_Info * pInfo)
+ {
+ h264_Slice_Header_t cur_slice = pInfo->SliceHeader;
+ OldSliceParams old_slice = pInfo->old_slice;
+
+ int result = 0;
+
+ //pInfo->img.second_field = 0;
+
+ /// is it second field?
+
+ //OS_INFO( "xxx is_used = %d\n", pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used);
+
+ if (cur_slice.structure != FRAME)
+ {
+ if( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )
+ &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ))
+ {
+ if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag))
+ {
+
+ if(old_slice.structure != cur_slice.structure)
+ {
+
+ if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1:
+ (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) && \
+ ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0) || // Condition 2:
+ (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0)))
+ {
+ //pInfo->img.second_field = 1;
+ result = 1;
+ }
+ }
+ }
+
+
+ }
+
+
+ }
+
+
+
+ return result;
+
+ } //// End of h264_is_second_field
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice)
+{
+ int result = 0;
+
+ if(pInfo->number_of_first_au_info_nal_before_first_slice)
+ {
+ pInfo->number_of_first_au_info_nal_before_first_slice = 0;
+ return 1;
+ }
+
+
+
+ result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id);
+ result |= (old_slice.frame_num != cur_slice.frame_num);
+ result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag);
+ if(cur_slice.field_pic_flag && old_slice.field_pic_flag)
+ {
+ result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag);
+ }
+
+ result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \
+ ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0));
+ result |= ( old_slice.idr_flag != cur_slice.idr_flag);
+
+ if (cur_slice.idr_flag && old_slice.idr_flag)
+ {
+ result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id);
+ }
+
+ if (pInfo->active_SPS.pic_order_cnt_type == 0)
+ {
+ result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb);
+ result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom);
+ }
+
+ if (pInfo->active_SPS.pic_order_cnt_type == 1)
+ {
+ result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]);
+ result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]);
+ }
+
+ return result;
+}
+
+
+int32_t h264_check_previous_frame_end(h264_Info * pInfo)
+{
+ int result = 0;
+
+ if( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) )
+ {
+
+ switch ( pInfo->nal_unit_type )
+ {
+ case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+ case h264_NAL_UNIT_TYPE_SPS:
+ case h264_NAL_UNIT_TYPE_PPS:
+ case h264_NAL_UNIT_TYPE_SEI:
+ case h264_NAL_UNIT_TYPE_EOSeq:
+ case h264_NAL_UNIT_TYPE_EOstream:
+ case h264_NAL_UNIT_TYPE_Reserved1:
+ case h264_NAL_UNIT_TYPE_Reserved2:
+ case h264_NAL_UNIT_TYPE_Reserved3:
+ case h264_NAL_UNIT_TYPE_Reserved4:
+ case h264_NAL_UNIT_TYPE_Reserved5:
+ {
+ pInfo->img.current_slice_num = 0;
+
+ if((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) {
+ pInfo->is_frame_boundary_detected_by_non_slice_nal =1;
+ pInfo->is_current_workload_done=1;
+ result=1;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ }
+
+ return result;
+
+}
+
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////
+// 1) Update old slice structure for frame boundary detection
+//////////////////////////////////////////////////////////////
+void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader)
+{
+ pInfo->old_slice.pic_parameter_id = pInfo->SliceHeader.pic_parameter_id;
+
+ pInfo->old_slice.frame_num = pInfo->SliceHeader.frame_num;
+
+ pInfo->old_slice.field_pic_flag = pInfo->SliceHeader.field_pic_flag;
+
+ if(pInfo->SliceHeader.field_pic_flag)
+ {
+ pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag;
+ }
+
+ pInfo->old_slice.nal_ref_idc = pInfo->SliceHeader.nal_ref_idc;
+
+ pInfo->old_slice.structure = pInfo->SliceHeader.structure;
+
+ pInfo->old_slice.idr_flag = pInfo->SliceHeader.idr_flag;
+ if (pInfo->SliceHeader.idr_flag)
+ {
+ pInfo->old_slice.idr_pic_id = pInfo->SliceHeader.idr_pic_id;
+ }
+
+ if (pInfo->active_SPS.pic_order_cnt_type == 0)
+ {
+ pInfo->old_slice.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb;
+ pInfo->old_slice.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom;
+ }
+
+ if (pInfo->active_SPS.pic_order_cnt_type == 1)
+ {
+ pInfo->old_slice.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0];
+ pInfo->old_slice.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1];
+ }
+
+ ////////////////////////////// Next to current
+ memcpy(&pInfo->SliceHeader, &next_SliceHeader, sizeof(h264_Slice_Header_t));
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// Initialization for new picture
+//////////////////////////////////////////////////////////////////////////////
+void h264_update_img_info(h264_Info * pInfo )
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ pInfo->img.frame_num = pInfo->SliceHeader.frame_num;
+ pInfo->img.structure = pInfo->SliceHeader.structure;
+
+ pInfo->img.field_pic_flag = pInfo->SliceHeader.field_pic_flag;
+ pInfo->img.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag;
+
+ pInfo->img.MbaffFrameFlag = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag));
+ pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type;
+
+ if(pInfo->img.pic_order_cnt_type == 1) {
+ pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle;
+ pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag;
+ pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic;
+ pInfo->img.offset_for_top_to_bottom_field = pInfo->active_SPS.offset_for_top_to_bottom_field;
+ }
+
+ pInfo->img.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb;
+ //pInfo->img.pic_order_cnt_msb = pInfo->SliceHeader.pic_order_cnt_msb;
+ pInfo->img.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom;
+ pInfo->img.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0];
+ pInfo->img.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1];
+
+
+ pInfo->img.PreviousFrameNum = pInfo->old_slice.frame_num;
+
+ pInfo->img.no_output_of_prior_pics_flag = pInfo->SliceHeader.sh_dec_refpic.no_output_of_prior_pics_flag;
+
+ ////////////////////////////////////////////////// Check SEI recovery point
+ if (pInfo->sei_information.recovery_point) {
+ int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+ pInfo->sei_information.recovery_frame_num = (pInfo->img.frame_num + pInfo->sei_information.recovery_frame_cnt) % MaxFrameNum;
+ }
+
+ if (pInfo->SliceHeader.idr_flag)
+ pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num;
+
+
+
+ /////////////////////////////////////////////////Resolution Change
+ pInfo->img.curr_has_mmco_5 = 0;
+
+ if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)||
+ (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) )
+ {
+ int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0;
+
+ // If resolution changed, reset the soft DPB here
+ h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics);
+ }
+
+ return;
+
+} ///// End of init new frame
+
+
+void h264_update_frame_type(h264_Info * pInfo )
+{
+
+//update frame type
+ if(pInfo->img.structure == FRAME)
+ {
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET);
+ //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff;
+ //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc;
+
+ }
+ else
+ {
+ #if 1
+ switch(pInfo->SliceHeader.slice_type)
+ {
+ case h264_PtypeB:
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_B << FRAME_TYPE_FRAME_OFFSET);
+ break;
+ case h264_PtypeSP:
+ case h264_PtypeP:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET != FRAME_TYPE_B)
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_P << FRAME_TYPE_FRAME_OFFSET);
+ break;
+ case h264_PtypeI:
+ case h264_PtypeSI:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET == FRAME_TYPE_INVALID)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET);
+ }
+ pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc;
+
+ break;
+ default:
+ break;
+
+ }
+ #endif
+
+ }
+
+ }
+ else if(pInfo->img.structure == TOP_FIELD)
+ {
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));;
+ }
+ else
+ {
+ switch(pInfo->SliceHeader.slice_type)
+ {
+ case h264_PtypeB:
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+ break;
+ case h264_PtypeSP:
+ case h264_PtypeP:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B)
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+ break;
+ case h264_PtypeI:
+ case h264_PtypeSI:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+ }
+ break;
+ default:
+ break;
+
+ }
+
+ }
+
+
+ }else if(pInfo->img.structure == BOTTOM_FIELD)
+ {
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));;
+ }
+ else
+ {
+ switch(pInfo->SliceHeader.slice_type)
+ {
+ case h264_PtypeB:
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+ break;
+ case h264_PtypeSP:
+ case h264_PtypeP:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B)
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+ break;
+ case h264_PtypeI:
+ case h264_PtypeSI:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+ }
+ break;
+ default:
+ break;
+
+ }
+
+ }
+
+ }
+ return;
+
+}
+
+
+//////#endif ///////////// IFDEF H264_PARSE_C///////////////////
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c
new file mode 100644
index 0000000..c4e00ee
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c
@@ -0,0 +1,228 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: h264 bistream decoding
+//
+///////////////////////////////////////////////////////////////////////*/
+
+
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_parser_ops.h"
+
+
+
+
+
+/**
+ get_codeNum :Get codenum based on sec 9.1 of H264 spec.
+ @param cxt : Buffer adress & size are part inputs, the cxt is updated
+ with codeNum & sign on sucess.
+ Assumption: codeNum is a max of 32 bits
+
+ @retval 1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code.
+ @retval 0 : Couldn't find a code in the current buffer.
+ be freed.
+*/
+
+uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo)
+{
+ int32_t leadingZeroBits= 0;
+ uint32_t temp = 0, match = 0, noOfBits = 0, count = 0;
+ uint32_t codeNum =0;
+ uint32_t bits_offset =0, byte_offset =0;
+ uint8_t is_emul =0;
+ uint8_t is_first_byte = 1;
+ uint32_t length =0;
+ uint32_t bits_need_add_in_first_byte =0;
+ int32_t bits_operation_result=0;
+
+ //remove warning
+ pInfo = pInfo;
+
+ ////// Step 1: parse through zero bits until we find a bit with value 1.
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+
+ while(!match)
+ {
+ if ((bits_offset != 0) && ( is_first_byte == 1))
+ {
+ //we handle byte at a time, if we have offset then for first
+ // byte handle only 8 - offset bits
+ noOfBits = (uint8_t)(8 - bits_offset);
+ bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits);
+
+
+ temp = (temp << bits_offset);
+ if(temp!=0)
+ {
+ bits_need_add_in_first_byte = bits_offset;
+ }
+ is_first_byte =0;
+ }
+ else
+ {
+ noOfBits = 8;/* always 8 bits as we read a byte at a time */
+ bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8);
+
+ }
+
+ if(-1==bits_operation_result)
+ {
+ return MAX_INT32_VALUE;
+ }
+
+ if(temp != 0)
+ {
+ // if byte!=0 we have at least one bit with value 1.
+ count=1;
+ while(((temp & 0x80) != 0x80) && (count <= noOfBits))
+ {
+ count++;
+ temp = temp <<1;
+ }
+ //At this point we get the bit position of 1 in current byte(count).
+
+ match = 1;
+ leadingZeroBits += count;
+ }
+ else
+ {
+ // we don't have a 1 in current byte
+ leadingZeroBits += noOfBits;
+ }
+
+ if(!match)
+ {
+ //actually move the bitoff by viddec_pm_get_bits
+ viddec_pm_get_bits(parent, &temp, noOfBits);
+ }
+ else
+ {
+ //actually move the bitoff by viddec_pm_get_bits
+ viddec_pm_get_bits(parent, &temp, count);
+ }
+
+ }
+ ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value.
+
+
+ if(match)
+ {
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+ /* bit position in current byte */
+ //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7);
+ count = ((count + bits_need_add_in_first_byte)& 0x7);
+
+ leadingZeroBits --;
+ length = leadingZeroBits;
+ codeNum = 0;
+ noOfBits = 8 - count;
+
+
+ while(leadingZeroBits > 0)
+ {
+ if(noOfBits < (uint32_t)leadingZeroBits)
+ {
+ viddec_pm_get_bits(parent, &temp, noOfBits);
+
+
+ codeNum = (codeNum << noOfBits) | temp;
+ leadingZeroBits -= noOfBits;
+ }
+ else
+ {
+ viddec_pm_get_bits(parent, &temp, leadingZeroBits);
+
+ codeNum = (codeNum << leadingZeroBits) | temp;
+ leadingZeroBits = 0;
+ }
+
+
+ noOfBits = 8;
+ }
+ // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits).
+ codeNum = codeNum + (1 << length) -1;
+
+ }
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+ if(bits_offset!=0)
+ {
+ viddec_pm_peek_bits(parent, &temp, 8-bits_offset);
+ }
+
+ return codeNum;
+}
+
+
+/*---------------------------------------*/
+/*---------------------------------------*/
+int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned)
+{
+ int32_t sval = 0;
+ signed char sign;
+
+ sval = h264_get_codeNum(parent , pInfo);
+
+ if(bIsSigned) //get signed integer golomb code else the value is unsigned
+ {
+ sign = (sval & 0x1)?1:-1;
+ sval = (sval +1) >> 1;
+ sval = sval * sign;
+ }
+
+ return sval;
+} // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned)
+
+///
+/// Check whether more RBSP data left in current NAL
+///
+uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo)
+{
+ uint8_t cnt = 0;
+
+ uint8_t is_emul =0;
+ uint8_t cur_byte = 0;
+ int32_t shift_bits =0;
+ uint32_t ctr_bit = 0;
+ uint32_t bits_offset =0, byte_offset =0;
+
+ //remove warning
+ pInfo = pInfo;
+
+ if (!viddec_pm_is_nomoredata(parent))
+ return 1;
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ shift_bits = 7-bits_offset;
+
+ // read one byte
+ viddec_pm_get_cur_byte(parent, &cur_byte);
+
+ ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01;
+
+ // a stop bit has to be one
+ if (ctr_bit==0)
+ return 1;
+
+ while (shift_bits>=0 && !cnt)
+ {
+ cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit
+ }
+
+ return (cnt);
+}
+
+
+
+///////////// EOF/////////////////////
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c
new file mode 100644
index 0000000..d1b693b
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c
@@ -0,0 +1,4171 @@
+
+/*!
+ ***********************************************************************
+ * \file: h264_dpb_ctl.c
+ *
+ ***********************************************************************
+ */
+
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+#include "viddec_h264_parse.h"
+
+
+
+//#include <limits.h>
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+//#include "h264_debug.h"
+
+#ifndef NULL
+#define NULL 0
+#endif
+//#ifndef USER_MODE
+//#define NULL 0
+//#endif
+
+////////////////////////// Declare Globals///////////////////////////////
+frame_store *active_fs;
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+///////////////////////// DPB init //////////////////////////////////////////
+//////////////////////////////////////////////////////////////////////////////
+// Init DPB
+// Description: init dpb, which should be called while open
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb)
+{
+ int32_t i;
+
+ //// Init DPB to zero
+ //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) );
+
+
+ for(i=0;i<NUM_DPB_FRAME_STORES;i++)
+ {
+ p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC;
+ p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+ }
+ p_dpb->used_size = 0;
+ p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+ p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC;
+
+ return;
+}
+
+
+///////////////////////// Reference list management //////////////////////////
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_add_ref_list ()
+//
+// Adds an idc to the long term reference list
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_add_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+ p_dpb->fs_ref_idc[p_dpb->ref_frames_in_buffer] = ref_idc;
+ p_dpb->ref_frames_in_buffer++;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_add_ltref_list ()
+//
+// Adds an idc to the long term reference list
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_add_ltref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+ p_dpb->fs_ltref_idc[p_dpb->ltref_frames_in_buffer] = ref_idc;
+ p_dpb->ltref_frames_in_buffer++;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_update_all_ref_lists (h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting)
+//
+// Decide whether the current picture needs to be added to the reference lists
+// active_fs should be set-up prior to calling this function
+//
+// Check if we need to search the lists here
+// or can we go straight to adding to ref lists..
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExisting)
+{
+ if(NonExisting)
+ h264_dpb_set_active_fs(p_dpb,p_dpb->fs_non_exist_idc);
+ else
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ //if(active_fs->is_reference)
+ if(active_fs->frame.used_for_reference)
+ {
+ if(viddec_h264_get_is_long_term(active_fs))
+ {
+ if(viddec_h264_get_dec_structure(active_fs) == FRAME)
+ h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc);
+ else
+ {
+ uint32_t found_in_list = 0, i = 0;
+ for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) {
+ if(p_dpb->fs_ltref_idc[i] == active_fs->fs_idc) found_in_list = 1;
+ }
+
+ if(found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc);
+ }
+ }
+ else
+ {
+ if(viddec_h264_get_dec_structure(active_fs) == FRAME) {
+ h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc);
+ } else
+ {
+ uint32_t found_in_list = 0, i = 0;
+
+ for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++)
+ {
+ if(p_dpb->fs_ref_idc[i] == active_fs->fs_idc) found_in_list = 1;
+ }
+
+ if(found_in_list == 0) h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc);
+ }
+ }
+ }
+
+ return;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Set active fs
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index)
+{
+ active_fs = &p_dpb->fs[index];
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Sort reference list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t desc)
+{
+ int32_t j, k, temp, idc;
+
+ // Dodgy looking for embedded code here...
+ if(size > 1)
+ {
+ for (j = 0; j < size-1; j = j + 1) {
+ for (k = j + 1; k < size; k = k + 1) {
+ if ((desc & (sort_indices[j] < sort_indices[k]))|
+ (~desc & (sort_indices[j] > sort_indices[k])) )
+ {
+ temp = sort_indices[k];
+ sort_indices[k] = sort_indices[j];
+ sort_indices[j] = temp;
+ idc = list[k];
+ list[k] = list[j];
+ list[j] = idc;
+ }
+ }
+ }
+ }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_pic_is_bottom_field_ref ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_pic_is_bottom_field_ref(int32_t long_term)
+{
+ int32_t temp;
+ if(long_term) temp = ((active_fs->bottom_field.used_for_reference) && (active_fs->bottom_field.is_long_term)) ? 1 : 0;
+ else temp = ((active_fs->bottom_field.used_for_reference) && !(active_fs->bottom_field.is_long_term)) ? 1 : 0;
+
+ return temp;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_pic_is_top_field_ref ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_pic_is_top_field_ref(int32_t long_term)
+{
+ int32_t temp;
+ if(long_term)
+ temp = ((active_fs->top_field.used_for_reference) && (active_fs->top_field.is_long_term)) ? 1 : 0;
+ else
+ temp = ((active_fs->top_field.used_for_reference) && !(active_fs->top_field.is_long_term)) ? 1 : 0;
+
+ return temp;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_gen_pic_list_from_frame_list ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, uint8_t *pic_list, uint8_t *frame_list, int32_t currPicStructure, int32_t list_size, int32_t long_term)
+{
+ int32_t top_idx, bot_idx, got_pic, list_idx;
+ int32_t lterm;
+
+ list_idx = 0;
+ lterm = (long_term)? 1:0;
+
+ if(list_size){
+
+
+ top_idx = 0;
+ bot_idx = 0;
+
+ if (currPicStructure == TOP_FIELD) {
+ while ((top_idx < list_size)||(bot_idx < list_size))
+ {
+ /////////////////////////////////////////// ref Top Field
+ got_pic = 0;
+ while ((top_idx < list_size) & ~got_pic)
+ {
+ h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]);
+ if ((viddec_h264_get_is_used(active_fs))&0x1)
+ {
+ if(h264_dpb_pic_is_top_field_ref(long_term))
+ {
+ pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field
+ list_idx++;
+ got_pic = 1;
+ }
+ }
+ top_idx++;
+ }
+
+ /////////////////////////////////////////// ref Bottom Field
+ got_pic = 0;
+ while ((bot_idx < list_size) & ~got_pic)
+ {
+ h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]);
+ if ((viddec_h264_get_is_used(active_fs))&0x2)
+ {
+ if(h264_dpb_pic_is_bottom_field_ref(long_term))
+ {
+ pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field
+ list_idx++;
+ got_pic = 1;
+ }
+ }
+ bot_idx++;
+ }
+ }
+ }
+
+ /////////////////////////////////////////////// current Bottom Field
+ if (currPicStructure == BOTTOM_FIELD) {
+ while ((top_idx < list_size)||(bot_idx < list_size))
+ {
+ /////////////////////////////////////////// ref Top Field
+ got_pic = 0;
+ while ((bot_idx < list_size) && (!(got_pic)))
+ {
+ h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]);
+ if ((viddec_h264_get_is_used(active_fs))&0x2) {
+ if(h264_dpb_pic_is_bottom_field_ref(long_term)) {
+ // short term ref pic
+ pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field
+ list_idx++;
+ got_pic = 1;
+ }
+ }
+ bot_idx++;
+ }
+
+ /////////////////////////////////////////// ref Bottom Field
+ got_pic = 0;
+ while ((top_idx < list_size) && (!(got_pic)))
+ {
+ h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]);
+ if ((viddec_h264_get_is_used(active_fs))&0x1) {
+ if(h264_dpb_pic_is_top_field_ref(long_term)){
+ // short term ref pic
+ pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field
+ list_idx++;
+ got_pic = 1;
+ }
+ }
+ top_idx++;
+ }
+ }
+ }
+ }
+
+ return list_idx;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_ref_list ()
+//
+// Removes an idc from the refernce list and updates list after
+//
+
+void h264_dpb_remove_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+ uint8_t idx = 0;
+ int32_t Found = 0;
+
+ while ((idx < p_dpb->ref_frames_in_buffer) && (!(Found)))
+ {
+ if (p_dpb->fs_ref_idc[idx] == ref_idc)
+ Found = 1;
+ else
+ idx++;
+ }
+
+ if (Found)
+ {
+ // Move the remainder of the list up one
+ while(idx < p_dpb->ref_frames_in_buffer - 1) {
+ p_dpb->fs_ref_idc[idx] = p_dpb->fs_ref_idc[idx + 1];
+ idx ++;
+ }
+
+ p_dpb->fs_ref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one
+ p_dpb->ref_frames_in_buffer--;
+ }
+
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_ltref_list ()
+//
+// Removes an idc from the long term reference list and updates list after
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_ltref_list(h264_DecodedPictureBuffer * p_dpb,int32_t ref_idc)
+{
+ uint8_t idx = 0;
+ int32_t Found = 0;
+
+ while ((idx < p_dpb->ltref_frames_in_buffer) && (!(Found)))
+ {
+ if (p_dpb->fs_ltref_idc[idx] == ref_idc) Found = 1;
+ else idx++;
+ }
+
+ if (Found)
+ {
+ // Move the remainder of the list up one
+ while(idx <(uint8_t)(p_dpb->ltref_frames_in_buffer - 1))
+ {
+ p_dpb->fs_ltref_idc[idx] = p_dpb->fs_ltref_idc[idx + 1];
+ idx ++;
+ }
+ p_dpb->fs_ltref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one
+
+ p_dpb->ltref_frames_in_buffer--;
+ }
+
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_init_lists ()
+//
+// Used to initialise the reference lists
+// Also assigns picture numbers and long term picture numbers if P OR B slice
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_update_ref_lists(h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer * p_dpb = &pInfo->dpb;
+
+ int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+ uint8_t list0idx, list0idx_1, listltidx;
+ uint8_t idx;
+
+ uint8_t add_top, add_bottom, diff;
+ uint8_t list_idc;
+ uint8_t check_non_existing, skip_picture;
+
+
+ uint8_t gen_pic_fs_list0[16];
+ uint8_t gen_pic_fs_list1[16];
+ uint8_t gen_pic_fs_listlt[16];
+ uint8_t gen_pic_pic_list[32]; // check out these sizes...
+
+ uint8_t sort_fs_idc[16];
+ int32_t list_sort_number[16];
+
+#ifdef DUMP_HEADER_INFO
+ static int cc1 = 0;
+ //OS_INFO("-------------cc1= %d\n",cc1); /////// DEBUG info
+ if(cc1 == 255)
+ idx = 0;
+#endif
+
+ list0idx = list0idx_1 = listltidx = 0;
+
+ if (pInfo->SliceHeader.structure == FRAME)
+ {
+ ////////////////////////////////////////////////// short term handling
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if((viddec_h264_get_is_used(active_fs) == 3)&&(active_fs->frame.used_for_reference == 3))
+ {
+ if (active_fs->frame_num > pInfo->img.frame_num)
+ active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum;
+ else
+ active_fs->frame_num_wrap = active_fs->frame_num;
+
+ active_fs->frame.pic_num = active_fs->frame_num_wrap;
+
+ // Use this opportunity to sort list for a p-frame
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame.pic_num;
+ list0idx++;
+ }
+ }
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+ for (idx = 0; idx < list0idx; idx++)
+ p_dpb->listX_0[idx] = (sort_fs_idc[idx]); // frame
+
+ p_dpb->listXsize[0] = list0idx;
+ }
+
+ ////////////////////////////////////////////////// long term handling
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3) && (active_fs->frame.used_for_reference == 3))
+ {
+ active_fs->frame.long_term_pic_num = active_fs->frame.long_term_frame_idx;
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ sort_fs_idc[list0idx-p_dpb->listXsize[0]] = p_dpb->fs_ltref_idc[idx];
+ list_sort_number[list0idx-p_dpb->listXsize[0]] = active_fs->frame.long_term_pic_num;
+ list0idx++;
+ }
+ }
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0);
+ for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) {
+ p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+ }
+ p_dpb->listXsize[0] = list0idx;
+ }
+ }
+ else /// Field base
+ {
+ if (pInfo->SliceHeader.structure == TOP_FIELD)
+ {
+ add_top = 1;
+ add_bottom = 0;
+ }
+ else
+ {
+ add_top = 0;
+ add_bottom = 1;
+ }
+
+ ////////////////////////////////////////////P0: Short term handling
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+ if (active_fs->frame.used_for_reference)
+ {
+ if(active_fs->frame_num > pInfo->SliceHeader.frame_num) {
+ active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum;
+ } else {
+ active_fs->frame_num_wrap = active_fs->frame_num;
+ }
+
+ if ((active_fs->frame.used_for_reference)&0x1) {
+ active_fs->top_field.pic_num = (active_fs->frame_num_wrap << 1) + add_top;
+ }
+
+ if ((active_fs->frame.used_for_reference)&0x2) {
+ active_fs->bottom_field.pic_num = (active_fs->frame_num_wrap << 1) + add_bottom;
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP) {
+ sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame_num_wrap;
+ list0idx++;
+ }
+ }
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+ for (idx = 0; idx < list0idx; idx++) {
+ gen_pic_fs_list0[idx] = sort_fs_idc[idx];
+ }
+
+ p_dpb->listXsize[0] = 0;
+ p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0);
+
+ for (idx = 0; idx < p_dpb->listXsize[0]; idx++)
+ {
+ p_dpb->listX_0[idx] = gen_pic_pic_list[idx];
+ }
+ }
+
+ ////////////////////////////////////////////P0: long term handling
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+ if (viddec_h264_get_is_long_term(active_fs)&0x1) {
+ active_fs->top_field.long_term_pic_num = (active_fs->top_field.long_term_frame_idx << 1) + add_top;
+ }
+
+ if (viddec_h264_get_is_long_term(active_fs)&0x2) {
+ active_fs->bottom_field.long_term_pic_num = (active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom;
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx];
+ list_sort_number[listltidx] = active_fs->long_term_frame_idx;
+ listltidx++;
+ }
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0);
+ for (idx = 0; idx < listltidx; idx++) {
+ gen_pic_fs_listlt[idx] = sort_fs_idc[idx];
+ }
+ list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1);
+
+ for (idx = 0; idx < list0idx_1; idx++) {
+ p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx];
+ }
+ p_dpb->listXsize[0] += list0idx_1;
+ }
+ }
+
+
+ if (pInfo->SliceHeader.slice_type == h264_PtypeI)
+ {
+ p_dpb->listXsize[0] = 0;
+ p_dpb->listXsize[1] = 0;
+ return;
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ //// Forward done above
+ p_dpb->listXsize[1] = 0;
+ }
+
+
+ // B-Slice
+ // Do not include non-existing frames for B-pictures when cnt_type is zero
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeB)
+ {
+ list0idx = list0idx_1 = listltidx = 0;
+ skip_picture = 0;
+
+ if(pInfo->active_SPS.pic_order_cnt_type == 0)
+ check_non_existing = 1;
+ else
+ check_non_existing = 0;
+
+ if (pInfo->SliceHeader.structure == FRAME)
+ {
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+ if (viddec_h264_get_is_used(active_fs) == 3)
+ {
+ if(check_non_existing)
+ {
+ if(viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1;
+ else skip_picture = 0;
+ }
+
+ if(skip_picture == 0)
+ {
+ if ((active_fs->frame.used_for_reference==3) && (!(active_fs->frame.is_long_term)))
+ {
+ if (pInfo->img.framepoc >= active_fs->frame.poc)
+ {
+ sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame.poc;
+ list0idx++;
+ }
+ }
+ }
+ }
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+ for (idx = 0; idx < list0idx; idx++) {
+ p_dpb->listX_0[idx] = sort_fs_idc[idx];
+ }
+
+ list0idx_1 = list0idx;
+
+ /////////////////////////////////////////B0: Short term handling
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if (viddec_h264_get_is_used(active_fs) == 3)
+ {
+ if(check_non_existing)
+ {
+ if(viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1;
+ else skip_picture = 0;
+ }
+
+ if(skip_picture == 0)
+ {
+ if ((active_fs->frame.used_for_reference) && (!(active_fs->frame.is_long_term)))
+ {
+ if (pInfo->img.framepoc < active_fs->frame.poc)
+ {
+ sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc;
+ list0idx++;
+ }
+ }
+ }
+ }
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0);
+ for (idx = list0idx_1; idx < list0idx; idx++) {
+ p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1];
+ }
+
+ for (idx = 0; idx < list0idx_1; idx++) {
+ p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx];
+ }
+
+ for (idx = list0idx_1; idx < list0idx; idx++) {
+ p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx];
+ }
+
+ p_dpb->listXsize[0] = list0idx;
+ p_dpb->listXsize[1] = list0idx;
+
+ /////////////////////////////////////////B0: long term handling
+ list0idx = 0;
+
+ // Can non-existent pics be set as long term??
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+ if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3))
+ {
+ // if we have two fields, both must be long-term
+ sort_fs_idc[list0idx] = p_dpb->fs_ltref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame.long_term_pic_num;
+ list0idx++;
+ }
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0);
+ for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1)
+ {
+ p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+ p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+ }
+
+ p_dpb->listXsize[0] += list0idx;
+ p_dpb->listXsize[1] += list0idx;
+ }
+ else // Field
+ {
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if (viddec_h264_get_is_used(active_fs)) {
+ if(check_non_existing) {
+ if(viddec_h264_get_is_non_existent(active_fs))
+ skip_picture = 1;
+ else
+ skip_picture = 0;
+ }
+
+ if(skip_picture == 0) {
+ if (pInfo->img.ThisPOC >= active_fs->frame.poc) {
+ sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame.poc;
+ list0idx++;
+ }
+ }
+ }
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+ for (idx = 0; idx < list0idx; idx = idx + 1) {
+ gen_pic_fs_list0[idx] = sort_fs_idc[idx];
+ }
+
+ list0idx_1 = list0idx;
+
+ ///////////////////////////////////////////// B1: Short term handling
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+ if (viddec_h264_get_is_used(active_fs))
+ {
+ if(check_non_existing) {
+ if(viddec_h264_get_is_non_existent(active_fs))
+ skip_picture = 1;
+ else
+ skip_picture = 0;
+ }
+
+ if(skip_picture == 0) {
+ if (pInfo->img.ThisPOC < active_fs->frame.poc) {
+ sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc;
+ list0idx++;
+ }
+ }
+ }
+ }
+
+ ///// Generate frame list from sorted fs
+ /////
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0);
+ for (idx = list0idx_1; idx < list0idx; idx++)
+ gen_pic_fs_list0[idx] = sort_fs_idc[idx-list0idx_1];
+
+ for (idx = 0; idx < list0idx_1; idx++)
+ gen_pic_fs_list1[list0idx-list0idx_1+idx] = gen_pic_fs_list0[idx];
+
+ for (idx = list0idx_1; idx < list0idx; idx++)
+ gen_pic_fs_list1[idx-list0idx_1] = gen_pic_fs_list0[idx];
+
+ ///// Generate List_X0
+ /////
+ p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0);
+
+ for (idx = 0; idx < p_dpb->listXsize[0]; idx++)
+ p_dpb->listX_0[idx] = gen_pic_pic_list[idx];
+
+ //// Generate List X1
+ ////
+ p_dpb->listXsize[1] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list1, pInfo->img.structure, list0idx, 0);
+
+ for (idx = 0; idx < p_dpb->listXsize[1]; idx++)
+ p_dpb->listX_1[idx] = gen_pic_pic_list[idx];
+
+ ///////////////////////////////////////////// B1: long term handling
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx];
+ list_sort_number[listltidx] = active_fs->long_term_frame_idx;
+ listltidx++;
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0);
+ for (idx = 0; idx < listltidx; idx++)
+ gen_pic_fs_listlt[idx] = sort_fs_idc[idx];
+
+ list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1);
+
+ for (idx = 0; idx < list0idx_1; idx++)
+ {
+ p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx];
+ p_dpb->listX_1[p_dpb->listXsize[1]+idx] = gen_pic_pic_list[idx];
+ }
+
+ p_dpb->listXsize[0] += list0idx_1;
+ p_dpb->listXsize[1] += list0idx_1;
+ }
+ }
+
+ // Setup initial list sizes at this point
+ p_dpb->nInitListSize[0] = p_dpb->listXsize[0];
+ p_dpb->nInitListSize[1] = p_dpb->listXsize[1];
+ if(pInfo->SliceHeader.slice_type != h264_PtypeI)
+ {
+ if ((p_dpb->listXsize[0]==p_dpb->listXsize[1]) && (p_dpb->listXsize[0] > 1))
+ {
+ // check if lists are identical, if yes swap first two elements of listX[1]
+ diff = 0;
+ for (idx = 0; idx < p_dpb->listXsize[0]; idx = idx + 1)
+ {
+ if (p_dpb->listX_0[idx] != p_dpb->listX_1[idx]) diff = 1;
+ }
+
+
+ if (!(diff))
+ {
+ list_idc = p_dpb->listX_1[0];
+ p_dpb->listX_1[0] = p_dpb->listX_1[1];
+ p_dpb->listX_1[1] = list_idc;
+ }
+ }
+
+ // set max size
+ if (p_dpb->listXsize[0] > pInfo->SliceHeader.num_ref_idx_l0_active)
+ {
+ p_dpb->listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active;
+ }
+
+
+ if (p_dpb->listXsize[1] > pInfo->SliceHeader.num_ref_idx_l1_active)
+ {
+ p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active;
+ }
+
+
+
+ }
+
+
+
+ /// DPB reorder list
+ h264_dpb_reorder_lists(pInfo);
+
+ return;
+} //// End of init_dpb_list
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_short_term_pic ()
+//
+// Sets active_fs to point to frame store containing picture with given picNum
+// Sets field_flag, bottom_field and err_flag based on the picture and whether
+// it is available or not...
+//
+static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic_num, int32_t *bottom_field_bit)
+{
+ register uint32_t idx;
+ register frame_param_ptr temp_fs;
+
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ *bottom_field_bit = 0;
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ temp_fs = &p_dpb->fs[p_dpb->fs_ref_idc[idx]];
+ if (pInfo->SliceHeader.structure == FRAME)
+ {
+ if(temp_fs->frame.used_for_reference == 3)
+ if (!(temp_fs->frame.is_long_term))
+ if (temp_fs->frame.pic_num == pic_num) return temp_fs;
+ }
+ else // current picture is a field
+ {
+ if (temp_fs->frame.used_for_reference&0x1)
+ if (!(temp_fs->top_field.is_long_term))
+ if (temp_fs->top_field.pic_num == pic_num)
+ {
+ return temp_fs;
+ }
+
+ if (temp_fs->frame.used_for_reference&0x2)
+ if (!(temp_fs->bottom_field.is_long_term))
+ if (temp_fs->bottom_field.pic_num == pic_num)
+ {
+ *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1);
+ return temp_fs;
+ }
+ }
+ }
+ return NULL;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_long_term_pic ()
+//
+// Sets active_fs to point to frame store containing picture with given picNum
+//
+
+static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long_term_pic_num, int32_t *bottom_field_bit)
+{
+ register uint32_t idx;
+ register frame_param_ptr temp_fs;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ *bottom_field_bit = 0;
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ temp_fs = &p_dpb->fs[p_dpb->fs_ltref_idc[idx]];
+ if (pInfo->SliceHeader.structure == FRAME)
+ {
+ if (temp_fs->frame.used_for_reference == 3)
+ if (temp_fs->frame.is_long_term)
+ if (temp_fs->frame.long_term_pic_num == long_term_pic_num)
+ return temp_fs;
+ }
+ else
+ {
+ if (temp_fs->frame.used_for_reference&0x1)
+ if (temp_fs->top_field.is_long_term)
+ if (temp_fs->top_field.long_term_pic_num == long_term_pic_num)
+ return temp_fs;
+
+ if (temp_fs->frame.used_for_reference&0x2)
+ if (temp_fs->bottom_field.is_long_term)
+ if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num)
+ {
+ *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1);
+ return temp_fs;
+ }
+ }
+ }
+ return NULL;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reorder_ref_pic_list ()
+//
+// Used to sort a list based on a corresponding sort indices
+//
+
+struct list_value_t
+{
+ int32_t value;
+ struct list_value_t *next;
+};
+
+struct linked_list_t
+{
+ struct list_value_t *begin;
+ struct list_value_t *end;
+ struct list_value_t *entry;
+ struct list_value_t *prev_entry;
+ struct list_value_t list[32];
+};
+
+static void linked_list_initialize (struct linked_list_t *lp, uint8_t *vp, int32_t size)
+{
+ struct list_value_t *lvp;
+
+ lvp = lp->list;
+ lp->begin = lvp;
+ lp->entry = lvp;
+ lp->end = lvp + (size-1);
+ lp->prev_entry = NULL;
+
+ while (lvp <= lp->end)
+ {
+ lvp->value = *(vp++);
+ lvp->next = lvp + 1;
+ lvp++;
+ }
+ lp->end->next = NULL;
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+static void linked_list_reorder (struct linked_list_t *lp, int32_t list_value)
+{
+ register struct list_value_t *lvp = lp->entry;
+ register struct list_value_t *lvp_prev;
+
+ if (lvp == NULL) {
+ lp->end->value = list_value; // replace the end entry
+ } else if ((lp->begin==lp->end)||(lvp==lp->end)) // replece the begin/end entry and set the entry to NULL
+ {
+ lp->entry->value = list_value;
+ lp->prev_entry = lp->entry;
+ lp->entry = NULL;
+ }
+ else if (lvp->value==list_value) // the entry point matches
+ {
+ lp->prev_entry = lvp;
+ lp->entry = lvp->next;
+ }
+ else if (lvp->next == lp->end) // the entry is just before the end
+ {
+ // replace the end and swap the end and entry points
+ // lvp
+ // prev_entry => entry => old_end
+ // old_end & new_prev_entry => new_end & entry
+ lp->end->value = list_value;
+
+ if (lp->prev_entry)
+ lp->prev_entry->next = lp->end;
+ else
+ lp->begin = lp->end;
+
+ lp->prev_entry = lp->end;
+ lp->end->next = lvp;
+ lp->end = lvp;
+ lvp->next = NULL;
+ }
+ else
+ {
+ lvp_prev = NULL;
+ while (lvp->next) // do not check the end but we'll be in the loop at least once
+ {
+ if (lvp->value == list_value) break;
+ lvp_prev = lvp;
+ lvp = lvp->next;
+ }
+ lvp->value = list_value; // force end matches
+
+ // remove lvp from the list
+ lvp_prev->next = lvp->next;
+ if (lvp==lp->end) lp->end = lvp_prev;
+
+ // insert lvp in front of lp->entry
+ if (lp->entry==lp->begin)
+ {
+ lvp->next = lp->begin;
+ lp->begin = lvp;
+ }
+ else
+ {
+ lvp->next = lp->entry;
+ lp->prev_entry->next = lvp;
+ }
+ lp->prev_entry = lvp;
+ }
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+static void linked_list_output (struct linked_list_t *lp, int32_t *vp)
+{
+ register int32_t *ip1;
+ register struct list_value_t *lvp;
+
+ lvp = lp->begin;
+ ip1 = vp;
+ while (lvp)
+ {
+ *(ip1++) = lvp->value;
+ lvp = lvp->next;
+ }
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+int32_t h264_dpb_reorder_ref_pic_list(h264_Info * pInfo,int32_t list_num, int32_t num_ref_idx_active)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ uint8_t *remapping_of_pic_nums_idc;
+ list_reordering_num_t *list_reordering_num;
+ int32_t bottom_field_bit;
+
+ int32_t maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, pic_num;
+ int32_t refIdxLX;
+ int32_t i;
+
+ int32_t PicList[32] = {0};
+ struct linked_list_t ll;
+ struct linked_list_t *lp = &ll; // should consider use the scratch space
+
+ // declare these below as registers gave me 23 cy/MB for the worst frames in Allegro_Combined_CABAC_07_HD, YHu
+ register frame_param_ptr temp_fs;
+ register int32_t temp;
+ register uint8_t *ip1;
+
+ maxPicNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+
+ if (list_num == 0) // i.e list 0
+ {
+ ip1 = p_dpb->listX_0;
+ remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l0.reordering_of_pic_nums_idc;
+ list_reordering_num = pInfo->SliceHeader.sh_refpic_l0.list_reordering_num;
+ }
+ else
+ {
+ ip1 = p_dpb->listX_1;
+ remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l1.reordering_of_pic_nums_idc;
+ list_reordering_num = pInfo->SliceHeader.sh_refpic_l1.list_reordering_num;
+ }
+
+
+ linked_list_initialize (lp, ip1, num_ref_idx_active);
+
+ currPicNum = pInfo->SliceHeader.frame_num;
+ if (pInfo->SliceHeader.structure != FRAME)
+ {
+
+ /* The reason it is + 1 I think, is because the list is based on polarity
+ expand later...
+ */
+ maxPicNum <<= 1;
+ currPicNum <<= 1;
+ currPicNum++;
+ }
+
+ picNumLXPred = currPicNum;
+ refIdxLX = 0;
+
+ for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++)
+ {
+ if(i > MAX_NUM_REF_FRAMES)
+ {
+ break;
+ }
+
+ if (remapping_of_pic_nums_idc[i] < 2) // - short-term re-ordering
+ {
+ temp = (list_reordering_num[i].abs_diff_pic_num_minus1 + 1);
+ if (remapping_of_pic_nums_idc[i] == 0)
+ {
+ temp = picNumLXPred - temp;
+ if (temp < 0 ) picNumLXNoWrap = temp + maxPicNum;
+ else picNumLXNoWrap = temp;
+ }
+ else // (remapping_of_pic_nums_idc[i] == 1)
+ {
+ temp += picNumLXPred;
+ if (temp >= maxPicNum) picNumLXNoWrap = temp - maxPicNum;
+ else picNumLXNoWrap = temp;
+ }
+
+ // Updates for next iteration of the loop
+ picNumLXPred = picNumLXNoWrap;
+
+ if (picNumLXNoWrap > currPicNum ) pic_num = picNumLXNoWrap - maxPicNum;
+ else pic_num = picNumLXNoWrap;
+
+ temp_fs = h264_dpb_get_short_term_pic(pInfo, pic_num, &bottom_field_bit);
+ if (temp_fs)
+ {
+ temp = bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc);
+ linked_list_reorder (lp, temp);
+ }
+ }
+ else //(remapping_of_pic_nums_idc[i] == 2) long-term re-ordering
+ {
+ pic_num = list_reordering_num[i].long_term_pic_num;
+
+ temp_fs = h264_dpb_get_long_term_pic(pInfo, pic_num, &bottom_field_bit);
+ if (temp_fs)
+ {
+ temp = PUT_LIST_LONG_TERM_BITS(1) + bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc);
+ linked_list_reorder (lp, temp);
+ }
+ }
+ }
+
+ linked_list_output (lp, PicList);
+
+ if(0 == list_num )
+ {
+ for(i=0; i<num_ref_idx_active; i++)
+ {
+ pInfo->slice_ref_list0[i]=(uint8_t)PicList[i];
+ }
+ }
+ else
+ {
+ for(i=0; i<num_ref_idx_active; i++)
+ {
+ pInfo->slice_ref_list1[i]=(uint8_t)PicList[i];
+ }
+ }
+
+
+ // Instead of updating the now reordered list here, just write it down...
+ // This way, we can continue to hold the initialised list in p_dpb->listX_0
+ // and therefore not need to update it every slice
+
+ //h264_dpb_write_list(list_num, PicList, num_ref_idx_active);
+
+ return num_ref_idx_active;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+void h264_dpb_RP_check_list (h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ uint8_t *p_list = pInfo->slice_ref_list0;
+
+ //
+ // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away!
+ //
+
+ if((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) {
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+ }
+
+
+ //
+ // Repare Ref list if it damaged with RP recovery only
+ //
+ if((pInfo->SliceHeader.slice_type == h264_PtypeP) && pInfo->sei_rp_received)
+ {
+
+ int32_t idx, rp_found = 0;
+
+ if(pInfo->SliceHeader.num_ref_idx_l0_active == 1)
+ {
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ p_list = pInfo->slice_ref_list0;
+ }
+ else
+ {
+ p_list = pInfo->dpb.listX_0;
+ //pInfo->sei_rp_received = 0;
+ //return;
+ }
+
+
+ for(idx = 0; idx < p_dpb->used_size; idx++) {
+ if(p_dpb->fs_dpb_idc[idx] == pInfo->last_I_frame_idc) {
+ rp_found = 1;
+ break;
+ }
+ }
+ if(rp_found) {
+#if 0
+ int32_t poc;
+
+ ///// Clear long-term ref list
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ltref_idc[0]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]);
+ }
+
+ ///// Clear short-term ref list
+ //while(p_dpb->used_size>1)
+ for(idx = 0; idx < p_dpb->used_size; idx++)
+ {
+ int32_t idx_pos;
+ //// find smallest non-output POC
+ h264_dpb_get_smallest_poc(p_dpb, &poc, &idx_pos);
+
+ //// Remove all frames in previous GOP
+ if ((idx_pos != MPD_DPB_FS_NULL_IDC) && (p_dpb->fs_dpb_idc[idx_pos] != pInfo->last_I_frame_idc))
+ {
+ // Remove from ref-list
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx_pos]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx_pos]);
+
+ // Output from DPB
+ //h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ //if((active_fs->is_output == 0) && (active_fs->is_non_existent == 0))
+ {
+ //int32_t existing;
+ //h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[idx], 0, &existing);
+ //p_dpb->last_output_poc = poc;
+ }
+ //h264_dpb_remove_frame_from_dpb(p_dpb, idx); // Remove dpb.fs_dpb_idc[pos]
+
+ }
+ }
+#endif
+ ///// Set the reference to last I frame
+ if( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0]))
+ {
+ /// Repaire the reference list now
+ h264_dpb_unmark_for_reference(p_dpb, p_list[0]);
+ h264_dpb_remove_ref_list(p_dpb, p_list[0]);
+ p_list[0] = pInfo->last_I_frame_idc;
+ }
+
+ }
+ }
+
+ pInfo->sei_rp_received = 0;
+ pInfo->sei_b_state_ready = 1;
+
+ }
+
+
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reorder_lists ()
+//
+// Used to sort a list based on a corresponding sort indices
+//
+
+void h264_dpb_reorder_lists(h264_Info * pInfo)
+{
+ int32_t currSliceType = pInfo->SliceHeader.slice_type;
+
+ if (currSliceType == h264_PtypeP )
+ {
+ /////////////////////////////////////////////// Reordering reference list for P slice
+ /// Forward reordering
+ if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active);
+ else
+ {
+
+ }
+ pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active;
+ } else if (currSliceType == h264_PtypeB)
+ {
+ /////////////////////////////////////////////// Reordering reference list for B slice
+ /// Forward reordering
+ if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active);
+ else
+ {
+
+ }
+ pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active;
+
+ /// Backward reordering
+ if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag)
+ h264_dpb_reorder_ref_pic_list(pInfo, 1, pInfo->SliceHeader.num_ref_idx_l1_active);
+ else
+ {
+
+ }
+ pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active;
+ }
+
+ //// Check if need recover reference list with previous recovery point
+ h264_dpb_RP_check_list(pInfo);
+
+
+ return;
+}
+
+////////////////////////////////////////// DPB management //////////////////////
+
+//////////////////////////////////////////////////////////////////////////////
+// avc_dpb_get_non_output_frame_number ()
+//
+// get total non output frame number in the DPB.
+//
+static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo)
+{
+ int32_t idx;
+ int32_t number=0;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ for (idx = 0; idx < p_dpb->used_size; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ if (viddec_h264_get_is_output(active_fs) == 0)
+ {
+ (number)++;
+ }
+ }
+
+ return number;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//// Store previous picture in DPB, and then update DPB queue, remove unused frames from DPB
+
+void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExisting, int32_t use_old)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ int32_t used_for_reference;
+ int32_t is_direct_output;
+ int32_t second_field_stored = 0;
+ int32_t poc;
+ int32_t pos;
+ int32_t flag;
+ int32_t first_field_non_ref = 0;
+ int32_t idr_flag;
+
+ if(NonExisting) {
+ if(p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC)
+ return;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+ } else {
+ if(p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC)
+ return;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ }
+
+ if(NonExisting == 0)
+ {
+ //active_fs->sps_disp_index = (next_sps_disp_entry == 0)? 7 : next_sps_disp_entry - 1;
+ pInfo->img.last_has_mmco_5 = 0;
+ pInfo->img.last_pic_bottom_field = pInfo->img.bottom_field_flag;
+
+ //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag);
+ used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0);
+
+ switch (viddec_h264_get_dec_structure(active_fs))
+ {
+ case(TOP_FIELD) : {
+ active_fs->top_field.used_for_reference = used_for_reference;
+ viddec_h264_set_is_top_used(active_fs, 1);
+ //active_fs->crc_field_coded = 1;
+ }break;
+ case(BOTTOM_FIELD): {
+ active_fs->bottom_field.used_for_reference = used_for_reference << 1;
+ viddec_h264_set_is_bottom_used(active_fs, 1);
+ //active_fs->crc_field_coded = 1;
+ }break;
+ default: {
+ active_fs->frame.used_for_reference = used_for_reference?3:0;
+ viddec_h264_set_is_frame_used(active_fs, 3);
+ //if(pInfo->img.MbaffFrameFlag) active_fs->crc_field_coded = 1;
+
+ }break;
+ }
+
+ //freeze_assert = use_old ? old_pInfo->img.sei_freeze_this_image : pInfo->img.sei_freeze_this_image;
+ //if (freeze_assert) sei_information.disp_frozen = 1;
+
+ idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag;
+ if (idr_flag) {
+ h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag);
+ } else {
+ // adaptive memory management
+ if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) {
+ h264_dpb_adaptive_memory_management(pInfo);
+ }
+ }
+ // Reset the active frame store - could have changed in mem management ftns
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if ((viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD))
+ {
+ // check for frame store with same pic_number -- always true in my case, YH
+ // when we allocate frame store for the second field, we make sure the frame store for the second
+ // field is the one that contains the first field of the frame- see h264_dpb_init_frame_store()
+ // This is different from JM model.
+ // In this way we don't need to move image data around and can reduce memory bandwidth.
+ // simply check if the check if the other field has been decoded or not
+
+ if (viddec_h264_get_is_used(active_fs) != 0)
+ {
+ if(pInfo->img.second_field)
+ {
+ h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 0, NonExisting, use_old);
+ second_field_stored = 1;
+ }
+ }
+ }
+ }
+ else
+ { // Set up locals for non-existing frames
+ used_for_reference = 1;
+
+ active_fs->frame.used_for_reference = used_for_reference?3:0;
+ viddec_h264_set_is_frame_used(active_fs, 3);
+ viddec_h264_set_dec_structure(active_fs, FRAME);
+ pInfo->img.structure = FRAME;
+ }
+
+ is_direct_output = 0;
+ if (NonExisting == 0)
+ {
+ if(p_dpb->used_size >= p_dpb->BumpLevel)
+ {
+ // non-reference frames may be output directly
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if ((used_for_reference == 0) && (viddec_h264_get_is_used(active_fs) == 3))
+ {
+ h264_dpb_get_smallest_poc (p_dpb, &poc, &pos);
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ if ((pos == MPD_DPB_FS_NULL_IDC) || (pInfo->img.ThisPOC < poc))
+ {
+ is_direct_output = 1;
+ }
+ }
+ }
+ }
+
+ if (NonExisting) {
+ h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames);
+ } else if(pInfo->SliceHeader.idr_flag == 0) {
+ if(used_for_reference){
+ if(pInfo->img.second_field == 0) {
+ if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) {
+ h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames);
+ }
+ }
+ }
+ }
+
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+ //if (is_direct_output == 0)
+ {
+ if ((pInfo->img.second_field == 0) || (NonExisting))
+ {
+ h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 1, NonExisting, use_old);
+ }
+
+ // In an errored stream we saw a condition where
+ // p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer > p_dpb->BumpLevel,
+ // which in itself is an error, but this means first_field_non_ref will
+ // not get set and causes problems for h264_dpb_queue_update()
+ if((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) {
+ if(used_for_reference == 0)
+ if(p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer == p_dpb->BumpLevel)
+ first_field_non_ref = 1;
+ }
+
+ }
+
+ if(NonExisting)
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+ else
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if(NonExisting == 0)
+ {
+ if((pInfo->img.second_field == 1) || (pInfo->img.structure == FRAME))
+ {
+ //h264_send_new_decoded_frame();
+ if((p_dpb->OutputCtrl) && (is_direct_output == 0))
+ h264_dpb_output_one_frame_from_dpb(pInfo, 0, 0,pInfo->active_SPS.num_ref_frames);
+
+ // Pictures inserted by this point - check if we have reached the specified output
+ // level (if one has been specified) so we can begin on next call
+
+ /*
+ Fixed HSD 212625---------------should compare OutputLevel with non-output frame number in dpb, not the used number in dpb
+ if((p_dpb->OutputLevelValid)&&(p_dpb->OutputCtrl == 0))
+ {
+ if(p_dpb->used_size == p_dpb->OutputLevel)
+ p_dpb->OutputCtrl = 1;
+ }
+ */
+
+ if(p_dpb->OutputLevelValid)
+ {
+ int32_t non_output_frame_number=0;
+ non_output_frame_number = avc_dpb_get_non_output_frame_number(pInfo);
+
+ if(non_output_frame_number == p_dpb->OutputLevel)
+ p_dpb->OutputCtrl = 1;
+ else
+ p_dpb->OutputCtrl = 0;
+ }
+ else {
+ p_dpb->OutputCtrl = 0;
+ }
+ }
+ }
+
+ while(p_dpb->used_size > (p_dpb->BumpLevel + first_field_non_ref))
+ //while(p_dpb->used_size > p_dpb->BumpLevel)
+ {
+ h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+ //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+ }
+
+ //
+ // Do not output "direct output" pictures until the sempahore has been set that the pic is
+ // decoded!!
+ //
+ if(is_direct_output) {
+ h264_dpb_queue_update(pInfo, 1, 1, 0,pInfo->active_SPS.num_ref_frames);
+ //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+ }
+
+ //
+ // Add reference pictures into Reference list
+ //
+ if(used_for_reference) {
+ h264_dpb_insert_ref_lists(&pInfo->dpb, NonExisting);
+ }
+
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+
+ return;
+} ////////////// End of DPB store pic
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_insert_picture_in_dpb ()
+//
+// Insert the decoded picture into the DPB. A free DPB position is necessary
+// for frames, .
+// This ftn tends to fill out the framestore's top level parameters from the
+// storable picture's parameters within it. It is called from h264_dpb_store_picture_in_dpb()
+//
+// This function finishes by updating the reference lists - this means it must be called after
+// h264_dpb_sliding_window_memory_management()
+//
+// In the case of a frame it will call h264_dpb_split_field()
+// In the case of the second field of a complementary field pair it calls h264_dpb_combine_field()
+//
+
+void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference, int32_t add2dpb, int32_t NonExisting, int32_t use_old)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ if(NonExisting == 0) {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num;
+ }
+ else {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+ active_fs->frame_num = active_fs->frame.pic_num;
+ }
+
+ if (add2dpb) {
+ p_dpb->fs_dpb_idc[p_dpb->used_size] = active_fs->fs_idc;
+ p_dpb->used_size++;
+ }
+
+
+ switch (viddec_h264_get_dec_structure(active_fs))
+ {
+ case FRAME :{
+ viddec_h264_set_is_frame_used(active_fs, 3);
+ active_fs->frame.used_for_reference = used_for_reference?3:0;
+ if (used_for_reference)
+ {
+ active_fs->frame.used_for_reference = 3;
+ if (active_fs->frame.is_long_term)
+ viddec_h264_set_is_frame_long_term(active_fs, 3);
+ }
+ // Split frame to 2 fields for prediction
+ h264_dpb_split_field(pInfo);
+
+ }break;
+ case TOP_FIELD :{
+ viddec_h264_set_is_top_used(active_fs, 1);
+
+ active_fs->top_field.used_for_reference = used_for_reference;
+ if (used_for_reference)
+ {
+ active_fs->frame.used_for_reference |= 0x1;
+ if (active_fs->top_field.is_long_term)
+ {
+ viddec_h264_set_is_top_long_term(active_fs, 1);
+ active_fs->long_term_frame_idx = active_fs->top_field.long_term_frame_idx;
+ }
+ }
+ if (viddec_h264_get_is_used(active_fs) == 3) {
+ h264_dpb_combine_field(use_old); // generate frame view
+ }
+ else
+ {
+ active_fs->frame.poc = active_fs->top_field.poc;
+ }
+
+ }break;
+ case BOTTOM_FIELD :{
+ viddec_h264_set_is_bottom_used(active_fs, 1);
+
+ active_fs->bottom_field.used_for_reference = (used_for_reference<<1);
+ if (used_for_reference)
+ {
+ active_fs->frame.used_for_reference |= 0x2;
+ if (active_fs->bottom_field.is_long_term)
+ {
+ viddec_h264_set_is_bottom_long_term(active_fs, 1);
+ active_fs->long_term_frame_idx = active_fs->bottom_field.long_term_frame_idx;
+ }
+ }
+ if (viddec_h264_get_is_used(active_fs) == 3) {
+ h264_dpb_combine_field(use_old); // generate frame view
+ }
+ else
+ {
+ active_fs->frame.poc = active_fs->bottom_field.poc;
+ }
+
+ }break;
+ }
+/*
+ if ( gRestartMode.LastRestartType == RESTART_SEI )
+ {
+ if ( active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1;
+ }
+
+ gRestartMode.LastRestartType = 0xFFFF;
+*/
+
+ return;
+} ////// End of insert picture in DPB
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_short_term_for_reference ()
+//
+// Adaptive Memory Management: Mark short term picture unused
+//
+
+void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1)
+{
+ int32_t picNumX;
+ int32_t currPicNum;
+ uint32_t idx;
+ int32_t unmark_done;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ if (pInfo->img.structure == FRAME)
+ currPicNum = pInfo->img.frame_num;
+ else
+ currPicNum = (pInfo->img.frame_num << 1) + 1;
+
+ picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1);
+
+ unmark_done = 0;
+
+ for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if (pInfo->img.structure == FRAME)
+ {
+ /* If all pic numbers in the list are different (and they should be)
+ we should terminate the for loop the moment we match pic numbers,
+ no need to continue to check - hence set unmark_done
+ */
+
+ if ((active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(active_fs) == 0) &&
+ (active_fs->frame.pic_num == picNumX))
+ {
+ h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc);
+ h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc);
+ unmark_done = 1;
+ }
+ }
+ else
+ {
+ /*
+ If we wish to unmark a short-term picture by picture number when the current picture
+ is a field, we have to unmark the corresponding field as unused for reference,
+ and also if it was part of a frame or complementary reference field pair, the
+ frame is to be marked as unused. However the opposite field may still be used as a
+ reference for future fields
+
+ How will this affect the reference list update ftn coming after??
+
+ */
+ if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&&
+ (active_fs->top_field.pic_num == picNumX) )
+ {
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->frame.used_for_reference &= 2;
+
+ unmark_done = 1;
+
+ //Check if other field is used for short-term reference, if not remove from list...
+ if(active_fs->bottom_field.used_for_reference == 0)
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+ }
+ if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) &&
+ (active_fs->bottom_field.pic_num == picNumX) )
+ {
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->frame.used_for_reference &= 1;
+
+ unmark_done = 1;
+
+ //Check if other field is used for reference, if not remove from list...
+ if(active_fs->top_field.used_for_reference == 0)
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+ }
+ }
+ }
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+////////////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_long_term_for_reference ()
+//
+// Adaptive Memory Management: Mark long term picture unused
+//
+// In a frame situation the long_term_pic_num will refer to another frame.
+// Thus we can call h264_dpb_unmark_for_long_term_reference() and then remove the picture
+// from the list
+//
+// If the current picture is a field, long_term_pic_num will refer to another field
+// It is also the case that each individual field should have a unique picture number
+// 8.2.5.4.2 suggests that when curr pic is a field, an mmco == 2 operation
+// should be accompanied by a second op to unmark the other field as being unused
+///////////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long_term_pic_num)
+{
+ uint32_t idx;
+ int32_t unmark_done;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ unmark_done = 0;
+ for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (!(unmark_done)); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+ if (pInfo->img.structure == FRAME)
+ {
+ if ((active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(active_fs)==3) &&
+ (active_fs->frame.long_term_pic_num == long_term_pic_num))
+ {
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ unmark_done = 1;
+ }
+ }
+ else
+ {
+ /// Check top field
+ if ((active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(active_fs)&0x1) &&
+ (active_fs->top_field.long_term_pic_num == long_term_pic_num) )
+ {
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->top_field.is_long_term = 0;
+ active_fs->frame.used_for_reference &= 2;
+ viddec_h264_set_is_frame_long_term(active_fs, 2);
+
+ unmark_done = 1;
+
+ //Check if other field is used for long term reference, if not remove from list...
+ if ((active_fs->bottom_field.used_for_reference == 0) || (active_fs->bottom_field.is_long_term == 0))
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ }
+
+ /// Check Bottom field
+ if ((active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(active_fs)&0x2) &&
+ (active_fs->bottom_field.long_term_pic_num == long_term_pic_num) )
+ {
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->bottom_field.is_long_term = 0;
+ active_fs->frame.used_for_reference &= 1;
+ viddec_h264_set_is_frame_long_term(active_fs, 1);
+
+ unmark_done = 1;
+ //Check if other field is used for long term reference, if not remove from list...
+ if ((active_fs->top_field.used_for_reference == 0) || (active_fs->top_field.is_long_term == 0))
+ {
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ }
+ }
+ } // field structure
+ } //for(idx)
+
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_pic_struct_by_pic_num
+//
+// Searches the fields appearing in short term reference list
+// Returns the polarity of the field with pic_num = picNumX
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int32_t picNumX)
+{
+ uint32_t idx;
+ int32_t pic_struct = INVALID;
+ int32_t found = 0;
+
+ for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&&
+ (active_fs->top_field.pic_num == picNumX) )
+ {
+ found = 1;
+ pic_struct = TOP_FIELD;
+
+ }
+ if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) &&
+ (active_fs->bottom_field.pic_num == picNumX) )
+ {
+ found = 1;
+ pic_struct = BOTTOM_FIELD;
+
+ }
+ }
+
+ return pic_struct;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_assign_long_term_frame_idx ()
+//
+// Assign a long term frame index to a short term picture
+// Both lists must be updated as part of this process...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1, int32_t long_term_frame_idx)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ int32_t picNumX;
+ int32_t currPicNum;
+ int32_t polarity = 0;
+
+ if (pInfo->img.structure == FRAME) {
+ currPicNum = pInfo->img.frame_num;
+ } else {
+ currPicNum = (pInfo->img.frame_num << 1) + 1;
+ }
+
+ picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1);
+
+ // remove frames / fields with same long_term_frame_idx
+ if (pInfo->img.structure == FRAME) {
+ h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx);
+ } else {
+ polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX);
+
+ if(polarity != INVALID)
+ h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, active_fs->fs_idc, polarity);
+ }
+
+ h264_dpb_mark_pic_long_term(pInfo, long_term_frame_idx, picNumX);
+
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_update_max_long_term_frame_idx ()
+//
+// Set new max long_term_frame_idx
+//
+
+void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb,int32_t max_long_term_frame_idx_plus1)
+{
+ //h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ int32_t idx;
+ int32_t temp;
+ int32_t removed_count;
+ int32_t idx2 = 0;
+
+ p_dpb->max_long_term_pic_idx = max_long_term_frame_idx_plus1 - 1;
+
+ temp = p_dpb->ltref_frames_in_buffer;
+ removed_count = 0;
+
+ // check for invalid frames
+ for (idx = 0; idx < temp; idx++)
+ {
+ idx2 = idx - removed_count;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+
+ if (active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx)
+ {
+ removed_count++;
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+ }
+ }
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_all_short_term_for_reference ()
+//
+// Unmark all short term refernce pictures
+//
+
+void h264_dpb_mm_unmark_all_short_term_for_reference (h264_DecodedPictureBuffer *p_dpb)
+{
+ int32_t idx;
+ int32_t temp = p_dpb->ref_frames_in_buffer;
+
+ for (idx = 0; idx < temp; idx++)
+ {
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+ }
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_mark_current_picture_long_term ()
+//
+// Marks the current picture as long term after unmarking any long term picture
+// already assigned with the same long term frame index
+//
+
+void h264_dpb_mm_mark_current_picture_long_term(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx)
+{
+ int32_t picNumX;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if (viddec_h264_get_dec_structure(active_fs) == FRAME)
+ {
+ h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx);
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ active_fs->frame.is_long_term = 1;
+ active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+ active_fs->frame.long_term_pic_num = long_term_frame_idx;
+ }
+ else
+ {
+ if(viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)
+ {
+ picNumX = (active_fs->top_field.pic_num << 1) + 1;
+ active_fs->top_field.is_long_term = 1;
+ active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+
+ // Assign long-term pic num
+ active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + 1;
+ }
+ else
+ {
+ picNumX = (active_fs->bottom_field.pic_num << 1) + 1;
+ active_fs->bottom_field.is_long_term = 1;
+ active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+
+ // Assign long-term pic num
+ active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + 1;
+
+ }
+ h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(active_fs));
+ }
+ // Add to long term list
+ //h264_dpb_add_ltref_list(p_dpb->fs_dec_idc);
+
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx ()
+//
+// Mark a long-term reference frame or complementary field pair unused for referemce
+// NOTE: Obviously this ftn cannot be used to unmark individual fields...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx)
+{
+ uint32_t idx;
+ for(idx =0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+ if (active_fs->long_term_frame_idx == long_term_frame_idx)
+ {
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ }
+ }
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_long_term_field_for_reference_by_frame_idx ()
+//
+// Mark a long-term reference field unused for reference. However if it is the
+// complementary field (opposite polarity) of the picture stored in fs_idc,
+// we do not unmark it
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity)
+{
+ uint32_t idx;
+ int32_t found = 0;
+ int32_t is_complement = 0;
+
+ for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (found == 0); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ if (active_fs->long_term_frame_idx == long_term_frame_idx)
+ {
+ if(active_fs->fs_idc == fs_idc)
+ {
+ // Again these seem like redundant checks but for safety while until JM is updated
+ if (polarity == TOP_FIELD)
+ is_complement = (active_fs->bottom_field.is_long_term)? 1:0;
+ else if(polarity == BOTTOM_FIELD)
+ is_complement = (active_fs->top_field.is_long_term) ? 1:0;
+ }
+ found = 1;
+ }
+ }
+
+ if(found) {
+ if(is_complement == 0)
+ {
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx-1]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx-1]);
+ }
+ }
+
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mark_pic_long_term ()
+//
+// This is used on a picture already in the dpb - i.e. not for the current picture
+// dpb_split / dpb_combine field will perform ftnality in that case
+//
+// Marks a picture as used for long-term reference. Adds it to the long-term
+// reference list. Also removes it from the short term reference list if required
+//
+// Note: if the current picture is a frame, the picture to be marked will be a
+// short-term reference frame or short-term complemenetary reference field pair
+// We use the pic_num assigned to the frame part of the structure to locate it
+// Both its fields will have their long_term_frame_idx and long_term_pic_num
+// assigned to be equal to long_term_frame_idx
+//
+// If the current picture is a field, the picture to be marked will be a
+// short-term reference field. We use the pic_nums assigned to the field parts of
+// the structure to identify the appropriate field. We assign the long_term_frame_idx
+// of the field equal to long_term_frame_idx.
+//
+// We also check to see if this marking has resulted in both fields of the frame
+// becoming long_term. If it has, we update the frame part of the structure by
+// setting its long_term_frame_idx
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ uint32_t idx;
+ int32_t mark_done;
+ int32_t polarity = 0;
+
+ mark_done = 0;
+
+ if (pInfo->img.structure == FRAME)
+ {
+ for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(mark_done)); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if (active_fs->frame.used_for_reference == 3)
+ {
+ if ((!(active_fs->frame.is_long_term))&&(active_fs->frame.pic_num == picNumX))
+ {
+ active_fs->long_term_frame_idx = long_term_frame_idx;
+ active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+ active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+ active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+
+ active_fs->frame.is_long_term = 1;
+ active_fs->top_field.is_long_term = 1;
+ active_fs->bottom_field.is_long_term = 1;
+
+ viddec_h264_set_is_frame_long_term(active_fs, 3);
+ mark_done = 1;
+
+ // Assign long-term pic num
+ active_fs->frame.long_term_pic_num = long_term_frame_idx;
+ active_fs->top_field.long_term_pic_num = long_term_frame_idx;
+ active_fs->bottom_field.long_term_pic_num = long_term_frame_idx;
+ // Add to long term list
+ h264_dpb_add_ltref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+ // Remove from short-term list
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+ }
+ }
+ }
+ }
+ else
+ {
+ polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX);
+ active_fs->long_term_frame_idx = long_term_frame_idx; /////BUG
+
+ if(polarity == TOP_FIELD)
+ {
+ active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+ active_fs->top_field.is_long_term = 1;
+ viddec_h264_set_is_top_long_term(active_fs, 1);
+
+ // Assign long-term pic num
+ active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0);
+
+ }
+ else if (polarity == BOTTOM_FIELD)
+ {
+ active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+ active_fs->bottom_field.is_long_term = 1;
+ viddec_h264_set_is_bottom_long_term(active_fs, 1);
+
+ // Assign long-term pic num
+ active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0);
+ }
+
+ if (viddec_h264_get_is_long_term(active_fs) == 3)
+ {
+ active_fs->frame.is_long_term = 1;
+ active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+ h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc);
+ }
+ else
+ {
+ // We need to add this idc to the long term ref list...
+ h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc);
+
+ // If the opposite field is not a short term reference, remove it from the
+ // short term list. Since we know top field is a reference but both are not long term
+ // we can simply check that both fields are not references...
+ if(active_fs->frame.used_for_reference != 3)
+ h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc);
+ }
+ }
+ return;
+} ///// End of mark pic long term
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_adaptive_memory_management ()
+//
+// Perform Adaptive memory control decoded reference picture marking process
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_adaptive_memory_management (h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ int32_t idx;
+
+ idx = 0;
+
+ while (idx < pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count)
+ {
+ switch(pInfo->SliceHeader.sh_dec_refpic.memory_management_control_operation[idx])
+ {
+ case 1:{ //Mark a short-term reference picture as �unused for reference?
+ h264_dpb_mm_unmark_short_term_for_reference(pInfo,
+ pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx]);
+ } break;
+ case 2:{ //Mark a long-term reference picture as �unused for reference?
+ h264_dpb_mm_unmark_long_term_for_reference(pInfo,
+ pInfo->SliceHeader.sh_dec_refpic.long_term_pic_num[idx]);
+ }break;
+ case 3:{ //Mark a short-term reference picture as "used for long-term reference" and assign a long-term frame index to it
+ h264_dpb_mm_assign_long_term_frame_idx(pInfo,
+ pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx],
+ pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]);
+ }break;
+ case 4:{ //Specify the maximum long-term frame index and
+ //mark all long-term reference pictureshaving long-term frame indices greater than
+ //the maximum value as "unused for reference"
+ h264_dpb_mm_update_max_long_term_frame_idx (&pInfo->dpb,
+ pInfo->SliceHeader.sh_dec_refpic.max_long_term_frame_idx_plus1[idx]);
+ }break;
+ case 5:{ //Mark all reference pictures as "unused for reference" and set the MaxLongTermFrameIdx variable to
+ // "no long-term frame indices"
+ h264_dpb_mm_unmark_all_short_term_for_reference(&pInfo->dpb);
+ h264_dpb_mm_update_max_long_term_frame_idx(&pInfo->dpb, 0);
+ pInfo->img.last_has_mmco_5 = 1;
+ }break;
+ case 6:{ //Mark the current picture as "used for long-term reference" and assign a long-term frame index to it
+ h264_dpb_mm_mark_current_picture_long_term(&pInfo->dpb,
+ pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]);
+ }break;
+ }
+ idx++;
+ }
+
+
+ if (pInfo->img.last_has_mmco_5)
+ {
+ pInfo->img.frame_num = 0;
+ pInfo->SliceHeader.frame_num=0;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if (viddec_h264_get_dec_structure(active_fs) == FRAME)
+ {
+ pInfo->img.bottompoc -= active_fs->frame.poc;
+ pInfo->img.toppoc -= active_fs->frame.poc;
+
+
+ active_fs->frame.poc = 0;
+ active_fs->frame.pic_num = 0;
+ active_fs->frame_num = 0;
+ }
+
+ else if (viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)
+ {
+ active_fs->top_field.poc = active_fs->top_field.pic_num = 0;
+ pInfo->img.toppoc = active_fs->top_field.poc;
+ }
+ else if (viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD)
+ {
+ active_fs->bottom_field.poc = active_fs->bottom_field.pic_num = 0;
+ pInfo->img.bottompoc = 0;
+ }
+
+ h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field,pInfo->active_SPS.num_ref_frames);
+ }
+ // Reset the marking count operations for the current picture...
+ pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count = 0;
+
+ return;
+} ////// End of adaptive memory management
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_gaps_in_frame_num_mem_management ()
+//
+// Produces a set of frame_nums pertaining to "non-existing" pictures
+// Calls h264_dpb_store_picture_in_dpb
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo)
+{
+ int32_t temp_frame_num = 0;
+ int32_t idx, prev_idc;
+ int32_t prev_frame_num_plus1_wrap;
+ uint32_t temp;
+ int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+ seq_param_set_used_ptr active_sps = &pInfo->active_SPS;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ pInfo->img.gaps_in_frame_num = 0;
+
+ // pInfo->img.last_has_mmco_5 set thru store_picture_in_dpb
+ if (pInfo->img.last_has_mmco_5)
+ {
+ // If the previous picture was an unpaired field, mark it as a dangler
+ if(p_dpb->used_size)
+ {
+ idx = p_dpb->used_size-1;
+ prev_idc = p_dpb->fs_dpb_idc[idx];
+ if (prev_idc != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ active_fs->frame_num =0;
+ }
+ }
+ pInfo->img.PreviousFrameNumOffset = 0;
+ //CONFORMANCE_ISSUE
+ pInfo->img.PreviousFrameNum = 0;
+
+ }
+
+ // Check for gaps in frame_num
+ if(pInfo->SliceHeader.idr_flag) {
+ pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+ }
+ // Have we re-started following a recovery point message?
+/*
+ else if(got_sei_recovery || aud_got_restart){
+ pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+ //got_sei_recovery = 0;
+ //aud_got_restart = 0;
+ }
+*/
+ else if(pInfo->img.frame_num != pInfo->img.PreviousFrameNum)
+ {
+ if (MaxFrameNum)
+ ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp);
+
+ prev_frame_num_plus1_wrap = temp;
+ if(pInfo->img.frame_num != prev_frame_num_plus1_wrap)
+ {
+ pInfo->img.gaps_in_frame_num = (pInfo->img.frame_num < pInfo->img.PreviousFrameNum)? ((MaxFrameNum + pInfo->img.frame_num -1) - pInfo->img.PreviousFrameNum): (pInfo->img.frame_num - pInfo->img.PreviousFrameNum - 1);
+ // We should test for an error here - should infer an unintentional loss of pictures
+ }
+ }
+
+
+ //if(active_sps->gaps_in_frame_num_value_allowed_flag == 0) {
+ if(pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) {
+ // infer an unintentional loss of pictures
+ // only invoke following process for a conforming bitstream
+ // when gaps_in_frame_num_value_allowed_flag is equal to 1
+ pInfo->img.gaps_in_frame_num = 0;
+
+ //mfd_printf("ERROR STREAM??\n");
+ ////// Error handling here----
+ }
+
+ /////// Removed following OLO source (Sodaville H.D)
+ //else if (pInfo->img.gaps_in_frame_num > active_sps->num_ref_frames) {
+ // // No need to produce any more non-existent frames than the amount required to flush the dpb
+ // pInfo->img.gaps_in_frame_num = active_sps->num_ref_frames;
+ //mfd_printf("gaps in frame: %d\n", gaps_in_frame_num);
+ //}
+
+ // If the previous picture was an unpaired field, mark it as a dangler
+ if(p_dpb->used_size)
+ {
+ idx = p_dpb->used_size-1;
+ prev_idc = p_dpb->fs_dpb_idc[idx];
+ if (prev_idc != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ if(viddec_h264_get_is_used(active_fs) != 3) {
+ h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME
+ }
+ }
+ }
+
+ while(temp_frame_num < pInfo->img.gaps_in_frame_num)
+ {
+ h264_dpb_assign_frame_store(pInfo, 1);
+
+ // Set up initial markings - not sure if all are needed
+ viddec_h264_set_dec_structure(active_fs, FRAME);
+
+ if(MaxFrameNum)
+ ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp);
+
+ active_fs->frame.pic_num = temp;
+ active_fs->long_term_frame_idx = 0;
+ active_fs->frame.long_term_pic_num = 0;
+ viddec_h264_set_is_frame_long_term(active_fs, 0);
+
+ // Note the call below will overwrite some aspects of the img structure with info relating to the
+ // non-existent picture
+ // However, since this is called before h264_hdr_decoding_poc() for the current existing picture
+ // it should be o.k.
+ if(pInfo->img.pic_order_cnt_type)
+ h264_hdr_decoding_poc(pInfo, 1, temp);
+
+ pInfo->img.structure = FRAME;
+ active_fs->frame.poc = pInfo->img.framepoc;
+
+ // call store_picture_in_dpb
+
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 1, 0);
+
+ h264_hdr_post_poc(pInfo, 1, temp, 0);
+
+ temp_frame_num++;
+ }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_for_reference ()
+//
+// Mark FrameStore unused for reference. Removes it from the short term reference list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+ if (viddec_h264_get_is_used(active_fs)&0x1) active_fs->top_field.used_for_reference = 0;
+ if (viddec_h264_get_is_used(active_fs)&0x2) active_fs->bottom_field.used_for_reference = 0;
+ if (viddec_h264_get_is_used(active_fs) == 3) active_fs->frame.used_for_reference = 0;
+
+ active_fs->frame.used_for_reference = 0;
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_for_long_term_reference ()
+//
+// mark FrameStore unused for reference and reset long term flags
+// This function does not remove it form the long term list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+ if (viddec_h264_get_is_used(active_fs)&0x1)
+ {
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->top_field.is_long_term = 0;
+ }
+
+ if (viddec_h264_get_is_used(active_fs)&0x2)
+ {
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->bottom_field.is_long_term = 0;
+ }
+ if (viddec_h264_get_is_used(active_fs) == 3)
+ {
+ active_fs->frame.used_for_reference = 0;
+ active_fs->frame.is_long_term = 0;
+ }
+
+ active_fs->frame.used_for_reference = 0;
+ viddec_h264_set_is_frame_long_term(active_fs, 0);
+
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mark_dangling_field
+//
+// Tells HW previous field was dangling
+// Marks it in SW as so
+// Takes appropriate actions. - sys_data needs thought through...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+ //PRINTF(MFD_NONE, " fs_idc = %d DANGLING_TYPE = %d \n", fs_idc, reason);
+ /*
+ Make the check that it has not already been marked
+ This covers the situation of a dangling field followed by a
+ frame which is direct output (i.e. never entered into the dpb).
+ In this case we could attempt to mark the prev unpaired field
+ as a dangler twice which would upset the HW dpb_disp_q count
+ */
+
+ if(viddec_h264_get_is_dangling(active_fs) == 0)
+ {
+ switch(viddec_h264_get_dec_structure(active_fs))
+ {
+ case TOP_FIELD:
+ viddec_h264_set_is_dangling(active_fs, 1);
+ //PRINTF(MFD_NONE, "FN:%d fs_idc=%d FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), active_fs->fs_idc);
+ break;
+ case BOTTOM_FIELD:
+ //PRINTF(MFD_NONE, " FN:%d fs_idc=%d FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), active_fs->fs_idc);
+ viddec_h264_set_is_dangling(active_fs, 1);
+ break;
+ default:
+ //PRINTF(MFD_NONE, "FN:%d fs_idc=%d DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), active_fs->fs_idc);
+ break;
+ }
+
+ //h264_send_new_decoded_frame();
+ }
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_is_used_for_reference ()
+//
+// Check if one of the frames/fields in active_fs is used for reference
+//
+void h264_dpb_is_used_for_reference(int32_t * flag)
+{
+
+ /* Check out below for embedded */
+ *flag = 0;
+ if (active_fs->frame.used_for_reference)
+ *flag = 1;
+ else if (viddec_h264_get_is_used(active_fs) ==3) // frame
+ *flag = active_fs->frame.used_for_reference;
+ else
+ {
+ if (viddec_h264_get_is_used(active_fs)&0x1) // top field
+ *flag = active_fs->top_field.used_for_reference;
+ if (viddec_h264_get_is_used(active_fs)&0x2) // bottom field
+ *flag = *flag || active_fs->bottom_field.used_for_reference;
+ }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_idr_memory_management ()
+//
+// Perform Memory management for idr pictures
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr active_sps, int32_t no_output_of_prior_pics_flag)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ uint32_t idx;
+ uint32_t i;
+ int32_t DPB_size;
+ int32_t FrameSizeInBytes, FrameSizeInMbs;
+ uint32_t data;
+ int32_t num_ref_frames = active_sps->num_ref_frames;
+ int32_t level_idc = active_sps->level_idc;
+ uint32_t temp_bump_level=0;
+
+
+ /// H.D-----
+ /// There are 2 kinds of dpb flush defined, one is with display, the other is without display
+ /// The function name dpb_flush actually is just the first, and the 2nd one is for error case or no_prior_output
+ /// We will rewrite the code below to make it clean and clear
+ ///
+ if (no_output_of_prior_pics_flag)
+ {
+
+ // free all stored pictures
+ for (idx = 0; idx < p_dpb->used_size; idx = idx + 1)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",active_fs->fs_idc, active_fs->first_dsn);
+ viddec_h264_set_is_frame_used(active_fs, 0);
+ //if( (active_fs->frame_sent == 0x01) && (active_fs->is_output == 0x0))
+ {
+ //DECODED_FRAME sent but not DISPLAY_FRAME
+ h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc);
+ h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc);
+ //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host
+
+ /// Add into drop-out list for all frms in dpb without display
+ if(!(viddec_h264_get_is_non_existent(active_fs))) {
+ if( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released
+ p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs_dpb_idc[idx];
+ p_dpb->frame_numbers_need_to_be_removed ++;
+ } else { //// This frame will be removed without display
+ p_dpb->frame_id_need_to_be_dropped[p_dpb->frame_numbers_need_to_be_dropped] = p_dpb->fs_dpb_idc[idx];
+ p_dpb->frame_numbers_need_to_be_dropped ++;
+ }
+ }
+ }
+
+ }
+
+ ////////////////////////////////////////// Reset Reference list
+ for (i = 0; i < p_dpb->ref_frames_in_buffer; i++)
+ p_dpb->fs_ref_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+ for (i = 0; i < p_dpb->ltref_frames_in_buffer; i++)
+ p_dpb->fs_ltref_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+ ////////////////////////////////////////// Reset DPB and dpb list
+ for (i = 0; i < p_dpb->used_size; i++) {
+ p_dpb->fs[p_dpb->fs_dpb_idc[i]].fs_idc = MPD_DPB_FS_NULL_IDC;
+ p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+ }
+
+ p_dpb->used_size = 0;
+ p_dpb->ref_frames_in_buffer = 0;
+ p_dpb->ltref_frames_in_buffer = 0;
+
+ p_dpb->last_output_poc = 0x80000000;
+ }
+ else {
+ h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, num_ref_frames);
+ }
+
+ if (p_dpb->fs_dec_idc != MPD_DPB_FS_NULL_IDC) // added condition for use of DPB initialization
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ if (pInfo->img.long_term_reference_flag)
+ {
+ p_dpb->max_long_term_pic_idx = 0;
+ switch (viddec_h264_get_dec_structure(active_fs))
+ {
+ case FRAME : active_fs->frame.is_long_term = 1;
+ case TOP_FIELD : active_fs->top_field.is_long_term = 1;
+ case BOTTOM_FIELD : active_fs->bottom_field.is_long_term = 1;
+ }
+ active_fs->long_term_frame_idx = 0;
+ }
+ else
+ {
+ p_dpb->max_long_term_pic_idx = MPD_DPB_FS_NULL_IDC;
+ viddec_h264_set_is_frame_long_term(active_fs, 0);
+ }
+ }
+
+ p_dpb->OutputLevel = 0;
+ p_dpb->OutputLevelValid = 0;
+ p_dpb->OutputCtrl = 0;
+
+
+ // Set up bumping level - do this every time a parameters set is activated...
+ if(active_sps->sps_disp.vui_parameters_present_flag)
+ {
+ if(active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag)
+ {
+ //p_dpb->OutputLevel = active_sps->sps_disp.vui_seq_parameters.num_reorder_frames;
+ //p_dpb->OutputLevelValid = 1;
+ }
+ }
+
+ // Set up bumping level - do this every time a parameters set is activated...
+ switch(level_idc)
+ {
+ case h264_Level1b:
+ case h264_Level1:
+ {
+ if ((active_sps->profile_idc < 100) && ((active_sps->constraint_set_flags & 0x1) == 0)) {
+ DPB_size = 338;
+ }
+ else {
+ DPB_size = 149;
+ }
+
+ break;
+ }
+ case h264_Level11:
+ {
+ DPB_size = 338;
+ break;
+ }
+ case h264_Level12:
+ case h264_Level13:
+ case h264_Level2:
+ {
+ DPB_size = 891;
+ break;
+ }
+ case h264_Level21:
+ {
+ DPB_size = 1782;
+ break;
+ }
+ case h264_Level22:
+ case h264_Level3:
+ {
+ DPB_size = 3038;
+ break;
+ }
+ case h264_Level31:
+ {
+ DPB_size = 6750;
+ break;
+ }
+ case h264_Level32:
+ {
+ DPB_size = 7680;
+ break;
+ }
+ case h264_Level4:
+ case h264_Level41:
+ {
+ DPB_size = 12288;
+ break;
+ }
+ case h264_Level42:
+ {
+ DPB_size = 13056;
+ break;
+ }
+ case h264_Level5:
+ {
+ DPB_size = 41400;
+ break;
+ }
+ case h264_Level51:
+ {
+ DPB_size = 69120;
+ break;
+ }
+ default : DPB_size = 69120; break;
+ }
+
+ FrameSizeInMbs = pInfo->img.PicWidthInMbs * pInfo->img.FrameHeightInMbs;
+ FrameSizeInBytes = (FrameSizeInMbs << 8) + (FrameSizeInMbs << 7);
+
+ if(FrameSizeInBytes)
+ {
+
+ temp_bump_level = ldiv_mod_u((DPB_size << 10), FrameSizeInBytes, &data);
+
+ if(temp_bump_level > 255)
+ {
+ p_dpb->BumpLevel = 255;
+ }
+ else
+ {
+ p_dpb->BumpLevel = (uint8_t)temp_bump_level;
+ }
+ }
+
+ if (p_dpb->BumpLevel == 0)
+ p_dpb->BumpLevel = active_sps->num_ref_frames + 1;
+
+ if (p_dpb->BumpLevel > 16)
+ p_dpb->BumpLevel = 16;
+
+
+ if(active_sps->sps_disp.vui_parameters_present_flag && active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) {
+
+ if (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > p_dpb->BumpLevel) {
+ //MFD_PARSER_DEBUG(ERROR_H264_DPB);
+ //// err handling here
+ }
+ else {
+ p_dpb->BumpLevel = (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > 1) ?
+ (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering) : 1;
+ }
+ }
+
+
+ // A new sequence means automatic frame release
+ //sei_information.disp_frozen = 0;
+
+ return;
+} //// End --- dpb_idr_memory_management
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_frame_from_dpb ()
+//
+// remove one frame from DPB
+// The parameter index, is the location of the frame to be removed in the
+// fs_dpb_idc list. The used size is decremented by one
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx)
+{
+ int32_t fs_idc;
+ uint32_t i;
+
+ fs_idc = p_dpb->fs_dpb_idc[idx];
+
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+ viddec_h264_set_is_frame_used(active_fs, 0);
+
+ //add to support frame relocation interface to host
+ if(!(viddec_h264_get_is_non_existent(active_fs)))
+ {
+ p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc;
+ p_dpb->frame_numbers_need_to_be_removed ++;
+ }
+
+ ///////////////////////////////////////// Reset FS
+ p_dpb->fs[fs_idc].fs_idc = MPD_DPB_FS_NULL_IDC;
+
+ /////Remove unused frame from dpb-list
+ i = idx;
+ while( (i + 1)< p_dpb->used_size)
+ {
+ p_dpb->fs_dpb_idc[i] = p_dpb->fs_dpb_idc[i + 1];
+ i ++;
+ }
+ p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+ ////////////////////////////
+ p_dpb->used_size--;
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_unused_frame_from_dpb ()
+//
+// Remove a picture from DPB which is no longer needed.
+// Search for a frame which is not used for reference and has previously been placed
+// in the output queue - if find one call h264_dpb_remove_frame_from_dpb() and
+// set flag 1
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag)
+{
+ uint32_t idx;
+ int32_t first_non_exist_valid, non_exist_idx;
+ int32_t used_for_reference = 0;
+
+ *flag = 0;
+ first_non_exist_valid = 0x0;
+ non_exist_idx = 0x0;
+
+ for (idx = 0; (idx < p_dpb->used_size) && (*flag == 0); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ h264_dpb_is_used_for_reference(&used_for_reference);
+
+ //if( (used_for_reference == 0x0 ) && active_fs->is_output && active_fs->is_non_existent == 0x0)
+ //{
+ //PRINTF(MFD_NONE, " requesting to send FREE: fs_idc = %d fb_id = %d \n", active_fs->fs_idc, active_fs->fb_id);
+ //dpb_release_fb(&h264_dpb, active_fs->fb_id, 1);
+ //}
+
+ if (viddec_h264_get_is_output(active_fs) && (used_for_reference == 0))
+ {
+ h264_dpb_remove_frame_from_dpb(p_dpb, idx);
+ *flag = 1;
+ }
+/*
+/////// Removed following OLO source (Sodaville H.D)
+ else if ( (first_non_exist_valid == 0x0) && active_fs->is_non_existent )
+ {
+ first_non_exist_valid = 0x01;
+ non_exist_idx = idx;
+ }
+*/
+ }
+/*
+/////// Removed following OLO source (Sodaville H.D)
+ if ( *flag == 0x0 && first_non_exist_valid) {
+ h264_dpb_remove_frame_from_dpb(p_dpb,non_exist_idx);
+ *flag = 1;
+ }
+*/
+ return;
+} //// End of h264_dpb_remove_unused_frame_from_dpb
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_smallest_poc ()
+//
+// find smallest POC in the DPB which has not as yet been output
+// This function only checks for frames and dangling fields...
+// unless the dpb used size is one, in which case it will accept an unpaired field
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos)
+{
+ int32_t poc_int;
+ uint32_t idx;
+ int32_t first_non_output = 1;
+
+ *pos = MPD_DPB_FS_NULL_IDC;
+
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]);
+ poc_int = active_fs->frame.poc;
+
+ for (idx = 0; idx < p_dpb->used_size; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ if (viddec_h264_get_is_output(active_fs) == 0)
+ {
+ //PRINTF(MFD_NONE, " active_fs->fs_idc = %d active_fs->is_used = %d, active_fs->is_dangling = %d , active_fs->poc = %d \n", active_fs->fs_idc, active_fs->is_used, active_fs->is_dangling, active_fs->poc);
+ if ((viddec_h264_get_is_used(active_fs) == 3) || (viddec_h264_get_is_dangling(active_fs)))
+ {
+ if (first_non_output)
+ {
+ *pos = idx;
+ first_non_output = 0;
+ poc_int = active_fs->frame.poc;
+ }
+ else if (poc_int > active_fs->frame.poc)
+ {
+ poc_int = active_fs->frame.poc;
+ *pos = idx;
+ }
+ }
+ else if (p_dpb->used_size == 1)
+ {
+ poc_int = active_fs->frame.poc;
+ *pos = idx;
+ }
+ }
+ }
+
+ *poc = poc_int;
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_split_field ()
+//
+// Extract field information from a frame
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_split_field (h264_Info * pInfo)
+{
+
+ //active_fs->frame.poc = active_fs->frame.poc;
+ // active_fs->top_field.poc = active_fs->frame.poc;
+ // This line changed on 11/05/05 KMc
+ active_fs->top_field.poc = pInfo->img.toppoc;
+ active_fs->bottom_field.poc = pInfo->img.bottompoc;
+
+ active_fs->top_field.used_for_reference = active_fs->frame.used_for_reference & 1;
+ active_fs->bottom_field.used_for_reference = active_fs->frame.used_for_reference >> 1;
+
+ active_fs->top_field.is_long_term = active_fs->frame.is_long_term;
+ active_fs->bottom_field.is_long_term = active_fs->frame.is_long_term;
+
+ active_fs->long_term_frame_idx = active_fs->frame.long_term_frame_idx;
+ active_fs->top_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx;
+ active_fs->bottom_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx;
+
+
+ // Assign field mvs attached to MB-Frame buffer to the proper buffer
+ //! Generate field MVs from Frame MVs
+ // ...
+ // these will be done in RTL through using proper memory mapping
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_combine_field (int32_t use_old)
+//
+// Generate a frame from top and bottom fields
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_combine_field(int32_t use_old)
+{
+
+ //remove warning
+ use_old = use_old;
+
+ active_fs->frame.poc = (active_fs->top_field.poc < active_fs->bottom_field.poc)?
+ active_fs->top_field.poc: active_fs->bottom_field.poc;
+
+ //active_fs->frame.poc = active_fs->poc;
+
+
+ active_fs->frame.used_for_reference = active_fs->top_field.used_for_reference |(active_fs->bottom_field.used_for_reference);
+
+ active_fs->frame.is_long_term = active_fs->top_field.is_long_term |(active_fs->bottom_field.is_long_term <<1);
+
+ if (active_fs->frame.is_long_term)
+ active_fs->frame.long_term_frame_idx = active_fs->long_term_frame_idx;
+
+ return;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_sliding_window_memory_management ()
+//
+// Perform Sliding window decoded reference picture marking process
+// It must be the reference frame, complementary reference field pair
+// or non-paired reference field that has the smallest value of
+// FrameNumWrap which is marked as unused for reference. Note : We CANNOT
+// simply use frame_num!!!!
+//
+// Although we hold frame_num_wrap in SW, currently, this is not
+// being updated for every picture (the b-picture parameter non-update
+// phenomenon of the reference software)
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, int32_t NonExisting, int32_t num_ref_frames)
+{
+ // if this is a reference pic with sliding window, unmark first ref frame
+ // should this be (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer)
+ // Rem: adaptive marking can be on a slice by slice basis so we
+ // could have pictures merked as long term reference in adaptive marking and then
+ // the marking mode changed back to sliding_window_memory_management
+ if (p_dpb->ref_frames_in_buffer >= (num_ref_frames - p_dpb->ltref_frames_in_buffer))
+ {
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+
+ if(NonExisting == 0)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ viddec_h264_set_is_frame_long_term(active_fs, 0);
+ }
+ }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_store_picture_in_dpb ()
+//
+// First we run the marking procedure.
+// Then, before we add the current frame_store to the list of refernce stores we run some checks
+// These include checking the number of existing reference frames
+// in DPB and if necessary, flushing frames.
+//
+// \param NonExisting
+// If non-zero this is called to store a non-existing frame resulting from gaps_in_frame_num
+//////////////////////////////////////////////////////////////////////////////
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_frame_output ()
+//
+// If direct == 1, Directly output a frame without storing it in the p_dpb->
+// Therefore we must set is_used to 0, which I guess means it will not appear
+// in the fs_dpb_idc list and is_output to 1 which means it should be in the
+// fs_output_idc list.
+//
+// If it is a non-existing pcture we do not actually place it in the output queue
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+ //h264_dpb_push_output_queue();
+ if(pInfo->sei_information.disp_frozen)
+ {
+ // check pocs
+ if(active_fs->top_field.poc >= pInfo->sei_information.freeze_POC)
+ {
+ if(active_fs->top_field.poc < pInfo->sei_information.release_POC)
+ {
+ viddec_h264_set_is_top_skipped(active_fs, 1);
+ }
+ else
+ {
+ pInfo->sei_information.disp_frozen = 0;
+ }
+ }
+
+ if(active_fs->bottom_field.poc >= pInfo->sei_information.freeze_POC)
+ {
+ if(active_fs->bottom_field.poc < pInfo->sei_information.release_POC)
+ {
+ viddec_h264_set_is_bottom_skipped(active_fs, 1);
+ }
+ else
+ {
+ pInfo->sei_information.disp_frozen = 0;
+ }
+ }
+ }
+
+ if ( viddec_h264_get_broken_link_picture(active_fs) )
+ pInfo->sei_information.broken_link = 1;
+
+ if( pInfo->sei_information.broken_link)
+ {
+ // Check if this was the recovery point picture - going to have recovery point on
+ // a frame basis
+ if(viddec_h264_get_recovery_pt_picture(active_fs))
+ {
+ pInfo->sei_information.broken_link = 0;
+ // Also reset wait on sei recovery point picture
+ p_dpb->WaitSeiRecovery = 0;
+ }
+ else
+ {
+ viddec_h264_set_is_frame_skipped(active_fs, 3);
+ }
+ }
+ else
+ {
+ // even if this is not a broken - link, we need to follow SEI recovery point rules
+ // Did we use SEI recovery point for th elast restart?
+ if ( p_dpb->WaitSeiRecovery )
+ {
+ if ( viddec_h264_get_recovery_pt_picture(active_fs) ) {
+ p_dpb->WaitSeiRecovery = 0;
+ } else {
+ viddec_h264_set_is_frame_skipped(active_fs, 3);
+ }
+ }
+ }
+
+ if ( p_dpb->SuspendOutput )
+ {
+ if ( viddec_h264_get_open_gop_entry(active_fs) ) {
+ p_dpb->SuspendOutput = 0;
+ } else{
+ viddec_h264_set_is_frame_skipped(active_fs, 3);
+ }
+ }
+
+ //h264_send_new_display_frame(0x0);
+ viddec_h264_set_is_output(active_fs, 1);
+
+ if(viddec_h264_get_is_non_existent(active_fs) == 0)
+ {
+ *existing = 1;
+ p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=active_fs->fs_idc;
+ p_dpb->frame_numbers_need_to_be_displayed++;
+
+ //if(direct)
+ //h264_dpb_remove_frame_from_dpb(p_dpb, active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos]
+ }
+ else
+ {
+ *existing = 0;
+ }
+
+ if(direct) {
+ viddec_h264_set_is_frame_used(active_fs, 0);
+ active_fs->frame.used_for_reference = 0;
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->fs_idc = MPD_DPB_FS_NULL_IDC;
+ }
+ return;
+} ///////// End of dpb frame output
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_output_one_frame_from_dpb ()
+//
+// Output one frame stored in the DPB. Basiclly this results in its placment
+// in the fs_output_idc list.
+// Placement in the output queue should cause an automatic removal from the dpb
+// if the frame store is not being used as a reference
+// This may need another param for a frame request so that it definitely outputs one non-exiosting frame
+//////////////////////////////////////////////////////////////////////////////
+int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int32_t request, int32_t num_ref_frames)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ int32_t poc;
+ int32_t pos;
+ int32_t used_for_reference;
+
+ int32_t existing = 0;
+ int32_t is_refused = 0;
+ int32_t is_pushed = 0;
+
+ //remove warning
+ request = request;
+
+ if(direct)
+ {
+ h264_dpb_frame_output(pInfo, p_dpb->fs_dec_idc, 1, &existing);
+ }
+ else
+ {
+ if(p_dpb->used_size != 0)
+ {
+ // Should this be dpb.not_as_yet_output_num > 0 ??
+ // There should maybe be a is_refused == 0 condition instead...
+ while ((p_dpb->used_size > 0) && (existing == 0) && (is_refused == 0))
+ {
+ // find smallest non-output POC
+ h264_dpb_get_smallest_poc(p_dpb, &poc, &pos);
+ if (pos != MPD_DPB_FS_NULL_IDC)
+ {
+ // put it into the output queue
+ h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing);
+
+ p_dpb->last_output_poc = poc;
+ if (existing) is_pushed = 1;
+ // If non-reference, free frame store and move empty store to end of buffer
+
+ h264_dpb_is_used_for_reference(&used_for_reference);
+ if (!(used_for_reference))
+ h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos]
+ }
+ else
+ {
+ int32_t flag;
+ uint32_t idx;
+
+ // This is basically an error condition caused by too many reference frames in the DPB.
+ // It should only happen in errored streams, and can happen if this picture had an MMCO,
+ // thus disabling h264_dpb_sliding_window_memory_management(), which would normally have
+ // unmarked the oldest reference frame.
+ h264_dpb_sliding_window_memory_management(p_dpb, 0,num_ref_frames);
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+ if (flag == 0) {
+ for (idx = 0; idx < p_dpb->used_size; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ h264_dpb_is_used_for_reference(&used_for_reference);
+
+ if (used_for_reference) {
+ break;
+ }
+ }
+
+ if (idx < p_dpb->used_size) {
+ // Short term
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ // Long term
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ // Remove from DPB
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+ }
+ }
+ return 1;
+ }
+ }
+ }
+ }
+
+ return is_pushed;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_queue_update
+//
+// This should be called anytime the output queue might be changed
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_queue_update(h264_Info* pInfo,int32_t push, int32_t direct, int32_t frame_request, int32_t num_ref_frames)
+{
+
+ int32_t frame_output = 0;
+
+ if(push)
+ {
+ frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, direct, 0, num_ref_frames);
+ }
+ else if(frame_request)
+ {
+ frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, 0, 1,num_ref_frames);
+ }
+
+
+ return frame_output;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_flush_dpb ()
+//
+// Unmarks all reference pictures in the short-term and long term lists and
+// in doing so resets the lists.
+//
+// Flushing the dpb, adds all the current frames in the dpb, not already on the output list
+// to the output list and removes them from the dpb (they will all be marked as unused for
+// reference first)
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ int32_t idx, flag;
+ int32_t ref_frames_in_buffer;
+
+ ref_frames_in_buffer = p_dpb->ref_frames_in_buffer;
+
+ for (idx = 0; idx < ref_frames_in_buffer; idx++){
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+ }
+
+ ref_frames_in_buffer = p_dpb->ltref_frames_in_buffer;
+
+ for (idx = 0; idx < ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[0]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]);
+ }
+
+ // output frames in POC order
+ if (output_all) {
+ while (p_dpb->used_size - keep_complement) {
+ h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames);
+ }
+ }
+
+ flag = 1;
+ while (flag) {
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+ }
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reset_dpb ()
+//
+// Used to reset the contents of dpb
+// Must calculate memory (aligned) pointers for each of the possible frame stores
+//
+// Also want to calculate possible max dpb size in terms of frames
+// We should have an active SPS when we call this ftn to calc bumping level
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, int32_t SizeChange, int32_t no_output_of_prior_pics_flag)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ int32_t num_ref_frames = pInfo->active_SPS.num_ref_frames;
+
+
+ // If half way through a frame then Frame in progress will still be high,
+ // so mark the previous field as a dangling field. This is also needed to
+ // keep cs7050_sif_dpb_disp_numb_ptr correct. Better to reset instead?
+ if(p_dpb->used_size)
+ {
+ int32_t idx;
+ idx = p_dpb->used_size-1;
+ if (p_dpb->fs_dpb_idc[idx] != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ if(viddec_h264_get_is_used(active_fs) != 3)
+ h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_DPB_RESET
+ }
+ }
+
+ // initialize software DPB
+ if(active_fs) {
+ viddec_h264_set_dec_structure(active_fs, INVALID);
+ }
+ h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag); // implied no_output_of_prior_pics_flag==1
+
+
+ // May always be a size change which calls this function now...
+ // could eliminate below branch
+ if(SizeChange)
+ {
+
+ /***
+ Note : 21/03/2005 14:16
+ Danger asociated with resetting curr_alloc_mem as it would allow the FW top reallocate
+ frame stores from 0 -> NUM_FRAME_STORES again - could lead to queue overflow and corruption
+
+ Placed in size change condition in the hope that this will only ensure dpb is empty
+ and thus this behaviour is valid before continuing again
+ ***/
+
+
+ p_dpb->PicWidthInMbs = PicWidthInMbs;
+ p_dpb->FrameHeightInMbs = FrameHeightInMbs;
+
+ p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+ //Flush the current DPB.
+ h264_dpb_flush_dpb(pInfo, 1,0,num_ref_frames);
+ }
+
+ return;
+} ///// End of reset DPB
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+// ---------------------------------------------------------------------------
+// Note that if an 'missing_pip_fb' condition exists, the message will
+// sent to the host each time setup_free_fb is called. However, since this
+// condition is not expected to happen if pre-defined steps are followed, we let
+// it be for now and will change it if required. Basically, as long as host
+// enables PiP after adding PiP buffers and disables PiP before removing buffers
+// and matches PiP fb_id's with normal decode fb_id's this condition should
+// not occur.
+// ---------------------------------------------------------------------------
+int32_t dpb_setup_free_fb( h264_DecodedPictureBuffer *p_dpb, uint8_t* fb_id, pip_setting_t* pip_setting )
+{
+ uint8_t idx;
+
+ //remove warning
+ pip_setting = pip_setting;
+
+
+ for (idx = 0; idx < NUM_DPB_FRAME_STORES; idx++)
+ {
+ if (p_dpb->fs[idx].fs_idc == MPD_DPB_FS_NULL_IDC)
+ {
+ *fb_id = idx;
+ break;
+ }
+ }
+
+ if(idx == NUM_DPB_FRAME_STORES)
+ return 1;
+
+ p_dpb->fs[idx].fs_idc = idx;
+
+ return 0;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_assign_frame_store ()
+//
+// may need a non-existing option parameter
+//
+
+int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting)
+{
+ uint8_t idc = MPD_DPB_FS_NULL_IDC;
+ pip_setting_t pip_setting;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+
+ while( dpb_setup_free_fb(p_dpb, &idc, &pip_setting) != 0 ) {
+ ///
+ /// Generally this is triggered a error case, no more frame buffer avaliable for next
+ /// What we do here is just remove one with min-POC before get more info
+ ///
+
+ int32_t pos = 0, poc = 0, existing = 1;
+
+ // find smallest non-output POC
+ h264_dpb_get_smallest_poc(p_dpb, &poc, &pos);
+ if (pos != MPD_DPB_FS_NULL_IDC)
+ {
+ // put it into the output queue
+ h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing);
+ p_dpb->last_output_poc = poc;
+ h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos]
+ }
+ }
+
+
+ if(NonExisting) {
+ p_dpb->fs_non_exist_idc = idc;
+ }else {
+ p_dpb->fs_dec_idc = idc;
+ }
+
+ //add to support frame relocation interface to host
+ if(!NonExisting)
+ {
+ p_dpb->frame_numbers_need_to_be_allocated = 1;
+ p_dpb->frame_id_need_to_be_allocated = p_dpb->fs_dec_idc;
+ }
+
+
+ ///////////////////////////////h264_dpb_reset_fs();
+ h264_dpb_set_active_fs(p_dpb, idc);
+ active_fs->fs_flag_1 = 0;
+ active_fs->fs_flag_2 = 0;
+ viddec_h264_set_is_non_existent(active_fs, NonExisting);
+ viddec_h264_set_is_output(active_fs, (NonExisting?1:0));
+
+ active_fs->pic_type = ((FRAME_TYPE_INVALID<<FRAME_TYPE_TOP_OFFSET)|(FRAME_TYPE_INVALID<<FRAME_TYPE_BOTTOM_OFFSET)); //----
+
+ // Only put members in here which will not be reset somewhere else
+ // and which could be used before they are overwritten again with
+ // new valid values
+ // eg ->is_used is reset on removal from dpb, no need for it here
+ // ->poc would only be changed when we overwrite on insert_Picture_in_dpb()
+ // but would be used by get_smallest_poc()
+ // ->top.poc would also not be overwritten until a new valid value comes along,
+ // but I don't think it is used before then so no need to reset
+ //active_fs->is_long_term = 0;
+ active_fs->frame.used_for_reference = 0;
+ active_fs->frame.poc = 0;
+
+ return 1;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_update_queue_dangling_field (h264_Info * pInfo)
+//
+// Update DPB for Dangling field special case
+//
+void h264_dpb_update_queue_dangling_field(h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb;
+ int32_t prev_pic_unpaired_field = 0;
+
+ if(dpb_ptr->used_size > dpb_ptr->BumpLevel)
+ {
+ if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]);
+ if(viddec_h264_get_is_used(active_fs) != 3)
+ {
+ prev_pic_unpaired_field = 1;
+ }
+ }
+
+ if (pInfo->img.structure != FRAME)
+ {
+ // To prove this is the second field,
+ // 1) The previous picture is an (as yet) unpaired field
+ if(prev_pic_unpaired_field)
+ {
+ // If we establish the previous pic was an unpaired field and this picture is not
+ // its complement, the previous picture was a dangling field
+ if(pInfo->img.second_field == 0) {
+ while(dpb_ptr->used_size > dpb_ptr->BumpLevel)
+ h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+ }
+ }
+ }
+ else if (prev_pic_unpaired_field) {
+ while(dpb_ptr->used_size > dpb_ptr->BumpLevel)
+ h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+ }
+ }
+
+
+ return;
+} ///// End of init Frame Store
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_init_frame_store (h264_Info * pInfo)
+//
+// Set the frame store to be used in decoding the picture
+//
+
+void h264_dpb_init_frame_store(h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb;
+
+ int32_t free_fs_found;
+ int32_t idx = 0;
+ int32_t prev_pic_unpaired_field = 0;
+ int32_t prev_idc = MPD_DPB_FS_NULL_IDC;
+ int32_t structure = pInfo->img.structure;
+
+ if(dpb_ptr->used_size)
+ {
+ idx = dpb_ptr->used_size-1;
+ prev_idc = dpb_ptr->fs_dpb_idc[idx];
+ }
+
+ if (prev_idc != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]);
+ if(viddec_h264_get_is_used(active_fs) != 3)
+ {
+ //PRINTF(MFD_NONE, " FN: %d active_fs->is_used = %d \n", (h264_frame_number+1), active_fs->is_used);
+ prev_pic_unpaired_field = 1;
+ }
+ }
+
+ //if ((pInfo->img.curr_has_mmco_5) || (pInfo->img.idr_flag)) curr_fld_not_prev_comp = 1;
+
+ if (structure != FRAME)
+ {
+
+ // To prove this is the second field,
+ // 1) The previous picture is an (as yet) unpaired field
+ if(prev_pic_unpaired_field)
+ {
+ // If we establish the previous pic was an unpaired field and this picture is not
+ // its complement, the previous picture was a dangling field
+ if(pInfo->img.second_field == 0)
+ h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FIELD
+ }
+ }
+ else if (prev_pic_unpaired_field) {
+ h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FRAME
+ }
+
+ free_fs_found = 0;
+
+ // If this is not a second field, we must find a free space for the current picture
+ if (!(pInfo->img.second_field))
+ {
+ dpb_ptr->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+ free_fs_found = h264_dpb_assign_frame_store(pInfo, 0);
+ //h264_frame_number++;
+ //PRINTF(MFD_NONE, " FN: %d (inc) fs_idc = %d \n", (h264_frame_number+1), dpb.fs_dec_idc);
+ }
+
+ h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dec_idc);
+
+ ////////////// TODO: THe following init
+#if 1
+ if( pInfo->img.second_field) {
+ //active_fs->second_dsn = pInfo->img.dsn;
+ //active_fs->prev_dsn = pInfo->img.prev_dsn;
+ if (active_fs->pic_type == FRAME_TYPE_IDR ||
+ active_fs->pic_type == FRAME_TYPE_I) {
+
+ viddec_h264_set_first_field_intra(active_fs, 1);
+ } else {
+ viddec_h264_set_first_field_intra(active_fs, 0);
+ }
+
+ }
+ else {
+ //active_fs->first_dsn = pInfo->img.dsn;
+ //active_fs->prev_dsn = pInfo->img.prev_dsn;
+ viddec_h264_set_first_field_intra(active_fs, 0);
+ }
+
+ if (pInfo->img.structure == FRAME) {
+ //active_fs->second_dsn = 0x0;
+ }
+
+ if ( pInfo->sei_information.broken_link_pic )
+ {
+ viddec_h264_set_broken_link_picture(active_fs, 1);
+ pInfo->sei_information.broken_link_pic = 0;
+ }
+
+ if ((pInfo->img.frame_num == pInfo->sei_information.recovery_frame_num)&&(pInfo->SliceHeader.nal_ref_idc != 0))
+ viddec_h264_set_recovery_pt_picture(active_fs, 1);
+
+ //if ((( gRestartMode.aud ) || ( gRestartMode.sei )) && ( !gRestartMode.idr))
+ if(pInfo->img.recovery_point_found == 6)
+ {
+ viddec_h264_set_open_gop_entry(active_fs, 1);
+ pInfo->dpb.SuspendOutput = 1;
+ }
+#endif
+
+ if ((pInfo->img.second_field) || (free_fs_found))
+ {
+ viddec_h264_set_dec_structure(active_fs, pInfo->img.structure);
+ viddec_h264_set_is_output(active_fs, 0);
+
+ switch(pInfo->img.structure)
+ {
+ case (FRAME) :{
+ active_fs->frame.pic_num = pInfo->img.frame_num;
+ active_fs->frame.long_term_frame_idx = 0;
+ active_fs->frame.long_term_pic_num = 0;
+ active_fs->frame.used_for_reference = 0;
+ active_fs->frame.is_long_term = 0;
+ //active_fs->frame.structure = pInfo->img.structure;
+ active_fs->frame.poc = pInfo->img.framepoc;
+ }break;
+ case (TOP_FIELD) :{
+ active_fs->top_field.pic_num = pInfo->img.frame_num;
+ active_fs->top_field.long_term_frame_idx = 0;
+ active_fs->top_field.long_term_pic_num = 0;
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->top_field.is_long_term = 0;
+ //active_fs->top_field.structure = pInfo->img.structure;
+ active_fs->top_field.poc = pInfo->img.toppoc;
+ }break;
+ case(BOTTOM_FIELD) :{
+ active_fs->bottom_field.pic_num = pInfo->img.frame_num;
+ active_fs->bottom_field.long_term_frame_idx = 0;
+ active_fs->bottom_field.long_term_pic_num = 0;
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->bottom_field.is_long_term = 0;
+ //active_fs->bottom_field.structure = pInfo->img.structure;
+ active_fs->bottom_field.poc = pInfo->img.bottompoc;
+ }break;
+ }
+ }
+ else
+ {
+ // Need to drop a frame or something here
+ }
+
+ return;
+} ///// End of init Frame Store
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Decoding POC for current Picture
+// 1) pic_order_cnt_type (0, 1, 2)
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num)
+{
+ int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4));
+ int32_t delta_pic_order_count[2];
+ int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+ int32_t AbsFrameNum =0;
+ int32_t ExpectedDeltaPerPicOrderCntCycle =0;
+ int32_t PicOrderCntCycleCnt = 0;
+ int32_t FrameNumInPicOrderCntCycle =0;
+ int32_t ExpectedPicOrderCnt =0;
+
+ int32_t actual_frame_num =0;
+
+
+
+ if(NonExisting) actual_frame_num = frame_num;
+ else actual_frame_num = pInfo->img.frame_num;
+
+ switch (pInfo->active_SPS.pic_order_cnt_type)
+ {
+ case 0:
+ if(NonExisting != 0) break;
+
+ if (pInfo->SliceHeader.idr_flag)
+ {
+ pInfo->img.PicOrderCntMsb = 0;
+ pInfo->img.PrevPicOrderCntLsb = 0;
+ }
+ else if (pInfo->img.last_has_mmco_5)
+ {
+ if (pInfo->img.last_pic_bottom_field)
+ {
+ pInfo->img.PicOrderCntMsb = 0;
+ pInfo->img.PrevPicOrderCntLsb = 0;
+ }
+ else
+ {
+ pInfo->img.PicOrderCntMsb = 0;
+ pInfo->img.PrevPicOrderCntLsb = pInfo->img.toppoc;
+ }
+ }
+
+ // Calculate the MSBs of current picture
+ if((pInfo->img.pic_order_cnt_lsb < pInfo->img.PrevPicOrderCntLsb) &&
+ ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb )>=(MaxPicOrderCntLsb>>1)) )
+ {
+ pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb + MaxPicOrderCntLsb;
+ } else if ((pInfo->img.pic_order_cnt_lsb > pInfo->img.PrevPicOrderCntLsb) &&
+ ((pInfo->img.pic_order_cnt_lsb - pInfo->img.PrevPicOrderCntLsb ) > (MaxPicOrderCntLsb>>1)) )
+ {
+ pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb - MaxPicOrderCntLsb;
+ } else
+ {
+ pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb;
+ }
+
+ // 2nd
+
+ if(pInfo->img.field_pic_flag==0)
+ {
+ //frame pix
+ pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+ pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.delta_pic_order_cnt_bottom;
+ pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301
+ }
+ else if (pInfo->img.bottom_field_flag==0)
+ { //top field
+ pInfo->img.ThisPOC= pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+ }
+ else
+ { //bottom field
+ pInfo->img.ThisPOC= pInfo->img.bottompoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+ }
+ pInfo->img.framepoc=pInfo->img.ThisPOC;
+
+ if ( pInfo->img.frame_num != pInfo->old_slice.frame_num)
+ pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+
+ if(pInfo->SliceHeader.nal_ref_idc)
+ {
+ pInfo->img.PrevPicOrderCntLsb = pInfo->img.pic_order_cnt_lsb;
+ pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb;
+ }
+
+ break;
+ case 1: {
+ if(NonExisting)
+ {
+ delta_pic_order_count[0] = 0;
+ delta_pic_order_count[1] = 0;
+ }
+ else
+ {
+ delta_pic_order_count[0] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : pInfo->img.delta_pic_order_cnt[0];
+ delta_pic_order_count[1] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 :
+ ( (!pInfo->active_PPS.pic_order_present_flag) && (!(pInfo->img.field_pic_flag))) ? 0 :
+ pInfo->img.delta_pic_order_cnt[1];
+ }
+
+ // this if branch should not be taken during processing of a gap_in_frame_num pic since
+ // an IDR picture cannot produce non-existent frames...
+ if(pInfo->SliceHeader.idr_flag)
+ {
+ pInfo->img.FrameNumOffset = 0;
+ }
+ else
+ {
+
+ if (actual_frame_num < pInfo->img.PreviousFrameNum)
+ {
+ pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum;
+ }
+ else
+ {
+ pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset;
+ }
+ }
+
+ // pInfo->img.num_ref_frames_in_pic_order_cnt_cycle set from SPS
+ // so constant between existent and non-existent frames
+ if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle)
+ AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num;
+ else
+ AbsFrameNum = 0;
+
+ // pInfo->img.disposable_flag should never be true for a non-existent frame since these are always
+ // references...
+ if ((pInfo->SliceHeader.nal_ref_idc == 0) && (AbsFrameNum > 0)) AbsFrameNum = AbsFrameNum - 1;
+
+ // 3rd
+ ExpectedDeltaPerPicOrderCntCycle = pInfo->active_SPS.expectedDeltaPerPOCCycle;
+
+ if (AbsFrameNum)
+ {
+ // Rem: pInfo->img.num_ref_frames_in_pic_order_cnt_cycle takes max value of 255 (8 bit)
+ // Frame NUm may be 2^16 (17 bits)
+ // I guess we really have to treat AbsFrameNum as a 32 bit number
+ uint32_t temp = 0;
+ int32_t i=0;
+ int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE];
+
+ if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle)
+ PicOrderCntCycleCnt = ldiv_mod_u((uint32_t)(AbsFrameNum-1), (uint32_t)pInfo->img.num_ref_frames_in_pic_order_cnt_cycle, &temp);
+
+ ExpectedPicOrderCnt = mult_u((uint32_t)PicOrderCntCycleCnt, (uint32_t)ExpectedDeltaPerPicOrderCntCycle);
+
+ FrameNumInPicOrderCntCycle = temp;
+
+ //ExpectedPicOrderCnt +=pInfo->active_SPS.expectedDeltaPerPOCCycle;
+#ifndef USER_MODE
+ h264_Parse_Copy_Offset_Ref_Frames_From_DDR(pInfo, offset_for_ref_frame, pInfo->active_SPS.seq_parameter_set_id);
+ for (i = 0; i <= FrameNumInPicOrderCntCycle; i++)
+ ExpectedPicOrderCnt += offset_for_ref_frame[i];
+#else
+ for (i = 0; i <= FrameNumInPicOrderCntCycle; i++)
+ ExpectedPicOrderCnt += pInfo->active_SPS.offset_for_ref_frame[i];
+#endif
+ }
+ else {
+ ExpectedPicOrderCnt = 0;
+ }
+
+ if (pInfo->SliceHeader.nal_ref_idc == 0)
+ ExpectedPicOrderCnt += pInfo->img.offset_for_non_ref_pic;
+
+ if (!(pInfo->img.field_pic_flag))
+ {
+ pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0];
+ pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[1];
+ pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc;
+ pInfo->img.ThisPOC = pInfo->img.framepoc;
+ }
+ else if (!(pInfo->img.bottom_field_flag))
+ {
+ //top field
+ pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0];
+ pInfo->img.ThisPOC = pInfo->img.toppoc;
+ pInfo->img.bottompoc = 0;
+ }
+ else
+ {
+ //bottom field
+ pInfo->img.toppoc = 0;
+ pInfo->img.bottompoc = ExpectedPicOrderCnt + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[0];
+ pInfo->img.ThisPOC = pInfo->img.bottompoc;
+ }
+
+ //CONFORMANCE_ISSUE
+ pInfo->img.framepoc=pInfo->img.ThisPOC;
+
+ //CONFORMANCE_ISSUE
+ pInfo->img.PreviousFrameNum=pInfo->img.frame_num;
+ pInfo->img.PreviousFrameNumOffset=pInfo->img.FrameNumOffset;
+
+ }
+ break;
+ case 2: { // POC MODE 2
+ if (pInfo->SliceHeader.idr_flag)
+ {
+ pInfo->img.FrameNumOffset = 0;
+ pInfo->img.framepoc = 0;
+ pInfo->img.toppoc = 0;
+ pInfo->img.bottompoc = 0;
+ pInfo->img.ThisPOC = 0;
+ }
+ else
+ {
+ if (pInfo->img.last_has_mmco_5)
+ {
+ pInfo->img.PreviousFrameNum = 0;
+ pInfo->img.PreviousFrameNumOffset = 0;
+ }
+ if (actual_frame_num < pInfo->img.PreviousFrameNum)
+ pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum;
+ else
+ pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset;
+
+ AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num;
+ if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum<<1) - 1;
+ else pInfo->img.ThisPOC = (AbsFrameNum<<1);
+
+ if (!(pInfo->img.field_pic_flag))
+ {
+ pInfo->img.toppoc = pInfo->img.ThisPOC;
+ pInfo->img.bottompoc = pInfo->img.ThisPOC;
+ pInfo->img.framepoc = pInfo->img.ThisPOC;
+ }
+ else if (!(pInfo->img.bottom_field_flag))
+ {
+ pInfo->img.toppoc = pInfo->img.ThisPOC;
+ pInfo->img.framepoc = pInfo->img.ThisPOC;
+ }
+ else
+ {
+ pInfo->img.bottompoc = pInfo->img.ThisPOC;
+ pInfo->img.framepoc = pInfo->img.ThisPOC;
+ }
+ }
+
+ //CONFORMANCE_ISSUE
+ pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+ pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return;
+} //// End of decoding_POC
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_hdr_post_poc ()
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_hdr_post_poc(h264_Info* pInfo, int32_t NonExisting, int32_t frame_num, int32_t use_old)
+{
+ int32_t actual_frame_num = (NonExisting)? frame_num :
+ (use_old)? pInfo->old_slice.frame_num :
+ pInfo->img.frame_num;
+
+ int32_t disposable_flag = (use_old)?(pInfo->old_slice.nal_ref_idc == 0) :
+ (pInfo->SliceHeader.nal_ref_idc == 0);
+
+ switch(pInfo->img.pic_order_cnt_type)
+ {
+ case 0: {
+ pInfo->img.PreviousFrameNum = actual_frame_num;
+ if ((disposable_flag == 0) && (NonExisting == 0))
+ {
+ pInfo->img.PrevPicOrderCntLsb = (use_old)? pInfo->old_slice.pic_order_cnt_lsb :
+ pInfo->SliceHeader.pic_order_cnt_lsb;
+ pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb;
+ }
+ }
+ break;
+ case 1: {
+ pInfo->img.PreviousFrameNum = actual_frame_num;
+ pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+ }
+ break;
+ case 2: {
+ pInfo->img.PreviousFrameNum = actual_frame_num;
+ pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+
+ }break;
+
+ default: {
+ }break;
+ }
+
+ return;
+} ///// End of h264_hdr_post_poc
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c
new file mode 100644
index 0000000..b5df6d9
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c
@@ -0,0 +1,82 @@
+//#include "math.h"
+// Arithmatic functions using add & subtract
+
+unsigned long mult_u(register unsigned long var1, register unsigned long var2)
+{
+
+ register unsigned long var_out = 0;
+
+ while (var2 > 0)
+ {
+
+ if (var2 & 0x01)
+ {
+ var_out += var1;
+ }
+ var2 >>= 1;
+ var1 <<= 1;
+ }
+ return var_out;
+
+}// mult_u
+
+unsigned long ldiv_mod_u(register unsigned long a, register unsigned long b, unsigned long * mod)
+{
+ register unsigned long div = b;
+ register unsigned long res = 0;
+ register unsigned long bit = 0x1;
+
+ if (!div)
+ {
+ *mod = 0;
+ return 0xffffffff ; // Div by 0
+ }
+
+ if (a < b)
+ {
+ *mod = a;
+ return 0; // It won't even go once
+ }
+
+ while(!(div & 0x80000000))
+ {
+ div <<= 1;
+ bit <<= 1;
+ }
+
+ while (bit)
+ {
+ if (div <= a)
+ {
+ res |= bit;
+ a -= div;
+ }
+ div >>= 1;
+ bit >>= 1;
+ }
+ *mod = a;
+ return res;
+}// ldiv_mod_u
+
+
+unsigned ldiv_u(register unsigned a, register unsigned b)
+{
+ register unsigned div = b << 16;
+ register unsigned res = 0;
+ register unsigned bit = 0x10000;
+
+ while (bit)
+ {
+ div >>= 1;
+ bit >>= 1;
+ if (div < a)
+ {
+ res |= bit;
+ a -= div;
+ }
+ }
+
+ return res;
+}
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c
new file mode 100644
index 0000000..a956607
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c
@@ -0,0 +1,198 @@
+
+/*!
+ ***********************************************************************
+ * \file: h264_dpb_ctl.c
+ *
+ ***********************************************************************
+ */
+
+//#include <limits.h>
+
+#include "h264parse.h"
+
+
+// ---------------------------------------------------------------------------
+// IMPORTANT: note that in this implementation int c is an int not a char
+// ---------------------------------------------------------------------------
+void* h264_memset( void* buf, uint32_t c, uint32_t num )
+{
+ uint32_t* buf32 = buf;
+ uint32_t size32 = ( num >> 2 );
+ uint32_t i;
+
+ for ( i = 0; i < size32; i++ )
+ {
+ *buf32++ = c;
+ }
+
+ return buf;
+}
+
+
+void* h264_memcpy( void* dest, void* src, uint32_t num )
+{
+ int32_t* dest32 = dest;
+ int32_t* src32 = src;
+ uint32_t size32 = ( num >> 2 );
+ uint32_t i;
+
+ for ( i = 0; i < size32; i++ )
+ {
+ *dest32++ = *src32++;
+ }
+
+ return dest;
+}
+
+
+#ifndef USER_MODE
+
+//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem
+void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId)
+{
+ uint32_t copy_size = sizeof(pic_param_set);
+ uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size;
+
+ if(nPPSId < MAX_NUM_PPS)
+ {
+ cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 1, 0);
+ }
+
+ return;
+
+}
+//end of h264_Parse_Copy_Pps_To_DDR
+
+
+// h264_Parse_Copy_Pps_From_DDR copy a pps with nPPSId from ddr mem to local PPS
+void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId)
+{
+
+ uint32_t copy_size= sizeof(pic_param_set);
+ uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size;
+
+ if( nPPSId < MAX_NUM_PPS)
+ {
+ cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 0, 0);
+ }
+
+ return;
+}
+//end of h264_Parse_Copy_Pps_From_DDR
+
+
+//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem with nSPSId
+void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId)
+{
+ uint32_t copy_size = sizeof(seq_param_set_used);
+ uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all);
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 1, 0);
+ }
+
+ //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id);
+
+
+ return;
+}
+
+//end of h264_Parse_Copy_Sps_To_DDR
+
+
+// h264_Parse_Copy_Sps_From_DDR copy a sps with nSPSId from ddr mem to local SPS
+void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId)
+{
+ uint32_t copy_size= sizeof(seq_param_set_used);
+ uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all);
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 0, 0);
+ }
+
+ return;
+
+}
+//end of h264_Parse_Copy_Sps_From_DDR
+
+//h264_Parse_Copy_Offset_Ref_Frames_To_DDR () copy local offset_ref_frames to ddr mem with nSPSId
+void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId)
+{
+ uint32_t copy_size = sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+ uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size;
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 1, 0);
+ h264_memcpy((int32_t *)offset_ref_frames_entry_ptr,pOffset_ref_frames, copy_size);
+ }
+
+ return;
+}
+
+//end of h264_Parse_Copy_Offset_Ref_Frames_To_DDR
+
+
+// h264_Parse_Copy_Offset_Ref_Frames_From_DDR copy a offset_ref_frames with nSPSId from ddr mem to local offset_ref_frames
+void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId)
+{
+ uint32_t copy_size= sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+ uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size;
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 0, 0);
+ h264_memcpy(pOffset_ref_frames, (int32_t *)offset_ref_frames_entry_ptr, copy_size);
+ }
+
+ return;
+
+}
+//end of h264_Parse_Copy_Offset_Ref_Frames_From_DDR
+
+
+//h264_Parse_Check_Sps_Updated_Flag () copy local sps to ddr mem with nSPSId
+uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId)
+{
+ uint32_t is_updated=0;
+ uint32_t copy_size = sizeof(uint32_t);
+ uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size;
+
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 1, 0);
+ }
+
+ //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id);
+
+
+ return is_updated;
+}
+
+//end of h264_Parse_Check_Sps_Updated_Flag
+
+
+// h264_Parse_Clear_Sps_Updated_Flag copy a sps with nSPSId from ddr mem to local SPS
+void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId)
+{
+ uint32_t is_updated=0;
+ uint32_t copy_size= sizeof(uint32_t);
+ uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size;
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 0, 0);
+ }
+
+ return;
+
+}
+//end of h264_Parse_Clear_Sps_Updated_Flag
+
+
+#endif
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c
new file mode 100644
index 0000000..a1281c2
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c
@@ -0,0 +1,128 @@
+
+
+#include "h264.h"
+#include "h264parse.h"
+
+/*---------------------------------------------*/
+/*---------------------------------------------*/
+/*---------------------------------------------*/
+h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet)
+{
+ h264_Status ret = H264_PPS_ERROR;
+
+ //h264_PicParameterSet_t* PictureParameterSet = &pInfo->PictureParameterSet;
+ uint32_t code=0, i = 0;
+
+ do {
+ ///// PPS par1: pic_parameter_set_id & seq_parameter_set_id
+ code = h264_GetVLCElement(parent, pInfo, false);
+ if(code > MAX_PIC_PARAMS) {
+ break;
+ }
+ PictureParameterSet->pic_parameter_set_id = (uint8_t)code;
+
+
+ code = h264_GetVLCElement(parent, pInfo, false);
+ if(code > MAX_NUM_SPS-1) {
+ break;
+ }
+ PictureParameterSet->seq_parameter_set_id = (uint8_t)code;
+
+ ///// entropy_coding_mode_flag
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code;
+ ///// pic_order_present_flag
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->pic_order_present_flag = (uint8_t)code;
+
+ PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+ //
+ // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0
+ //
+ if(PictureParameterSet->num_slice_groups_minus1 > 0) //MAX_NUM_SLICE_GRPS)
+ break;
+
+ PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false)+1;
+ PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1;
+
+ //// PPS->num_ref_idx_l0_active --- [0,32]
+ if(((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES))
+ {
+ break;
+ }
+
+ //// weighting prediction
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->weighted_pred_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 2);
+ PictureParameterSet->weighted_bipred_idc = (uint8_t)code;
+
+ //// QP
+ PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true);
+ PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true);
+ if(((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP))
+ break;
+ PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true);
+
+ //// Deblocking ctl parameters
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->constrained_intra_pred_flag = (uint8_t)code;
+
+ if( viddec_pm_get_bits(parent, &code, 1) == -1)
+ break;
+ PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code;
+
+ //// Check if have more RBSP Data for additional parameters
+ if(h264_More_RBSP_Data(parent, pInfo))
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->transform_8x8_mode_flag = (uint8_t)code;
+
+ if( viddec_pm_get_bits(parent, &code, 1) == -1)
+ break;
+ PictureParameterSet->pic_scaling_matrix_present_flag = (uint8_t)code;
+
+ if(PictureParameterSet->pic_scaling_matrix_present_flag)
+ {
+ uint32_t n_ScalingList = 6 + (PictureParameterSet->transform_8x8_mode_flag << 1);
+ for(i=0; i<n_ScalingList; i++)
+ {
+ if( viddec_pm_get_bits(parent, &code, 1) == -1)
+ break;
+ PictureParameterSet->pic_scaling_list_present_flag[i] = (uint8_t)code;
+
+ if(PictureParameterSet->pic_scaling_list_present_flag[i])
+ {
+ if(i<6)
+ h264_Scaling_List(parent, PictureParameterSet->ScalingList4x4[i], 16, &PictureParameterSet->UseDefaultScalingMatrix4x4Flag[i], pInfo);
+ else
+ h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i-6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i-6], pInfo);
+ }
+ }
+ }
+
+ PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix
+ //if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12))
+ // break;
+ }
+ else
+ {
+ PictureParameterSet->transform_8x8_mode_flag = 0;
+ PictureParameterSet->pic_scaling_matrix_present_flag = 0;
+ PictureParameterSet->second_chroma_qp_index_offset = PictureParameterSet->chroma_qp_index_offset;
+ }
+
+ ret = H264_STATUS_OK;
+ }while(0);
+
+ //h264_Parse_rbsp_trailing_bits(pInfo);
+ return ret;
+}
+
+////////// EOF///////////////
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c
new file mode 100644
index 0000000..829eb55
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c
@@ -0,0 +1,1176 @@
+#define H264_PARSE_SEI_C
+
+#ifdef H264_PARSE_SEI_C
+
+#include "h264.h"
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_item_types.h"
+#include "viddec_fw_workload.h"
+
+//////////////////////////////////////////////////////////////////////////////
+// avc_sei_stream_initialise ()
+//
+//
+
+void h264_sei_stream_initialise (h264_Info* pInfo)
+{
+ pInfo->sei_information.capture_POC = 0;
+ pInfo->sei_information.disp_frozen = 0;
+ pInfo->sei_information.release_POC = 0;
+ pInfo->sei_information.capture_fn = 0;
+ pInfo->sei_information.recovery_fn = 0xFFFFFFFF;
+ pInfo->sei_information.scan_format = 0;
+ pInfo->sei_information.broken_link_pic = 0;
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_buffering_period(void *parent,h264_Info* pInfo)
+{
+ h264_Status ret = H264_STATUS_SEI_ERROR;
+
+ h264_SEI_buffering_period_t* sei_msg_ptr;
+ h264_SEI_buffering_period_t sei_buffering_period;
+ int32_t SchedSelIdx;
+ int num_bits = 0;
+
+ sei_msg_ptr = (h264_SEI_buffering_period_t *)(&sei_buffering_period);
+
+ do{
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+ {
+ num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1;
+ }
+ else if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag)
+ {
+ num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1;
+ }
+
+ sei_msg_ptr->seq_param_set_id = h264_GetVLCElement(parent, pInfo, false);
+ if(sei_msg_ptr->seq_param_set_id >= NUM_SPS)
+ break;
+
+ //check if this id is same as the id of the current SPS //fix
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+ {
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+ break;
+
+ for(SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; SchedSelIdx++)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_nal, num_bits);
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_nal, num_bits);
+ }
+ }
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)
+ {
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+ break;
+
+ for(SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; SchedSelIdx++)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_vcl, num_bits);
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_vcl, num_bits);
+ }
+ }
+
+ ret = H264_STATUS_OK;
+ } while (0);
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo)
+{
+ int32_t CpbDpbDelaysPresentFlag = 0;
+ h264_SEI_pic_timing_t* sei_msg_ptr;
+ h264_SEI_pic_timing_t sei_pic_timing;
+ int32_t num_bits_cpb = 0, num_bits_dpb = 0, time_offset_length = 0;
+ uint32_t code;
+ uint32_t clock_timestamp_flag = 0;
+ uint32_t full_timestamp_flag = 0;
+ uint32_t seconds_flag = 0;
+ uint32_t minutes_flag = 0;
+ uint32_t hours_flag = 0;
+ uint32_t time_offset = 0;
+
+
+
+
+ sei_msg_ptr = (h264_SEI_pic_timing_t *)(&sei_pic_timing);
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag)
+ {
+ num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 +1;
+ num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 + 1;
+ time_offset_length = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_time_offset_length;
+ }
+ else if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag)
+ {
+ num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 +1;
+ num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 + 1;
+ }
+
+
+ CpbDpbDelaysPresentFlag = 1; // as per amphion code
+ if(CpbDpbDelaysPresentFlag)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->cpb_removal_delay, num_bits_cpb);
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->dpb_output_delay, num_bits_dpb);
+ }
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag)
+ {
+ int32_t i = 0, NumClockTS = 0;
+
+ viddec_workload_item_t wi;
+
+ wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0;
+ viddec_pm_get_bits(parent, &code , 4);
+ sei_msg_ptr->pic_struct = (uint8_t)code;
+
+
+ if((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) {
+ pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_PROGRESSIVE;
+ } else {
+ pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED;
+ }
+
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_PIC_TIMING;
+ wi.h264_sei_pic_timing.pic_struct = sei_msg_ptr->pic_struct;
+
+#ifndef VBP
+ //Push to current if we are in first frame, or we do not detect previous frame end
+ if( (pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done) ) {
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+
+ if(sei_msg_ptr->pic_struct < 3) {
+ NumClockTS = 1;
+ } else if((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) {
+ NumClockTS = 2;
+ } else {
+ NumClockTS = 3;
+ }
+
+ for(i = 0; i < NumClockTS; i++)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ clock_timestamp_flag = code;
+ //sei_msg_ptr->clock_timestamp_flag[i] = (uint8_t)code;
+
+ if(clock_timestamp_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 2);
+ //sei_msg_ptr->ct_type[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->nuit_field_based_flag[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 5);
+ //sei_msg_ptr->counting_type[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->full_timestamp_flag[i] = (uint8_t)code;
+ full_timestamp_flag = code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->discontinuity_flag[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->cnt_dropped_flag[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 8);
+ //sei_msg_ptr->n_frames[i] = (uint8_t)code;
+
+
+ if(full_timestamp_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->seconds_value[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->minutes_value[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 5);
+ //sei_msg_ptr->hours_value[i] = (uint8_t)code;
+ }
+ else
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->seconds_flag[i] = (uint8_t)code;
+ seconds_flag = code;
+
+ if(seconds_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->seconds_value[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->minutes_flag[i] = (uint8_t)code;
+ minutes_flag = code;
+
+ if(minutes_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->minutes_value[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->hours_flag[i] = (uint8_t)code;
+ hours_flag = code;
+
+ if(hours_flag){
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->hours_value[i] = (uint8_t)code;
+ }
+ }
+ }
+ }
+
+ if(time_offset_length > 0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&time_offset, time_offset_length);
+ }
+ }
+ }
+ }
+
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo)
+{
+ h264_SEI_pan_scan_rectangle_t* sei_msg_ptr;
+ h264_SEI_pan_scan_rectangle_t sei_pan_scan;
+ uint32_t code;
+
+ viddec_workload_item_t wi;
+
+ h264_memset( &(sei_pan_scan), 0x0, sizeof(h264_SEI_pan_scan_rectangle_t) );
+
+ viddec_fw_reset_workload_item(&wi);
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PAN_SCAN;
+
+ sei_msg_ptr = (h264_SEI_pan_scan_rectangle_t *)(&sei_pan_scan);
+
+ sei_msg_ptr->pan_scan_rect_id = h264_GetVLCElement(parent, pInfo, false);
+
+ wi.h264_sei_pan_scan.pan_scan_rect_id = sei_msg_ptr->pan_scan_rect_id;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->pan_scan_rect_cancel_flag = (uint8_t)code;
+ viddec_fw_h264_sei_pan_scan_set_cancel_flag(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_rect_cancel_flag);
+
+ if(!sei_msg_ptr->pan_scan_rect_cancel_flag)
+ {
+ int32_t i;
+ sei_msg_ptr->pan_scan_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_fw_h264_sei_pan_scan_set_cnt_minus1(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_cnt_minus1);
+ if(sei_msg_ptr->pan_scan_cnt_minus1 > MAX_PAN_SCAN_CNT -1)
+ {
+ return H264_STATUS_SEI_ERROR;
+ }
+ for(i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++)
+ {
+ sei_msg_ptr->pan_scan_rect_left_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+ sei_msg_ptr->pan_scan_rect_right_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+ sei_msg_ptr->pan_scan_rect_top_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+ sei_msg_ptr->pan_scan_rect_bottom_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false);
+ wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period;
+ }
+
+#ifndef VBP
+ if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+
+ if(!sei_msg_ptr->pan_scan_rect_cancel_flag)
+ {
+ int32_t i;
+
+ viddec_fw_reset_workload_item(&wi);
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT;
+
+ for(i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++)
+ {
+ viddec_fw_h264_pan_scan_set_left(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_left_offset[i]);
+ viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]);
+ viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]);
+ viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]);
+
+#ifndef VBP
+ if(pInfo->Is_first_frame_in_stream) { //cur is first frame
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+ }
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_filler_payload(void *parent,h264_Info* pInfo, uint32_t payload_size)
+{
+
+ h264_SEI_filler_payload_t* sei_msg_ptr;
+ h264_SEI_filler_payload_t sei_filler_payload;
+ uint32_t k;
+ uint32_t code;
+
+ //remove warning
+ pInfo = pInfo;
+
+ sei_msg_ptr = (h264_SEI_filler_payload_t *)(&sei_filler_payload);
+ for(k=0; k < payload_size; k++)
+ {
+ viddec_pm_get_bits(parent, &code , 8);
+ sei_msg_ptr->ff_byte = (uint8_t)code;
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payload_size)
+{
+
+ h264_SEI_userdata_registered_t* sei_msg_ptr;
+ h264_SEI_userdata_registered_t sei_userdata_registered;
+ uint32_t i;
+ int32_t byte = 0;
+ uint32_t code = 0;
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED;
+ wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0;
+ //remove warning
+ pInfo = pInfo;
+
+ sei_msg_ptr = (h264_SEI_userdata_registered_t *)(&sei_userdata_registered);
+
+ viddec_pm_get_bits(parent, &code , 8);
+ sei_msg_ptr->itu_t_t35_country_code = (uint8_t)code;
+
+ if(sei_msg_ptr->itu_t_t35_country_code != 0xff) {
+ i = 1;
+ } else {
+ viddec_pm_get_bits(parent, &code , 8);
+ sei_msg_ptr->itu_t_t35_country_code_extension_byte = (uint8_t)code;
+ i = 2;
+ }
+
+
+ wi.user_data.size =0;
+ do
+ {
+
+ viddec_pm_get_bits(parent, (uint32_t *)&byte, 8);
+
+ wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte;
+ wi.user_data.size++;
+
+ if(11 == wi.user_data.size)
+ {
+ viddec_pm_setup_userdata(&wi);
+#ifndef VBP
+ if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+ wi.user_data.size =0;
+ }
+
+ i++;
+ }while(i < payload_size);
+
+ if(0!=wi.user_data.size)
+ {
+ viddec_pm_setup_userdata(&wi);
+
+#ifndef VBP
+ if(pInfo->Is_first_frame_in_stream) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t payload_size)
+{
+
+ h264_SEI_userdata_unregistered_t* sei_msg_ptr;
+ h264_SEI_userdata_unregistered_t sei_userdata_unregistered;
+ uint32_t i;
+ int32_t byte = 0;
+ uint32_t code;
+
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED;
+
+ //remove warning
+ pInfo = pInfo;
+
+ sei_msg_ptr = (h264_SEI_userdata_unregistered_t *)(&sei_userdata_unregistered);
+
+ for (i = 0; i < 4; i++)
+ {
+ viddec_pm_get_bits(parent, &code , 32);
+ sei_msg_ptr->uuid_iso_iec_11578[i] = (uint8_t)code;
+ }
+
+ wi.user_data.size =0;
+ for(i = 16; i < payload_size; i++)
+ {
+
+ viddec_pm_get_bits(parent, (uint32_t *)&byte, 8);
+
+ wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte;
+ wi.user_data.size++;
+
+ if(11 == wi.user_data.size)
+ {
+ viddec_pm_setup_userdata(&wi);
+ if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ wi.user_data.size =0;
+ }
+ }
+
+ if(0!=wi.user_data.size)
+ {
+ viddec_pm_setup_userdata(&wi);
+ if(pInfo->Is_first_frame_in_stream) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo)
+{
+
+ h264_SEI_recovery_point_t* sei_msg_ptr;
+ h264_SEI_recovery_point_t sei_recovery_point;
+ uint32_t code;
+ viddec_workload_item_t wi;
+
+
+ sei_msg_ptr = (h264_SEI_recovery_point_t *)(&sei_recovery_point);
+
+ sei_msg_ptr->recovery_frame_cnt = h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->exact_match_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->broken_link_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 2);
+ sei_msg_ptr->changing_slice_group_idc = (uint8_t)code;
+
+ pInfo->sei_information.recovery_point = 1;
+ pInfo->sei_information.recovery_frame_cnt = (int32_t) sei_msg_ptr->recovery_frame_cnt;
+ pInfo->sei_information.capture_fn = 1;
+ pInfo->sei_information.broken_link_pic = sei_msg_ptr->broken_link_flag;
+
+ if(pInfo->got_start) {
+ pInfo->img.recovery_point_found |= 2;
+
+ //// Enable the RP recovery if no IDR ---Cisco
+ if((pInfo->img.recovery_point_found & 1)==0)
+ pInfo->sei_rp_received = 1;
+ }
+
+ //
+ /// Append workload for SEI
+ //
+ viddec_fw_reset_workload_item(&wi);
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_RECOVERY_POINT;
+ wi.h264_sei_recovery_point.recovery_frame_cnt = sei_msg_ptr->recovery_frame_cnt;
+ viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag);
+ viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag);
+ wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc;
+
+ if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_dec_ref_pic_marking_rep(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_decoded_ref_pic_marking_repetition_t* sei_msg_ptr;
+ h264_SEI_decoded_ref_pic_marking_repetition_t sei_ref_pic;
+ uint32_t code;
+
+ sei_msg_ptr = (h264_SEI_decoded_ref_pic_marking_repetition_t *)(&sei_ref_pic);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->original_idr_flag = (uint8_t)code;
+
+ sei_msg_ptr->original_frame_num = h264_GetVLCElement(parent, pInfo, false);
+
+ if(!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag))
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->orignal_field_pic_flag = (uint8_t)code;
+
+ if(sei_msg_ptr->orignal_field_pic_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->original_bottom_field_pic_flag = (uint8_t)code;
+ }
+ }
+ h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, &pInfo->SliceHeader);
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_spare_pic(void *parent,h264_Info* pInfo)
+{
+
+ //h264_SEI_spare_picture_t* sei_msg_ptr;
+
+ //remove warning
+ pInfo = pInfo;
+ parent = parent;
+
+ //sei_msg_ptr = (h264_SEI_spare_picture_t *)(&user_data->user_data[0]);
+
+ //OS_INFO("Not supported SEI\n");
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_scene_info(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_scene_info_t* sei_msg_ptr;
+ h264_SEI_scene_info_t sei_scene_info;
+ uint32_t code;
+
+ sei_msg_ptr = (h264_SEI_scene_info_t*)(&sei_scene_info);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->scene_info_present_flag = (uint8_t)code;
+
+ if(sei_msg_ptr->scene_info_present_flag)
+ {
+ sei_msg_ptr->scene_id = h264_GetVLCElement(parent, pInfo, false);
+ sei_msg_ptr->scene_transitioning_type= h264_GetVLCElement(parent, pInfo, false);
+ if(sei_msg_ptr->scene_transitioning_type > 3)
+ {
+ sei_msg_ptr->second_scene_id = h264_GetVLCElement(parent, pInfo, false);
+ }
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq_info(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_sub_sequence_info_t* sei_msg_ptr;
+ h264_SEI_sub_sequence_info_t sei_sub_sequence_info;
+ uint32_t code;
+
+ sei_msg_ptr = (h264_SEI_sub_sequence_info_t *)(&sei_sub_sequence_info);
+
+ sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo,false);
+ sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo,false);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->first_ref_pic_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->leading_non_ref_pic_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->last_pic_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->sub_seq_frame_num_flag = (uint8_t)code;
+
+
+ if(sei_msg_ptr->sub_seq_frame_num_flag)
+ {
+ sei_msg_ptr->sub_seq_frame_num = h264_GetVLCElement(parent, pInfo,false);
+ }
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq_layer(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_sub_sequence_layer_t* sei_msg_ptr;
+ h264_SEI_sub_sequence_layer_t sei_sub_sequence_layer;
+ int32_t layer;
+ uint32_t code;
+
+ sei_msg_ptr = (h264_SEI_sub_sequence_layer_t *)(&sei_sub_sequence_layer);
+ sei_msg_ptr->num_sub_seq_layers_minus1 = h264_GetVLCElement(parent, pInfo,false);
+
+ if(sei_msg_ptr->num_sub_seq_layers_minus1 >= MAX_SUB_SEQ_LAYERS)
+ {
+ return H264_STATUS_SEI_ERROR;
+ }
+
+ for(layer = 0;layer <= sei_msg_ptr->num_sub_seq_layers_minus1; layer++)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->accurate_statistics_flag[layer] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 16);
+ sei_msg_ptr->average_bit_rate[layer] = (uint16_t)code;
+
+ viddec_pm_get_bits(parent, &code , 16);
+ sei_msg_ptr->average_frame_rate[layer] = (uint16_t)code;
+
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq(void *parent,h264_Info* pInfo)
+{
+ int32_t n;
+ uint32_t code;
+
+ h264_SEI_sub_sequence_t* sei_msg_ptr;
+ h264_SEI_sub_sequence_t sei_sub_sequence;
+
+ sei_msg_ptr = (h264_SEI_sub_sequence_t *)(&sei_sub_sequence);
+
+ sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo, false);
+ sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->duration_flag = (uint8_t)code;
+
+ if(sei_msg_ptr->duration_flag)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->sub_seq_duration, 32);
+ }
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->average_rate_flag = (uint8_t)code;
+
+ if(sei_msg_ptr->average_rate_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->average_statistics_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 16);
+ sei_msg_ptr->average_bit_rate = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 16);
+ sei_msg_ptr->average_frame_rate = (uint8_t)code;
+
+ }
+ sei_msg_ptr->num_referenced_subseqs = h264_GetVLCElement(parent, pInfo, false);
+ if(sei_msg_ptr->num_referenced_subseqs >= MAX_NUM_REF_SUBSEQS)
+ {
+ return H264_STATUS_SEI_ERROR;
+ }
+
+ for(n = 0; n < sei_msg_ptr->num_referenced_subseqs; n++)
+ {
+ sei_msg_ptr->ref_sub_seq_layer_num= h264_GetVLCElement(parent, pInfo, false);
+ sei_msg_ptr->ref_sub_seq_id= h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->ref_sub_seq_direction = (uint8_t)code;
+ }
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_freeze(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_full_frame_freeze_t* sei_msg_ptr;
+ h264_SEI_full_frame_freeze_t sei_full_frame_freeze;
+
+ sei_msg_ptr = (h264_SEI_full_frame_freeze_t *)(&sei_full_frame_freeze);
+
+ sei_msg_ptr->full_frame_freeze_repetition_period= h264_GetVLCElement(parent, pInfo, false);
+
+ pInfo->sei_information.capture_POC = 1;
+ pInfo->sei_information.freeze_rep_period = sei_msg_ptr->full_frame_freeze_repetition_period;
+ //pInfo->img.sei_freeze_this_image = 1;
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_freeze_release(void *parent,h264_Info* pInfo)
+{
+ //remove warning
+ parent = parent;
+ pInfo = pInfo;
+
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_snapshot(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_full_frame_snapshot_t* sei_msg_ptr;
+ h264_SEI_full_frame_snapshot_t sei_full_frame_snapshot;
+
+ sei_msg_ptr = (h264_SEI_full_frame_snapshot_t *)(&sei_full_frame_snapshot);
+
+ sei_msg_ptr->snapshot_id = h264_GetVLCElement(parent, pInfo, false);
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_progressive_segement_start(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_progressive_segment_start_t* sei_msg_ptr;
+ h264_SEI_progressive_segment_start_t sei_progressive_segment_start;
+
+ sei_msg_ptr = (h264_SEI_progressive_segment_start_t *)(&sei_progressive_segment_start);
+
+ sei_msg_ptr->progressive_refinement_id= h264_GetVLCElement(parent, pInfo, false);
+ sei_msg_ptr->num_refinement_steps_minus1= h264_GetVLCElement(parent, pInfo, false);
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_progressive_segment_end(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_progressive_segment_end_t* sei_msg_ptr;
+ h264_SEI_progressive_segment_end_t sei_progressive_segment_end;
+
+ sei_msg_ptr = (h264_SEI_progressive_segment_end_t *)(&sei_progressive_segment_end);
+
+ sei_msg_ptr->progressive_refinement_id = h264_GetVLCElement(parent, pInfo, false);
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_motion_constrained_slice_grp_set(void *parent, h264_Info* pInfo)
+{
+ int32_t i;
+ uint32_t code;
+ h264_SEI_motion_constrained_slice_group_t* sei_msg_ptr;
+ h264_SEI_motion_constrained_slice_group_t sei_motion_constrained_slice_group;
+
+ sei_msg_ptr = (h264_SEI_motion_constrained_slice_group_t *)(&sei_motion_constrained_slice_group);
+
+ sei_msg_ptr->num_slice_groups_in_set_minus1= h264_GetVLCElement(parent, pInfo, false);
+ if(sei_msg_ptr->num_slice_groups_in_set_minus1 >= MAX_NUM_SLICE_GRPS)
+ {
+ return H264_STATUS_SEI_ERROR;
+ }
+
+ for(i=0; i<= sei_msg_ptr->num_slice_groups_in_set_minus1; i++)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->slice_group_id[i] = (uint8_t)code;
+ }
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->exact_sample_value_match_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->pan_scan_rect_flag = (uint8_t)code;
+
+
+ if(sei_msg_ptr->pan_scan_rect_flag)
+ {
+ sei_msg_ptr->pan_scan_rect_id= h264_GetVLCElement(parent, pInfo, false);
+ }
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_film_grain_characteristics(void *parent,h264_Info* pInfo)
+{
+ //OS_INFO("Not supported SEI\n");
+
+ //remove warning
+ parent = parent;
+ pInfo = pInfo;
+
+
+
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_deblocking_filter_display_preferences(void *parent,h264_Info* pInfo)
+{
+
+ //h264_SEI_deblocking_filter_display_pref_t* sei_msg_ptr;
+
+ //remove warning
+ parent = parent;
+ pInfo = pInfo;
+
+ //sei_msg_ptr = (h264_SEI_deblocking_filter_display_pref_t *)(&user_data->user_data[0]);
+
+ //OS_INFO("Not supported SEI\n");
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_stereo_video_info(void *parent,h264_Info* pInfo)
+{
+
+ //h264_SEI_stereo_video_info_t* sei_msg_ptr;
+
+ //remove warning
+ parent = parent;
+ pInfo = pInfo;
+
+
+ //sei_msg_ptr = (h264_SEI_stereo_video_info_t *)(&user_data->user_data[0]);
+
+ //OS_INFO("Not supported SEI\n");
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t payload_size)
+{
+ int32_t k, byte_index, user_data_byte_index;
+ uint32_t i;
+ int32_t word, bits;
+ uint32_t user_data;
+ //h264_SEI_reserved_t* sei_msg_ptr;
+ //h264_SEI_reserved_t sei_reserved;
+
+ //remove warning
+ pInfo = pInfo;
+
+ //sei_msg_ptr = (h264_SEI_reserved_t *)(&sei_reserved);
+
+ byte_index = 0;
+ word = 0;
+ user_data_byte_index = 0x0;
+
+ for(i = 0, k = 0; i < payload_size; i++)
+ {
+ if(byte_index == 0) word = 0;
+ viddec_pm_get_bits(parent, (uint32_t *)&bits, 8);
+
+ switch (byte_index)
+ {
+ case 1:
+ word = (bits << 8) | word;
+ break;
+ case 2:
+ word = (bits << 16) | word;
+ break;
+ case 3:
+ word = (bits << 24) | word;
+ break;
+ default :
+ word = bits;
+ break;
+ }
+
+ if(byte_index == 3)
+ {
+ byte_index = 0;
+ user_data = word;
+ k++;
+ }
+ else
+ {
+ byte_index++;
+ }
+
+ user_data_byte_index++;
+ if ( user_data_byte_index == MAX_USER_DATA_SIZE)
+ {
+ //user_data->user_data_size = user_data_byte_index;
+ //sei_msg_ptr = (h264_SEI_reserved_t *)(&user_data->user_data[0]);
+ byte_index = 0;
+ word = 0;
+ user_data_byte_index = 0x0;
+ }
+ }
+
+ if(byte_index)
+ user_data = word;
+
+ //user_data->user_data_size = user_data_byte_index;
+
+ return user_data_byte_index;
+
+ return H264_STATUS_OK;
+}
+
+////// TODO
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize)
+{
+ //int32_t bit_equal_to_zero;
+ h264_Status status = H264_STATUS_OK;
+
+ //removing warning
+ payloadSize = payloadSize;
+
+ switch(payloadType)
+ {
+ case SEI_BUF_PERIOD:
+ status = h264_sei_buffering_period(parent, pInfo);
+ break;
+ case SEI_PIC_TIMING:
+ status = h264_sei_pic_timing(parent, pInfo);
+ break;
+ case SEI_PAN_SCAN:
+ status = h264_sei_pan_scan(parent, pInfo);
+ break;
+ case SEI_FILLER_PAYLOAD:
+ status = h264_sei_filler_payload(parent, pInfo, payloadSize);
+ break;
+ case SEI_REG_USERDATA:
+ status = h264_sei_userdata_reg(parent, pInfo, payloadSize);
+ break;
+ case SEI_UNREG_USERDATA:
+ status = h264_sei_userdata_unreg(parent, pInfo, payloadSize);
+ break;
+ case SEI_RECOVERY_POINT:
+ h264_sei_recovery_point(parent, pInfo);
+ break;
+ case SEI_DEC_REF_PIC_MARKING_REP:
+ status = h264_sei_dec_ref_pic_marking_rep(parent, pInfo);
+ break;
+ case SEI_SPARE_PIC:
+ status = h264_sei_spare_pic(parent, pInfo);
+ break;
+ case SEI_SCENE_INFO:
+ status = h264_sei_scene_info(parent, pInfo);
+ break;
+ case SEI_SUB_SEQ_INFO:
+ status = h264_sei_sub_seq_info(parent, pInfo);
+ break;
+ case SEI_SUB_SEQ_LAYER:
+ status = h264_sei_sub_seq_layer(parent, pInfo);
+ break;
+ case SEI_SUB_SEQ:
+ status = h264_sei_sub_seq(parent, pInfo);
+ break;
+ case SEI_FULL_FRAME_FREEZE:
+ status = h264_sei_full_frame_freeze(parent, pInfo);
+ break;
+ case SEI_FULL_FRAME_FREEZE_RELEASE:
+ h264_sei_full_frame_freeze_release(parent, pInfo);
+ break;
+ case SEI_FULL_FRAME_SNAPSHOT:
+ status = h264_sei_full_frame_snapshot(parent, pInfo);
+ break;
+ case SEI_PROGRESSIVE_SEGMENT_START:
+ status = h264_sei_progressive_segement_start(parent, pInfo);
+ break;
+ case SEI_PROGRESSIVE_SEGMENT_END:
+ status = h264_sei_progressive_segment_end(parent, pInfo);
+ break;
+ case SEI_MOTION_CONSTRAINED_SLICE_GRP_SET:
+ status = h264_sei_motion_constrained_slice_grp_set(parent, pInfo);
+ break;
+ case SEI_FILM_GRAIN_CHARACTERISTICS:
+ status = h264_sei_film_grain_characteristics(parent, pInfo);
+ break;
+ case SEI_DEBLK_FILTER_DISPLAY_PREFERENCE:
+ status = h264_sei_deblocking_filter_display_preferences(parent, pInfo);
+ break;
+ case SEI_STEREO_VIDEO_INFO:
+ status = h264_sei_stereo_video_info(parent, pInfo);
+ break;
+ default:
+ status = h264_sei_reserved_sei_message(parent, pInfo, payloadSize);
+ break;
+ }
+
+/*
+ viddec_pm_get_bits(parent, (uint32_t *)&tmp, 1);
+
+ if(tmp == 0x1) // if byte is not aligned
+ {
+ while(pInfo->bitoff != 0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&bit_equal_to_zero, 1);
+ }
+ }
+*/
+ return status;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent, h264_Info* pInfo)
+{
+ h264_Status status = H264_STATUS_OK;
+ int32_t payload_type, payload_size;
+ uint32_t next_8_bits = 0,bits_offset=0,byte_offset = 0;
+ uint8_t is_emul = 0;
+ int32_t bits_operation_result = 0;
+
+ do {
+ //// payload_type
+ payload_type = 0;
+ viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ while (next_8_bits == 0xFF)
+ {
+ bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ if(-1 == bits_operation_result)
+ {
+ status = H264_STATUS_SEI_ERROR;
+ return status;
+ }
+ payload_type += 255;
+
+ }
+ //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ payload_type += next_8_bits;
+
+ //// payload_size
+ payload_size = 0;
+ viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ while (next_8_bits == 0xFF)
+ {
+ payload_size += 255;
+ bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ if(-1 == bits_operation_result)
+ {
+ status = H264_STATUS_SEI_ERROR;
+ return status;
+ }
+ }
+ //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ payload_size += next_8_bits;
+
+ //PRINTF(MFD_NONE, " SEI: payload type = %d, payload size = %d \n", payload_type, payload_size);
+
+
+ /////////////////////////////////
+ // Parse SEI payloads
+ /////////////////////////////////
+ status = h264_SEI_payload(parent, pInfo, payload_type, payload_size);
+ if(status != H264_STATUS_OK)
+ break;
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+ // OS_INFO("SEI byte_offset 3= %d, bits_offset=%d\n", byte_offset, bits_offset);
+
+ if(bits_offset!=0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8-bits_offset);
+ }
+
+ bits_operation_result = viddec_pm_peek_bits(parent, (uint32_t *)&next_8_bits, 8);
+ if(-1 == bits_operation_result)
+ {
+ status = H264_STATUS_SEI_ERROR;
+ return status;
+ }
+
+ // OS_INFO("next_8_bits = %08x\n", next_8_bits);
+
+ }while(next_8_bits != 0x80);
+
+ //} while (h264_More_RBSP_Data(parent, pInfo) && status == H264_STATUS_OK);
+
+ return status;
+}
+
+#endif
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c
new file mode 100644
index 0000000..3134ae0
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c
@@ -0,0 +1,740 @@
+//#define H264_PARSE_SLICE_HDR
+//#ifdef H264_PARSE_SLICE_HDR
+
+#include "h264.h"
+#include "h264parse.h"
+
+extern int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul);
+
+
+/*-----------------------------------------------------------------------------------------*/
+// Slice header 1----
+// 1) first_mb_in_slice, slice_type, pic_parameter_id
+/*-----------------------------------------------------------------------------------------*/
+h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+ h264_Status ret = H264_STATUS_ERROR;
+
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ int32_t slice_type =0;
+ uint32_t data =0;
+
+ do {
+ ///// first_mb_in_slice
+ SliceHeader->first_mb_in_slice = h264_GetVLCElement(parent, pInfo, false);
+
+ ///// slice_type
+ slice_type = h264_GetVLCElement(parent, pInfo, false);
+ SliceHeader->slice_type = (slice_type%5);
+
+ if(SliceHeader->slice_type > h264_PtypeI) {
+ ret = H264_STATUS_NOTSUPPORT;
+ break;
+ }
+
+
+ ////// pic_parameter_id
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if(data > MAX_PIC_PARAMS) {
+ ret = H264_PPS_INVALID_PIC_ID;
+ break;
+ }
+ SliceHeader->pic_parameter_id = (uint8_t)data;
+ ret = H264_STATUS_OK;
+ }while(0);
+
+ return ret;
+}
+
+/*-----------------------------------------------------------------------------------------*/
+// slice header 2
+// frame_num
+// field_pic_flag, structure
+// idr_pic_id
+// pic_order_cnt_lsb, delta_pic_order_cnt_bottom
+/*-----------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+ h264_Status ret = H264_SliceHeader_ERROR;
+
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ uint32_t code;
+ int32_t max_mb_num=0;
+
+ do {
+ //////////////////////////////////// Slice header part 2//////////////////
+
+ /// Frame_num
+ viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4+4);
+ SliceHeader->frame_num = (int32_t)code;
+
+ /// Picture structure
+ SliceHeader->structure = FRAME;
+ SliceHeader->field_pic_flag = 0;
+ SliceHeader->bottom_field_flag = 0;
+
+ if(!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag))
+ {
+ /// field_pic_flag
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->field_pic_flag = (uint8_t)code;
+
+ if(SliceHeader->field_pic_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->bottom_field_flag = (uint8_t)code;
+
+ SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD;
+ }
+ }
+
+ ////// Check valid or not of first_mb_in_slice
+ if(SliceHeader->structure == FRAME) {
+ max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs;
+ } else {
+ max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2;
+ }
+
+
+ ///if(pInfo->img.MbaffFrameFlag)
+ if(pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) {
+ SliceHeader->first_mb_in_slice <<=1;
+ }
+
+ if(SliceHeader->first_mb_in_slice >= max_mb_num)
+ break;
+
+
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if(pInfo->active_SPS.pic_order_cnt_type == 0)
+ {
+ viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4);
+ SliceHeader->pic_order_cnt_lsb = (uint32_t)code;
+
+
+ if((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag))
+ {
+ SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ SliceHeader->delta_pic_order_cnt_bottom = 0;
+ }
+ }
+
+ if((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag))
+ {
+ SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true);
+ if((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag))
+ {
+ SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+
+ if(pInfo->active_PPS.redundant_pic_cnt_present_flag)
+ {
+ SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false);
+ if(SliceHeader->redundant_pic_cnt > 127)
+ break;
+ } else {
+ SliceHeader->redundant_pic_cnt = 0;
+ }
+
+ ret = H264_STATUS_OK;
+ } while (0);
+
+ //////////// FMO is not supported curently, so comment out the following code
+ //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) )
+ //{
+ // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile
+ //}
+
+ return ret;
+}
+
+/*-----------------------------------------------------------------------------------------*/
+// slice header 3
+// (direct_spatial_mv_pred_flag, num_ref_idx, pic_list_reorder, PWT, ref_pic_remark, alpha, beta, etc)
+/*-----------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+ h264_Status ret = H264_SliceHeader_ERROR;
+
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ int32_t slice_alpha_c0_offset, slice_beta_offset;
+ uint32_t code;
+ uint32_t bits_offset =0, byte_offset =0;
+ uint8_t is_emul =0;
+
+ do {
+ /// direct_spatial_mv_pred_flag
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code;
+ }
+ else
+ {
+ SliceHeader->direct_spatial_mv_pred_flag = 0;
+ }
+
+ //
+ // Reset ref_idx and Overide it if exist
+ //
+ SliceHeader->num_ref_idx_l0_active = pInfo->active_PPS.num_ref_idx_l0_active;
+ SliceHeader->num_ref_idx_l1_active = pInfo->active_PPS.num_ref_idx_l1_active;
+
+ if((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeB))
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code;
+
+ if(SliceHeader->num_ref_idx_active_override_flag)
+ {
+ SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1;
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1;
+ }
+ }
+ }
+
+ if(SliceHeader->slice_type != h264_PtypeB) {
+ SliceHeader->num_ref_idx_l1_active = 0;
+ }
+
+ if((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES))
+ {
+ break;
+ }
+
+ if(h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ break;
+ }
+
+
+ ////
+ //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW
+ ////
+ if(((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB)))
+ {
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ pInfo->h264_pwt_enabled = 1;
+ pInfo->h264_pwt_start_byte_offset = byte_offset;
+ pInfo->h264_pwt_start_bit_offset = bits_offset;
+
+ if(h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ break;
+ }
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ if(0 == bits_offset)
+ {
+ pInfo->h264_pwt_end_byte_offset = byte_offset-1;
+ pInfo->h264_pwt_end_bit_offset = 8;
+ }
+ else
+ {
+ pInfo->h264_pwt_end_byte_offset = byte_offset;
+ pInfo->h264_pwt_end_bit_offset = bits_offset;
+ }
+
+ }
+
+
+
+ ////
+ //// Parse Ref_pic marking if there
+ ////
+ if(SliceHeader->nal_ref_idc != 0)
+ {
+ if(h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ break;
+ }
+ }
+
+ if((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI))
+ {
+ SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false);
+ }
+ else
+ {
+ SliceHeader->cabac_init_idc = 0;
+ }
+
+ if(SliceHeader->cabac_init_idc > 2)
+ {
+ break;
+ }
+
+ SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true);
+ if( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26)))
+ break;
+
+
+ if((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) )
+ {
+ if(SliceHeader->slice_type == h264_PtypeSP)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sp_for_switch_flag = (uint8_t)code;
+
+ }
+ SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true);
+
+ if( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) )
+ break;
+ }
+
+ if(pInfo->active_PPS.deblocking_filter_control_present_flag)
+ {
+ SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false);
+ if(SliceHeader->disable_deblocking_filter_idc != 1)
+ {
+ SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true);
+ slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1;
+ if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) {
+ break;
+ }
+
+ SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true);
+ slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1;
+ if (slice_beta_offset < -12 || slice_beta_offset > 12) {
+ break;
+ }
+ }
+ else
+ {
+ SliceHeader->slice_alpha_c0_offset_div2 = 0;
+ SliceHeader->slice_beta_offset_div2 = 0;
+ }
+ }
+
+ ret = H264_STATUS_OK;
+ } while (0);
+
+ //////////// FMO is not supported curently, so comment out the following code
+ //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) )
+ //{
+ // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile
+ //}
+
+ return ret;
+}
+
+
+/*--------------------------------------------------------------------------------------------------*/
+//
+// The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num
+// specify the change from the initial reference picture lists to the reference picture lists to be used
+// for decoding the slice
+
+// reordering_of_pic_nums_idc:
+// 0: abs_diff_pic_num_minus1 is present and corresponds to a difference to subtract from a picture number prediction value
+// 1: abs_diff_pic_num_minus1 is present and corresponds to a difference to add to a picture number prediction value
+// 2: long_term_pic_num is present and specifies the long-term picture number for a reference picture
+// 3: End loop for reordering of the initial reference picture list
+//
+/*--------------------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ int32_t reorder= -1;
+ uint32_t code;
+
+
+ if((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI))
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag = (uint8_t)code;
+
+ if(SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+
+ reorder= -1;
+ do
+ {
+ reorder++;
+
+ if(reorder > MAX_NUM_REF_FRAMES)
+ {
+ return H264_SliceHeader_ERROR;
+ }
+
+ SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false);
+ if((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1))
+ {
+ SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false);
+ }
+ else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2)
+ {
+ SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ }while(SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3);
+ }
+ }
+
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag = (uint8_t)code;
+
+ if(SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag)
+ {
+
+ reorder = -1;
+ do
+ {
+ reorder++;
+ if(reorder > MAX_NUM_REF_FRAMES)
+ {
+ return H264_SliceHeader_ERROR;
+ }
+ SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false);
+ if((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1))
+ {
+ SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false);
+ }
+ else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2)
+ {
+ SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false);
+ }
+ }while(SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3);
+ }
+ }
+
+ //currently just two reference frames but in case mroe than two, then should use an array for the above structures that is why reorder
+ return H264_STATUS_OK;
+
+}
+
+#ifdef VBP
+h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+ uint32_t i =0, j=0;
+ uint32_t flag;
+
+ SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false);
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false);
+ }
+
+ for(i=0; i< SliceHeader->num_ref_idx_l0_active; i++)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag;
+
+ if(SliceHeader->sh_predwttbl.luma_weight_l0_flag)
+ {
+ SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true);
+ SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+ SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0;
+ }
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag;
+
+ if(SliceHeader->sh_predwttbl.chroma_weight_l0_flag)
+ {
+ for(j=0; j <2; j++)
+ {
+ SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true);
+ SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+ else
+ {
+ for(j=0; j <2; j++)
+ {
+ SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+ SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0;
+ }
+ }
+ }
+
+ }
+
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ for(i=0; i< SliceHeader->num_ref_idx_l1_active; i++)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag;
+
+ if(SliceHeader->sh_predwttbl.luma_weight_l1_flag)
+ {
+ SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true);
+ SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+ SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0;
+ }
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag;
+
+ if(SliceHeader->sh_predwttbl.chroma_weight_l1_flag)
+ {
+ for(j=0; j <2; j++)
+ {
+ SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(parent, pInfo, true);
+ SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+ else
+ {
+ for(j=0; j <2; j++)
+ {
+ SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+ SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0;
+ }
+ }
+ }
+
+ }
+ }
+
+ return H264_STATUS_OK;
+} ///// End of h264_Parse_Pred_Weight_Table
+
+#else
+
+/*--------------------------------------------------------------------------------------------------*/
+//
+// Parse Prediction weight table
+// Note: This table will be reparsed in HW Accelerator, so needn't keep it in parser
+//
+/*--------------------------------------------------------------------------------------------------*/
+
+
+h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+ uint32_t i =0, j=0;
+ uint32_t flag, val;
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SPS.SliceHeader;
+
+ //SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "luma_log2_weight_denom");
+ val = h264_GetVLCElement(parent, pInfo, false);
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ //SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "chroma_log2_weight_denom");
+ val = h264_GetVLCElement(parent,pInfo, false);
+ }
+
+ for(i=0; i< SliceHeader->num_ref_idx_l0_active; i++)
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l0_flag = h264_GetBits(pInfo, 1, "luma_weight_l0_flag");
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+
+ //if(SliceHeader->sh_predwttbl.luma_weight_l0_flag)
+ if(flag)
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l0");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ //SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l0");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+ //SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0;
+ }
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l0_flag = h264_GetBits(pInfo, 1, "chroma_weight_l0_flag");
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ if(flag)
+ {
+ for(j=0; j <2; j++)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l0");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l0");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+ else
+ {
+ for(j=0; j <2; j++)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+ //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0;
+ }
+ }
+ }
+
+ }
+
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ for(i=0; i< SliceHeader->num_ref_idx_l1_active; i++)
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l1_flag = h264_GetBits(pInfo, 1, "luma_weight_l1_flag");
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ if(flag)
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l1");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ //SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l1");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+ //SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0;
+ }
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l1_flag = h264_GetBits(pInfo, 1, "chroma_weight_l1_flag");
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ if(flag)
+ {
+ for(j=0; j <2; j++)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l1");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l1");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+ else
+ {
+ for(j=0; j <2; j++)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+ //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0;
+ }
+ }
+ }
+
+ }
+ }
+
+ return H264_STATUS_OK;
+} ///// End of h264_Parse_Pred_Weight_Table
+
+#endif
+
+/*--------------------------------------------------------------------------------------------------*/
+// The syntax elements specify marking of the reference pictures.
+// 1)IDR: no_output_of_prior_pics_flag,
+// long_term_reference_flag,
+// 2)NonIDR: adaptive_ref_pic_marking_mode_flag,
+// memory_management_control_operation,
+// difference_of_pic_nums_minus1,
+// long_term_frame_idx,
+// long_term_pic_num, and
+// max_long_term_frame_idx_plus1
+//
+//The marking of a reference picture can be "unused for reference", "used for short-term reference", or "used for longterm
+// reference", but only one among these three.
+/*--------------------------------------------------------------------------------------------------*/
+
+
+h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ uint8_t i = 0;
+ uint32_t code = 0;
+
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)code;
+ pInfo->img.long_term_reference_flag = (uint8_t)code;
+ }
+ else
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = (uint8_t)code;
+
+ ///////////////////////////////////////////////////////////////////////////////////////
+ //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified
+ // 0 Sliding window reference picture marking mode: A marking mode
+ // providing a first-in first-out mechanism for short-term reference pictures.
+ // 1 Adaptive reference picture marking mode: A reference picture
+ // marking mode providing syntax elements to specify marking of
+ // reference pictures as �unused for reference?and to assign long-term
+ // frame indices.
+ ///////////////////////////////////////////////////////////////////////////////////////
+
+ if(SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag)
+ {
+ do
+ {
+ SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false);
+ if((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3))
+ {
+ SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2)
+ {
+ SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6))
+ {
+ SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4)
+ {
+ SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5)
+ {
+ pInfo->img.curr_has_mmco_5 = 1;
+ }
+
+ if(i>NUM_MMCO_OPERATIONS) {
+ return H264_STATUS_ERROR;
+ }
+
+ }while(SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0);
+ }
+ }
+
+
+
+ SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i;
+
+ return H264_STATUS_OK;
+}
+
+
+
+//#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c
new file mode 100644
index 0000000..29ef54d
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c
@@ -0,0 +1,513 @@
+//#define H264_PARSE_SPS_C
+//#ifdef H264_PARSE_SPS_C
+
+#include "h264.h"
+#include "h264parse.h"
+
+
+/// SPS extension unit (unit_type = 13)
+///
+#if 0
+h264_Status h264_Parse_SeqParameterSet_Extension(void *parent,h264_Info * pInfo)
+{
+ /*h264_SPS_Extension_RBSP_t* SPS_ext = pInfo->p_active_SPS_ext;
+
+ SPS_ext->seq_parameter_set_id = h264_GetVLCElement(pInfo, false);
+ if(SPS_ext->seq_parameter_set_id > MAX_SEQ_PARAMS-1)
+ {
+ return H264_SPS_ERROR;
+ }
+ SPS_ext->aux_format_idc = h264_GetVLCElement(pInfo, false);
+ if(SPS_ext->aux_format_idc > 3)
+ {
+ return H264_SPS_ERROR;
+ }
+ if(SPS_ext->aux_format_idc != 0)
+ {
+ SPS_ext->bit_depth_aux_minus8 = h264_GetVLCElement(pInfo, false);
+ if(SPS_ext->bit_depth_aux_minus8 + 8 > 12)
+ {
+ return H264_SPS_ERROR;
+ }
+
+ SPS_ext->alpha_incr_flag = h264_GetBits(pInfo, 1, "alpha_incr_flag");
+ if(SPS_ext->alpha_incr_flag > 1)
+ {
+ return H264_SPS_ERROR;
+ }
+
+ SPS_ext->alpha_opaque_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_opaque_value"); //+8 to get the bit_depth value
+ SPS_ext->alpha_transparent_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_transparent_value"); //+8 to get the bit_depth value
+ }
+ SPS_ext->additional_extension_flag = h264_GetBits(pInfo, 1, "additional_extension_flag");
+*/
+ return H264_STATUS_OK;
+}
+#endif
+
+
+h264_Status h264_Parse_HRD_Parameters(void *parent, h264_Info* pInfo, int nal_hrd,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used)
+{
+ //seq_param_set_ptr SPS = pInfo->p_active_SPS;
+ int32_t i = 0;
+ uint32_t code;
+
+
+ if(nal_hrd)
+ {
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+ if(SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+ {
+ return H264_SPS_ERROR;
+ }
+
+ viddec_pm_get_bits(parent, &code, 8);
+ pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale = (uint8_t)(code>>4);
+ pVUI_Seq_Not_Used->nal_hrd_cpb_size_scale = (uint8_t)(code & 0xf);
+
+ for(i=0; i<=SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; i++)
+ {
+ pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->nal_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->nal_hrd_parameters.cbr_flag[i] = (uint8_t)code;
+ }
+
+ if( viddec_pm_get_bits(parent, &code, 20) == -1)
+ return H264_SPS_ERROR;
+
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f);
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);;
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);;
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_time_offset_length = (uint8_t)(code&0x1f);;
+
+ }
+ else
+ {
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+ if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+ {
+ return H264_SPS_ERROR;
+ }
+
+ viddec_pm_get_bits(parent, &code, 8);
+ pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale = (uint8_t)(code>>4);
+ pVUI_Seq_Not_Used->vcl_hrd_cpb_size_scale = (uint8_t)(code&0xf);
+
+ for(i=0; i<=SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; i++)
+ {
+ pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->vcl_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->vcl_hrd_parameters.cbr_flag[i] = (uint8_t)code;
+ }
+
+ if( viddec_pm_get_bits(parent, &code, 20) == -1)
+ return H264_SPS_ERROR;
+
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f);
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);;
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);;
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_time_offset_length = (uint8_t)(code&0x1f);;
+ }
+
+ return H264_STATUS_OK;
+}
+
+
+
+h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used)
+{
+ h264_Status ret = H264_STATUS_OK;
+ //seq_param_set_ptr SPS = pInfo->p_active_SPS;
+ int32_t nal_hrd = 0;
+ uint32_t code;
+
+ do {
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag = (uint8_t)code;
+
+
+ if(SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc == h264_AR_Extended_SAR)
+ {
+ viddec_pm_get_bits(parent, &code, 16);
+ SPS->sps_disp.vui_seq_parameters.sar_width = (uint16_t)code;
+
+ viddec_pm_get_bits(parent, &code, 16);
+ SPS->sps_disp.vui_seq_parameters.sar_height = (uint16_t)code;
+
+ }
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->overscan_info_present_flag = (uint8_t)code;
+
+ if(pVUI_Seq_Not_Used->overscan_info_present_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->overscan_appropriate_flag = (uint8_t)code;
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 3);
+ SPS->sps_disp.vui_seq_parameters.video_format = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.colour_description_present_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->sps_disp.vui_seq_parameters.colour_primaries = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->sps_disp.vui_seq_parameters.transfer_characteristics = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 8);
+ pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code;
+ }
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->chroma_location_info_present_flag = (uint8_t)code;
+
+ if(pVUI_Seq_Not_Used->chroma_location_info_present_flag)
+ {
+ pVUI_Seq_Not_Used->chroma_sample_loc_type_top_field = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->chroma_sample_loc_type_bottom_field = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.timing_info_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+ {
+ viddec_pm_get_bits(parent, &code, 32);
+ SPS->sps_disp.vui_seq_parameters.num_units_in_tick = (uint32_t)code;
+
+ viddec_pm_get_bits(parent, &code, 32);
+ SPS->sps_disp.vui_seq_parameters.time_scale = (uint32_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.fixed_frame_rate_flag = (uint8_t)code;
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+ {
+ nal_hrd = 1;
+ ret |= h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used);
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)
+ {
+ nal_hrd = 0;
+ ret |= h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used);
+ }
+
+ if((SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) || (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1))
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.low_delay_hrd_flag = (uint8_t)code;
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.pic_struct_present_flag = (uint8_t)code;
+
+ if(viddec_pm_get_bits(parent, &code, 1) == -1) {
+ ret = H264_STATUS_ERROR;
+ break;
+ }
+ SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->motion_vectors_over_pic_boundaries_flag = (uint8_t)code;
+
+ pVUI_Seq_Not_Used->max_bytes_per_pic_denom = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->max_bits_per_mb_denom = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->log2_max_mv_length_horizontal = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->log2_max_mv_length_vertical = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.vui_seq_parameters.num_reorder_frames = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering = h264_GetVLCElement(parent, pInfo, false);
+
+ if(SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering == MAX_INT32_VALUE)
+ ret = H264_STATUS_ERROR;
+ }
+ }while (0);
+
+ return ret;
+}
+
+
+h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame)
+{
+ h264_Status ret = H264_SPS_ERROR;
+
+ int32_t i = 0, tmp = 0;
+ int32_t PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs;
+ uint32_t code = 0;
+ uint32_t data = 0;
+
+ //SPS->profile_idc = h264_GetBits(pInfo, 8, "Profile");
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->profile_idc = (uint8_t)code;
+
+ switch(SPS->profile_idc)
+ {
+ case h264_ProfileBaseline:
+ case h264_ProfileMain:
+ case h264_ProfileExtended:
+ case h264_ProfileHigh10:
+ case h264_ProfileHigh422:
+ case h264_ProfileHigh444:
+ case h264_ProfileHigh:
+ break;
+ default:
+ return H264_SPS_INVALID_PROFILE;
+ break;
+ }
+
+ //SPS->constraint_set0_flag = h264_GetBits(pInfo, 1, "constraint_set0_flag");
+ //SPS->constraint_set1_flag = h264_GetBits(pInfo, 1, "constraint_set1_flag"); //should be 1
+ //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag");
+ //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag");
+
+ viddec_pm_get_bits(parent, &code, 4);
+ SPS->constraint_set_flags = (uint8_t)code;
+
+ //// reserved_zero_4bits
+ viddec_pm_get_bits(parent, (uint32_t *)&code, 4);
+
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->level_idc = (uint8_t)code;
+
+ switch(SPS->level_idc)
+ {
+ case h264_Level1b:
+ case h264_Level1:
+ case h264_Level11:
+ case h264_Level12:
+ case h264_Level13:
+ case h264_Level2:
+ case h264_Level21:
+ case h264_Level22:
+ case h264_Level3:
+ case h264_Level31:
+ case h264_Level32:
+ case h264_Level4:
+ case h264_Level41:
+ case h264_Level42:
+ case h264_Level5:
+ case h264_Level51:
+ break;
+ default:
+ return H264_SPS_INVALID_LEVEL;
+ }
+
+ do {
+ SPS->seq_parameter_set_id = h264_GetVLCElement(parent, pInfo, false);
+
+ //// seq_parameter_set_id ---[0,31]
+ if(SPS->seq_parameter_set_id > MAX_NUM_SPS -1)
+ break;
+
+ if((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) ||
+ (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444) )
+ {
+ //// chroma_format_idc ---[0,3], currently we don't support 444, so [0,2]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data > H264_CHROMA_422)
+ break;
+ SPS->sps_disp.chroma_format_idc = (uint8_t)data;
+ //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {}
+
+ //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data)
+ break;
+ SPS->bit_depth_luma_minus8 = (uint8_t)data;
+
+ //// bit_depth_chroma_minus8 ---[0,4]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data )
+ break;
+ SPS->bit_depth_chroma_minus8 = (uint8_t)data;
+
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->lossless_qpprime_y_zero_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->seq_scaling_matrix_present_flag = (uint8_t)code;
+
+ if(SPS->seq_scaling_matrix_present_flag == 1)
+ {
+ //int n_ScalingList = (SPS->sps_disp.chroma_format_idc != H264_CHROMA_444) ? 8 : 12;
+ int n_ScalingList = 8; /// We do not support 444 currrently
+
+ for(i=0; i<n_ScalingList; i++)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->seq_scaling_list_present_flag[i] = (uint8_t)code;
+
+ if(SPS->seq_scaling_list_present_flag[i])
+ {
+ if(i<6)
+ h264_Scaling_List(parent, SPS->ScalingList4x4[i], 16, &SPS->UseDefaultScalingMatrix4x4Flag[i], pInfo);
+ else
+ h264_Scaling_List(parent, SPS->ScalingList8x8[i-6], 64, &SPS->UseDefaultScalingMatrix8x8Flag[i-6], pInfo);
+ }
+ }
+ }
+ }
+ else
+ {
+ SPS->sps_disp.chroma_format_idc = 1;
+ SPS->seq_scaling_matrix_present_flag = 0;
+
+ SPS->bit_depth_luma_minus8 = 0;
+ SPS->bit_depth_chroma_minus8 = 0;
+ //h264_SetDefaultScalingLists(pInfo);
+ }
+
+ //// log2_max_frame_num_minus4 ---[0,12]
+ data = (h264_GetVLCElement(parent, pInfo, false));
+ if( data > 12)
+ break;
+ SPS->log2_max_frame_num_minus4 = (uint8_t)data;
+
+ //// pic_order_cnt_type ---- [0,2]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data > 2)
+ break;
+ SPS->pic_order_cnt_type = (uint8_t)data;
+
+
+ SPS->expectedDeltaPerPOCCycle = 0;
+ if(SPS->pic_order_cnt_type == 0) {
+ SPS->log2_max_pic_order_cnt_lsb_minus4 = h264_GetVLCElement(parent, pInfo, false);
+ } else if(SPS->pic_order_cnt_type == 1){
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->delta_pic_order_always_zero_flag = (uint8_t)code;
+
+ SPS->offset_for_non_ref_pic = h264_GetVLCElement(parent, pInfo, true);
+ SPS->offset_for_top_to_bottom_field = h264_GetVLCElement(parent, pInfo, true);
+
+ //// num_ref_frames_in_pic_order_cnt_cycle ---- [0,255]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data > 255)
+ break;
+ SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data;
+
+
+ //Alloc memory for frame offset -- FIXME
+ for(i=0; i< SPS->num_ref_frames_in_pic_order_cnt_cycle; i++)
+ {
+ /////SPS->offset_for_ref_frame[i] could be removed from SPS
+#ifndef USER_MODE
+ tmp = h264_GetVLCElement(parent, pInfo, true);
+ pOffset_ref_frame[i]=tmp;
+ SPS->expectedDeltaPerPOCCycle += tmp;
+#else
+ tmp = h264_GetVLCElement(parent, pInfo, true);
+ SPS->offset_for_ref_frame[i]=tmp;
+ SPS->expectedDeltaPerPOCCycle += tmp;
+#endif
+ }
+ }
+
+ //// num_ref_frames ---[0,16]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data > 16)
+ break;
+ SPS->num_ref_frames = (uint8_t)data;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->gaps_in_frame_num_value_allowed_flag = (uint8_t)code;
+
+
+ SPS->sps_disp.pic_width_in_mbs_minus1 = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.pic_height_in_map_units_minus1 = h264_GetVLCElement(parent, pInfo, false);
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.frame_mbs_only_flag = (uint8_t)code;
+
+ /// err check for size
+ PicWidthInMbs = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1);
+ PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1);
+ FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1);
+ if((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128))
+ break;
+
+ if(!SPS->sps_disp.frame_mbs_only_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.mb_adaptive_frame_field_flag = (uint8_t)code;
+ }
+
+ //SPS->frame_height_in_mbs = (2-SPS->sps_disp.frame_mbs_only_flag)*(SPS->sps_disp.pic_height_in_map_units_minus1+1);
+ //SPS->pic_size_in_map_units = (SPS->sps_disp.pic_width_in_mbs_minus1+1)*SPS->sps_disp.frame_height_in_mbs;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.direct_8x8_inference_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.frame_cropping_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.frame_cropping_flag)
+ {
+ SPS->sps_disp.frame_crop_rect_left_offset = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.frame_crop_rect_right_offset = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.frame_crop_rect_top_offset = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.frame_crop_rect_bottom_offset = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ //// when frame_mbs_only_flag is equal to 0, direct_8x8_inference_flag shall be equal to 1
+ if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0){
+ break;
+ }
+
+ ////// vui_parameters
+ if(viddec_pm_get_bits(parent, &code, 1) == -1)
+ break;
+ SPS->sps_disp.vui_parameters_present_flag = (uint8_t)code;
+ ret = H264_STATUS_OK;
+
+ if(SPS->sps_disp.vui_parameters_present_flag)
+ {
+#ifndef VBP // Ignore VUI parsing result
+ ret =
+#endif
+ h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used);
+ }
+
+ }while(0);
+
+ //h264_Parse_rbsp_trailing_bits(pInfo);
+
+ return ret;
+}
+
+//#endif
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c
new file mode 100644
index 0000000..87959f3
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c
@@ -0,0 +1,575 @@
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_fw_item_types.h"
+#include "h264parse_dpb.h"
+#include <glib.h>
+
+extern void* h264_memcpy( void* dest, void* src, uint32_t num );
+
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+ if (swap != 0)
+ {
+ g_warning("swap copying is not implemented.");
+ }
+
+ if (to_ddr)
+ {
+ memcpy((void*)ddr_addr, (void*)local_addr, size);
+ }
+ else
+ {
+ memcpy((void*)local_addr, (void*)ddr_addr, size);
+ }
+
+ return (0);
+}
+
+#if 0
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+ if(pInfo->Is_first_frame_in_stream) //new stream, fill new frame in cur
+ {
+
+ pInfo->img.g_new_frame = 0;
+ pInfo->Is_first_frame_in_stream =0;
+ pInfo->push_to_cur = 1;
+
+ }
+ else // move to next for new frame
+ {
+ pInfo->push_to_cur = 0;
+ }
+
+
+
+ //fill dpb managemnt info
+
+
+
+
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+ pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+
+}
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+ ////
+ //// Now we can flush out all frames in DPB fro display
+ if(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used != 3)
+ {
+ h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME
+ }
+
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+ h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+}
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+ pInfo->qm_present_list=0;
+}
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+#if 1
+ uint32_t i, nitems=0;
+
+
+ if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+ {
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+ else
+ {
+ nitems = pInfo->dpb.listXsize[0];
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+
+ }
+ else
+ {
+ nitems =0;
+ }
+#endif
+}
+#else
+
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+ h264_slice_data slice_data;
+
+ uint32_t i=0, nitems=0, data=0;
+ uint32_t bits_offset =0, byte_offset =0;
+ uint8_t is_emul =0;
+
+
+ ////////////////////// Update Reference list //////////////////
+ if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+ {
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+ else
+ {
+ nitems = pInfo->dpb.listXsize[0];
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+
+ }
+ else
+ {
+ nitems =0;
+ }
+ /////file ref list 0
+ // h264_parse_emit_ref_list(parent, pInfo, 0);
+
+ /////file ref list 1
+ //h264_parse_emit_ref_list(parent, pInfo, 1);
+
+ ///////////////////////////////////// Slice Data ////////////////////////////////
+ // h264_fill_slice_data(pInfo, &slice_data);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG;
+
+ wi.data.data_offset = slice_data.h264_bsd_slice_start;
+ wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1;
+ wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent , &wi);
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent , &wi);
+ }
+
+
+ ///////////////////////////predict weight table item and data if have///////////////////////////
+ if(pInfo->h264_pwt_enabled)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET;
+ wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1;
+ wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset;
+ wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent , &wi);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+ wi.es.es_flags = 0;
+ // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1);
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent , &wi);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+ wi.es.es_flags = 0;
+ // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0);
+ }
+ }
+
+
+ ////////////////////////////////// Update ES Buffer for Slice ///////////////////////
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset);
+
+ if(pInfo->active_PPS.entropy_coding_mode_flag)
+ {
+ if(0!=bits_offset) {
+ data = data; // fix compilation warning
+ // don't skip byte-aligned bits as those bits are actually
+ // part of slice_data
+ //viddec_pm_get_bits(parent, &data, 8-bits_offset);
+ }
+ }
+ else
+ {
+ if(0!=bits_offset) {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET;
+ wi.data.data_offset = bits_offset;
+ wi.data.data_payload[0]=0;
+ wi.data.data_payload[1]=0;
+
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ // viddec_pm_append_workitem( parent , &wi);
+ }
+ else {
+ //viddec_pm_append_workitem_next( parent , &wi);
+ }
+ }
+ }
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ //viddec_pm_append_pixeldata( parent );
+ }
+ else
+ {
+ //viddec_pm_append_pixeldata_next( parent);
+ }
+
+ return;
+}
+
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+
+ const uint32_t *pl;
+ uint32_t i=0,nitems=0;
+
+ h264_pic_data pic_data;
+
+ pInfo->qm_present_list=0;
+
+ //h264_parse_emit_4X4_scaling_matrix(parent, pInfo);
+ // h264_parse_emit_8X8_scaling_matrix(parent, pInfo);
+
+ // h264_fill_pic_data(pInfo, &pic_data);
+
+ // How many payloads must be generated
+ nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up
+
+ pl = (const uint32_t *) &pic_data;
+
+ // Dump slice data to an array of workitems, to do pl access non valid mem
+ for( i = 0; i < nitems; i++ )
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG;
+ wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct
+ wi.data.data_payload[0] = pl[0];
+ wi.data.data_payload[1] = pl[1];
+ pl += 2;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+
+ // viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ //viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ return;
+}
+
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+ uint32_t i=0,nitems=0;
+
+ ///////////////////////// Frame attributes//////////////////////////
+
+ //Push data into current workload if first frame or frame_boundary already detected by non slice nal
+ if( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal))
+ {
+ //viddec_workload_t *wl_cur = viddec_pm_get_header( parent );
+ //pInfo->img.g_new_frame = 0;
+ pInfo->Is_first_frame_in_stream =0;
+ pInfo->is_frame_boundary_detected_by_non_slice_nal=0;
+ pInfo->push_to_cur = 1;
+ //h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo);
+ }
+ else // move to cur if frame boundary detected by previous non slice nal, or move to next if not
+ {
+ //viddec_workload_t *wl_next = viddec_pm_get_next_header (parent);
+
+ pInfo->push_to_cur = 0;
+ //h264_translate_parser_info_to_frame_attributes(wl_next, pInfo);
+
+ pInfo->is_current_workload_done=1;
+ }
+
+ ///////////////////// SPS/////////////////////
+ // h264_parse_emit_sps(parent, pInfo);
+
+ /////////////////////display frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+ /////////////////////release frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ //viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+ /////////////////////flust frames (do not display)/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_dropped;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 + pInfo->dpb.frame_id_need_to_be_dropped[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ //viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ pInfo->dpb.frame_numbers_need_to_be_dropped =0;
+
+ /////////////////////updata DPB frames/////////////////////
+ nitems = pInfo->dpb.used_size;
+ for(i=0; i<nitems; i++)
+ {
+ uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id;
+ wi.ref_frame.reference_id = fs_id;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ //viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ }
+
+
+ /////////////////////updata dpb frames info (poc)/////////////////////
+ nitems = pInfo->dpb.used_size;
+ for(i=0; i<nitems; i++)
+ {
+ uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC;
+ wi.data.data_offset = fs_id;
+ //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc);
+
+ switch(viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id])))
+ {
+ case (FRAME):{
+ wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+ wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+ break;
+ };
+
+ case (TOP_FIELD):{
+ wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+ wi.data.data_payload[1] = 0;
+ break;
+ };
+
+ case (BOTTOM_FIELD):{
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+ break;
+ };
+
+ default : {
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = 0;
+ break;
+ };
+ }
+
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ //viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ }
+
+ /////////////////////Alloc buffer for current Existing frame/////////////////////
+ if(0!=pInfo->dpb.frame_numbers_need_to_be_allocated)
+ {
+ if(pInfo->push_to_cur)
+ {
+ // viddec_workload_t *wl_cur = viddec_pm_get_header (parent);
+ // wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+ }
+ else
+ {
+ // viddec_workload_t *wl_next = viddec_pm_get_next_header (parent);
+ //wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+ return;
+}
+
+
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+
+ uint32_t nitems=0, i=0;
+ viddec_workload_item_t wi;
+
+ ////
+ //// Now we can flush out all frames in DPB fro display
+ if(viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3)
+ {
+ h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME
+ }
+
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+ h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+ /////////////////////display frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ //viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ //viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+ /////////////////////release frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent, &wi );
+ viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent, &wi );
+ viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next);
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+ return;
+}
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c
new file mode 100644
index 0000000..9388d81
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c
@@ -0,0 +1,559 @@
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+
+#include "h264.h"
+#include "h264parse.h"
+
+#include "viddec_h264_parse.h"
+#include "h264parse_dpb.h"
+
+/* Init function which can be called to intialized local context on open and flush and preserve*/
+#ifdef VBP
+void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#else
+static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#endif
+{
+ struct h264_viddec_parser* parser = ctxt;
+ h264_Info * pInfo = &(parser->info);
+
+ if(!preserve)
+ {
+ /* we don't initialize this data if we want to preserve
+ sequence and gop information */
+ h264_init_sps_pps(parser,persist_mem);
+ }
+ /* picture level info which will always be initialized */
+ h264_init_Info_under_sps_pps_level(pInfo);
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+#ifdef VBP
+uint32_t viddec_h264_parse(void *parent, void *ctxt)
+#else
+static uint32_t viddec_h264_parse(void *parent, void *ctxt)
+#endif
+{
+ struct h264_viddec_parser* parser = ctxt;
+
+ h264_Info * pInfo = &(parser->info);
+
+ h264_Status status = H264_STATUS_ERROR;
+
+
+ uint8_t nal_ref_idc = 0;
+
+ ///// Parse NAL Unit header
+ pInfo->img.g_new_frame = 0;
+ pInfo->push_to_cur = 1;
+ pInfo->is_current_workload_done =0;
+ pInfo->nal_unit_type = 0;
+
+ h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc);
+
+ ///// Check frame bounday for non-vcl elimitter
+ h264_check_previous_frame_end(pInfo);
+
+ //OS_INFO("========================nal_type: %d=================\n", pInfo->nal_unit_type);
+ //DEBUG_WRITE(pInfo->nal_unit_type, pInfo->got_start, pInfo->wl_err_flag, pInfo->is_current_workload_done, 0, 0);
+#if 0
+ devh_SVEN_WriteModuleEvent( NULL,
+ SVEN_MODULE_EVENT_GV_FW_PARSER_DEBUG_P0,
+ pInfo->got_start,pInfo->nal_unit_type, pInfo->wl_err_curr, pInfo->is_current_workload_done, 0, pInfo->img.frame_num);
+#endif
+
+ //////// Parse valid NAL unit
+ switch ( pInfo->nal_unit_type )
+ {
+ case h264_NAL_UNIT_TYPE_IDR:
+ if(pInfo->got_start) {
+ pInfo->img.recovery_point_found |= 1;
+ }
+
+ pInfo->sei_rp_received = 0;
+
+ case h264_NAL_UNIT_TYPE_SLICE:
+ ////////////////////////////////////////////////////////////////////////////
+ // Step 1: Check start point
+ ////////////////////////////////////////////////////////////////////////////
+ //
+ /// Slice parsing must start from the valid start point( SPS, PPS, IDR or recovery point or primary_I)
+ /// 1) No start point reached, append current ES buffer to workload and release it
+ /// 2) else, start parsing
+ //
+ //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)))
+ //{
+ //pInfo->img.recovery_point_found = 1;
+ //}
+ {
+
+ h264_Slice_Header_t next_SliceHeader;
+
+ /// Reset next slice header
+ h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t));
+ next_SliceHeader.nal_ref_idc = nal_ref_idc;
+
+ if( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start))
+ {
+ pInfo->img.recovery_point_found |=4;
+ }
+ pInfo->primary_pic_type_plus_one = 0;
+
+
+
+ if(pInfo->img.recovery_point_found == 0) {
+ pInfo->img.structure = FRAME;
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+ break;
+ }
+
+ ////////////////////////////////////////////////////////////////////////////
+ // Step 2: Parsing slice header
+ ////////////////////////////////////////////////////////////////////////////
+ /// PWT
+ pInfo->h264_pwt_start_byte_offset=0;
+ pInfo->h264_pwt_start_bit_offset=0;
+ pInfo->h264_pwt_end_byte_offset=0;
+ pInfo->h264_pwt_end_bit_offset=0;
+ pInfo->h264_pwt_enabled =0;
+ /// IDR flag
+ next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR);
+
+
+ /// Pass slice header
+ status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader);
+
+ pInfo->sei_information.recovery_point = 0;
+
+ if(next_SliceHeader.sh_error & 3) {
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+
+ // Error type definition, refer to viddec_fw_common_defs.h
+ // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17)
+ // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18)
+ // if this is frame based, both 2 bits should be set
+ pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+
+ break;
+ }
+ pInfo->img.current_slice_num++;
+
+
+#ifdef DUMP_HEADER_INFO
+dump_slice_header(pInfo, &next_SliceHeader);
+////h264_print_decoder_values(pInfo);
+#endif
+
+
+ ////////////////////////////////////////////////////////////////////////////
+ // Step 3: Processing if new picture coming
+ // 1) if it's the second field
+ // 2) if it's a new frame
+ ////////////////////////////////////////////////////////////////////////////
+ //AssignQuantParam(pInfo);
+ if(h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader))
+ {
+ //
+ ///----------------- New Picture.boundary detected--------------------
+ //
+ pInfo->img.g_new_pic++;
+
+ //
+ // Complete previous picture
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old
+ //h264_hdr_post_poc(0, 0, use_old);
+
+ //
+ // Update slice structures:
+ h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur;
+
+ //
+ // 1) if resolution change: reset dpb
+ // 2) else: init frame store
+ h264_update_img_info(pInfo); //img, dpb
+
+ //
+ ///----------------- New frame.boundary detected--------------------
+ //
+ pInfo->img.second_field = h264_is_second_field(pInfo);
+ if(pInfo->img.second_field == 0)
+ {
+ pInfo->img.g_new_frame = 1;
+ h264_dpb_update_queue_dangling_field(pInfo);
+
+ //
+ /// DPB management
+ /// 1) check the gaps
+ /// 2) assign fs for non-exist frames
+ /// 3) fill the gaps
+ /// 4) store frame into DPB if ...
+ //
+ //if(pInfo->SliceHeader.redundant_pic_cnt)
+ {
+ h264_dpb_gaps_in_frame_num_mem_management(pInfo);
+ }
+
+#ifdef DUMP_HEADER_INFO
+ dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num);
+#endif
+ }
+ //
+ /// Decoding POC
+ h264_hdr_decoding_poc (pInfo, 0, 0);
+
+ //
+ /// Init Frame Store for next frame
+ h264_dpb_init_frame_store (pInfo);
+ pInfo->img.current_slice_num = 1;
+
+ if(pInfo->SliceHeader.first_mb_in_slice != 0)
+ {
+ ////Come here means we have slice lost at the beginning, since no FMO support
+ pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17);
+ }
+
+ //
+ /// Emit out the New Frame
+ if(pInfo->img.g_new_frame)
+ {
+ h264_parse_emit_start_new_frame(parent, pInfo);
+ }
+
+ h264_parse_emit_current_pic(parent, pInfo);
+ }
+ else ///////////////////////////////////////////////////// If Not a picture start
+ {
+ //
+ /// Update slice structures: cur->old; next->cur;
+ h264_update_old_slice(pInfo, next_SliceHeader);
+
+ //
+ /// 1) if resolution change: reset dpb
+ /// 2) else: update img info
+ h264_update_img_info(pInfo);
+ }
+
+
+ //////////////////////////////////////////////////////////////
+ // Step 4: DPB reference list init and reordering
+ //////////////////////////////////////////////////////////////
+
+ //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field
+ h264_update_frame_type(pInfo);
+
+
+ h264_dpb_update_ref_lists( pInfo);
+
+#ifdef DUMP_HEADER_INFO
+ dump_ref_list(pInfo);
+#endif
+ /// Emit out the current "good" slice
+ h264_parse_emit_current_slice(parent, pInfo);
+
+ }
+ break;
+
+ ///// * Main profile doesn't support Data Partition, skipped.... *////
+ case h264_NAL_UNIT_TYPE_DPA:
+ case h264_NAL_UNIT_TYPE_DPB:
+ case h264_NAL_UNIT_TYPE_DPC:
+ //OS_INFO("***********************DP feature, not supported currently*******************\n");
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ status = H264_STATUS_NOTSUPPORT;
+ break;
+
+ //// * Parsing SEI info *////
+ case h264_NAL_UNIT_TYPE_SEI:
+ status = H264_STATUS_OK;
+
+ //OS_INFO("*****************************SEI**************************************\n");
+ if(pInfo->sps_valid){
+ //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ /// parsing the SEI info
+ status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo);
+ }
+
+ //h264_rbsp_trailing_bits(pInfo);
+ break;
+ case h264_NAL_UNIT_TYPE_SPS:
+ {
+ //OS_INFO("*****************************SPS**************************************\n");
+ ///
+ /// Can not define local SPS since the Current local stack size limitation!
+ /// Could be changed after the limitation gone
+ ///
+ uint8_t old_sps_id=0;
+ vui_seq_parameters_t_not_used vui_seq_not_used;
+
+ old_sps_id = pInfo->active_SPS.seq_parameter_set_id;
+ h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used));
+
+
+ status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL);
+ if(status == H264_STATUS_OK) {
+ h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id);
+ pInfo->sps_valid = 1;
+
+ if(1==pInfo->active_SPS.pic_order_cnt_type) {
+ h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id);
+ }
+
+#ifdef DUMP_HEADER_INFO
+ dump_sps(&(pInfo->active_SPS));
+#endif
+
+ }
+ ///// Restore the active SPS if new arrival's id changed
+ if(old_sps_id>=MAX_NUM_SPS) {
+ h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used));
+ pInfo->active_SPS.seq_parameter_set_id = 0xff;
+ }
+ else {
+ if(old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) {
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+ }
+ else {
+ //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set));
+ pInfo->active_SPS.seq_parameter_set_id = 0xff;
+ }
+ }
+
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ }
+ break;
+ case h264_NAL_UNIT_TYPE_PPS:
+ {
+ //OS_INFO("*****************************PPS**************************************\n");
+
+ uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id;
+ uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id;
+
+ h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set));
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+
+ if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK)
+ {
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id);
+ if(old_sps_id != pInfo->active_SPS.seq_parameter_set_id)
+ {
+ pInfo->Is_SPS_updated = 1;
+ }
+ if(pInfo->active_SPS.seq_parameter_set_id != 0xff) {
+ h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id);
+ pInfo->got_start = 1;
+ if(pInfo->sei_information.recovery_point)
+ {
+ pInfo->img.recovery_point_found |= 2;
+
+ //// Enable the RP recovery if no IDR ---Cisco
+ if((pInfo->img.recovery_point_found & 1)==0)
+ pInfo->sei_rp_received = 1;
+ }
+ }
+ else
+ {
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+ }
+ #ifdef DUMP_HEADER_INFO
+ dump_pps(&(pInfo->active_PPS));
+ #endif
+ } else {
+ if(old_sps_id<MAX_NUM_SPS)
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+ if(old_pps_id<MAX_NUM_PPS)
+ h264_Parse_Copy_Pps_From_DDR(pInfo, &(pInfo->active_PPS), old_pps_id);
+ }
+
+ } //// End of PPS parsing
+ break;
+
+
+ case h264_NAL_UNIT_TYPE_EOSeq:
+ case h264_NAL_UNIT_TYPE_EOstream:
+
+ h264_parse_emit_eos(parent, pInfo);
+ h264_init_dpb(&(pInfo->dpb));
+
+ /* picture level info which will always be initialized */
+ //h264_init_Info_under_sps_pps_level(pInfo);
+
+ ////reset the pInfo here
+ //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false);
+
+
+ status = H264_STATUS_OK;
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ break;
+
+ case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+#if 1
+ ///// primary_pic_type
+ {
+ uint32_t code = 0xff;
+ int32_t ret = 0;
+ ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3);
+
+ if(ret != -1) {
+ //if(pInfo->got_start && (code == 0))
+ //{
+ //pInfo->img.recovery_point_found |= 4;
+ //}
+ pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1;
+ status = H264_STATUS_OK;
+ }
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ break;
+ }
+#endif
+
+ case h264_NAL_UNIT_TYPE_Reserved1:
+ case h264_NAL_UNIT_TYPE_Reserved2:
+ case h264_NAL_UNIT_TYPE_Reserved3:
+ case h264_NAL_UNIT_TYPE_Reserved4:
+ case h264_NAL_UNIT_TYPE_Reserved5:
+ status = H264_STATUS_OK;
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ break;
+
+ case h264_NAL_UNIT_TYPE_filler_data:
+ status = H264_STATUS_OK;
+ break;
+ case h264_NAL_UNIT_TYPE_ACP:
+ break;
+ case h264_NAL_UNIT_TYPE_SPS_extension:
+ case h264_NAL_UNIT_TYPE_unspecified:
+ case h264_NAL_UNIT_TYPE_unspecified2:
+ status = H264_STATUS_OK;
+ //nothing
+ break;
+ default:
+ status = H264_STATUS_OK;
+ break;
+ }
+
+ //pInfo->old_nal_unit_type = pInfo->nal_unit_type;
+ switch ( pInfo->nal_unit_type )
+ {
+ case h264_NAL_UNIT_TYPE_IDR:
+ case h264_NAL_UNIT_TYPE_SLICE:
+ case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+ case h264_NAL_UNIT_TYPE_SPS:
+ case h264_NAL_UNIT_TYPE_PPS:
+ case h264_NAL_UNIT_TYPE_SEI:
+ case h264_NAL_UNIT_TYPE_EOSeq:
+ case h264_NAL_UNIT_TYPE_EOstream:
+ case h264_NAL_UNIT_TYPE_Reserved1:
+ case h264_NAL_UNIT_TYPE_Reserved2:
+ case h264_NAL_UNIT_TYPE_Reserved3:
+ case h264_NAL_UNIT_TYPE_Reserved4:
+ case h264_NAL_UNIT_TYPE_Reserved5:
+ {
+ pInfo->old_nal_unit_type = pInfo->nal_unit_type;
+ break;
+ }
+ default:
+ break;
+ }
+
+ return status;
+}
+
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+static uint32_t viddec_h264_is_frame_start(void *ctxt)
+{
+ struct h264_viddec_parser* parser = ctxt;
+ uint32_t ret = 0;
+
+ h264_Info * pInfo = &(parser->info);
+
+ if(pInfo->img.g_new_frame) {
+ ret = 1;
+ }
+
+ return ret;
+}
+
+#ifdef VBP
+uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc,
+ uint32_t *codec_specific_errors)
+#else
+static uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors)
+#endif
+{
+ struct h264_viddec_parser* parser = ctxt;
+ uint32_t ret = VIDDEC_PARSE_SUCESS;
+ h264_Info * pInfo = &(parser->info);
+ uint8_t is_stream_forced_to_complete=false;
+
+ is_stream_forced_to_complete = (VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc);
+
+ if(is_stream_forced_to_complete || (pInfo->is_current_workload_done))
+ {
+ viddec_workload_t *wl;
+ viddec_frame_attributes_t *attrs;
+
+ wl = viddec_pm_get_header( parent );
+ attrs = &wl->attrs;
+
+ if((attrs->cont_size.width < 32) || (attrs->cont_size.width > 2048) || (attrs->cont_size.height < 32) || (attrs->cont_size.height>2048))
+ {
+ attrs->cont_size.width = 32;
+ attrs->cont_size.height = 32;
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+ }
+
+ *codec_specific_errors = pInfo->wl_err_curr;
+ pInfo->wl_err_curr = pInfo->wl_err_next;
+ pInfo->wl_err_next = 0;
+
+ if(is_stream_forced_to_complete)
+ {
+ h264_parse_emit_eos(parent, pInfo);
+ }
+ ret = VIDDEC_PARSE_FRMDONE;
+ }
+
+ return ret;
+}
+
+#ifdef VBP
+void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size)
+#else
+static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size)
+#endif
+{
+ /* Should return size of my structure */
+ size->context_size = sizeof(struct h264_viddec_parser);
+ size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all)
+ + MAX_NUM_PPS * sizeof(pic_param_set)
+ + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE
+ + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+}
+
+void viddec_h264_get_ops(viddec_parser_ops_t *ops)
+{
+ ops->init = viddec_h264_init;
+
+ ops->parse_syntax = viddec_h264_parse;
+ ops->get_cxt_size = viddec_h264_get_context_size;
+ ops->is_wkld_done = viddec_h264_wkld_done;
+ ops->is_frame_start = viddec_h264_is_frame_start;
+ return;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c
new file mode 100644
index 0000000..4fc2f1a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c
@@ -0,0 +1,1306 @@
+/* Any workload management goes in this file */
+
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_fw_item_types.h"
+#include "h264parse_dpb.h"
+
+
+#include "viddec_fw_workload.h"
+#include <auto_eas/gen4_mfd.h>
+#include "viddec_pm_utils_bstream.h"
+
+// picture parameter 1
+#define PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT(w) (((uint32_t)w)&0x1)
+#define PUT_BSD_PP1_SLICE_TYPE_BITS(w) ((((uint32_t)w)&0x7)<<1)
+#define PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(w) ((((uint32_t)w)&0x3)<<4)
+#define PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6)
+#define PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(w) ((((uint32_t)w)&0x3F)<<8)
+#define PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(w) ((((uint32_t)w)&0x3F)<<16)
+
+// picture parameter 2
+#define PUT_BSD_PP2_CABAC_INIT_IDC_BITS(w) (((uint32_t)w)&0x3)
+#define PUT_BSD_PP2_QP_BITS(w) ((((uint32_t)w)&0x3F)<<2)
+#define PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(w) ((((uint32_t)w)&0x3)<<8)
+#define PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<10)
+#define PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<14)
+#define PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<18)
+#define PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(w) ((((uint32_t)w)&0x1F)<<19)
+#define PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(w) ((((uint32_t)w)&0x1F)<<24)
+
+
+// slice start parameter
+#define PUT_BSD_SS_START_ADDR_BITS(w) (((uint32_t)w)&0x7fff) // 14:0 current slice start address
+#define PUT_BSD_SS_SKIP_FS_IDC_BITS(w) ((((uint32_t)w)&0x3f)<<16) // [5:0], [4:0] frame store idc, [5] - 0: top-filed, 1: bottom field
+#define PUT_BSD_SS_SKIP_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<24) // 0: P-skip, 1: I-skip
+#define PUT_BSD_SS_SKIP_REWIND_BITS(w) ((((uint32_t)w)&0xf)<<28) // number of MB or MBAFF pairs to rewind before skip
+
+//h264_dpb_init
+#define PUT_FRAME_WIDTH_MB_BITS(w) (((uint32_t)w)&0x7F)
+#define PUT_FRAME_HEIGHT_MB_BITS(w) ((((uint32_t)w)&0x7F)<<16)
+
+//dpb lut table init
+//#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8)
+
+//h264 img init
+#define PUT_BSD_IMAGE_STRUCTURE_BITS(w) (((uint32_t)w)&0x3)
+#define PUT_BSD_IMAGE_IDR_BIT(w) ((((uint32_t)w)&0x1)<<2)
+#define PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<3)
+#define PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<4)
+#define PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<5)
+#define PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6)
+#define PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<7)
+#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8)
+
+#define PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<13)
+#define PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<14)
+#define PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<15)
+#define PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<16)
+#define PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(w) ((((uint32_t)w)&0xFF)<<17)
+#define PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<25)
+
+
+extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,
+ int32_t NonExisting,
+ int32_t use_old);
+
+extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames);
+
+
+
+void h264_translate_parser_info_to_frame_attributes(viddec_workload_t *wl, h264_Info *pInfo)
+{
+
+ viddec_frame_attributes_t *attrs = &wl->attrs;
+
+
+
+ //// Cont_size
+ attrs->cont_size.height = pInfo->img.FrameHeightInMbs*16;
+ attrs->cont_size.width = pInfo->img.PicWidthInMbs*16;
+
+ //// The following attributes will be updated in slice level
+ attrs->h264.used_for_reference = 0;
+ attrs->h264.top_field_first = 0;
+ attrs->h264.top_field_poc = 0;
+ attrs->h264.bottom_field_poc = 0;
+ attrs->h264.field_pic_flag = 0;
+
+#if 1
+/// Double check the size late!!!!!
+ //attrs->h264.cropped_size.width = pInfo->img.PicWidthInMbs*16;
+ //attrs->h264.cropped_size.height = pInfo->img.PicWidthInMbs*16;
+
+ if( (pInfo->active_SPS.sps_disp.frame_cropping_flag) &&
+ (pInfo->active_SPS.sps_disp.chroma_format_idc < 4))
+ {
+ int32_t CropUnitX, CropUnitY;
+ int32_t SubWidthC, SubHeightC;
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc == 0)
+ {
+ CropUnitX = 1;
+ CropUnitY = 2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag;
+ }
+ else
+ {
+ SubWidthC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >> 1);
+ SubHeightC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >>1)
+ - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) & 0x1);
+ CropUnitX = SubWidthC;
+ CropUnitY = SubHeightC * (2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag);
+ }
+
+ if ((int32_t)attrs->cont_size.height >(pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY))
+ {
+ attrs->cont_size.height -= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY);
+ //attrs->h264.cropped_size.height-= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY);
+ }
+ }
+/// Pan-Scan Info
+
+#endif
+
+}
+
+
+static void h264_parse_update_frame_attributes(void *parent, h264_Info *pInfo)
+{
+ viddec_workload_t *wl_cur, *wl_next;
+ viddec_frame_attributes_t *attrs;
+ uint8_t frame_type=0;
+
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ wl_cur = viddec_pm_get_header( parent );
+ attrs = &wl_cur->attrs;
+ }
+ else
+ {
+ wl_next = viddec_pm_get_next_header (parent);
+ attrs = &wl_next->attrs;
+ }
+
+ /////////update frame type
+ if((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&(0x1 << FRAME_TYPE_STRUCTRUE_OFFSET))
+ {
+ frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_FRAME_OFFSET)) )>> FRAME_TYPE_FRAME_OFFSET;
+ switch(frame_type)
+ {
+ case FRAME_TYPE_IDR: attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; break;
+ case FRAME_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break;
+ case FRAME_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break;
+ case FRAME_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break;
+ default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; break;
+ }
+
+ attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID;
+ }
+ else
+ {
+ frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_TOP_OFFSET)) )>> FRAME_TYPE_TOP_OFFSET;
+ switch(frame_type)
+ {
+ case FRAME_TYPE_IDR: attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; break;
+ case FRAME_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break;
+ case FRAME_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break;
+ case FRAME_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break;
+ default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; break;
+
+ }
+
+ frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_BOTTOM_OFFSET)) )>> FRAME_TYPE_BOTTOM_OFFSET;
+ switch(frame_type)
+ {
+ case FRAME_TYPE_IDR: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_IDR; break;
+ case FRAME_TYPE_I: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_I; break;
+ case FRAME_TYPE_P: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_P; break;
+ case FRAME_TYPE_B: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_B; break;
+ default: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; break;
+
+ }
+ }
+
+ /////////update is_referece flag
+ attrs->h264.used_for_reference |= (pInfo->SliceHeader.nal_ref_idc == 0)? 0: 1;
+
+ /////////update POC
+ attrs->h264.top_field_poc = pInfo->img.toppoc;
+ attrs->h264.bottom_field_poc = pInfo->img.bottompoc;
+
+ //////// update TFF
+ if(attrs->h264.top_field_poc <= attrs->h264.bottom_field_poc) {
+ attrs->h264.top_field_first = 1;
+ } else {
+ attrs->h264.top_field_first = 0;
+ }
+
+ /////// update field_pic_flag
+ //attrs->h264.field_pic_flag |= (pInfo->SliceHeader.field_pic_flag << pInfo->SliceHeader.bottom_field_flag);
+ attrs->h264.field_pic_flag |= pInfo->SliceHeader.field_pic_flag;
+
+ return;
+}
+
+
+static void h264_fill_slice_data(h264_Info *pInfo, h264_slice_data * p_slice_data)
+{
+ uint32_t data=0;
+ uint32_t first_mb_in_slice =0;
+
+
+
+ ////////////fill pic parameters 1
+ data = PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT( (pInfo->SliceHeader.nal_ref_idc == 0) ) +
+ PUT_BSD_PP1_SLICE_TYPE_BITS(pInfo->SliceHeader.slice_type) +
+ PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(pInfo->active_PPS.weighted_bipred_idc) +
+ PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(pInfo->active_PPS.weighted_pred_flag) +
+ PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(pInfo->SliceHeader.num_ref_idx_l0_active) +
+ PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(pInfo->SliceHeader.num_ref_idx_l1_active);
+ p_slice_data->h264_bsd_slice_p1 = data;
+
+
+ ///////////fill pic parameters 2
+ data = PUT_BSD_PP2_CABAC_INIT_IDC_BITS(pInfo->SliceHeader.cabac_init_idc) +
+ PUT_BSD_PP2_QP_BITS( (pInfo->SliceHeader.slice_qp_delta + pInfo->active_PPS.pic_init_qp_minus26+26) ) +
+ PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(pInfo->SliceHeader.disable_deblocking_filter_idc) +
+ PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_alpha_c0_offset_div2) +
+ PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_beta_offset_div2) +
+ PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(pInfo->SliceHeader.direct_spatial_mv_pred_flag) +
+ PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(pInfo->active_PPS.chroma_qp_index_offset) +
+ PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(pInfo->active_PPS.second_chroma_qp_index_offset);
+
+ p_slice_data->h264_bsd_slice_p2 = data;
+
+ /////////fill slice start
+ first_mb_in_slice = pInfo->SliceHeader.first_mb_in_slice;
+
+ data = PUT_BSD_SS_START_ADDR_BITS(first_mb_in_slice);
+ data |= PUT_BSD_SS_SKIP_FS_IDC_BITS( pInfo->h264_list_replacement) |
+ PUT_BSD_SS_SKIP_TYPE_BIT(0) |
+ PUT_BSD_SS_SKIP_REWIND_BITS((pInfo->img.MbaffFrameFlag? 2: 3));
+
+ p_slice_data->h264_bsd_slice_start = data;
+
+}
+
+
+static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+
+ uint32_t i=0, n_items=0;
+ uint32_t qm_type=0;
+
+
+ for( i = 0; i < 6; i++ )
+ {
+ qm_type = FB_QM;
+ if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first
+ {
+ if (pInfo->active_SPS.seq_scaling_list_present_flag[i])
+ {
+ pInfo->qm_present_list |= ((0x1)<<i);
+
+ if (pInfo->active_SPS.UseDefaultScalingMatrix4x4Flag[i]) {
+ qm_type = DEFAULT_QM;
+ } else {
+ qm_type = SPS_QM;
+ }
+ }
+ }
+
+ if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps
+ {
+ if (pInfo->active_PPS.pic_scaling_list_present_flag[i])
+ {
+ pInfo->qm_present_list |= ((0x1)<<i);
+ if (pInfo->active_PPS.UseDefaultScalingMatrix4x4Flag[i]) {
+ qm_type = DEFAULT_QM;
+ } else {
+ qm_type = PPS_QM;
+ }
+ }
+ else
+ {
+ if ((i != 0) && (i != 3) && (i < 6)) {
+ pInfo->qm_present_list &= ~((0x1)<<i);
+ qm_type = FB_QM;
+ }
+ }
+ }
+
+
+ ///////////////////// Emit out Scaling_matrix//////////////////////
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX;
+ // data_offset 0x aa bb cc dd
+ // bb is the workload item offset
+ // cc is the qm_type
+ // dd is the matrix number
+ //
+ switch (qm_type)
+ {
+ case (SPS_QM):{
+
+ for(n_items =0; n_items<2; n_items++)
+ {
+ wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8);
+ wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+0]))+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+1]))<<8)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+2]))<<16)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+3]))<<24);
+ wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+4]))+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24);
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+
+ break;
+ }
+ case (PPS_QM):{
+
+ for(n_items =0; n_items<2; n_items++)
+ {
+ wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8);
+ wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+0]))+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+1]))<<8)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+2]))<<16)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+3]))<<24);
+ wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+4]))+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24);
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ break;
+ }
+ case (DEFAULT_QM):
+ {
+
+ wi.data.data_offset = i + (DEFAULT_QM << 4);
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = 0;
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+ }
+
+}
+
+static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+
+ uint32_t i=0, n_items=0;
+ uint32_t qm_type=0;
+
+ for( i = 6; i < 8; i++ )
+ {
+ qm_type = FB_QM;
+ if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first
+ {
+ if (pInfo->active_SPS.seq_scaling_list_present_flag[i])
+ {
+ pInfo->qm_present_list |= ((0x1)<<i);
+
+ if (pInfo->active_SPS.UseDefaultScalingMatrix8x8Flag[i-6])
+ {
+ qm_type = DEFAULT_QM;
+ }
+ else
+ {
+ qm_type = SPS_QM;
+ }
+ }
+ }
+
+ if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps
+ {
+ if (pInfo->active_PPS.pic_scaling_list_present_flag[i])
+ {
+ pInfo->qm_present_list |= ((0x1)<<i);
+
+ if (pInfo->active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])
+ {
+ qm_type = DEFAULT_QM;
+ }
+ else
+ {
+ qm_type = PPS_QM;
+ }
+ }
+ }
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX;
+
+ // data_offset 0x aa bb cc dd
+ // bb is the workload item offset
+ // cc is the qm_type
+ // dd is the matrix number
+ //
+ switch (qm_type)
+ {
+ case (SPS_QM):
+ {
+ for(n_items =0; n_items<8; n_items++)
+ {
+ wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8);
+ wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+0]))+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+3]))<<24);
+ wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+4]))+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24);
+
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ break;
+ }
+ case (PPS_QM):
+ {
+ for(n_items =0; n_items<8; n_items++)
+ {
+ wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8);
+ wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+0]))+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+3]))<<24);
+ wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+4]))+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24);
+
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ break;
+ }
+ case (DEFAULT_QM):
+ {
+ wi.data.data_offset = i + (DEFAULT_QM << 4);
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = 0;
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ break;
+ }
+ default:{
+ break;
+ }
+ }
+ }
+
+}
+
+
+
+static void h264_fill_pic_data(h264_Info *pInfo, h264_pic_data * p_pic_data)
+{
+ uint32_t data=0;
+ uint32_t dec_idc =0;
+ uint32_t frame_structure =0;
+
+ //fill h264_dpb_init
+ data = PUT_FRAME_WIDTH_MB_BITS(pInfo->dpb.PicWidthInMbs) +
+ PUT_FRAME_HEIGHT_MB_BITS(pInfo->dpb.FrameHeightInMbs);
+
+ p_pic_data->h264_dpb_init = data;
+
+ ////////////////////////////////file current pic info
+ data = 0;
+ dec_idc = pInfo->dpb.fs_dec_idc;
+ frame_structure = pInfo->img.structure;
+ if(frame_structure == FRAME)
+ frame_structure=0;
+ //data = PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc);
+
+ //p_pic_data->h264_cur_bsd_img_init= data;
+
+ data = PUT_BSD_IMAGE_STRUCTURE_BITS(frame_structure) +
+ PUT_BSD_IMAGE_IDR_BIT(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) +
+ PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(pInfo->img.MbaffFrameFlag) +
+ PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(pInfo->active_PPS.entropy_coding_mode_flag) +
+ PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(pInfo->active_PPS.constrained_intra_pred_flag) +
+ PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(pInfo->active_SPS.sps_disp.frame_mbs_only_flag) +
+ PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(pInfo->active_SPS.sps_disp.direct_8x8_inference_flag) +
+ PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(pInfo->active_PPS.transform_8x8_mode_flag) +
+ PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(((pInfo->active_SPS.sps_disp.chroma_format_idc==0)? 0x1: 0x0)) +
+ PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(0x0) +
+ PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT((pInfo->active_PPS.pic_scaling_matrix_present_flag||pInfo->active_SPS.seq_scaling_matrix_present_flag)) +
+ PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->qm_present_list) +
+ PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(0x1) +
+ PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc);
+
+ p_pic_data->h264_cur_bsd_img_init= data;
+
+ //to do: add qm list
+ //PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->img.q .qm_present_list) +
+ //printf("structure = %d, tpoc = %d, bpoc = %d\n", pInfo->img.structure, pInfo->img.toppoc, pInfo->img.bottompoc);
+
+ if(pInfo->img.structure == FRAME)
+ {
+ // Write down POC
+ p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc;
+ p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc;
+ }else if (pInfo->img.structure == TOP_FIELD)
+ {
+ // Write down POC
+ p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc;
+ p_pic_data->h264_cur_mpr_bf_poc = 0;
+ }
+ else if (pInfo->img.structure == BOTTOM_FIELD)
+ {
+ // Write down POC
+ p_pic_data->h264_cur_mpr_tf_poc = 0;
+ p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc;
+ }
+ else
+ {
+ // Write down POC
+ p_pic_data->h264_cur_mpr_tf_poc = 0;
+ p_pic_data->h264_cur_mpr_bf_poc = 0;
+ }
+
+ return;
+}
+
+static void h264_parse_emit_sps(void *parent, h264_Info *pInfo)
+{
+ viddec_workload_item_t wi;
+
+ if(pInfo->Is_SPS_updated)
+ {
+ viddec_fw_reset_workload_item(&wi);
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+ viddec_fw_h264_sps_set_profile_idc(&(wi.h264_sps), pInfo->active_SPS.profile_idc);
+ viddec_fw_h264_sps_set_level_idc(&(wi.h264_sps), pInfo->active_SPS.level_idc);
+ viddec_fw_h264_sps_set_chroma_format_idc(&(wi.h264_sps), pInfo->active_SPS.sps_disp.chroma_format_idc);
+ viddec_fw_h264_sps_set_num_ref_frames(&(wi.h264_sps), pInfo->active_SPS.num_ref_frames);
+ viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(&(wi.h264_sps), pInfo->active_SPS.gaps_in_frame_num_value_allowed_flag);
+ viddec_fw_h264_sps_set_frame_mbs_only_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_mbs_only_flag);
+ viddec_fw_h264_sps_set_frame_cropping_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_cropping_flag);
+ viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag);
+ wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1;
+ wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ viddec_fw_reset_workload_item(&wi);
+ if(pInfo->active_SPS.sps_disp.frame_cropping_flag)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_CROPPING;
+ viddec_fw_h264_cropping_set_left(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_left_offset);
+ viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset);
+ viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset);
+ viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset);
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ viddec_fw_reset_workload_item(&wi);
+ if(pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+ viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag);
+ viddec_fw_h264_vui_set_video_signal_type_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag);
+ viddec_fw_h264_vui_set_pic_struct_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag);
+ viddec_fw_h264_vui_set_timing_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag);
+ viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag);
+ viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag);
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag == 1)
+ {
+ viddec_fw_h264_vui_set_aspect_ratio_idc(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc);
+ if(h264_AR_Extended_SAR == pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc)
+ {
+ viddec_fw_h264_vui_set_sar_width(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_width);
+ viddec_fw_h264_vui_set_sar_height(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_height);
+ }
+ }
+
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+ {
+ viddec_fw_h264_vui_set_colour_description_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag);
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag)
+ {
+ viddec_fw_h264_vui_set_colour_primaries(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_primaries);
+ viddec_fw_h264_vui_set_transfer_characteristics(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.transfer_characteristics);
+ }
+ viddec_fw_h264_vui_set_video_format(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_format);
+ }
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+ {
+ viddec_fw_h264_vui_set_fixed_frame_rate_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.fixed_frame_rate_flag);
+ }
+
+ if( (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+ || (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1))
+ {
+ viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag);
+ }
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ viddec_fw_reset_workload_item(&wi);
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO;
+
+ wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick;
+ wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale;
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+
+ pInfo->Is_SPS_updated =0;
+
+ }
+
+ return;
+}
+
+
+
+
+static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t list_id)
+{
+ uint32_t i=0, nitems=0, byte_index=0, data=0, data_writed=0;
+ uint8_t *p_list;
+ viddec_workload_item_t wi;
+
+ if(0 == list_id)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_0;
+
+ if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ p_list = pInfo->slice_ref_list0;
+ }
+ else
+ {
+ p_list = pInfo->dpb.listX_0;
+ }
+ }
+ else
+ {
+ nitems =0;
+ p_list = pInfo->dpb.listX_0;
+ }
+ }
+ else
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_1;
+
+ if( h264_PtypeB==pInfo->SliceHeader.slice_type)
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l1_active;
+ if(pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag)
+ {
+ p_list = pInfo->slice_ref_list1;
+ }
+ else
+ {
+ p_list = pInfo->dpb.listX_1;
+ }
+ }
+ else
+ {
+ nitems = 0;
+ p_list = pInfo->dpb.listX_1;
+ }
+
+ }
+
+ if(0 == nitems)
+ {
+ return;
+ }
+
+ byte_index =0;
+ data_writed=0;
+
+
+ for (i=0; i < 32; i++)
+ {
+ if(byte_index == 0) data = 0;
+
+ if(i<nitems)
+ {
+ if( viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[ (p_list[i]&0x1f) ])))
+ {
+ data |= (pInfo->h264_list_replacement) << byte_index;
+ }
+ else
+ {
+ data |= (p_list[i] & 0x7f) << byte_index;
+ }
+ }
+ else
+ {
+ data |= (0x80) << byte_index;
+ }
+
+
+ if(byte_index == 24)
+ {
+ byte_index = 0;
+ wi.data.data_offset = data_writed&(~0x1);
+ wi.data.data_payload[data_writed&0x1]=data;
+
+ data =0;
+
+ if(data_writed&0x1)
+ {
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ data_writed ++;
+ }
+ else
+ {
+ byte_index += 8;
+ }
+ }
+
+}
+
+
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+ h264_slice_data slice_data;
+
+ uint32_t i=0, nitems=0, data=0;
+ uint32_t bits_offset =0, byte_offset =0;
+ uint8_t is_emul =0;
+
+ ////////////////////// Update frame attributes/////////////////
+ h264_parse_update_frame_attributes(parent,pInfo);
+
+
+ if(pInfo->SliceHeader.sh_error) {
+ // Error type definition, refer to viddec_fw_common_defs.h
+ // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17)
+ // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18)
+ // if this is frame based, both 2 bits should be set
+
+ if(pInfo->push_to_cur) {
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_curr |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET);
+ } else {
+ pInfo->wl_err_next |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_next |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET);
+ }
+ }
+
+
+ ////////////////////// Update Reference list //////////////////
+ if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+ {
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+ else
+ {
+ nitems = pInfo->dpb.listXsize[0];
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+
+ }
+ else
+ {
+ nitems =0;
+ }
+ /////file ref list 0
+ h264_parse_emit_ref_list(parent, pInfo, 0);
+
+ /////file ref list 1
+ h264_parse_emit_ref_list(parent, pInfo, 1);
+
+ ///////////////////////////////////// Slice Data ////////////////////////////////
+ h264_fill_slice_data(pInfo, &slice_data);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG;
+
+ wi.data.data_offset = slice_data.h264_bsd_slice_start;
+ wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1;
+ wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent , &wi);
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent , &wi);
+ }
+
+
+ ///////////////////////////predict weight table item and data if have///////////////////////////
+ if(pInfo->h264_pwt_enabled)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET;
+ wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1;
+ wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset;
+ wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent , &wi);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+ wi.es.es_flags = 0;
+ viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1);
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent , &wi);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+ wi.es.es_flags = 0;
+ viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0);
+ }
+ }
+
+
+ ////////////////////////////////// Update ES Buffer for Slice ///////////////////////
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset);
+
+ if(pInfo->active_PPS.entropy_coding_mode_flag)
+ {
+ if(0!=bits_offset) {
+ viddec_pm_get_bits(parent, &data, 8-bits_offset);
+ }
+ }
+ else
+ {
+ if(0!=bits_offset) {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET;
+ wi.data.data_offset = bits_offset;
+ wi.data.data_payload[0]=0;
+ wi.data.data_payload[1]=0;
+
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ viddec_pm_append_workitem( parent , &wi);
+ }
+ else {
+ viddec_pm_append_workitem_next( parent , &wi);
+ }
+ }
+ }
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_pixeldata( parent );
+ }
+ else
+ {
+ viddec_pm_append_pixeldata_next( parent);
+ }
+
+ return;
+}
+
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+
+ const uint32_t *pl;
+ uint32_t i=0,nitems=0;
+
+ h264_pic_data pic_data;
+
+ pInfo->qm_present_list=0;
+
+ h264_parse_emit_4X4_scaling_matrix(parent, pInfo);
+ h264_parse_emit_8X8_scaling_matrix(parent, pInfo);
+
+ h264_fill_pic_data(pInfo, &pic_data);
+
+ // How many payloads must be generated
+ nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up
+
+ pl = (const uint32_t *) &pic_data;
+
+ // Dump slice data to an array of workitems, to do pl access non valid mem
+ for( i = 0; i < nitems; i++ )
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG;
+ wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct
+ wi.data.data_payload[0] = pl[0];
+ wi.data.data_payload[1] = pl[1];
+ pl += 2;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ return;
+}
+
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+ uint32_t i=0,nitems=0;
+
+ ///////////////////////// Frame attributes//////////////////////////
+
+ //Push data into current workload if first frame or frame_boundary already detected by non slice nal
+ if( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal))
+ {
+ viddec_workload_t *wl_cur = viddec_pm_get_header( parent );
+ //pInfo->img.g_new_frame = 0;
+ pInfo->Is_first_frame_in_stream =0;
+ pInfo->is_frame_boundary_detected_by_non_slice_nal=0;
+ pInfo->push_to_cur = 1;
+ h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo);
+ }
+ else // move to cur if frame boundary detected by previous non slice nal, or move to next if not
+ {
+ viddec_workload_t *wl_next = viddec_pm_get_next_header (parent);
+
+ pInfo->push_to_cur = 0;
+ h264_translate_parser_info_to_frame_attributes(wl_next, pInfo);
+
+ pInfo->is_current_workload_done=1;
+ }
+
+ ///////////////////// SPS/////////////////////
+ h264_parse_emit_sps(parent, pInfo);
+
+ /////////////////////display frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+ /////////////////////release frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+ /////////////////////flust frames (do not display)/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_dropped;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 + pInfo->dpb.frame_id_need_to_be_dropped[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ pInfo->dpb.frame_numbers_need_to_be_dropped =0;
+
+ /////////////////////updata DPB frames/////////////////////
+ nitems = pInfo->dpb.used_size;
+ for(i=0; i<nitems; i++)
+ {
+ uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id;
+ wi.ref_frame.reference_id = fs_id;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ }
+
+
+ /////////////////////updata dpb frames info (poc)/////////////////////
+ nitems = pInfo->dpb.used_size;
+ for(i=0; i<nitems; i++)
+ {
+ uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC;
+ wi.data.data_offset = fs_id;
+ //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc);
+
+ switch(viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id])))
+ {
+ case (FRAME):{
+ wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+ wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+ break;
+ };
+
+ case (TOP_FIELD):{
+ wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+ wi.data.data_payload[1] = 0;
+ break;
+ };
+
+ case (BOTTOM_FIELD):{
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+ break;
+ };
+
+ default : {
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = 0;
+ break;
+ };
+ }
+
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ }
+
+ /////////////////////Alloc buffer for current Existing frame/////////////////////
+ if(0!=pInfo->dpb.frame_numbers_need_to_be_allocated)
+ {
+ if(pInfo->push_to_cur)
+ {
+ viddec_workload_t *wl_cur = viddec_pm_get_header (parent);
+ wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+ }
+ else
+ {
+ viddec_workload_t *wl_next = viddec_pm_get_next_header (parent);
+ wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+ return;
+}
+
+
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+
+ uint32_t nitems=0, i=0;
+ viddec_workload_item_t wi;
+
+
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ ////
+ //// Now we can flush out all frames in DPB fro display
+
+ if(MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)
+ {
+ if(viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3)
+ {
+ h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME
+ }
+ }
+
+
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+ h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+ /////////////////////display frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+ /////////////////////release frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next);
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+ return;
+}
+
+
+
+
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h b/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h
new file mode 100644
index 0000000..aa2a712
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h
@@ -0,0 +1,195 @@
+#ifndef _MPEG2_H
+#define _MPEG2_H
+
+/**
+ * mpeg2.h
+ * -------
+ * This file contains all the necessary enumerations and structures needed from
+ * the MPEG-2 Specification.
+ */
+
+/* Max Pan-Scan offsets */
+#define MPEG2_MAX_VID_OFFSETS 3
+
+/* Quantization matrix size */
+#define MPEG2_QUANT_MAT_SIZE 64
+
+/* MPEG2 Start Code Values */
+typedef enum {
+ MPEG2_SC_PICTURE = 0x00,
+ MPEG2_SC_SLICE_HDR = 0x01,
+ MPEG2_SC_SLICE_MIN = 0x01,
+ MPEG2_SC_SLICE_MAX = 0xAF,
+ MPEG2_SC_USER_DATA = 0xB2,
+ MPEG2_SC_SEQ_HDR = 0xB3,
+ MPEG2_SC_SEQ_ERR = 0xB4,
+ MPEG2_SC_EXT = 0xB5,
+ MPEG2_SC_SEQ_END = 0xB7,
+ MPEG2_SC_GROUP = 0xB8,
+ MPEG2_SC_SYS_MIN = 0xB9,
+ MPEG2_SC_SYS_MAX = 0xFF,
+ MPEG2_SC_ALL = 0xFF
+} mpeg2_start_codes;
+
+/* MPEG2 Extension Start Code ID */
+typedef enum {
+ MPEG2_EXT_SEQ = 1,
+ MPEG2_EXT_SEQ_DISP = 2,
+ MPEG2_EXT_QUANT_MAT = 3,
+ MPEG2_EXT_COPYRIGHT = 4,
+ MPEG2_EXT_SEQ_SCAL = 5,
+ MPEG2_EXT_PIC_DISP = 7,
+ MPEG2_EXT_PIC_CODING = 8,
+ MPEG2_EXT_PIC_SPA_SCAL = 9,
+ MPEG2_EXT_PIC_TEMP_SCAL = 10,
+ MPEG2_EXT_ALL = 11
+} mpeg2_ext_start_codes;
+
+/* MPEG2 Picture Coding Type Values */
+typedef enum {
+ MPEG2_PC_TYPE_FORBIDDEN = 0,
+ MPEG2_PC_TYPE_I = 1,
+ MPEG2_PC_TYPE_P = 2,
+ MPEG2_PC_TYPE_B = 3
+} mpeg2_picture_type;
+
+/* MPEG2 Picture Structure Type Values */
+typedef enum {
+ MPEG2_PIC_STRUCT_RESERVED = 0,
+ MPEG2_PIC_STRUCT_TOP = 1,
+ MPEG2_PIC_STRUCT_BOTTOM = 2,
+ MPEG2_PIC_STRUCT_FRAME = 3
+} mpeg2_picture_structure;
+
+/* MPEG2 Chroma Format Values */
+typedef enum {
+ MPEG2_CF_RESERVED = 0,
+ MPEG2_CF_420 = 1,
+ MPEG2_CF_422 = 2,
+ MPEG2_CF_444 = 3
+} mpeg2_chroma_format;
+
+/* MPEG2 Parser Structures */
+/* Sequence Header Info */
+struct mpeg2_sequence_hdr_info
+{
+ uint32_t horizontal_size_value;
+ uint32_t vertical_size_value;
+ uint32_t aspect_ratio_information;
+ uint32_t frame_rate_code;
+ uint32_t bit_rate_value;
+ uint32_t vbv_buffer_size_value;
+ uint32_t constrained_parameters_flag;
+};
+
+/* Group of Pictures Header Info */
+struct mpeg2_gop_hdr_info
+{
+ uint32_t closed_gop;
+ uint32_t broken_link;
+};
+
+/* Picture Header */
+struct mpeg2_picture_hdr_info
+{
+ uint32_t temporal_reference;
+ uint32_t picture_coding_type;
+ uint32_t full_pel_forward_vect;
+ uint32_t forward_f_code;
+ uint32_t full_pel_backward_vect;
+ uint32_t backward_f_code;
+};
+
+/* Sequence Extension Info */
+struct mpeg2_sequence_ext_info
+{
+ uint32_t profile_and_level_indication;
+ uint32_t progressive_sequence;
+ uint32_t chroma_format;
+ uint32_t horizontal_size_extension;
+ uint32_t vertical_size_extension;
+ uint32_t bit_rate_extension;
+ uint32_t vbv_buffer_size_extension;
+ uint32_t frame_rate_extension_n;
+ uint32_t frame_rate_extension_d;
+};
+
+/* Sequence Display Extension Info */
+struct mpeg2_sequence_disp_ext_info
+{
+ uint32_t video_format;
+ uint32_t colour_description;
+ uint32_t colour_primaries;
+ uint32_t transfer_characteristics;
+ uint32_t display_horizontal_size;
+ uint32_t display_vertical_size;
+};
+
+/* Sequence scalable extension Info */
+struct mpeg2_sequence_scal_ext_info
+{
+ uint32_t scalable_mode;
+};
+
+/* Picture Coding Extension */
+struct mpeg2_picture_coding_ext_info
+{
+ uint32_t fcode00;
+ uint32_t fcode01;
+ uint32_t fcode10;
+ uint32_t fcode11;
+ uint32_t intra_dc_precision;
+ uint32_t picture_structure;
+ uint32_t top_field_first;
+ uint32_t frame_pred_frame_dct;
+ uint32_t concealment_motion_vectors;
+ uint32_t q_scale_type;
+ uint32_t intra_vlc_format;
+ uint32_t alternate_scan;
+ uint32_t repeat_first_field;
+ uint32_t chroma_420_type;
+ uint32_t progressive_frame;
+ uint32_t composite_display_flag;
+};
+
+/* Picture Display Extension */
+struct mpeg2_picture_disp_ext_info
+{
+ uint32_t frame_center_horizontal_offset[MPEG2_MAX_VID_OFFSETS];
+ uint32_t frame_center_vertical_offset[MPEG2_MAX_VID_OFFSETS];
+};
+
+/* Quantization Matrix Extension */
+struct mpeg2_quant_ext_info
+{
+ uint32_t load_intra_quantiser_matrix;
+ uint32_t load_non_intra_quantiser_matrix;
+ uint32_t load_chroma_intra_quantiser_matrix;
+ uint32_t load_chroma_non_intra_quantiser_matrix;
+};
+
+/* Quantization Matrices */
+struct mpeg2_quant_matrices
+{
+ uint8_t intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+ uint8_t non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+ uint8_t chroma_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+ uint8_t chroma_non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+};
+
+/* MPEG2 Info */
+struct mpeg2_info
+{
+ struct mpeg2_sequence_hdr_info seq_hdr;
+ struct mpeg2_gop_hdr_info gop_hdr;
+ struct mpeg2_picture_hdr_info pic_hdr;
+ struct mpeg2_sequence_ext_info seq_ext;
+ struct mpeg2_sequence_disp_ext_info seq_disp_ext;
+ struct mpeg2_sequence_scal_ext_info seq_scal_ext;
+ struct mpeg2_picture_coding_ext_info pic_cod_ext;
+ struct mpeg2_picture_disp_ext_info pic_disp_ext;
+ struct mpeg2_quant_ext_info qnt_ext;
+ struct mpeg2_quant_matrices qnt_mat;
+};
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h b/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h
new file mode 100644
index 0000000..a6d8c2c
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h
@@ -0,0 +1,231 @@
+#ifndef _VIDDEC_MPEG2_H
+#define _VIDDEC_MPEG2_H
+
+/**
+ * viddec_mpeg2.h
+ * --------------
+ * This header file contains all the necessary state information and function
+ * prototypes for the MPEG2 parser. This header also defines the debug macros
+ * used by the MPEG2 parser to emit debug messages in host mode.
+ */
+
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "mpeg2.h"
+
+/* Debug Print Macros */
+#define MPEG2_DEB(x...) DEB("MPEG2_Parser: "x)
+#define MPEG2_FA_DEB(x...) DEB("MPEG2_Frame_attribute: "x)
+
+/* Bit masks */
+#define MPEG2_BIT_MASK_11 0x7ff /* Used for masking Height and Width */
+#define MPEG2_BIT_MASK_8 0xff /* Used fro masking start code byte */
+#define MPEG2_BIT_MASK_4 0xf /* Used for masking Level */
+#define MPEG2_BIT_MASK_3 0x7 /* Used for masking Profile */
+
+/* MPEG2 Start code and prefix size */
+#define MPEG2_SC_AND_PREFIX_SIZE 32
+
+/* Number of DMEM Workload Items */
+#define MPEG2_NUM_DMEM_WL_ITEMS 2
+
+/* Number of Quantization Matrix Workload Items */
+#define MPEG2_NUM_QMAT_WL_ITEMS 32
+
+/* Maximum supported content size */
+#define MPEG2_MAX_CONTENT_WIDTH 2048
+#define MPEG2_MAX_CONTENT_HEIGHT 2048
+
+/* Others */
+#define MPEG2_BITS_EIGHT 8
+
+
+/* MPEG2 Stream Levels */
+typedef enum {
+ MPEG2_LEVEL_SEQ = 0,
+ MPEG2_LEVEL_GOP,
+ MPEG2_LEVEL_PIC
+} mpeg2_stream_levels;
+
+/* MPEG2 Headers and Extensions */
+typedef enum {
+ MPEG2_HEADER_NONE = 0,
+ MPEG2_HEADER_SEQ = 1 << 0,
+ MPEG2_HEADER_SEQ_EXT = 1 << 1,
+ MPEG2_HEADER_SEQ_DISP_EXT = 1 << 2,
+ MPEG2_HEADER_GOP = 1 << 3,
+ MPEG2_HEADER_PIC = 1 << 4,
+ MPEG2_HEADER_PIC_COD_EXT = 1 << 5,
+ MPEG2_HEADER_PIC_DISP_EXT = 1 << 6,
+ MPEG2_HEADER_SEQ_SCAL_EXT = 1 << 7
+} mpeg2_headers;
+
+/* MPEG2 Parser Status Codes */
+typedef enum {
+ MPEG2_SUCCESS = 0, /* No error */
+ MPEG2_FRAME_COMPLETE = 1, /* Frame parsing complete found */
+ MPEG2_PARSE_ERROR = 2, /* Failure in parsing */
+} mpeg2_status;
+
+/* MPEG2 Current Workload Status Codes */
+typedef enum {
+ MPEG2_WL_EMPTY = 0,
+ MPEG2_WL_DMEM_DATA = (1 << 0),
+ MPEG2_WL_REF_INFO = (1 << 1),
+ MPEG2_WL_PARTIAL_SLICE = (1 << 2),
+ MPEG2_WL_DANGLING_FIELD = (1 << 3),
+ MPEG2_WL_COMPLETE = (1 << 4),
+ MPEG2_WL_MISSING_TF = (1 << 5),
+ MPEG2_WL_MISSING_BF = (1 << 6),
+ MPEG2_WL_UNSUPPORTED = (1 << 7),
+ /* Error codes */
+ MPEG2_WL_CORRUPTED_SEQ_HDR = (1 << 8),
+ MPEG2_WL_CORRUPTED_SEQ_EXT = (1 << 9),
+ MPEG2_WL_CORRUPTED_SEQ_DISP_EXT = (1 << 10),
+ MPEG2_WL_CORRUPTED_GOP_HDR = (1 << 11),
+ MPEG2_WL_CORRUPTED_PIC_HDR = (1 << 12),
+ MPEG2_WL_CORRUPTED_PIC_COD_EXT = (1 << 13),
+ MPEG2_WL_CORRUPTED_PIC_DISP_EXT = (1 << 14),
+ MPEG2_WL_CORRUPTED_QMAT_EXT = (1 << 15),
+ /* Error concealment codes */
+ MPEG2_WL_CONCEALED_PIC_COD_TYPE = (1 << 16),
+ MPEG2_WL_CONCEALED_PIC_STRUCT = (1 << 17),
+ MPEG2_WL_CONCEALED_CHROMA_FMT = (1 << 18),
+ /* Type of dangling field */
+ MPEG2_WL_DANGLING_FIELD_TOP = (1 << 24),
+ MPEG2_WL_DANGLING_FIELD_BOTTOM = (1 << 25),
+ MPEG2_WL_REPEAT_FIELD = (1 << 26),
+} mpeg2_wl_status_codes;
+
+/* MPEG2 Parser Workload types */
+typedef enum
+{
+ /* MPEG2 Decoder Specific data */
+ VIDDEC_WORKLOAD_MPEG2_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+
+ /* MPEG2 Quantization Matrix data */
+ VIDDEC_WORKLOAD_MPEG2_QMAT,
+
+ /* Past reference frame */
+ VIDDEC_WORKLOAD_MPEG2_REF_PAST = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+
+ /* Future reference frame */
+ VIDDEC_WORKLOAD_MPEG2_REF_FUTURE,
+
+ /* Use current frame as reference */
+ VIDDEC_WORKLOAD_MPEG2_REF_CURRENT_FRAME,
+
+ /* User Data */
+ VIDDEC_WORKLOAD_MPEG2_USERDATA = VIDDEC_WORKLOAD_USERDATA
+} viddec_mpeg2_workloads;
+
+/* MPEG2 Decoder Specific Workitems */
+struct mpeg2_workitems
+{
+ /* Core Sequence Info 1 */
+ uint32_t csi1;
+
+ /* Core Sequence Info 2 */
+ uint32_t csi2;
+
+ /* Core Picture Info 1 */
+ uint32_t cpi1;
+
+ /* Core Picture Coding Extension Info 1 */
+ uint32_t cpce1;
+
+ /* Quantization Matrices */
+ /* 0-15: Intra Quantization Matrix */
+ /* 16-31: Non-Intra Quantization Matrix */
+ /* 32-47: Chroma Intra Quantization Matrix */
+ /* 48-63: Chroma Non-Intra Quantization Matrix */
+ uint32_t qmat[MPEG2_QUANT_MAT_SIZE];
+};
+
+/* MPEG2 Video Parser Context */
+struct viddec_mpeg2_parser
+{
+ /* MPEG2 Metadata Structure */
+ struct mpeg2_info info;
+
+ /* MPEG2 Workitems */
+ struct mpeg2_workitems wi;
+
+ /* Workload Status */
+ uint32_t mpeg2_wl_status;
+
+ /* Last parsed start code */
+ int32_t mpeg2_last_parsed_sc;
+
+ /* Last parsed slice start code. Used to start emitting workload items. */
+ int32_t mpeg2_last_parsed_slice_sc;
+
+ /* Current sequence headers parsed */
+ uint8_t mpeg2_curr_seq_headers;
+
+ /* Current frame headers parsed */
+ uint8_t mpeg2_curr_frame_headers;
+
+ /* Flag to indicate a valid sequence header was successfully parsed for */
+ /* the current stream. */
+ uint8_t mpeg2_valid_seq_hdr_parsed;
+
+ /* Flag to indicate if quantization matrices are updated */
+ uint8_t mpeg2_custom_qmat_parsed;
+
+ /* Flag to indicate if reference table is updated with an entry */
+ uint8_t mpeg2_ref_table_updated;
+
+ /* Flag to indicate if the stream is MPEG2 */
+ uint8_t mpeg2_stream;
+
+ /* Flag to indicate if the previous picture metadata is parsed */
+ uint8_t mpeg2_pic_metadata_complete;
+
+ /* Number of active pan scan offsets */
+ uint8_t mpeg2_num_pan_scan_offsets;
+
+ /* Indicates the current stream level (Sequence/GOP/Picture) */
+ /* Used for identifying the level for User Data */
+ uint8_t mpeg2_stream_level;
+
+ /* Flag to indicate if the current picture is interlaced or not */
+ uint8_t mpeg2_picture_interlaced;
+
+ /* Flag to indicate if the current field for interlaced picture is first */
+ /* field or not. This flag is used only when mpeg2_picture_interlaced is */
+ /* set to 1. */
+ uint8_t mpeg2_first_field;
+
+ /* Flag to indicate if the current parsed data has start of a frame */
+ uint8_t mpeg2_frame_start;
+
+ /* Temporal reference of the previous picture - Used to detect dangling fields */
+ uint32_t mpeg2_prev_temp_ref;
+
+ /* Previous picture structure - Used to identify the type of missing field */
+ uint8_t mpeg2_prev_picture_structure;
+
+ /* Flag to decide whether to use the current or next workload to dump workitems */
+ uint8_t mpeg2_use_next_workload;
+ uint8_t mpeg2_first_slice_flag;
+};
+
+/* External Function Declarations */
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+/* MPEG2 Parser Function Prototypes */
+void viddec_mpeg2_translate_attr (void *parent, void *ctxt);
+void viddec_mpeg2_emit_workload (void *parent, void *ctxt);
+void viddec_mpeg2_parse_seq_hdr (void *parent, void *ctxt);
+void viddec_mpeg2_parse_gop_hdr (void *parent, void *ctxt);
+void viddec_mpeg2_parse_pic_hdr (void *parent, void *ctxt);
+void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt);
+void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt);
+void viddec_mpeg2_parse_ext (void *parent, void *ctxt);
+
+/* MPEG2 wrapper functions for workload operations */
+void viddec_mpeg2_append_workitem (void *parent, viddec_workload_item_t *wi, uint8_t flag);
+void viddec_mpeg2_append_pixeldata (void *parent, uint8_t flag);
+viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c
new file mode 100644
index 0000000..6aa6120
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c
@@ -0,0 +1,32 @@
+#include "viddec_mpeg2.h"
+#include "viddec_fw_item_types.h"
+
+
+void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t flag)
+{
+ return;
+}
+
+void viddec_mpeg2_emit_workload(void *parent, void *ctxt)
+{
+ return;
+}
+
+void viddec_mpeg2_append_pixeldata(void *parent, uint8_t flag)
+{
+ return;
+}
+
+viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag)
+{
+ viddec_workload_t *ret;
+ if (flag)
+ {
+ ret = viddec_pm_get_next_header(parent);
+ }
+ else
+ {
+ ret = viddec_pm_get_header(parent);
+ }
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c
new file mode 100644
index 0000000..e33a6d6
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c
@@ -0,0 +1,114 @@
+/**
+ * viddec_mpeg2_frame_attr.c
+ * -------------------------
+ * This is a helper file for viddec_mpeg2_workload.c to translate the data
+ * stored in the parser context into frame attributes in the workload.
+ */
+
+#include "viddec_mpeg2.h"
+
+/* viddec_mpeg2_print_attr() - Prints collected frame attributes */
+static inline void viddec_mpeg2_print_attr(viddec_frame_attributes_t *attr)
+{
+ unsigned int index = 0;
+
+ MPEG2_FA_DEB("Content_Size=%dx%d\n", attr->cont_size.width,
+ attr->cont_size.height);
+ MPEG2_FA_DEB("Repeat=%d\n", attr->mpeg2.repeat_first_field);
+ MPEG2_FA_DEB("Frame_Type=%d\n", attr->frame_type);
+ MPEG2_FA_DEB("Temporal_Reference=%d\n", attr->mpeg2.temporal_ref);
+ MPEG2_FA_DEB("Top_Field_First=%d\n", attr->mpeg2.top_field_first);
+ MPEG2_FA_DEB("Progressive_Frame=%d\n", attr->mpeg2.progressive_frame);
+ MPEG2_FA_DEB("Picture_Struct=%d\n", attr->mpeg2.picture_struct);
+ MPEG2_FA_DEB("Pan_Scan_Offsets=%d\n", attr->mpeg2.number_of_frame_center_offsets);
+
+ for (index = 0; index < attr->mpeg2.number_of_frame_center_offsets; index++)
+ {
+ MPEG2_FA_DEB("\tPan_Scan_Offset_%d= %dx%d\n", index,
+ attr->mpeg2.frame_center_offset[index].horz,
+ attr->mpeg2.frame_center_offset[index].vert);
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_set_default_values() - Resets attributes that are optional */
+/* in the bitstream to their default values. */
+static inline void viddec_mpeg2_set_default_values(viddec_frame_attributes_t *attrs)
+{
+ unsigned int index = 0;
+
+ attrs->mpeg2.number_of_frame_center_offsets = 0;
+ for (index = 0; index < MPEG2_MAX_VID_OFFSETS ; index++)
+ {
+ attrs->mpeg2.frame_center_offset[index].horz = 0;
+ attrs->mpeg2.frame_center_offset[index].vert = 0;
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_translate_attr() - Translates metadata parsed into frame */
+/* attributes in the workload */
+void viddec_mpeg2_translate_attr(void *parent, void *ctxt)
+{
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get workload */
+ viddec_workload_t *wl = viddec_pm_get_header( parent );
+
+ /* Get attributes in workload */
+ viddec_frame_attributes_t *attrs = &wl->attrs;
+
+ /* Get the default values for optional attributes */
+ viddec_mpeg2_set_default_values(attrs);
+
+ /* Populate attributes from parser context */
+ /* Content Size */
+ attrs->cont_size.height = ((parser->info.seq_ext.vertical_size_extension << 12)
+ | parser->info.seq_hdr.vertical_size_value);
+ attrs->cont_size.width = ((parser->info.seq_ext.horizontal_size_extension << 12)
+ | parser->info.seq_hdr.horizontal_size_value);
+
+ /* Repeat field */
+ attrs->mpeg2.repeat_first_field = parser->info.pic_cod_ext.repeat_first_field;
+
+ /* Temporal Reference */
+ attrs->mpeg2.temporal_ref = parser->info.pic_hdr.temporal_reference;
+
+ /* Top field first */
+ attrs->mpeg2.top_field_first = parser->info.pic_cod_ext.top_field_first;
+
+ /* Progressive frame */
+ attrs->mpeg2.progressive_frame = parser->info.pic_cod_ext.progressive_frame;
+
+ /* Picture Structure */
+ attrs->mpeg2.picture_struct = parser->info.pic_cod_ext.picture_structure;
+
+ /* Populate the frame type */
+ switch (parser->info.pic_hdr.picture_coding_type)
+ {
+ case MPEG2_PC_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break;
+ case MPEG2_PC_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break;
+ case MPEG2_PC_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break;
+ default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID;
+ }
+
+ /* Update PanScan data */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_DISP_EXT)
+ {
+ unsigned int index = 0;
+ attrs->mpeg2.number_of_frame_center_offsets = parser->mpeg2_num_pan_scan_offsets;
+ for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++)
+ {
+ attrs->mpeg2.frame_center_offset[index].horz = parser->info.pic_disp_ext.frame_center_horizontal_offset[index];
+ attrs->mpeg2.frame_center_offset[index].vert = parser->info.pic_disp_ext.frame_center_vertical_offset[index];
+ }
+ }
+
+ /* Print frame attributes */
+ viddec_mpeg2_print_attr(attrs);
+
+ return;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c
new file mode 100644
index 0000000..56604a4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c
@@ -0,0 +1,1039 @@
+/**
+ * viddec_mpeg2_metadata.c
+ * -----------------------
+ * This file contains all the routines to parse the information from MPEG2
+ * elementary stream and store it in the parser context. Based on the data
+ * parsed, the state information in the context is updated.
+ *
+ * Headers currently parsed from MPEG2 stream include:
+ * - Sequence Header
+ * - Sequence Extension
+ * - Sequence Display Extension
+ * - GOP Header
+ * - Picture Header
+ * - Picture Coding Extension
+ * - Quantization Matrix Extension
+ * - Picture Display Extension
+ *
+ * The slice data is parsed and appended into workload in viddec_mpeg2_parse.c
+ */
+
+#include "viddec_mpeg2.h"
+
+/* Default quantization matrix values */
+const uint8_t mpeg2_default_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = {
+ 8, 16, 19, 22, 26, 27, 29, 34,
+ 16, 16, 22, 24, 27, 29, 34, 37,
+ 19, 22, 26, 27, 29, 34, 34, 38,
+ 22, 22, 26, 27, 29, 34, 37, 40,
+ 22, 26, 27, 29, 32, 35, 40, 48,
+ 26, 27, 29, 32, 35, 40, 48, 58,
+ 26, 27, 29, 34, 38, 46, 56, 69,
+ 27, 29, 35, 38, 46, 56, 69, 83
+};
+const uint8_t mpeg2_default_non_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = {
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16
+};
+
+/* Matrix for converting scan order */
+const uint8_t mpeg2_classic_scan[MPEG2_QUANT_MAT_SIZE] = {
+ 0, 1, 8, 16, 9, 2, 3, 10,
+ 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34,
+ 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36,
+ 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46,
+ 53, 60, 61, 54, 47, 55, 62, 63
+};
+const uint8_t mpeg2_alternate_scan[MPEG2_QUANT_MAT_SIZE] = {
+ 0, 8, 16, 24, 1, 9, 2, 10,
+ 17, 25, 32, 40, 48, 56, 57, 49,
+ 41, 33, 26, 18, 3, 11, 4, 12,
+ 19, 27, 34, 42, 50, 58, 35, 43,
+ 51, 59, 20, 28, 5, 13, 6, 14,
+ 21, 29, 36, 44, 52, 60, 37, 45,
+ 53, 61, 22, 30, 7, 15, 23, 31,
+ 38, 46, 54, 62, 39, 47, 55, 63
+};
+
+/* Look-up tables for macro block address increment VLC */
+const uint8_t mb_addr_inc_tab1[16] = {
+ 0, 0, 7, 6, 5, 5, 4, 4,
+ 3, 3, 3, 3, 2, 2, 2, 2
+};
+const uint8_t mb_addr_inc_tab2[8] = {
+ 13, 12, 11, 10, 9, 9, 8, 8
+};
+const uint8_t mb_addr_inc_tab3[40] = {
+ 33, 32, 31, 30, 29, 28, 27, 26,
+ 25, 24, 23, 22, 21, 21, 20, 20,
+ 19, 19, 18, 18, 17, 17, 16, 16,
+ 15, 15, 15, 15, 15, 15, 15, 15,
+ 14, 14, 14, 14, 14, 14, 14, 14
+};
+
+/* viddec_mpeg2_copy_default_matrix() - Copies quantization matrix from src */
+/* to dst */
+static inline void mpeg2_copy_matrix(const uint8_t *src, uint8_t *dst)
+{
+ register uint32_t index = 0;
+ for(index=0; index < MPEG2_QUANT_MAT_SIZE; index++)
+ dst[index] = src[index];
+}
+
+/* viddec_mpeg2_copy_matrix() - Copies next 64bytes in the stream into given */
+/* matrix */
+static inline int32_t mpeg2_get_quant_matrix(void *parent, uint8_t *matrix, uint32_t alternate_scan)
+{
+ int32_t ret = 1;
+ uint32_t index = 0, code = 0;
+ const uint8_t *zigzag_scan = (const uint8_t *) mpeg2_classic_scan;
+
+ if (alternate_scan)
+ {
+ zigzag_scan = (const uint8_t *) mpeg2_alternate_scan;
+ }
+
+ /* Start extracting matrix co-efficients and copy them in */
+ /* inverse zigzag scan order */
+ for (index = 0; index < MPEG2_QUANT_MAT_SIZE; index++)
+ {
+ ret = viddec_pm_get_bits(parent, &code, MPEG2_BITS_EIGHT);
+ /* Quantization values cannot be zero. If zero value if found, */
+ /* further parsing is stopped and the existing values are used.*/
+ if ((ret != 1) || (code == 0))
+ {
+ ret = -1;
+ break;
+ }
+ matrix[zigzag_scan[index]] = (uint8_t)(code & 0xFF);
+ }
+
+ return ret;
+}
+
+/* viddec_mpeg2_parse_seq_hdr() - Parse sequence header metadata and store */
+/* in parser context */
+void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get Horizontal Frame Size */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.horizontal_size_value, 12);
+
+ /* Get Vertical Frame Size */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vertical_size_value, 12);
+
+ /* Get Frame Aspect Ratio */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.aspect_ratio_information, 4);
+
+ /* Get Frame Rate */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.frame_rate_code, 4);
+
+ /* Get Bit Rate */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.bit_rate_value, 18);
+
+ /* Skip Marker bit */
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+
+ /* Get VBV Buffer Size Value */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vbv_buffer_size_value, 10);
+
+ /* Get Constrained Parameters Flag */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.constrained_parameters_flag, 1);
+
+ /* Quantization Matrix Support */
+ /* Get Intra Quantizer matrix, if available or use default values */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.intra_quantiser_matrix, 0);
+ mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+ }
+ else
+ {
+ if (!parser->mpeg2_custom_qmat_parsed)
+ {
+ mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.intra_quantiser_matrix);
+ mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+ }
+ }
+
+ /* Get Non-Intra Qualtizer matrix, if available or use default values */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_non_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.non_intra_quantiser_matrix, 0);
+ mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+ }
+ else
+ {
+ if (!parser->mpeg2_custom_qmat_parsed)
+ {
+ mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.non_intra_quantiser_matrix);
+ mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+ }
+ }
+
+ /* Error handling */
+ /* The return value from get_bits() function is accumulated. If the return value is not 1, */
+ /* then there was an error getting the required information from the stream and the status */
+ /* is updated for the current workload. */
+ if (ret_code == 1)
+ {
+ /* This flag indicates a valid sequence header has been parsed and so even if */
+ /* a sequence haeder is corrupted in the future, this valid sequence header */
+ /* could be reused. */
+ parser->mpeg2_valid_seq_hdr_parsed = true;
+ /* This flag indicates a valid custom quantization matrix has been parsed. */
+ /* So, if in the future, there is an error parsing quantization matrix, the */
+ /* parser will use the previously parsed custom values. */
+ if ((parser->info.qnt_ext.load_intra_quantiser_matrix)
+ || (parser->info.qnt_ext.load_non_intra_quantiser_matrix))
+ {
+ parser->mpeg2_custom_qmat_parsed = true;
+ }
+ MPEG2_DEB("Seqeunce header parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_HDR;
+ MPEG2_DEB("Sequence header corrupted.\n");
+ }
+
+ parser->mpeg2_stream = false;
+ parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ;
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ;
+ parser->mpeg2_stream_level = MPEG2_LEVEL_SEQ;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_gop_hdr() - Parse group of pictures header info and */
+/* store it in parser context */
+void viddec_mpeg2_parse_gop_hdr(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Skip first 25 bits */
+ /* Skip time_code */
+ ret_code |= viddec_pm_skip_bits(parent, 25);
+
+ /* Get closed gop info */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.closed_gop, 1);
+
+ /* Get broken link info */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.broken_link, 1);
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("GOP Header parsed successfully.\n");
+ }
+ else
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_GOP_HDR;
+ MPEG2_DEB("GOP header corrupted.\n");
+ }
+
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_GOP;
+ parser->mpeg2_stream_level = MPEG2_LEVEL_GOP;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_pic_hdr() - Parse picture header info and store it in */
+/* parser context */
+void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0, found_error = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get Temporal Reference info */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.temporal_reference, 10);
+
+ /* Get Picture Coding type and skip the following byte */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.picture_coding_type, 3);
+
+ /* Error Handling and Concealment */
+ /* Picture coding type should be one I, P or B */
+ if ((parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I) &&
+ (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_P) &&
+ (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_B))
+ {
+ found_error = 1;
+ }
+ /* The first frame after a gop header should be a coded I picture as per */
+ /* section 6.3.1 in MPEG2 Specification. */
+ else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP)
+ {
+ if (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I)
+ {
+ found_error = 1;
+ }
+ }
+ /* The first frame after a sequence header cannot be a coded B picture as per */
+ /* section 6.1.1.6 in MPEG2 Specification. */
+ else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ)
+ {
+ if (parser->info.pic_hdr.picture_coding_type == MPEG2_PC_TYPE_B)
+ {
+ found_error = 1;
+ }
+ }
+
+ /* If there is an error parsing picture coding type, do error concealment and continue. */
+ if ((ret_code != 1) || (found_error))
+ {
+ if (found_error)
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR;
+ MPEG2_DEB("Picture header corrupted.\n");
+ }
+
+ /* Error concealment for picture coding type - Default to I picture. */
+ parser->info.pic_hdr.picture_coding_type = MPEG2_PC_TYPE_I;
+ parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_COD_TYPE;
+ MPEG2_DEB("Picture Coding Type corrupted. Concealing to I type.\n");
+ }
+
+ /* Skip next 16 bits */
+ /* Skip vbv_delay */
+ ret_code |= viddec_pm_skip_bits(parent, 16);
+
+ /* If Picture Coding type is either P or B then */
+ /* Get forward vector code */
+ if ((MPEG2_PC_TYPE_P == parser->info.pic_hdr.picture_coding_type) ||
+ (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type))
+ {
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_forward_vect, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.forward_f_code, 3);
+ }
+ else
+ {
+ parser->info.pic_hdr.full_pel_forward_vect = 0;
+ parser->info.pic_hdr.forward_f_code = 0;
+ }
+
+ /* If Picture coding type is B then */
+ /* Get backward vector code */
+ if (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type)
+ {
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_backward_vect, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.backward_f_code, 3);
+ }
+ else
+ {
+ parser->info.pic_hdr.full_pel_backward_vect = 0;
+ parser->info.pic_hdr.backward_f_code = 0;
+ }
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Picture header parsed successfully.\n")
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR;
+ MPEG2_DEB("Picture header corrupted.\n");
+ }
+
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC;
+ parser->mpeg2_stream_level = MPEG2_LEVEL_PIC;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_seq() - Parse Sequence extension metadata and */
+/* store in parser context */
+void viddec_mpeg2_parse_ext_seq(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get Profile and Level info */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.profile_and_level_indication, 8);
+
+ /* Get Progressive Sequence Flag */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.progressive_sequence, 1);
+
+ /* Get Chroma Format */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.chroma_format, 2);
+
+ /* Error Concealment */
+ /* If there is an error parsing chroma format, do error concealment and continue. */
+ if ((ret_code != 1) || (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED))
+ {
+ if (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED)
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT;
+ MPEG2_DEB("Sequence extension corrupted.\n")
+ }
+
+ /* Error concealment for chroma format - Default to 4:2:0 */
+ parser->info.seq_ext.chroma_format = MPEG2_CF_420;
+ parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_CHROMA_FMT;
+ MPEG2_DEB("Chroma Format corrupted. Concealing to 4:2:0.\n");
+ }
+
+ /* Get Content Size Extension Data */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.horizontal_size_extension, 2);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vertical_size_extension, 2);
+
+ /* Get Bit Rate Extension */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.bit_rate_extension, 12);
+
+ /* Skip Marker bit */
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+
+ /* Get VBV Buffer Size Extension Data */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vbv_buffer_size_extension, 8);
+
+ /* Skip 1 bit */
+ /* Skip low_delay */
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+
+ /* Get Frame Rate extension data */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_n, 2);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_d, 5);
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Sequence extension header parsed successfully.\n")
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT;
+ MPEG2_DEB("Sequence extension corrupted.\n")
+ }
+
+ /* Check if the last parsed start code was that of sequence header. */
+ /* If true, seq extension followed seq header => MPEG2 Stream */
+ parser->mpeg2_stream = (parser->mpeg2_last_parsed_sc == MPEG2_SC_SEQ_HDR) ? true:false;
+ parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_EXT;
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_EXT;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_seq_disp() - Parse Sequence Display extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get video format */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.video_format, 3);
+
+ /* Check if color description info is present */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_description, 1);
+
+ /* If color description is found, get color primaries info */
+ /* and transfer characteristics */
+ if (parser->info.seq_disp_ext.colour_description)
+ {
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_primaries, 8);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.transfer_characteristics, 8);
+ ret_code |= viddec_pm_skip_bits(parent, 8);
+ }
+
+ /* Get Display Horizontal Size */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_horizontal_size, 14);
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_vertical_size, 14);
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Sequence display extension parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_DISP_EXT;
+ MPEG2_DEB("Sequence display extension corrupted.\n")
+ }
+
+ /* Set flag to indicate Sequence Display Extension is present */
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_DISP_EXT;
+ parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_DISP_EXT;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_seq_scal() - Parse Sequence Scalable extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_seq_scal(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get video format */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_scal_ext.scalable_mode, 2);
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Sequence scalable extension parsed successfully.\n");
+ }
+
+ /* Set flag to indicate Sequence Display Extension is present */
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_SCAL_EXT;
+ parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_SCAL_EXT;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_pic() - Parse Picture Coding extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0, found_error = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get Forward/Backward, Horizontal/Vertical codes */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode00, 4);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode01, 4);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode10, 4);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode11, 4);
+
+ /* Get Intra DC Precision */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_dc_precision, 2);
+
+ /* Get Picture Structure */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.picture_structure, 2);
+
+ /* Error Handling and Concealment */
+ /* Picture structure should be frame, top field or bottom field */
+ if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_RESERVED)
+ {
+ found_error = 1;
+ }
+ /* All pictures in progressive sequence should be frame picture */
+ else if (parser->info.seq_ext.progressive_sequence)
+ {
+ if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME)
+ {
+ found_error = 1;
+ }
+ }
+
+ /* If there is an error parsing picture structure, do error concealment and continue. */
+ if ((ret_code != 1) || (found_error))
+ {
+ if (found_error)
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT;
+ MPEG2_DEB("Picture coding extension corrupted.\n");
+ }
+
+ /* Error concealment for picture structure - Default to frame picture. */
+ parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME;
+ parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT;
+ MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n");
+ }
+
+ /* Get flags */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.top_field_first, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.frame_pred_frame_dct, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.concealment_motion_vectors, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.q_scale_type, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_vlc_format, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.alternate_scan, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.repeat_first_field, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.chroma_420_type, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.progressive_frame, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.composite_display_flag, 1);
+
+ /* Error concealment for frame picture */
+ if ((parser->info.pic_cod_ext.top_field_first)
+ || (parser->info.pic_cod_ext.frame_pred_frame_dct)
+ || (parser->info.pic_cod_ext.repeat_first_field)
+ || (parser->info.pic_cod_ext.progressive_frame))
+ {
+ if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME)
+ {
+ parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME;
+ parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT;
+ MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n");
+ }
+ }
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Picture coding extension parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT;
+ MPEG2_DEB("Picture coding extension corrupted.\n");
+ }
+
+ /* Dangling field detection */
+ /* If the previous picture is the first field, then the temporal reference number */
+ /* should match with the second field. Otherwise, one of the fields in the previous */
+ /* picture is missing and dangling field error is marked. The workload containing */
+ /* the previous picture is emitted out and current picture data is added to the next */
+ /* workload. The mpeg2_use_next_workload variable is used as a flag to direct the */
+ /* items into the current/next workload. */
+ if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))
+ {
+ if (parser->mpeg2_prev_temp_ref != parser->info.pic_hdr.temporal_reference)
+ {
+ /* Mark dangling field info in workload status */
+ parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD;
+ if (parser->mpeg2_prev_picture_structure == MPEG2_PIC_STRUCT_BOTTOM)
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_TOP;
+ }
+ else
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_BOTTOM;
+ }
+ /* Set flag stating current workload is done */
+ parser->mpeg2_pic_metadata_complete = true;
+ /* Set flag to use the next workload for adding workitems for */
+ /* the current frame */
+ parser->mpeg2_use_next_workload = true;
+ /* Toggle first field flag to compensate for missing field */
+ parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true;
+ }
+ else
+ {
+ /* Same field repeated */
+ if (parser->mpeg2_prev_picture_structure == parser->info.pic_cod_ext.picture_structure)
+ {
+ /* Mark unsupported in workload status */
+ parser->mpeg2_wl_status |= MPEG2_WL_REPEAT_FIELD;
+ }
+ }
+ }
+
+ /* Set context variables for interlaced picture handling */
+ if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_FRAME)
+ {
+ /* Frame picture found. Reset variables used for interlaced fields picture. */
+ parser->mpeg2_picture_interlaced = false;
+ parser->mpeg2_first_field = false;
+ parser->mpeg2_use_next_workload = false;
+ }
+ else
+ {
+ /* Interlaced fields picture found. */
+ parser->mpeg2_picture_interlaced = true;
+ parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true;
+ }
+
+ /* Set flags */
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_COD_EXT;
+ parser->mpeg2_prev_temp_ref = parser->info.pic_hdr.temporal_reference;
+ parser->mpeg2_prev_picture_structure = parser->info.pic_cod_ext.picture_structure;
+ if ((!parser->mpeg2_picture_interlaced)
+ || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)))
+ {
+ parser->mpeg2_frame_start = true;
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_pic_disp() - Parse Picture Display extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+ uint32_t index = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Determine number of offsets */
+ if (parser->info.seq_ext.progressive_sequence)
+ {
+ if (parser->info.pic_cod_ext.repeat_first_field)
+ {
+ parser->mpeg2_num_pan_scan_offsets =
+ (parser->info.pic_cod_ext.top_field_first) ? 3 : 2;
+ }
+ else /* Not repeat field */
+ parser->mpeg2_num_pan_scan_offsets = 1;
+ }
+ else /* Not progressive sequence */
+ {
+ /* Check if picture structure is a field */
+ if ((parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_TOP) ||
+ (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_BOTTOM))
+ {
+ parser->mpeg2_num_pan_scan_offsets = 1;
+ }
+ else
+ {
+ parser->mpeg2_num_pan_scan_offsets =
+ (parser->info.pic_cod_ext.repeat_first_field) ? 3 : 2;
+ }
+ }
+
+ /* Get the offsets */
+ for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++)
+ {
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_horizontal_offset[index], 16);
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_vertical_offset[index], 16);
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+ }
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Picture display extension parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_DISP_EXT;
+ MPEG2_DEB("Picture display extension corrupted.\n");
+ }
+
+ /* Set flag to indicate picture display extension is found */
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_DISP_EXT;
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_quant() - Parse Quantization Matrix extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_quant(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Quantization Matrix Support */
+ /* Get Intra Quantizer matrix, if available or use default values */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent,
+ parser->info.qnt_mat.intra_quantiser_matrix,
+ parser->info.pic_cod_ext.alternate_scan);
+ mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix,
+ parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+ }
+
+ /* Get Non-Intra Qualtizer matrix, if available */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_non_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent,
+ parser->info.qnt_mat.non_intra_quantiser_matrix,
+ parser->info.pic_cod_ext.alternate_scan);
+ mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix,
+ parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+ }
+
+ /* Get Chroma Intra Quantizer matrix, if available */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent,
+ parser->info.qnt_mat.chroma_intra_quantiser_matrix,
+ parser->info.pic_cod_ext.alternate_scan);
+ }
+
+ /* Get Chroma Non-Intra Quantizer matrix, if available */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent,
+ parser->info.qnt_mat.chroma_non_intra_quantiser_matrix,
+ parser->info.pic_cod_ext.alternate_scan);
+ }
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Quantization matrix extension parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_QMAT_EXT;
+ MPEG2_DEB("Quantization matrix extension corrupted.\n");
+ }
+
+ /* Set quantization matrices updated flag */
+ if ( (parser->info.qnt_ext.load_intra_quantiser_matrix) ||
+ (parser->info.qnt_ext.load_non_intra_quantiser_matrix) ||
+ (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) ||
+ (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) )
+ {
+ MPEG2_DEB("Custom quantization matrix found.\n");
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext() - Parse extension metadata and store in parser */
+/* context */
+void viddec_mpeg2_parse_ext(void *parent, void *ctxt)
+{
+ uint32_t ext_code = 0;
+
+ /* Get extension start code */
+ viddec_pm_get_bits(parent, &ext_code, 4);
+
+ /* Switch on extension type */
+ switch ( ext_code )
+ {
+ /* Sequence Extension Info */
+ case MPEG2_EXT_SEQ:
+ viddec_mpeg2_parse_ext_seq(parent, ctxt);
+ break;
+
+ /* Sequence Display Extension info */
+ case MPEG2_EXT_SEQ_DISP:
+ viddec_mpeg2_parse_ext_seq_disp(parent, ctxt);
+ break;
+
+ case MPEG2_EXT_SEQ_SCAL:
+ viddec_mpeg2_parse_ext_seq_scal(parent, ctxt);
+ break;
+
+ /* Picture Coding Extension */
+ case MPEG2_EXT_PIC_CODING:
+ viddec_mpeg2_parse_ext_pic(parent, ctxt);
+ break;
+
+ /* Picture Display Extension */
+ case MPEG2_EXT_PIC_DISP:
+ viddec_mpeg2_parse_ext_pic_disp(parent, ctxt);
+ break;
+
+ /* Quantization Extension*/
+ case MPEG2_EXT_QUANT_MAT:
+ viddec_mpeg2_parse_ext_quant(parent, ctxt);
+ break;
+
+ default:
+ break;
+ } /* Switch, on extension type */
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext() - Parse user data and append to workload. */
+void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt)
+{
+ uint32_t user_data = 0;
+ viddec_workload_item_t wi;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Set the user data level (SEQ/GOP/PIC) in the workitem type. */
+ switch (parser->mpeg2_stream_level)
+ {
+ case MPEG2_LEVEL_SEQ:
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+ break;
+ }
+ case MPEG2_LEVEL_GOP:
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+ break;
+ }
+ case MPEG2_LEVEL_PIC:
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA;
+ break;
+ }
+ default:
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_INVALID;
+ break;
+ }
+ }
+
+ /* Read 1 byte of user data and store it in workitem for the current */
+ /* stream level (SEQ/GOP/PIC). Keep adding data payloads till it reaches */
+ /* size 11. When it is 11, the maximum user data payload size, append the */
+ /* workitem. This loop is repeated till all user data is extracted and */
+ /* appended. */
+ wi.user_data.size = 0;
+ memset(&(wi.user_data), 0, sizeof(wi.user_data));
+ while(viddec_pm_get_bits(parent, &user_data, MPEG2_BITS_EIGHT) != -1)
+ {
+ /* Store the valid byte in data payload */
+ wi.user_data.data_payload[wi.user_data.size] = user_data;
+ wi.user_data.size++;
+
+ /* When size exceeds payload size, append workitem and continue */
+ if (wi.user_data.size >= 11)
+ {
+ viddec_pm_setup_userdata(&wi);
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ viddec_fw_reset_workload_item(&wi);
+ wi.user_data.size = 0;
+ }
+ }
+ /* If size is not 0, append remaining user data. */
+ if (wi.user_data.size > 0)
+ {
+ viddec_pm_setup_userdata(&wi);
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ wi.user_data.size = 0;
+ }
+
+ MPEG2_DEB("User data @ Level %d found.\n", parser->mpeg2_stream_level);
+ return;
+}
+
+static inline uint32_t get_mb_addr_increment(uint32_t *data)
+{
+ if (*data >= 1024)
+ {
+ return 1;
+ }
+ else if (*data >= 128)
+ {
+ *data >>= 6;
+ return mb_addr_inc_tab1[*data];
+ }
+ else if (*data >= 64)
+ {
+ *data >>= 3;
+ *data -= 8;
+ return mb_addr_inc_tab2[*data];
+ }
+ else
+ {
+ *data -= 24;
+ return mb_addr_inc_tab3[*data];
+ }
+}
+
+static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t *first_mb)
+{
+ uint32_t mb_row = 0, mb_width = 0, prev_mb_addr = 0;
+ uint32_t temp = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+ *first_mb = 0;
+ mb_row = ((parser->mpeg2_last_parsed_slice_sc & 0xFF) - 1);
+ mb_width = parser->info.seq_hdr.horizontal_size_value >> 4;
+ prev_mb_addr = (mb_row * mb_width) - 1;
+
+ /* Skip slice start code */
+ viddec_pm_skip_bits(parent, 32);
+
+ if (parser->info.seq_hdr.vertical_size_value > 2800)
+ {
+ /* Get 3 bits of slice_vertical_position_extension */
+ viddec_pm_get_bits(parent, &temp, 3);
+ mb_row += (temp << 7);
+ }
+
+ /* Skip proprity_breakpoint if sequence scalable extension is present */
+ if (parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_SCAL_EXT)
+ {
+ /* Skip 7 bits if scalable mode is 00 (Data partition) */
+ if (parser->info.seq_scal_ext.scalable_mode == 0)
+ {
+ viddec_pm_skip_bits(parent, 7);
+ }
+ }
+
+ /* Skip quantizer_scale */
+ viddec_pm_skip_bits(parent, 5);
+
+ /* Skip a few bits with slice information */
+ temp = 0;
+ viddec_pm_peek_bits(parent, &temp, 1);
+ if (temp == 0x1)
+ {
+ /* Skip intra_slice_flag(1), intra_slice(1) and reserved_bits(7) */
+ viddec_pm_skip_bits(parent, 9);
+ temp=0;
+ viddec_pm_peek_bits(parent, &temp, 1);
+ while (temp == 0x1)
+ {
+ /* Skip extra_bit_slice(1) and extra_information_slice(8) */
+ viddec_pm_skip_bits(parent, 9);
+ temp=0;
+ viddec_pm_peek_bits(parent, &temp, 1);
+ }
+ }
+
+ /* Skip extra_bit_slice flag */
+ viddec_pm_skip_bits(parent, 1);
+
+ /* Increment prev_mb_addr by 33 for every 11 bits of macroblock_escape string */
+ temp=0;
+ viddec_pm_peek_bits(parent, &temp, 11);
+ while (temp == 0x8)
+ {
+ viddec_pm_skip_bits(parent, 11);
+ prev_mb_addr += 33;
+ temp=0;
+ viddec_pm_peek_bits(parent, &temp, 11);
+ }
+
+ /* Get the mb_addr_increment and add it to prev_mb_addr to get the current mb number. */
+ *first_mb = prev_mb_addr + get_mb_addr_increment(&temp);
+ MPEG2_DEB("First MB number in slice is 0x%08X.\n", *first_mb);
+
+ return;
+}
+
+/* Parse slice data to get the number of macroblocks in the current slice and then */
+/* append as pixel data. */
+void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt)
+{
+ uint32_t bit_off=0, start_byte=0, first_mb = 0;
+ uint8_t is_emul=0;
+ viddec_workload_item_t wi;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get current byte position */
+ viddec_pm_get_au_pos(parent, &bit_off, &start_byte, &is_emul);
+
+ /* Populate wi type */
+ viddec_mpeg2_get_first_mb_number(parent, ctxt, &first_mb);
+ wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES;
+ wi.es.es_flags = (first_mb << 16);
+
+ /* Append data from given byte position as pixel data */
+ viddec_pm_append_misc_tags(parent, start_byte, (unsigned int) -1, &wi, !parser->mpeg2_use_next_workload);
+ return;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c
new file mode 100644
index 0000000..a7b6ef7
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c
@@ -0,0 +1,380 @@
+/**
+ * viddec_mpeg2_parse.c
+ * --------------------
+ * This file acts as the main interface between the parser manager and MPEG2
+ * parser. All the operations done by the MPEG2 parser are defined here and
+ * functions pointers for each operation is returned to the parser manager.
+ */
+
+#include "viddec_mpeg2.h"
+
+/* viddec_mpeg2_parser_init() - Initializes parser context. */
+static void viddec_mpeg2_parser_init
+(
+ void *ctxt,
+ uint32_t *persist_mem,
+ uint32_t preserve
+)
+{
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Avoid compiler warning */
+ persist_mem = persist_mem;
+
+ /* Initialize state variables */
+ parser->mpeg2_pic_metadata_complete = false;
+ parser->mpeg2_picture_interlaced = false;
+ parser->mpeg2_first_field = false;
+ parser->mpeg2_frame_start = false;
+ parser->mpeg2_ref_table_updated = false;
+ parser->mpeg2_use_next_workload = false;
+ parser->mpeg2_first_slice_flag = false;
+ parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE;
+ parser->mpeg2_last_parsed_sc = MPEG2_SC_ALL;
+ parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX;
+ parser->mpeg2_wl_status = MPEG2_WL_EMPTY;
+ parser->mpeg2_prev_picture_structure = MPEG2_PIC_STRUCT_FRAME;
+ parser->mpeg2_prev_temp_ref = 0;
+ parser->mpeg2_num_pan_scan_offsets = 0;
+
+ if(preserve)
+ {
+ /* Init all picture level header info */
+ memset(&parser->info.pic_hdr, 0, sizeof(struct mpeg2_picture_hdr_info));
+ memset(&parser->info.pic_cod_ext, 0, sizeof(struct mpeg2_picture_coding_ext_info));
+ memset(&parser->info.pic_disp_ext, 0, sizeof(struct mpeg2_picture_disp_ext_info));
+ }
+ else
+ {
+ /* Init all header info */
+ memset(&parser->info, 0, sizeof(struct mpeg2_info));
+
+ parser->mpeg2_stream = false;
+ parser->mpeg2_custom_qmat_parsed = false;
+ parser->mpeg2_valid_seq_hdr_parsed = false;
+ parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE;
+ }
+
+ MPEG2_DEB("MPEG2 Parser: Context Initialized.\n");
+
+ return;
+}
+
+/* viddec_mpeg2_get_context_size() - Returns the memory size required by the */
+/* MPEG2 parser. */
+static void viddec_mpeg2_get_context_size
+(
+ viddec_parser_memory_sizes_t *size
+)
+{
+ /* Should return size of my structure */
+ size->context_size = sizeof(struct viddec_mpeg2_parser);
+ size->persist_size = 0;
+}
+
+/* viddec_mpeg2_get_error_code() - Returns the error code for the current */
+/* workload. */
+static void viddec_mpeg2_get_error_code
+(
+ struct viddec_mpeg2_parser *parser,
+ viddec_workload_t *wl,
+ uint32_t *error_code
+)
+{
+ *error_code = 0;
+
+ /* Dangling field error */
+ if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD)
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD;
+ if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD_TOP)
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_TOPFIELD;
+ }
+ else
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD;
+ }
+ }
+
+ /* Repeated same field */
+ if (parser->mpeg2_wl_status & MPEG2_WL_REPEAT_FIELD)
+ {
+ *error_code |= (VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD
+ | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+ }
+
+ /* If workload is not complete, set non-decodeable flag */
+ if (!(parser->mpeg2_wl_status & MPEG2_WL_COMPLETE))
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ }
+
+ /* If reference info is not updated, set missing reference flag */
+ if (!(parser->mpeg2_wl_status & MPEG2_WL_REF_INFO))
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE;
+ }
+
+ /* Missing DMEM data flag and irrecoverable flag is set */
+ if (!(parser->mpeg2_wl_status & MPEG2_WL_DMEM_DATA))
+ {
+ *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM
+ | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+ }
+
+ /* Missing sequence header and irrecoverable flag is set */
+ if ((!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ))
+ && (!parser->mpeg2_valid_seq_hdr_parsed))
+ {
+ *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO
+ | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+ }
+
+ /* Unsupported features found in stream */
+ if (parser->mpeg2_wl_status & MPEG2_WL_UNSUPPORTED)
+ {
+ *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED
+ | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+ }
+
+ /* If frame type is unknown, default to I frame. */
+ if ((wl->attrs.frame_type != VIDDEC_FRAME_TYPE_I)
+ && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_P)
+ && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_B))
+ {
+ wl->attrs.frame_type = VIDDEC_FRAME_TYPE_I;
+ }
+
+ /* If there is a mismatch between the frame type and reference information */
+ /* then mark the workload as not decodable */
+ if (wl->attrs.frame_type == VIDDEC_FRAME_TYPE_B)
+ {
+ if (wl->is_reference_frame != 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ }
+ else
+ {
+ if (wl->is_reference_frame == 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ }
+
+ /* For non-decodable frames, do not set reference info so that the workload */
+ /* manager does not increment ref count. */
+ if (*error_code & VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE)
+ {
+ wl->is_reference_frame = 0;
+ }
+
+ /* Corrupted header notification */
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_HDR)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_DISP_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_GOP_HDR)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_HDR)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_COD_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_DISP_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_QMAT_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT;
+
+ MPEG2_DEB("Workload error code: 0x%8X.\n", *error_code);
+ return;
+}
+
+/* viddec_mpeg2_is_start_frame() - Returns if the current chunk of parsed */
+/* data has start of a frame. */
+static uint32_t viddec_mpeg2_is_start_frame
+(
+ void *ctxt
+)
+{
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+ return (parser->mpeg2_frame_start);
+}
+
+/* viddec_mpeg2_is_workload_done() - Returns current frame parsing status */
+/* to the parser manager. */
+static uint32_t viddec_mpeg2_is_workload_done
+(
+ void *parent,
+ void *ctxt,
+ unsigned int next_sc,
+ uint32_t *codec_specific_errors
+)
+{
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+ viddec_workload_t *wl = viddec_pm_get_header(parent);
+ uint32_t ret = VIDDEC_PARSE_SUCESS;
+ uint32_t frame_boundary = 0;
+ uint8_t force_frame_complete = 0;
+ parent = parent;
+
+ /* Detect Frame Boundary */
+ frame_boundary = ((MPEG2_SC_PICTURE == next_sc) || (MPEG2_SC_SEQ_HDR == next_sc) || (MPEG2_SC_GROUP == next_sc));
+ if (frame_boundary)
+ {
+ parser->mpeg2_first_slice_flag = false;
+ }
+
+ force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc));
+
+ if (force_frame_complete || (frame_boundary && (parser->mpeg2_pic_metadata_complete)))
+ {
+ if(!force_frame_complete)
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_COMPLETE;
+ parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX;
+ parser->mpeg2_pic_metadata_complete = false;
+ parser->mpeg2_first_slice_flag = false;
+ }
+
+ viddec_mpeg2_get_error_code(parser, wl, codec_specific_errors);
+ parser->mpeg2_wl_status = MPEG2_WL_EMPTY;
+ parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE;
+ /* Reset mpeg2_use_next_workload flag if it is set */
+ if (parser->mpeg2_use_next_workload)
+ {
+ viddec_pm_set_late_frame_detect(parent);
+ parser->mpeg2_use_next_workload = false;
+ }
+ ret = VIDDEC_PARSE_FRMDONE;
+ }
+ return ret;
+}
+
+/* viddec_mpeg2_parse() - Parse metadata info from the buffer for the prev */
+/* start code found. */
+static mpeg2_status viddec_mpeg2_parse
+(
+ void *parent,
+ void *ctxt
+)
+{
+ uint32_t current_sc = 0, sc_bits = MPEG2_SC_AND_PREFIX_SIZE;
+ int32_t ret = MPEG2_SUCCESS;
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Reset frame start flag. For Mpeg1 we want to set frame start after
+ we parsed pich header, since there is no extension*/
+ parser->mpeg2_frame_start = (!parser->mpeg2_stream) && (parser->mpeg2_last_parsed_sc == MPEG2_SC_PICTURE);
+
+ /* Peak current start code - First 32 bits of the stream */
+ ret = viddec_pm_peek_bits(parent, &current_sc, sc_bits);
+ if (ret == -1)
+ {
+ MPEG2_DEB("Unable to get start code.\n");
+ return MPEG2_PARSE_ERROR;
+ }
+ current_sc &= MPEG2_BIT_MASK_8;
+ MPEG2_DEB("Start Code found = 0x%.8X\n", current_sc);
+
+ /* Get rid of the start code prefix for all start codes except slice */
+ /* start codes. */
+ if ((current_sc < MPEG2_SC_SLICE_MIN) || (current_sc > MPEG2_SC_SLICE_MAX))
+ {
+ viddec_pm_skip_bits(parent, sc_bits);
+ }
+
+ /* Parse Metadata based on the start code found */
+ switch( current_sc )
+ {
+ /* Sequence Start Code */
+ case MPEG2_SC_SEQ_HDR:
+ {
+ parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE;
+ viddec_mpeg2_parse_seq_hdr(parent, ctxt);
+ }
+ break;
+
+ /* Picture Start Code */
+ case MPEG2_SC_PICTURE:
+ {
+ viddec_mpeg2_parse_pic_hdr(parent, ctxt);
+ }
+ break;
+
+ /* Extension Code */
+ case MPEG2_SC_EXT:
+ {
+ viddec_mpeg2_parse_ext(parent, ctxt);
+ }
+ break;
+
+ /* Group of Pictures Header */
+ case MPEG2_SC_GROUP:
+ {
+ viddec_mpeg2_parse_gop_hdr(parent, ctxt);
+ }
+ break;
+
+ /* Unused Start Code */
+ case MPEG2_SC_SEQ_END:
+ case MPEG2_SC_SEQ_ERR:
+ break;
+
+ /* User Data */
+ case MPEG2_SC_USER_DATA:
+ {
+ viddec_mpeg2_parse_and_append_user_data(parent, ctxt);
+ }
+ break;
+
+ default:
+ {
+ /* Slice Data - Append slice data to the workload */
+ if ((current_sc >= MPEG2_SC_SLICE_MIN) &&
+ (current_sc <= MPEG2_SC_SLICE_MAX))
+ {
+ if (!parser->mpeg2_first_slice_flag)
+ {
+ /* At this point, all the metadata required by the MPEG2 */
+ /* hardware for decoding is extracted and stored. So the */
+ /* metadata can be packed into workitems and emitted out.*/
+ viddec_mpeg2_emit_workload(parent, ctxt);
+
+ /* If the current picture is progressive or it is the */
+ /* second field of interlaced field picture then, set */
+ /* the workload done flag. */
+ if ((!parser->mpeg2_picture_interlaced)
+ || ((parser->mpeg2_picture_interlaced) && (!parser->mpeg2_first_field)))
+ {
+ parser->mpeg2_pic_metadata_complete = true;
+ }
+ else if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))
+ {
+ parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE;
+ }
+
+ parser->mpeg2_first_slice_flag = true;
+ }
+ parser->mpeg2_last_parsed_slice_sc = current_sc;
+ viddec_mpeg2_parse_and_append_slice_data(parent, ctxt);
+ parser->mpeg2_wl_status |= MPEG2_WL_PARTIAL_SLICE;
+ }
+ }
+ } /* Switch */
+
+ /* Save last parsed start code */
+ parser->mpeg2_last_parsed_sc = current_sc;
+ return ret;
+}
+
+/* viddec_mpeg2_get_ops() - Register parser ops with the parser manager. */
+void viddec_mpeg2_get_ops
+(
+ viddec_parser_ops_t *ops
+)
+{
+ ops->init = viddec_mpeg2_parser_init;
+ ops->parse_syntax = viddec_mpeg2_parse;
+ ops->get_cxt_size = viddec_mpeg2_get_context_size;
+ ops->is_wkld_done = viddec_mpeg2_is_workload_done;
+ ops->is_frame_start = viddec_mpeg2_is_start_frame;
+ return;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c
new file mode 100644
index 0000000..503ded5
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c
@@ -0,0 +1,461 @@
+/**
+ * viddec_mpeg2_workload.c
+ * -----------------------
+ * This file packs the data parsed and stored in the context into workload and
+ * emits it out. The current list of workitems emitter into the workload
+ * include:
+ *
+ * - DMEM - Register Data
+ * - Past and Future picture references
+ * - Quantization matrix data
+ *
+ * Slice data gets appended into the workload in viddec_mpeg2_parse.c
+ *
+ * Also, the frame attributes are updated in the workload.
+ */
+
+#include "viddec_mpeg2.h"
+#include "viddec_fw_item_types.h"
+
+void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t next_wl)
+{
+ if (next_wl)
+ {
+ viddec_pm_append_workitem_next(parent, wi);
+ }
+ else
+ {
+ viddec_pm_append_workitem(parent, wi);
+ }
+ return;
+}
+
+viddec_workload_t* viddec_mpeg2_get_header(void *parent, uint8_t next_wl)
+{
+ viddec_workload_t *ret;
+ if (next_wl)
+ {
+ ret = viddec_pm_get_next_header(parent);
+ }
+ else
+ {
+ ret = viddec_pm_get_header(parent);
+ }
+ return ret;
+}
+
+/* viddec_mpeg2_set_seq_ext_defaults() - Sets non-zero default values for */
+/* sequence extension items in case sequence extension is not present. */
+static void viddec_mpeg2_set_seq_ext_defaults(struct viddec_mpeg2_parser *parser)
+{
+ parser->info.seq_ext.progressive_sequence = true;
+ parser->info.seq_ext.chroma_format = MPEG2_CF_420;
+}
+
+/* viddec_mpeg2_set_pic_cod_ext_defaults() - Sets non-zero default values for*/
+/* picture coding extension items in case picture coding extension is not */
+/* present. */
+static void viddec_mpeg2_set_pic_cod_ext_defaults(struct viddec_mpeg2_parser *parser)
+{
+ parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME;
+ parser->info.pic_cod_ext.frame_pred_frame_dct = true;
+ parser->info.pic_cod_ext.progressive_frame = true;
+}
+
+/* viddec_mpeg2_pack_qmat() - Packs the 256 byte quantization matrix data */
+/* 64 32-bit values. */
+#ifdef MFDBIGENDIAN
+static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser)
+{
+ /* Quantization Matrix Support */
+ /* Populate Quantization Matrices */
+ uint32_t index = 0;
+ uint32_t *qmat_packed, *qmat_unpacked;
+
+ /* When transferring the quantization matrix data from the parser */
+ /* context into workload items, we are packing four 8 bit */
+ /* quantization values into one DWORD (32 bits). To do this, the */
+ /* array of values of type uint8_t, is typecast as uint32 * and */
+ /* read. */
+ qmat_packed = (uint32_t *) parser->wi.qmat;
+ qmat_unpacked = (uint32_t *) &parser->info.qnt_mat;
+
+ for (index=0; index<MPEG2_QUANT_MAT_SIZE; index++)
+ {
+ qmat_packed[index] = qmat_unpacked[index];
+ }
+ return;
+}
+#else
+static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser)
+{
+ /* Quantization Matrix Support */
+ /* Populate Quantization Matrices */
+ uint32_t index = 0;
+ uint32_t *qmat_packed;
+ uint8_t *qmat_unpacked;
+
+ /* When transferring the quantization matrix data from the parser */
+ /* context into workload items, we are packing four 8 bit */
+ /* quantization values into one DWORD (32 bits). To do this, the */
+ /* array of values of type uint8_t, is typecast as uint32 * and */
+ /* read. */
+ qmat_packed = (uint32_t *) parser->wi.qmat;
+ qmat_unpacked = (uint8_t *) &parser->info.qnt_mat;
+
+ for (index=0; index<MPEG2_QUANT_MAT_SIZE; index++)
+ {
+ qmat_packed[index] =
+ (((uint32_t)qmat_unpacked[(index<<2)+0])<< 24) |
+ (((uint32_t)qmat_unpacked[(index<<2)+1])<< 16) |
+ (((uint32_t)qmat_unpacked[(index<<2)+2])<< 8) |
+ (((uint32_t)qmat_unpacked[(index<<2)+3])<< 0) ;
+ }
+ return;
+}
+#endif
+
+/* viddec_mpeg2_trans_metadata_workitems() - Transfers the metadata stored */
+/* in parser context into workitems by bit masking. These workitems are then */
+/* sent through emitter */
+static void viddec_mpeg2_trans_metadata_workitems(void *ctxt)
+{
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Reset register values */
+ parser->wi.csi1 = 0x0;
+ parser->wi.csi2 = 0x0;
+ parser->wi.cpi1 = 0x0;
+ parser->wi.cpce1 = 0x0;
+
+ /* Set defaults for missing fields */
+ if (!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_EXT))
+ {
+ viddec_mpeg2_set_seq_ext_defaults(parser);
+ }
+ if (!(parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_COD_EXT))
+ {
+ viddec_mpeg2_set_pic_cod_ext_defaults(parser);
+ }
+
+ /* Populate Core Sequence Info 1 */
+ parser->wi.csi1 |= (parser->mpeg2_stream) << 1;
+ parser->wi.csi1 |= (parser->info.seq_hdr.constrained_parameters_flag) << 2;
+ parser->wi.csi1 |= (parser->info.seq_ext.progressive_sequence) << 3;
+ parser->wi.csi1 |= (parser->info.seq_ext.chroma_format) << 16;
+ parser->wi.csi1 |= (parser->info.qnt_ext.load_intra_quantiser_matrix) << 19;
+ parser->wi.csi1 |= (parser->info.qnt_ext.load_non_intra_quantiser_matrix) << 20;
+ parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) << 21;
+ parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) << 22;
+ MPEG2_DEB("Core Sequence Info 1: 0x%.8X\n", parser->wi.csi1);
+
+ /* Populate Core Sequence Info 2 */
+ parser->wi.csi2 |= (parser->info.seq_hdr.horizontal_size_value & MPEG2_BIT_MASK_11);
+ parser->wi.csi2 |= (parser->info.seq_hdr.vertical_size_value & MPEG2_BIT_MASK_11) << 14;
+ MPEG2_DEB("Core Sequence Info 2: 0x%.8X\n", parser->wi.csi2);
+
+ /* Populate Core Picture Info */
+ parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_forward_vect);
+ parser->wi.cpi1 |= (parser->info.pic_hdr.forward_f_code) << 1;
+ parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_backward_vect) << 4;
+ parser->wi.cpi1 |= (parser->info.pic_hdr.backward_f_code) << 5;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode00) << 8;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode01) << 12;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode10) << 16;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode11) << 20;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.intra_dc_precision) << 24;
+ parser->wi.cpi1 |= (parser->info.pic_hdr.picture_coding_type-1) << 26;
+ MPEG2_DEB("Core Picture Info 1: 0x%.8X\n", parser->wi.cpi1);
+
+ /* Populate Core Picture Extension Info */
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.composite_display_flag);
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.progressive_frame) << 1;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.chroma_420_type) << 2;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.repeat_first_field) << 3;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.alternate_scan) << 4;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.intra_vlc_format) << 5;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.q_scale_type) << 6;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.concealment_motion_vectors) << 7;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.frame_pred_frame_dct) << 8;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.top_field_first) << 9;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.picture_structure) << 10;
+ MPEG2_DEB("Core Picture Ext Info 1: 0x%.8X\n", parser->wi.cpce1);
+
+ return;
+}
+
+/* mpeg2_emit_display_frame() - Sends the frame id as a workload item. */
+static inline void mpeg2_emit_frameid(void *parent, int32_t wl_type, uint8_t flag)
+{
+ viddec_workload_item_t wi;
+ wi.vwi_type = wl_type;
+
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ viddec_mpeg2_append_workitem( parent, &wi, flag );
+}
+
+/* mpeg2_send_ref_reorder() - Reorders reference frames */
+static inline void mpeg2_send_ref_reorder(void *parent, uint8_t flag)
+{
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+ wi.ref_reorder.ref_table_offset = 0;
+ /* Reorder index 1 to index 0 only */
+ wi.ref_reorder.ref_reorder_00010203 = 0x01010203;
+ wi.ref_reorder.ref_reorder_04050607 = 0x04050607;
+ viddec_mpeg2_append_workitem( parent, &wi, flag );
+}
+
+/* viddec_mpeg2_manage_ref() - Manages frame references by inserting the */
+/* past and future references (if any) for every frame inserted in the */
+/* workload. */
+static void viddec_mpeg2_manage_ref(void *parent, void *ctxt)
+{
+ int32_t frame_id = 1;
+ int32_t frame_type;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+ viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload );
+ wl->is_reference_frame = 0;
+
+ /* Identify the frame type (I, P or B) */
+ frame_type = parser->info.pic_hdr.picture_coding_type;
+
+ /* Send reference frame information based on whether the picture is a */
+ /* frame picture or field picture. */
+ if ((!parser->mpeg2_picture_interlaced)
+ || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)))
+ {
+ /* Check if we need to reorder frame references/send frame for display */
+ /* in case of I or P type */
+ if (frame_type != MPEG2_PC_TYPE_B)
+ {
+ /* Checking reorder */
+ if (parser->mpeg2_ref_table_updated)
+ {
+ mpeg2_send_ref_reorder(parent, parser->mpeg2_use_next_workload);
+ }
+ }
+
+ /* Send reference frame workitems */
+ switch(frame_type)
+ {
+ case MPEG2_PC_TYPE_I:
+ {
+ break;
+ }
+ case MPEG2_PC_TYPE_P:
+ {
+ mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload);
+ break;
+ }
+ case MPEG2_PC_TYPE_B:
+ {
+ mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload);
+ mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, parser->mpeg2_use_next_workload);
+ }
+ }
+
+ /* Set reference information updated flag */
+ if (!parser->mpeg2_picture_interlaced)
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO;
+ }
+ }
+ else
+ {
+ /* Set reference information updated flag for second fiel */
+ parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO;
+ }
+
+ /* Set the reference frame flags for I and P types */
+ if (frame_type != MPEG2_PC_TYPE_B)
+ {
+ wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK);
+ parser->mpeg2_ref_table_updated = true;
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_check_unsupported() - Check for unsupported feature in the stream */
+static void viddec_mpeg2_check_unsupported(void *parent, void *ctxt)
+{
+ unsigned int unsupported_feature_found = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get workload */
+ viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload );
+
+ /* Get attributes in workload */
+ viddec_frame_attributes_t *attrs = &wl->attrs;
+
+ /* Check for unsupported content size */
+ unsupported_feature_found |= (attrs->cont_size.height > MPEG2_MAX_CONTENT_HEIGHT);
+ unsupported_feature_found |= (attrs->cont_size.width > MPEG2_MAX_CONTENT_WIDTH);
+
+ /* Update parser status, if found */
+ if (unsupported_feature_found)
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_UNSUPPORTED;
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_append_metadata() - Appends meta data from the stream. */
+void viddec_mpeg2_append_metadata(void *parent, void *ctxt)
+{
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ viddec_workload_item_t wi;
+
+ /* Append sequence info, if found with current frame */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+ viddec_fw_mp2_sh_set_horizontal_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.horizontal_size_value);
+ viddec_fw_mp2_sh_set_vertical_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.vertical_size_value);
+ viddec_fw_mp2_sh_set_aspect_ratio_information ( &(wi.mp2_sh) , parser->info.seq_hdr.aspect_ratio_information);
+ viddec_fw_mp2_sh_set_frame_rate_code ( &(wi.mp2_sh) , parser->info.seq_hdr.frame_rate_code);
+ viddec_fw_mp2_sh_set_bit_rate_value ( &(wi.mp2_sh) , parser->info.seq_hdr.bit_rate_value);
+ viddec_fw_mp2_sh_set_vbv_buffer_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.vbv_buffer_size_value);
+
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ }
+
+ /* Append sequence extension info, if found with current frame */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_EXT)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG2_SEQ_EXT;
+
+ viddec_fw_mp2_se_set_profile_and_level_indication( &(wi.mp2_se) , parser->info.seq_ext.profile_and_level_indication);
+ viddec_fw_mp2_se_set_progressive_sequence ( &(wi.mp2_se) , parser->info.seq_ext.progressive_sequence);
+ viddec_fw_mp2_se_set_chroma_format ( &(wi.mp2_se) , parser->info.seq_ext.chroma_format);
+ viddec_fw_mp2_se_set_horizontal_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.horizontal_size_extension);
+ viddec_fw_mp2_se_set_vertical_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.vertical_size_extension);
+ viddec_fw_mp2_se_set_bit_rate_extension ( &(wi.mp2_se) , parser->info.seq_ext.bit_rate_extension);
+ viddec_fw_mp2_se_set_vbv_buffer_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.vbv_buffer_size_extension);
+ viddec_fw_mp2_se_set_frame_rate_extension_n ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_n);
+ viddec_fw_mp2_se_set_frame_rate_extension_d ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_d);
+
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ }
+
+ /* Append Display info, if present */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_DISP_EXT)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+ wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+
+ viddec_fw_mp2_sde_set_video_format ( &(wi.mp2_sde) , parser->info.seq_disp_ext.video_format);
+ viddec_fw_mp2_sde_set_color_description ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_description);
+ viddec_fw_mp2_sde_set_color_primaries ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_primaries);
+ viddec_fw_mp2_sde_set_transfer_characteristics( &(wi.mp2_sde) , parser->info.seq_disp_ext.transfer_characteristics);
+ viddec_fw_mp2_sde_set_display_horizontal_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_horizontal_size);
+ viddec_fw_mp2_sde_set_display_vertical_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_vertical_size);
+
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ }
+
+ /* Append GOP info, if present */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO;
+
+ viddec_fw_mp2_gop_set_closed_gop ( &(wi.mp2_gop) , parser->info.gop_hdr.closed_gop);
+ viddec_fw_mp2_gop_set_broken_link( &(wi.mp2_gop) , parser->info.gop_hdr.broken_link);
+
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_append_workitems() - Appends decoder specific workitems */
+/* to the workload starting at the address and length specified. */
+static void viddec_mpeg2_append_workitems
+(
+ void *parent,
+ uint32_t* address,
+ int workitem_type,
+ int num_items,
+ uint8_t flag
+)
+{
+ int32_t index=0;
+ const uint32_t* initial_address = address;
+ viddec_workload_item_t wi;
+
+ for (index=0; index < num_items; index++)
+ {
+ wi.vwi_type = workitem_type;
+ wi.data.data_offset = (char *) address - (const char *) initial_address;
+ wi.data.data_payload[0] = address[0];
+ wi.data.data_payload[1] = address[1];
+ address += 2;
+
+ viddec_mpeg2_append_workitem(parent, &wi, flag);
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_emit_workload() - Emits MPEG2 parser generated work load */
+/* items. */
+/* Items include: MPEG2 DMEM Data, Quantization Matrices. */
+/* Pixel ES data sent separately whenever parser sees slice data */
+void viddec_mpeg2_emit_workload(void *parent, void *ctxt)
+{
+ MPEG2_DEB("Emitting workloads.\n");
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Append meta data workitems */
+ viddec_mpeg2_append_metadata(parent, ctxt);
+
+ /* Transfer metadata into attributes */
+ viddec_mpeg2_translate_attr(parent, ctxt);
+
+ /* Check for unsupported features in the stream and update parser status */
+ viddec_mpeg2_check_unsupported(parent, ctxt);
+
+ /* Transfer all stored metadata into MPEG2 Hardware Info */
+ viddec_mpeg2_trans_metadata_workitems(parser);
+
+ /* Send MPEG2 DMEM workitems */
+ viddec_mpeg2_append_workitems(parent,
+ (uint32_t *) &parser->wi,
+ VIDDEC_WORKLOAD_MPEG2_DMEM,
+ MPEG2_NUM_DMEM_WL_ITEMS,
+ parser->mpeg2_use_next_workload);
+ parser->mpeg2_wl_status |= MPEG2_WL_DMEM_DATA;
+ MPEG2_DEB("Adding %d items as DMEM Data.\n", MPEG2_NUM_DMEM_WL_ITEMS);
+
+ /* Send MPEG2 Quantization Matrix workitems, if updated */
+ viddec_mpeg2_pack_qmat(parser);
+ viddec_mpeg2_append_workitems(parent,
+ (uint32_t *) parser->wi.qmat,
+ VIDDEC_WORKLOAD_MPEG2_QMAT,
+ MPEG2_NUM_QMAT_WL_ITEMS,
+ parser->mpeg2_use_next_workload);
+ MPEG2_DEB("Adding %d items as QMAT Data.\n", MPEG2_NUM_QMAT_WL_ITEMS);
+
+ /* Manage reference frames */
+ viddec_mpeg2_manage_ref(parent, ctxt);
+
+ return;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h b/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h
new file mode 100644
index 0000000..7084161
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h
@@ -0,0 +1,231 @@
+#ifndef VIDDEC_FW_MP4_H
+#define VIDDEC_FW_MP4_H
+
+#include "viddec_fw_workload.h"
+
+enum viddec_fw_mp4_ref_frame_id
+{
+ VIDDEC_MP4_FRAME_CURRENT = 0,
+ VIDDEC_MP4_FRAME_PAST = 1,
+ VIDDEC_MP4_FRAME_FUTURE = 2,
+ VIDDEC_MP4_FRAME_MAX = 3,
+};
+
+enum mp4_workload_item_type
+{
+ VIDDEC_WORKLOAD_MP4_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+ VIDDEC_WORKLOAD_MP4_FUTURE_FRAME,
+ VIDDEC_WORKLOAD_MP4_VOL_INFO = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+ VIDDEC_WORKLOAD_MP4_VOP_INFO,
+ VIDDEC_WORKLOAD_MP4_BVOP_INFO,
+ VIDDEC_WORKLOAD_MP4_SPRT_TRAJ,
+ VIDDEC_WORKLOAD_MP4_IQUANT,
+ VIDDEC_WORKLOAD_MP4_NIQUANT,
+ VIDDEC_WORKLOAD_MP4_SVH,
+};
+
+enum viddec_fw_mp4_vop_coding_type_t
+{
+ VIDDEC_MP4_VOP_TYPE_I = 0,
+ VIDDEC_MP4_VOP_TYPE_P,
+ VIDDEC_MP4_VOP_TYPE_B,
+ VIDDEC_MP4_VOP_TYPE_S
+};
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOL_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Flags extracted from the Video Object Layer
+ // 0:0 - short_video_header
+ // 1:2 - vol_shape
+ // 3:3 - interlaced
+ // 4:4 - obmc_disable
+ // 5:5 - quarter_sample
+ // 6:6 - resync_marker_disable
+ // 7:7 - data_partitioned
+ // 8:8 - reversible_vlc
+ #define viddec_fw_mp4_get_reversible_vlc(x) viddec_fw_bitfields_extract((x)->vol_flags, 8, 0x1)
+ #define viddec_fw_mp4_set_reversible_vlc(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 8, 0x1)
+ #define viddec_fw_mp4_get_data_partitioned(x) viddec_fw_bitfields_extract((x)->vol_flags, 7, 0x1)
+ #define viddec_fw_mp4_set_data_partitioned(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 7, 0x1)
+ #define viddec_fw_mp4_get_resync_marker_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 6, 0x1)
+ #define viddec_fw_mp4_set_resync_marker_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 6, 0x1)
+ #define viddec_fw_mp4_get_quarter_sample(x) viddec_fw_bitfields_extract((x)->vol_flags, 5, 0x1)
+ #define viddec_fw_mp4_set_quarter_sample(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 5, 0x1)
+ #define viddec_fw_mp4_get_obmc_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 4, 0x1)
+ #define viddec_fw_mp4_set_obmc_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 4, 0x1)
+ #define viddec_fw_mp4_get_interlaced(x) viddec_fw_bitfields_extract((x)->vol_flags, 3, 0x1)
+ #define viddec_fw_mp4_set_interlaced(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 3, 0x1)
+ #define viddec_fw_mp4_get_vol_shape(x) viddec_fw_bitfields_extract((x)->vol_flags, 1, 0x3)
+ #define viddec_fw_mp4_set_vol_shape(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 1, 0x3)
+ #define viddec_fw_mp4_get_short_video_header_flag(x) viddec_fw_bitfields_extract((x)->vol_flags, 0, 0x1)
+ #define viddec_fw_mp4_set_short_video_header_flag(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 0, 0x1)
+ unsigned int vol_flags;
+
+ // Size extracted from the Video Object Layer
+ // 0:12 - width
+ // 13:25 - height
+ // MFD_MPG4VD_MB_PER_ROW can be calculated as (width+15) >> 4
+ // MFD_MPG4VD_MB_ROWS can be calculated as (height+15) >> 4
+ #define viddec_fw_mp4_get_vol_width(x) viddec_fw_bitfields_extract((x)->vol_size, 13, 0x1FFF)
+ #define viddec_fw_mp4_set_vol_width(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 13, 0x1FFF)
+ #define viddec_fw_mp4_get_vol_height(x) viddec_fw_bitfields_extract((x)->vol_size, 0, 0x1FFF)
+ #define viddec_fw_mp4_set_vol_height(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 0, 0x1FFF)
+ unsigned int vol_size;
+
+ // Sprite, time increments and quantization details from the Video Object Layer
+ // 0:15 - vop_time_increment_resolution
+ // 16:17 - sprite_enable
+ // 18:23 - sprite_warping_points
+ // 24:25 - sprite_warping_accuracy
+ // 26:29 - quant_precision
+ // 30:30 - quant_type
+ #define viddec_fw_mp4_get_quant_type(x) viddec_fw_bitfields_extract((x)->vol_item, 30, 0x1)
+ #define viddec_fw_mp4_set_quant_type(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 30, 0x1)
+ #define viddec_fw_mp4_get_quant_precision(x) viddec_fw_bitfields_extract((x)->vol_item, 26, 0xF)
+ #define viddec_fw_mp4_set_quant_precision(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 26, 0xF)
+ #define viddec_fw_mp4_get_sprite_warping_accuracy(x) viddec_fw_bitfields_extract((x)->vol_item, 24, 0x3)
+ #define viddec_fw_mp4_set_sprite_warping_accuracy(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 24, 0x3)
+ #define viddec_fw_mp4_get_sprite_warping_points(x) viddec_fw_bitfields_extract((x)->vol_item, 18, 0x3F)
+ #define viddec_fw_mp4_set_sprite_warping_points(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 18, 0x3F)
+ #define viddec_fw_mp4_get_sprite_enable(x) viddec_fw_bitfields_extract((x)->vol_item, 16, 0x3)
+ #define viddec_fw_mp4_set_sprite_enable(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 16, 0x3)
+ #define viddec_fw_mp4_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_item, 0, 0xFFFF)
+ #define viddec_fw_mp4_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 0, 0xFFFF)
+ unsigned int vol_item;
+
+} viddec_fw_mp4_vol_info_t;
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOP_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Frame Info - to populate register MFD_MPG4VD_BSP_FRAME_INFO
+ // 0:4 - current_frame_id
+ // 5:5 - current_field_frame
+ // 6:10 - future_frame_id
+ // 11:11 - future_field_frame
+ // 12:16 - past_frame_id
+ // 17:17 - past_field_frame
+ #define viddec_fw_mp4_get_past_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 17, 0x1)
+ #define viddec_fw_mp4_set_past_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 17, 0x1)
+ #define viddec_fw_mp4_get_past_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 12, 0x1F)
+ #define viddec_fw_mp4_set_past_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 12, 0x1F)
+ #define viddec_fw_mp4_get_future_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 11, 0x1)
+ #define viddec_fw_mp4_set_future_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 11, 0x1)
+ #define viddec_fw_mp4_get_future_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 6, 0x1F)
+ #define viddec_fw_mp4_set_future_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 6, 0x1F)
+ #define viddec_fw_mp4_get_current_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 5, 0x1)
+ #define viddec_fw_mp4_set_current_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 5, 0x1)
+ #define viddec_fw_mp4_get_current_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 0, 0x1F)
+ #define viddec_fw_mp4_set_current_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 0, 0x1F)
+ unsigned int frame_info;
+
+ // Video Object Plane Info
+ // 0:1 - vop_coding_type
+ // 2:2 - vop_rounding_type
+ // 3:5 - intra_dc_vlc_thr
+ // 6:6 - top_field_first
+ // 7:7 - alternate_vertical_scan_flag
+ // 8:16 - vop_quant
+ // 17:19 - vop_fcode_forward
+ // 20:22 - vop_fcode_backward
+ // 23:31 - quant_scale
+ #define viddec_fw_mp4_get_vop_quant_scale(x) viddec_fw_bitfields_extract((x)->vop_data, 23, 0x1FF)
+ #define viddec_fw_mp4_set_vop_quant_scale(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 23, 0x1FF)
+ #define viddec_fw_mp4_get_vop_fcode_backward(x) viddec_fw_bitfields_extract((x)->vop_data, 20, 0x7)
+ #define viddec_fw_mp4_set_vop_fcode_backward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 20, 0x7)
+ #define viddec_fw_mp4_get_vop_fcode_forward(x) viddec_fw_bitfields_extract((x)->vop_data, 17, 0x7)
+ #define viddec_fw_mp4_set_vop_fcode_forward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 17, 0x7)
+ #define viddec_fw_mp4_get_vop_quant(x) viddec_fw_bitfields_extract((x)->vop_data, 8, 0x1FF)
+ #define viddec_fw_mp4_set_vop_quant(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 8, 0x1FF)
+ #define viddec_fw_mp4_get_alternate_vertical_scan_flag(x) viddec_fw_bitfields_extract((x)->vop_data, 7, 0x1)
+ #define viddec_fw_mp4_set_alternate_vertical_scan_flag(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 7, 0x1)
+ #define viddec_fw_mp4_get_top_field_first(x) viddec_fw_bitfields_extract((x)->vop_data, 6, 0x1)
+ #define viddec_fw_mp4_set_top_field_first(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 6, 0x1)
+ #define viddec_fw_mp4_get_intra_dc_vlc_thr(x) viddec_fw_bitfields_extract((x)->vop_data, 3, 0x7)
+ #define viddec_fw_mp4_set_intra_dc_vlc_thr(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 3, 0x7)
+ #define viddec_fw_mp4_get_vop_rounding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 2, 0x1)
+ #define viddec_fw_mp4_set_vop_rounding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 2, 0x1)
+ #define viddec_fw_mp4_get_vop_coding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 0, 0x3)
+ #define viddec_fw_mp4_set_vop_coding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 0, 0x3)
+ unsigned int vop_data;
+
+ // No of bits used in first byte of MB data
+ unsigned int bit_offset;
+
+} viddec_fw_mp4_vop_info_t;
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_BVOP_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Frame period = T(first B-VOP after VOL) - T(past reference of first B-VOP after VOL)
+ unsigned int Tframe;
+
+ // TRD is the difference in temporal reference of the temporally next reference VOP with
+ // temporally previous reference VOP, assuming B-VOPs or skipped VOPs in between.
+ unsigned int TRD;
+
+ // TRB is the difference in temporal reference of the B-VOP and the previous reference VOP.
+ unsigned int TRB;
+
+} viddec_fw_mp4_bvop_info_t;
+
+// This structure contains the information extracted from the sprite trajectory.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SPRT_TRAJ,
+// using the fields vwi_payload in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Sprite Trajectory can have dmv_codes for each warping point.
+ // 0:13 - warping_mv_code_dv
+ // 14:27 - warping_mv_code_du
+ // 28:31 - warping_point_index - identifies which warping point the warping code refers to.
+ // The default value for index is 0xF which should be treated as invalid.
+ #define viddec_fw_mp4_get_warping_point_index(x) viddec_fw_bitfields_extract((x), 28, 0xF)
+ #define viddec_fw_mp4_set_warping_point_index(x, val) viddec_fw_bitfields_insert((x), val, 28, 0xF)
+ #define viddec_fw_mp4_get_warping_mv_code_du(x) viddec_fw_bitfields_extract((x), 14, 0x3FFF)
+ #define viddec_fw_mp4_set_warping_mv_code_du(x, val) viddec_fw_bitfields_insert((x), val, 14, 0x3FFF)
+ #define viddec_fw_mp4_get_warping_mv_code_dv(x) viddec_fw_bitfields_extract((x), 0, 0x3FFF)
+ #define viddec_fw_mp4_set_warping_mv_code_dv(x, val) viddec_fw_bitfields_insert((x), val, 0, 0x3FFF)
+ unsigned int warping_mv_code[3];
+} viddec_fw_mp4_sprite_trajectory_t;
+
+// IQUANT entries will be populated in the workload using items of type VIDDEC_WORKLOAD_MP4_IQUANT and the
+// vwi_payload array. The entries will be in the order in which they need to be programmed in the registers.
+// There is no need for a separate structure for these values.
+
+// This structure contains the information extracted from the Video Plane with Short Header.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SVH, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Video Plane with Short Header
+ // 0:7 - temporal_reference
+ // 8:19 - num_macroblocks_in_gob
+ // 20:24 - num_gobs_in_vop
+ // 25:27 - num_rows_in_gob
+ #define viddec_fw_mp4_get_num_rows_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 25, 0x7)
+ #define viddec_fw_mp4_set_num_rows_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 25, 0x7)
+ #define viddec_fw_mp4_get_num_gobs_in_vop(x) viddec_fw_bitfields_extract((x)->svh_data, 20, 0x1F)
+ #define viddec_fw_mp4_set_num_gobs_in_vop(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 20, 0x1F)
+ #define viddec_fw_mp4_get_num_macroblocks_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 8, 0xFFF)
+ #define viddec_fw_mp4_set_num_macroblocks_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 8, 0xFFF)
+ #define viddec_fw_mp4_get_temporal_reference(x) viddec_fw_bitfields_extract((x)->svh_data, 0, 0xFF)
+ #define viddec_fw_mp4_set_temporal_reference(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 0, 0xFF)
+ unsigned int svh_data;
+
+ unsigned int pad1;
+ unsigned int pad2;
+} viddec_fw_mp4_svh_t;
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c
new file mode 100644
index 0000000..f595c91
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c
@@ -0,0 +1,371 @@
+#include "viddec_fw_workload.h"
+#include "viddec_parser_ops.h"
+#include "viddec_fw_mp4.h"
+#include "viddec_mp4_parse.h"
+
+uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_frame_attributes_t *attr = &(wl->attrs);
+ mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+ memset(attr, 0, sizeof(viddec_frame_attributes_t));
+
+ attr->cont_size.width = vol->video_object_layer_width;
+ attr->cont_size.height = vol->video_object_layer_height;
+
+ // Translate vop_coding_type
+ switch(vol->VideoObjectPlane.vop_coding_type)
+ {
+ case MP4_VOP_TYPE_B:
+ attr->frame_type = VIDDEC_FRAME_TYPE_B;
+ break;
+ case MP4_VOP_TYPE_P:
+ attr->frame_type = VIDDEC_FRAME_TYPE_P;
+ break;
+ case MP4_VOP_TYPE_S:
+ attr->frame_type = VIDDEC_FRAME_TYPE_S;
+ break;
+ case MP4_VOP_TYPE_I:
+ attr->frame_type = VIDDEC_FRAME_TYPE_I;
+ break;
+ default:
+ break;
+ } // switch on vop_coding_type
+
+ attr->mpeg4.top_field_first = vol->VideoObjectPlane.top_field_first;
+
+ return result;
+} // viddec_fw_mp4_populate_attr
+
+uint32_t viddec_fw_mp4_insert_vol_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ viddec_fw_mp4_vol_info_t vol_info;
+ mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+ memset(&vol_info, 0, sizeof(viddec_fw_mp4_vol_info_t));
+
+ // Get vol_flags
+ viddec_fw_mp4_set_reversible_vlc(&vol_info, vol->reversible_vlc);
+ viddec_fw_mp4_set_data_partitioned(&vol_info, vol->data_partitioned);
+ viddec_fw_mp4_set_resync_marker_disable(&vol_info, vol->resync_marker_disable);
+ viddec_fw_mp4_set_quarter_sample(&vol_info, vol->quarter_sample);
+ viddec_fw_mp4_set_obmc_disable(&vol_info, vol->obmc_disable);
+ viddec_fw_mp4_set_interlaced(&vol_info, vol->interlaced);
+ viddec_fw_mp4_set_vol_shape(&vol_info, vol->video_object_layer_shape);
+ viddec_fw_mp4_set_short_video_header_flag(&vol_info, vol->short_video_header);
+
+ // Get vol_size
+ viddec_fw_mp4_set_vol_width(&vol_info, vol->video_object_layer_width);
+ viddec_fw_mp4_set_vol_height(&vol_info, vol->video_object_layer_height);
+
+ // Get vol_item
+ viddec_fw_mp4_set_quant_type(&vol_info, vol->quant_type);
+ viddec_fw_mp4_set_quant_precision(&vol_info, vol->quant_precision);
+ viddec_fw_mp4_set_sprite_warping_accuracy(&vol_info, vol->sprite_info.sprite_warping_accuracy);
+ viddec_fw_mp4_set_sprite_warping_points(&vol_info, vol->sprite_info.no_of_sprite_warping_points);
+ viddec_fw_mp4_set_sprite_enable(&vol_info, vol->sprite_enable);
+ viddec_fw_mp4_set_vop_time_increment_resolution(&vol_info, vol->vop_time_increment_resolution);
+
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_VOL_INFO;
+ wi.vwi_payload[0] = vol_info.vol_flags;
+ wi.vwi_payload[1] = vol_info.vol_size;
+ wi.vwi_payload[2] = vol_info.vol_item;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_vol_workitem
+
+uint32_t viddec_fw_mp4_insert_vop_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ viddec_fw_mp4_vop_info_t vop_info;
+ mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane);
+ uint32_t byte = 0;
+ unsigned char is_emul;
+
+ memset(&vop_info, 0, sizeof(viddec_fw_mp4_vop_info_t));
+
+ // Get frame_info
+ viddec_fw_mp4_set_past_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_2].is_field);
+ viddec_fw_mp4_set_past_frame_id(&vop_info, VIDDEC_MP4_FRAME_PAST);
+ viddec_fw_mp4_set_future_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_1].is_field);
+ viddec_fw_mp4_set_future_frame_id(&vop_info, VIDDEC_MP4_FRAME_FUTURE);
+ viddec_fw_mp4_set_current_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_0].is_field);
+ viddec_fw_mp4_set_current_frame_id(&vop_info, VIDDEC_MP4_FRAME_CURRENT);
+
+ // HW has a limitation that the enums for PAST(1), FUTURE(2) and CURRENT(0) cannot be changed and
+ // the spec does not support field pictures. Hence the field_frame bits are always zero.
+ // This gives us the constant 0x10200.
+ vop_info.frame_info = 0x10200;
+
+ // Get vop_data
+ // Quant scale is in the video_packet_header or the gob_layer - both of which are parsed by the BSP
+ viddec_fw_mp4_set_vop_quant_scale(&vop_info, 0);
+ viddec_fw_mp4_set_vop_fcode_backward(&vop_info, vop->vop_fcode_backward);
+ viddec_fw_mp4_set_vop_fcode_forward(&vop_info, vop->vop_fcode_forward);
+ viddec_fw_mp4_set_vop_quant(&vop_info, vop->vop_quant);
+ viddec_fw_mp4_set_alternate_vertical_scan_flag(&vop_info, vop->alternate_vertical_scan_flag);
+ viddec_fw_mp4_set_top_field_first(&vop_info, vop->top_field_first);
+ viddec_fw_mp4_set_intra_dc_vlc_thr(&vop_info, vop->intra_dc_vlc_thr);
+ viddec_fw_mp4_set_vop_rounding_type(&vop_info, vop->vop_rounding_type);
+ viddec_fw_mp4_set_vop_coding_type(&vop_info, vop->vop_coding_type);
+
+ // Get vol_item
+ result = viddec_pm_get_au_pos(parent, &vop_info.bit_offset, &byte, &is_emul);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_VOP_INFO;
+ wi.vwi_payload[0] = vop_info.frame_info;
+ wi.vwi_payload[1] = vop_info.vop_data;
+ wi.vwi_payload[2] = vop_info.bit_offset;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_vop_workitem
+
+uint32_t viddec_fw_mp4_insert_vpsh_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ viddec_fw_mp4_svh_t svh_info;
+ mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263);
+
+ memset(&svh_info, 0, sizeof(viddec_fw_mp4_svh_t));
+
+ // Get svh_data
+ viddec_fw_mp4_set_temporal_reference(&svh_info, svh->temporal_reference);
+ viddec_fw_mp4_set_num_macroblocks_in_gob(&svh_info, svh->num_macroblocks_in_gob);
+ viddec_fw_mp4_set_num_gobs_in_vop(&svh_info, svh->num_gobs_in_vop);
+ viddec_fw_mp4_set_num_rows_in_gob(&svh_info, svh->num_rows_in_gob);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_SVH;
+ wi.vwi_payload[0] = svh_info.svh_data;
+ wi.vwi_payload[1] = svh_info.pad1;
+ wi.vwi_payload[2] = svh_info.pad2;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_vpsh_workitem
+
+uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ viddec_fw_mp4_sprite_trajectory_t sprite_info;
+ mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+ mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane);
+ uint8_t no_of_entries_per_item = 3;
+ uint8_t no_of_sprite_workitems = 0;
+ uint8_t warp_index = 0;
+ int i, j;
+
+ if(!vol->sprite_info.no_of_sprite_warping_points)
+ return result;
+
+ no_of_sprite_workitems = (vol->sprite_info.no_of_sprite_warping_points > 3) ? 2 : 1;
+
+ for(i=0; i<no_of_sprite_workitems; i++)
+ {
+ memset(&sprite_info, 0, sizeof(viddec_fw_mp4_sprite_trajectory_t));
+
+ for(j=0; j<no_of_entries_per_item; j++)
+ {
+ if(warp_index < vol->sprite_info.no_of_sprite_warping_points)
+ {
+ viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index);
+ viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]);
+ viddec_fw_mp4_set_warping_mv_code_dv(sprite_info.warping_mv_code[j], vop->warping_mv_code_dv[warp_index]);
+ }
+ else
+ {
+ sprite_info.warping_mv_code[j] = 0xF << 28;
+ }
+ warp_index++;
+ }
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_SPRT_TRAJ;
+ wi.vwi_payload[0] = sprite_info.warping_mv_code[0];
+ wi.vwi_payload[1] = sprite_info.warping_mv_code[1];
+ wi.vwi_payload[2] = sprite_info.warping_mv_code[2];
+
+ result = viddec_pm_append_workitem(parent, &wi);
+ }
+
+ return result;
+} // viddec_fw_mp4_insert_sprite_workitem
+
+uint32_t viddec_fw_mp4_insert_bvop_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_BVOP_INFO;
+ wi.vwi_payload[0] = vol->Tframe;
+ wi.vwi_payload[1] = vol->TRD;
+ wi.vwi_payload[2] = vol->TRB;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_bvop_workitem
+
+uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint32_t *qmat)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ uint8_t i;
+
+ // No of items = (64/4 Dwords / 3 entries per workload item)
+ // 64 8b entries => 64 * 8 / 32 DWORDS => 64/4 DWORDS => 16 DWORDS
+ // Each item can store 3 DWORDS, 16 DWORDS => 16/3 items => 6 items
+ for(i=0; i<6; i++)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+
+ if(intra_quant_flag)
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_IQUANT;
+ else
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_NIQUANT;
+
+ if(i == 6)
+ {
+ wi.vwi_payload[0] = qmat[0];
+ wi.vwi_payload[1] = 0;
+ wi.vwi_payload[2] = 0;
+ }
+ else
+ {
+ wi.vwi_payload[0] = qmat[0];
+ wi.vwi_payload[1] = qmat[1];
+ wi.vwi_payload[2] = qmat[2];
+ }
+
+ qmat += 3;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+ }
+
+ return result;
+} // viddec_fw_mp4_insert_qmat
+
+uint32_t viddec_fw_mp4_insert_inversequant_workitem(void *parent, mp4_VOLQuant_mat_t *qmat)
+{
+ uint32_t result = MP4_STATUS_OK;
+
+ if(qmat->load_intra_quant_mat)
+ {
+ result = viddec_fw_mp4_insert_qmat(parent, true, (uint32_t *) &(qmat->intra_quant_mat));
+ }
+
+ if(qmat->load_nonintra_quant_mat)
+ {
+ result = viddec_fw_mp4_insert_qmat(parent, false, (uint32_t *) &(qmat->nonintra_quant_mat));
+ }
+
+ return result;
+} // viddec_fw_mp4_insert_inversequant_workitem
+
+uint32_t viddec_fw_mp4_insert_past_frame_workitem(void *parent)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_PAST_FRAME;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_past_frame_workitem
+
+uint32_t viddec_fw_mp4_insert_future_frame_workitem(void *parent)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_FUTURE_FRAME;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_future_frame_workitem
+
+uint32_t viddec_fw_mp4_insert_reorder_workitem(void *parent)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+
+ // Move frame at location 1 of the reference table to location 0
+ wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+ wi.ref_reorder.ref_table_offset = 0;
+ wi.ref_reorder.ref_reorder_00010203 = 0x01010203;
+ wi.ref_reorder.ref_reorder_04050607 = 0x04050607;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_reorder_workitem
+
+uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt)
+{
+ uint32_t result = 0;
+ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt;
+ viddec_workload_t *wl = viddec_pm_get_header(parent);
+
+ result = viddec_fw_mp4_populate_attr(wl, parser);
+ result = viddec_fw_mp4_insert_vol_workitem(parent, parser);
+ result = viddec_fw_mp4_insert_vop_workitem(parent, parser);
+ result = viddec_fw_mp4_insert_sprite_workitem(parent, parser);
+ result = viddec_fw_mp4_insert_inversequant_workitem(parent, &(parser->info.VisualObject.VideoObject.quant_mat_info));
+
+ if(parser->info.VisualObject.VideoObject.short_video_header)
+ result = viddec_fw_mp4_insert_vpsh_workitem(parent, parser);
+
+ if(!parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded)
+ wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME;
+
+ // Send reference re-order tag for all reference frame types
+ if (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type != MP4_VOP_TYPE_B)
+ {
+ result = viddec_fw_mp4_insert_reorder_workitem(parent);
+ }
+
+ // Handle vop_coding_type based information
+ switch(parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type)
+ {
+ case MP4_VOP_TYPE_B:
+ result = viddec_fw_mp4_insert_bvop_workitem(parent, parser);
+ result = viddec_fw_mp4_insert_past_frame_workitem(parent);
+ result = viddec_fw_mp4_insert_future_frame_workitem(parent);
+ break;
+ case MP4_VOP_TYPE_P:
+ case MP4_VOP_TYPE_S:
+ result = viddec_fw_mp4_insert_past_frame_workitem(parent);
+ // Deliberate fall-thru to type I
+ case MP4_VOP_TYPE_I:
+ wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (1 & WORKLOAD_REFERENCE_FRAME_BMASK);
+ // Swap reference information
+ parser->ref_frame[VIDDEC_MP4_INDX_2] = parser->ref_frame[VIDDEC_MP4_INDX_1];
+ parser->ref_frame[VIDDEC_MP4_INDX_1] = parser->ref_frame[VIDDEC_MP4_INDX_0];
+ break;
+ break;
+ default:
+ break;
+ } // switch on vop_coding_type
+
+ result = viddec_pm_append_pixeldata(parent);
+
+ return result;
+} // viddec_fw_mp4_emit_workload
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c
new file mode 100644
index 0000000..4ae9135
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c
@@ -0,0 +1,98 @@
+#include "viddec_mp4_decodevideoobjectplane.h"
+
+mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo)
+{
+ mp4_Status_t status = MP4_STATUS_OK;
+ uint32_t vop_time=0;
+// mp4_VisualObject_t *vo = &(pInfo->VisualObject);
+ mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject);
+ mp4_GroupOfVideoObjectPlane_t *gvop = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane);
+ mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+ // set VOP time
+ if (vol->short_video_header)
+ {
+ vop_time = vol->vop_sync_time +
+ pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.temporal_reference * 1001;
+
+// if (vo->currentFrame.time > vop_time)
+ {
+ vol->vop_sync_time += 256 * 1001;
+ vop_time += 256 * 1001;
+ }
+ }
+ else
+ {
+ if (vop->vop_coding_type == MP4_VOP_TYPE_B)
+ {
+ vop_time = vol->vop_sync_time_b + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment;
+ }
+ else
+ {
+ if (gvop->time_base > vol->vop_sync_time)
+ vol->vop_sync_time = gvop->time_base;
+
+ vop_time = vol->vop_sync_time + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment;
+
+ if (vol->vop_sync_time_b < vol->vop_sync_time)
+ vol->vop_sync_time_b = vol->vop_sync_time;
+
+ if (vop->modulo_time_base != 0)
+ vol->vop_sync_time = vop_time - vop->vop_time_increment;
+ }
+ }
+
+ if(vop->vop_coded)
+ {
+ switch (vop->vop_coding_type)
+ {
+ case MP4_VOP_TYPE_S:
+ if (vol->sprite_enable != MP4_SPRITE_GMC)
+ break;
+ // Deliberate fall-through from this case
+ case MP4_VOP_TYPE_I:
+ case MP4_VOP_TYPE_P:
+ // set past and future time for B-VOP
+ vol->pastFrameTime = vol->futureFrameTime;
+ vol->futureFrameTime = vop_time;
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (vop->vop_coded)
+// || (vop_time != vo->currentFrame.time && vop_time != vo->pastFrame.time && vop_time != vo->futureFrame.time) )
+ {
+ if(vop->vop_coding_type == MP4_VOP_TYPE_B)
+ {
+ if (!vol->Tframe)
+ vol->Tframe = (int) (vop_time); // - vo->pastFrame.time);
+
+ if (vop->vop_coded)
+ {
+ vol->TRB = (int) (vop_time - vol->pastFrameTime);
+ vol->TRD = (int) (vol->futureFrameTime - vol->pastFrameTime);
+
+ // defense from bad streams when B-VOPs are before Past and/or Future
+ if (vol->TRB <= 0)
+ vol->TRB = 1;
+
+ if (vol->TRD <= 0)
+ vol->TRD = 2;
+
+ if (vol->TRD <= vol->TRB)
+ {
+ vol->TRB = 1;
+ vol->TRD = 2;
+ }
+
+ if (vol->Tframe >= vol->TRD)
+ vol->Tframe = vol->TRB;
+ }
+ }
+ }
+
+ return status;
+} // mp4_DecodeVideoObjectPlane
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h
new file mode 100644
index 0000000..2cb3c87
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h
@@ -0,0 +1,11 @@
+#ifndef VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H
+#define VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t *pInfo);
+
+//void mp4_copy_info_to_dmem(mp4_Info_t *pInfo, mp4_MBHWInterface *ptr_parameters);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c
new file mode 100644
index 0000000..b4cc302
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c
@@ -0,0 +1,278 @@
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+#include "viddec_mp4_decodevideoobjectplane.h"
+#include "viddec_mp4_shortheader.h"
+#include "viddec_mp4_videoobjectlayer.h"
+#include "viddec_mp4_videoobjectplane.h"
+#include "viddec_mp4_visualobject.h"
+
+extern uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state);
+
+void viddec_mp4_get_context_size(viddec_parser_memory_sizes_t *size)
+{
+ /* Should return size of my structure */
+ size->context_size = sizeof(viddec_mp4_parser_t);
+ size->persist_size = 0;
+ return;
+} // viddec_mp4_get_context_size
+
+uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors)
+{
+ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt;
+ int result = VIDDEC_PARSE_SUCESS;
+ uint8_t frame_boundary = false;
+ uint8_t force_frame_complete = false;
+
+ //DEB("entering is_wkld_done: next_sc: 0x%x, sc_seen: %d\n", next_sc, parser->sc_seen);
+
+ parent = parent;
+
+ // VS, VO, VOL, VOP or GVOP start codes indicate frame boundary.
+ frame_boundary = ( (MP4_SC_VISUAL_OBJECT_SEQUENCE == next_sc) ||
+ (MP4_SC_VISUAL_OBJECT == next_sc) ||
+ ((MP4_SC_VIDEO_OBJECT_LAYER_MIN <= next_sc) && (next_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) ||
+ (next_sc <= MP4_SC_VIDEO_OBJECT_MAX) ||
+ (MP4_SC_VIDEO_OBJECT_PLANE == next_sc) ||
+ ((SHORT_THIRD_STARTCODE_BYTE & 0xFC) == (next_sc & 0xFC)) ||
+ (MP4_SC_GROUP_OF_VOP == next_sc) );
+
+ // EOS and discontinuity should force workload completion.
+ force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc));
+
+ if(frame_boundary | force_frame_complete)
+ {
+ *codec_specific_errors = 0;
+
+ // Frame is considered complete and without errors, if a VOL was received since startup and
+ // if a VOP was received for this workload.
+ if (!((parser->sc_seen & MP4_SC_SEEN_VOL) && (parser->sc_seen & MP4_SC_SEEN_VOP)) && !(parser->sc_seen & MP4_SC_SEEN_SVH))
+ *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+
+ /*
+ This is the strategy for error detection.
+ Errors in any field needed by the firmware (parser/decoder) are treated as non-decodable.
+ Errors in other fields will be considered decodable.
+ Defaults/alternate strategies will be considered on a case-by-case basis as customer content is seen.
+
+ ERROR_TYPE | PARSING | INVALID/UNSUPPORTED | BS = Bitstream error
+ ----------------------------------------------------------------- UNSUP = Un-supported
+ DFLT_PRESENT | YES | NO | YES | NO | ND = Non-decodable
+ COMPONENT USED | | | | | DFLT = Populate defaults
+ -----------------------------------------------------------------
+ FIRMWARE | BS+ND | BS+ND | UNSUP+ND | UNSUP+ND |
+ DRIVER/USER | BS+DFLT | BS | UNSUP | UNSUP |
+ NONE | BS | BS | UNSUP | UNSUP |
+ | | | Continue Parsing |
+ */
+ if((parser->bitstream_error & MP4_BS_ERROR_HDR_NONDEC) || (parser->bitstream_error & MP4_BS_ERROR_FRM_NONDEC))
+ *codec_specific_errors |= (VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE | VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM);
+
+ if((parser->bitstream_error & MP4_BS_ERROR_HDR_UNSUP) || (parser->bitstream_error & MP4_BS_ERROR_FRM_UNSUP))
+ *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED;
+
+ if((parser->bitstream_error & MP4_BS_ERROR_HDR_PARSE) || (parser->bitstream_error & MP4_BS_ERROR_FRM_PARSE))
+ *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR;
+
+ parser->bitstream_error &= MP4_HDR_ERROR_MASK;
+ parser->sc_seen &= MP4_SC_SEEN_VOL;
+ result = VIDDEC_PARSE_FRMDONE;
+ }
+ //DEB("exiting is_wkld_done: next_sc: 0x%x, sc_seen: %d, err: %d, fr_bnd:%d, force:%d\n",
+ // next_sc, parser->sc_seen, *codec_specific_errors, frame_boundary, force_frame_complete);
+
+ return result;
+} // viddec_mp4_wkld_done
+
+void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+{
+ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt;
+
+ persist_mem = persist_mem;
+ parser->is_frame_start = false;
+ parser->prev_sc = MP4_SC_INVALID;
+ parser->current_sc = MP4_SC_INVALID;
+ parser->cur_sc_prefix = false;
+ parser->next_sc_prefix = false;
+ parser->ignore_scs = false;
+
+ if(preserve)
+ {
+ // Need to maintain information till VOL
+ parser->sc_seen &= MP4_SC_SEEN_VOL;
+ parser->bitstream_error &= MP4_HDR_ERROR_MASK;
+
+ // Reset only frame related data
+ memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlane), 0, sizeof(mp4_VideoObjectPlane_t));
+ memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263), 0, sizeof(mp4_VideoObjectPlaneH263));
+ }
+ else
+ {
+ parser->sc_seen = MP4_SC_SEEN_INVALID;
+ parser->bitstream_error = MP4_BS_ERROR_NONE;
+ memset(&(parser->info), 0, sizeof(mp4_Info_t));
+ }
+
+ return;
+} // viddec_mp4_init
+
+static uint32_t viddec_mp4_decodevop_and_emitwkld(void *parent, void *ctxt)
+{
+ int status = MP4_STATUS_OK;
+ viddec_mp4_parser_t *cxt = (viddec_mp4_parser_t *)ctxt;
+
+ status = mp4_DecodeVideoObjectPlane(&(cxt->info));
+
+#ifndef VBP
+ status = viddec_fw_mp4_emit_workload(parent, ctxt);
+#endif
+
+ return status;
+} // viddec_mp4_decodevop_and_emitwkld
+
+uint32_t viddec_mp4_parse(void *parent, void *ctxt)
+{
+ uint32_t sc=0;
+ viddec_mp4_parser_t *cxt;
+ uint8_t is_svh=0;
+ int32_t getbits=0;
+ int32_t status = 0;
+
+ cxt = (viddec_mp4_parser_t *)ctxt;
+ is_svh = (cxt->cur_sc_prefix) ? false: true;
+ if((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1)
+ {
+ DEB("Start code not found\n");
+ return VIDDEC_PARSE_ERROR;
+ }
+
+ if(!is_svh)
+ {
+ viddec_pm_get_bits(parent, &sc, 32);
+ sc = sc & 0xFF;
+ cxt->current_sc = sc;
+ cxt->current_sc |= 0x100;
+ DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc);
+
+ switch(sc)
+ {
+ case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+ {
+ status = mp4_Parse_VisualSequence(parent, cxt);
+ cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE;
+ DEB("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n");
+ break;
+ }
+ case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC:
+ {/* Not required to do anything */
+ break;
+ }
+ case MP4_SC_USER_DATA:
+ { /* Copy userdata to user-visible buffer (EMIT) */
+ status = mp4_Parse_UserData(parent, cxt);
+ DEB("MP4_USER_DATA_SC: \n");
+ break;
+ }
+ case MP4_SC_GROUP_OF_VOP:
+ {
+ status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt);
+ cxt->prev_sc = MP4_SC_GROUP_OF_VOP;
+ DEB("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status);
+ break;
+ }
+ case MP4_SC_VIDEO_SESSION_ERROR:
+ {/* Not required to do anything?? */
+ break;
+ }
+ case MP4_SC_VISUAL_OBJECT:
+ {
+ status = mp4_Parse_VisualObject(parent, cxt);
+ cxt->prev_sc = MP4_SC_VISUAL_OBJECT;
+ DEB("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status);
+ break;
+ }
+ case MP4_SC_VIDEO_OBJECT_PLANE:
+ {
+ /* We must decode the VOP Header information, it does not end on a byte boundary, so we need to emit
+ a starting bit offset after parsing the header. */
+ status = mp4_Parse_VideoObjectPlane(parent, cxt);
+ status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+ // TODO: Fix this for interlaced
+ cxt->is_frame_start = true;
+ cxt->sc_seen |= MP4_SC_SEEN_VOP;
+
+ DEB("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status);
+ break;
+ }
+ case MP4_SC_STUFFING:
+ {
+ break;
+ }
+ default:
+ {
+ if( (sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX) )
+ {
+ status = mp4_Parse_VideoObjectLayer(parent, cxt);
+ cxt->sc_seen = MP4_SC_SEEN_VOL;
+ cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN;
+ DEB("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status);
+ sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN;
+ }
+ // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN
+ else if(sc <= MP4_SC_VIDEO_OBJECT_MAX)
+ {
+ // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer
+ getbits = viddec_pm_get_bits(parent, &sc, 22);
+ if(getbits != -1)
+ {
+ cxt->current_sc = sc;
+ status = mp4_Parse_VideoObject_svh(parent, cxt);
+ status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+ cxt->sc_seen = MP4_SC_SEEN_SVH;
+ cxt->is_frame_start = true;
+ DEB("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc);
+ DEB("MP4_VIDEO_OBJECT_MIN_SC:status=0x%.8X\n", status);
+ }
+ }
+ else
+ {
+ DEB("UNKWON Cod:0x%08X\n", sc);
+ }
+ }
+ break;
+ }
+ }
+ else
+ {
+ viddec_pm_get_bits(parent, &sc, 22);
+ cxt->current_sc = sc;
+ DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc);
+ status = mp4_Parse_VideoObject_svh(parent, cxt);
+ status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+ cxt->sc_seen = MP4_SC_SEEN_SVH;
+ cxt->is_frame_start = true;
+ DEB("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc);
+ }
+
+ // Current sc becomes the previous sc
+ cxt->prev_sc = sc;
+
+ return VIDDEC_PARSE_SUCESS;
+} // viddec_mp4_parse
+
+uint32_t viddec_mp4_is_frame_start(void *ctxt)
+{
+ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *)ctxt;
+ return parser->is_frame_start;
+} // viddec_mp4_is_frame_start
+
+void viddec_mp4_get_ops(viddec_parser_ops_t *ops)
+{
+ ops->parse_syntax = viddec_mp4_parse;
+ ops->get_cxt_size = viddec_mp4_get_context_size;
+ ops->is_wkld_done = viddec_mp4_wkld_done;
+ ops->parse_sc = viddec_parse_sc_mp4;
+ ops->is_frame_start = viddec_mp4_is_frame_start;
+ ops->init = viddec_mp4_init;
+ return;
+} // viddec_mp4_get_ops
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h
new file mode 100644
index 0000000..12447a4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h
@@ -0,0 +1,527 @@
+#ifndef VIDDEC_MP4_PARSE_H
+#define VIDDEC_MP4_PARSE_H
+
+#include "viddec_fw_debug.h"
+#include "viddec_fw_mp4.h"
+
+/* Macros for MP4 start code detection */
+#define FIRST_STARTCODE_BYTE 0x00
+#define SECOND_STARTCODE_BYTE 0x00
+#define THIRD_STARTCODE_BYTE 0x01
+#define SHORT_THIRD_STARTCODE_BYTE 0x80
+#define SC_BYTE_MASK0 0x00ff0000
+#define SC_BYTE_MASK1 0x000000ff
+
+/* status codes */
+typedef enum
+{
+ MP4_STATUS_OK = 0, /* Success */
+ MP4_STATUS_PARSE_ERROR = (1 << 0), /* Invalid syntax */
+ MP4_STATUS_NOTSUPPORT = (1 << 1), /* unsupported feature */
+ MP4_STATUS_REQD_DATA_ERROR = (1 << 2), /* supported data either invalid or missing */
+} mp4_Status_t;
+
+/* feature codes */
+typedef enum
+{
+ MP4_VOP_FEATURE_DEFAULT = 0, // Default VOP features, no code image update needed
+ MP4_VOP_FEATURE_SVH = 1, // VOP has Short Video Header
+ MP4_VOP_FEATURE_DP = 2 // VOP is Data Partitioned
+} mp4_Vop_feature;
+
+/* MPEG-4 start code values: Table 6-3 */
+typedef enum
+{
+ MP4_SC_VIDEO_OBJECT_MIN = 0x00,
+ MP4_SC_VIDEO_OBJECT_MAX = 0x1F,
+ MP4_SC_VIDEO_OBJECT_LAYER_MIN = 0x20,
+ MP4_SC_VIDEO_OBJECT_LAYER_MAX = 0x2F,
+ MP4_SC_FGS_BP_MIN = 0x40, /* Unsupported */
+ MP4_SC_FGS_BP_MAX = 0x5F, /* Unsupported */
+ MP4_SC_VISUAL_OBJECT_SEQUENCE = 0xB0,
+ MP4_SC_VISUAL_OBJECT_SEQUENCE_EC = 0xB1,
+ MP4_SC_USER_DATA = 0xB2,
+ MP4_SC_GROUP_OF_VOP = 0xB3,
+ MP4_SC_VIDEO_SESSION_ERROR = 0xB4,
+ MP4_SC_VISUAL_OBJECT = 0xB5,
+ MP4_SC_VIDEO_OBJECT_PLANE = 0xB6,
+ MP4_SC_SLICE = 0xB7, /* Unsupported */
+ MP4_SC_EXTENSION = 0xB8, /* Unsupported */
+ MP4_SC_FGS_VOP = 0xB9, /* Unsupported */
+ MP4_SC_FBA_OBJECT = 0xBA, /* Unsupported */
+ MP4_SC_FBA_OBJECT_PLANE = 0xBB, /* Unsupported */
+ MP4_SC_MESH_OBJECT = 0xBC, /* Unsupported */
+ MP4_SC_MESH_OBJECT_PLANE = 0xBD, /* Unsupported */
+ MP4_SC_STILL_TEXTURE_OBJECT = 0xBE, /* Unsupported */
+ MP4_SC_TEXTURE_SPATIAL_LAYER = 0xBF, /* Unsupported */
+ MP4_SC_TEXTURE_SNR_LAYER = 0xC0, /* Unsupported */
+ MP4_SC_TEXTURE_TILE = 0xC1, /* Unsupported */
+ MP4_SC_TEXTURE_SHAPE_LAYER = 0xC2, /* Unsupported */
+ MP4_SC_STUFFING = 0xC3,
+ MP4_SC_SYTEM_MIN = 0xC6, /* Unsupported */
+ MP4_SC_SYTEM_MAX = 0xFF, /* Unsupported */
+ MP4_SC_INVALID = 0x100, /* Invalid */
+}mp4_start_code_values_t;
+
+/* MPEG-4 code values
+ ISO/IEC 14496-2:2004 table 6-6 */
+enum
+{
+ MP4_VISUAL_OBJECT_TYPE_VIDEO = 1,
+ MP4_VISUAL_OBJECT_TYPE_TEXTURE = 2,
+ MP4_VISUAL_OBJECT_TYPE_MESH = 3,
+ MP4_VISUAL_OBJECT_TYPE_FBA = 4,
+ MP4_VISUAL_OBJECT_TYPE_3DMESH = 5
+};
+
+/* ISO/IEC 14496-2:2004 table 6-7 */
+enum
+{
+ MP4_VIDEO_FORMAT_COMPONENT = 0,
+ MP4_VIDEO_FORMAT_PAL = 1,
+ MP4_VIDEO_FORMAT_NTSC = 2,
+ MP4_VIDEO_FORMAT_SECAM = 3,
+ MP4_VIDEO_FORMAT_MAC = 4,
+ MP4_VIDEO_FORMAT_UNSPECIFIED = 5
+};
+
+/* ISO/IEC 14496-2:2004 table 6-8..10 */
+enum
+{
+ MP4_VIDEO_COLORS_FORBIDDEN = 0,
+ MP4_VIDEO_COLORS_ITU_R_BT_709 = 1,
+ MP4_VIDEO_COLORS_UNSPECIFIED = 2,
+ MP4_VIDEO_COLORS_RESERVED = 3,
+ MP4_VIDEO_COLORS_ITU_R_BT_470_2_M = 4,
+ MP4_VIDEO_COLORS_ITU_R_BT_470_2_BG = 5,
+ MP4_VIDEO_COLORS_SMPTE_170M = 6,
+ MP4_VIDEO_COLORS_SMPTE_240M = 7,
+ MP4_VIDEO_COLORS_GENERIC_FILM = 8
+};
+
+/* ISO/IEC 14496-2:2004 table 6-11 */
+enum
+{
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE = 1,
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE_SCALABLE = 2,
+ MP4_VIDEO_OBJECT_TYPE_CORE = 3,
+ MP4_VIDEO_OBJECT_TYPE_MAIN = 4,
+ MP4_VIDEO_OBJECT_TYPE_NBIT = 5,
+ MP4_VIDEO_OBJECT_TYPE_2DTEXTURE = 6,
+ MP4_VIDEO_OBJECT_TYPE_2DMESH = 7,
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE_FACE = 8,
+ MP4_VIDEO_OBJECT_TYPE_STILL_SCALABLE_TEXTURE = 9,
+ MP4_VIDEO_OBJECT_TYPE_ADVANCED_REAL_TIME_SIMPLE = 10,
+ MP4_VIDEO_OBJECT_TYPE_CORE_SCALABLE = 11,
+ MP4_VIDEO_OBJECT_TYPE_ADVANCED_CODING_EFFICIENCY = 12,
+ MP4_VIDEO_OBJECT_TYPE_ADVANCED_SCALABLE_TEXTURE = 13,
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE_FBA = 14,
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE_STUDIO = 15,
+ MP4_VIDEO_OBJECT_TYPE_CORE_STUDIO = 16,
+ MP4_VIDEO_OBJECT_TYPE_ADVANCED_SIMPLE = 17,
+ MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE = 18
+};
+
+/* ISO/IEC 14496-2:2004 table 6.17 (maximum defined video_object_layer_shape_extension) */
+#define MP4_SHAPE_EXT_NUM 13
+
+/* ISO/IEC 14496-2:2004 table 6-14 */
+enum
+{
+ MP4_ASPECT_RATIO_FORBIDDEN = 0,
+ MP4_ASPECT_RATIO_1_1 = 1,
+ MP4_ASPECT_RATIO_12_11 = 2,
+ MP4_ASPECT_RATIO_10_11 = 3,
+ MP4_ASPECT_RATIO_16_11 = 4,
+ MP4_ASPECT_RATIO_40_33 = 5,
+ MP4_ASPECT_RATIO_EXTPAR = 15
+};
+
+/* ISO/IEC 14496-2:2004 table 6-15 */
+#define MP4_CHROMA_FORMAT_420 1
+
+/* ISO/IEC 14496-2:2004 table 6-16 */
+enum
+{
+ MP4_SHAPE_TYPE_RECTANGULAR = 0,
+ MP4_SHAPE_TYPE_BINARY = 1,
+ MP4_SHAPE_TYPE_BINARYONLY = 2,
+ MP4_SHAPE_TYPE_GRAYSCALE = 3
+};
+
+/* ISO/IEC 14496-2:2004 table 6-19 */
+#define MP4_SPRITE_STATIC 1
+#define MP4_SPRITE_GMC 2
+
+/* ISO/IEC 14496-2:2004 table 6-24 */
+enum
+{
+ MP4_VOP_TYPE_I = 0,
+ MP4_VOP_TYPE_P = 1,
+ MP4_VOP_TYPE_B = 2,
+ MP4_VOP_TYPE_S = 3,
+};
+
+/* ISO/IEC 14496-2:2004 table 6-26 */
+enum
+{
+ MP4_SPRITE_TRANSMIT_MODE_STOP = 0,
+ MP4_SPRITE_TRANSMIT_MODE_PIECE = 1,
+ MP4_SPRITE_TRANSMIT_MODE_UPDATE = 2,
+ MP4_SPRITE_TRANSMIT_MODE_PAUSE = 3
+};
+
+/* ISO/IEC 14496-2:2004 table 7-3 */
+enum
+{
+ MP4_BAB_TYPE_MVDSZ_NOUPDATE = 0,
+ MP4_BAB_TYPE_MVDSNZ_NOUPDATE = 1,
+ MP4_BAB_TYPE_TRANSPARENT = 2,
+ MP4_BAB_TYPE_OPAQUE = 3,
+ MP4_BAB_TYPE_INTRACAE = 4,
+ MP4_BAB_TYPE_MVDSZ_INTERCAE = 5,
+ MP4_BAB_TYPE_MVDSNZ_INTERCAE = 6
+};
+
+#define MP4_DC_MARKER 0x6B001 // 110 1011 0000 0000 0001
+#define MP4_MV_MARKER 0x1F001 // 1 1111 0000 0000 0001
+
+
+/* ISO/IEC 14496-2:2004 table G.1 */
+enum
+{
+ MP4_SIMPLE_PROFILE_LEVEL_1 = 0x01,
+ MP4_SIMPLE_PROFILE_LEVEL_2 = 0x02,
+ MP4_SIMPLE_PROFILE_LEVEL_3 = 0x03,
+ MP4_SIMPLE_PROFILE_LEVEL_4a = 0x04,
+ MP4_SIMPLE_PROFILE_LEVEL_5 = 0x05,
+ MP4_SIMPLE_PROFILE_LEVEL_6 = 0x06,
+ MP4_SIMPLE_PROFILE_LEVEL_0 = 0x08,
+ MP4_CORE_PROFILE_LEVEL_1 = 0x21,
+ MP4_CORE_PROFILE_LEVEL_2 = 0x22,
+ MP4_MAIN_PROFILE_LEVEL_2 = 0x32,
+ MP4_MAIN_PROFILE_LEVEL_3 = 0x33,
+ MP4_MAIN_PROFILE_LEVEL_4 = 0x34,
+ MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_1 = 0x91,
+ MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_2 = 0x92,
+ MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_3 = 0x93,
+ MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_4 = 0x94,
+ MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_1 = 0xB1,
+ MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_2 = 0xB2,
+ MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_3 = 0xB3,
+ MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_4 = 0xB4,
+ MP4_ADVANCED_CORE_PROFILE_LEVEL_1 = 0xC1,
+ MP4_ADVANCED_CORE_PROFILE_LEVEL_2 = 0xC2,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0 = 0xF0,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1 = 0xF1,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2 = 0xF2,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3 = 0xF3,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4 = 0xF4,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5 = 0xF5,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B = 0xF7
+};
+
+/* Group Of Video Object Plane Info */
+typedef struct
+{
+ uint8_t closed_gov;
+ uint8_t broken_link;
+ uint8_t time_code_hours;
+ uint8_t time_code_minutes;
+ uint8_t time_code_seconds;
+ uint8_t dummy1;
+ uint16_t dummy2;
+ uint32_t time_base;
+} mp4_GroupOfVideoObjectPlane_t;
+
+
+/* Video Object Plane Info */
+typedef struct
+{
+ uint8_t vop_coding_type;
+ uint32_t modulo_time_base;
+ uint16_t vop_time_increment;
+ uint8_t vop_coded;
+
+ uint16_t vop_id;
+ uint16_t vop_id_for_prediction;
+ uint8_t is_vop_id_for_prediction_indication;
+ uint8_t vop_rounding_type;
+ uint8_t vop_reduced_resolution;
+ uint8_t align_dummy;
+
+ uint16_t vop_width;
+ uint16_t vop_height;
+ uint16_t vop_horizontal_mc_spatial_ref;
+ uint16_t vop_vertical_mc_spatial_ref;
+
+ uint8_t background_composition;
+ uint8_t change_conv_ratio_disable;
+ uint8_t is_vop_constant_alpha;
+ uint8_t vop_constant_alpha_value;
+ uint8_t intra_dc_vlc_thr;
+ uint8_t top_field_first;
+ uint8_t alternate_vertical_scan_flag;
+ uint8_t sprite_transmit_mode;
+
+ int32_t brightness_change_factor;
+ uint16_t vop_quant;
+ uint8_t vop_fcode_forward;
+ uint8_t vop_fcode_backward;
+
+ uint16_t warping_mv_code_du[4];
+ uint16_t warping_mv_code_dv[4];
+
+} mp4_VideoObjectPlane_t;
+
+/* VOLControlParameters Info */
+typedef struct
+{
+ uint8_t chroma_format;
+ uint8_t low_delay;
+ uint8_t vbv_parameters;
+ uint8_t align_dummy1;
+ uint32_t bit_rate;
+ uint32_t vbv_buffer_size;
+ uint32_t vbv_occupancy;
+} mp4_VOLControlParameters_t;
+
+/* Video Object Plane with short header Info */
+typedef struct _mp4_VideoObjectPlaneH263
+{
+ uint8_t temporal_reference;
+ uint8_t split_screen_indicator;
+ uint8_t document_camera_indicator;
+ uint8_t full_picture_freeze_release;
+ uint8_t source_format;
+ uint8_t picture_coding_type;
+ uint8_t vop_quant;
+ uint16_t num_gobs_in_vop;
+ uint16_t num_macroblocks_in_gob;
+ uint8_t num_rows_in_gob;
+
+#if 0
+ uint8_t gob_number;
+ int gob_header_empty;
+ int gob_frame_id;
+ int quant_scale;
+#endif
+} mp4_VideoObjectPlaneH263;
+
+typedef struct
+{
+ uint16_t sprite_width;
+ uint16_t sprite_height;
+ uint16_t sprite_left_coordinate;
+ uint16_t sprite_top_coordinate;
+ uint16_t no_of_sprite_warping_points;
+ uint16_t sprite_warping_accuracy;
+ uint16_t sprite_brightness_change;
+ uint16_t low_latency_sprite_enable;
+}mp4_VOLSpriteInfo_t;
+
+typedef struct
+{
+ uint8_t load_intra_quant_mat;
+ uint8_t load_nonintra_quant_mat;
+ uint16_t align_dummy1;
+ uint8_t intra_quant_mat[64];
+ uint8_t nonintra_quant_mat[64];
+}mp4_VOLQuant_mat_t;
+
+/* Video Object Layer Info */
+typedef struct
+{
+ uint8_t video_object_layer_id; /* Last 4 bits of start code. */
+ uint8_t short_video_header;
+ uint8_t random_accessible_vol;
+ uint8_t video_object_type_indication;
+
+ uint8_t is_object_layer_identifier;
+ uint8_t video_object_layer_verid;
+ uint8_t video_object_layer_priority;
+ uint8_t aspect_ratio_info;
+
+ uint8_t aspect_ratio_info_par_width;
+ uint8_t aspect_ratio_info_par_height;
+ uint8_t align_dummy1;
+ uint8_t is_vol_control_parameters;
+
+ mp4_VOLControlParameters_t VOLControlParameters;
+
+ uint8_t video_object_layer_shape;
+ uint16_t vop_time_increment_resolution;
+ uint8_t vop_time_increment_resolution_bits;
+
+ uint8_t fixed_vop_rate;
+ uint16_t fixed_vop_time_increment;
+ uint16_t video_object_layer_width;
+ uint16_t video_object_layer_height;
+ uint8_t interlaced;
+
+ uint8_t obmc_disable;
+ uint8_t sprite_enable;
+ mp4_VOLSpriteInfo_t sprite_info;
+ uint8_t not_8_bit;
+ uint8_t quant_precision;
+
+ uint8_t bits_per_pixel;
+ uint8_t quant_type;
+ mp4_VOLQuant_mat_t quant_mat_info;
+ uint8_t quarter_sample;
+ uint8_t complexity_estimation_disable;
+
+ uint8_t resync_marker_disable;
+ uint8_t data_partitioned;
+ uint8_t reversible_vlc;
+ uint8_t newpred_enable;
+
+ uint8_t reduced_resolution_vop_enable; // verid != 1
+ uint8_t scalability;
+ uint8_t low_latency_sprite_enable;
+
+ mp4_GroupOfVideoObjectPlane_t GroupOfVideoObjectPlane;
+ mp4_VideoObjectPlane_t VideoObjectPlane;
+ mp4_VideoObjectPlaneH263 VideoObjectPlaneH263;
+
+ // for interlaced B-VOP direct mode
+ uint32_t Tframe;
+ // for B-VOP direct mode
+ uint32_t TRB, TRD;
+ // time increment of past and future VOP for B-VOP
+ uint32_t pastFrameTime, futureFrameTime;
+ // VOP global time
+ uint32_t vop_sync_time, vop_sync_time_b;
+
+} mp4_VideoObjectLayer_t;
+
+/* video_signal_type Info */
+typedef struct
+{
+ uint8_t is_video_signal_type;
+ uint8_t video_format;
+ uint8_t video_range;
+ uint8_t is_colour_description;
+ uint8_t colour_primaries;
+ uint8_t transfer_characteristics;
+ uint8_t matrix_coefficients;
+} mp4_VideoSignalType_t;
+
+typedef struct _mp4_Frame {
+ long long int time;
+} mp4_Frame;
+
+/* Visual Object Info */
+typedef struct
+{
+ uint8_t is_visual_object_identifier;
+ uint8_t visual_object_verid;
+ uint8_t visual_object_priority;
+ uint8_t visual_object_type;
+ mp4_VideoSignalType_t VideoSignalType;
+ mp4_VideoObjectLayer_t VideoObject;
+
+ mp4_Frame currentFrame; // current
+ mp4_Frame pastFrame; // reference in past
+ mp4_Frame futureFrame; // reference in future
+} mp4_VisualObject_t;
+
+/* Full Info */
+typedef struct
+{
+ mp4_VisualObject_t VisualObject;
+ uint8_t profile_and_level_indication;
+} mp4_Info_t;
+
+enum
+{
+ MP4_SC_SEEN_INVALID = 0x0,
+ MP4_SC_SEEN_VOL = 0x1,
+ MP4_SC_SEEN_VOP = 0x2,
+ MP4_SC_SEEN_SVH = 0x4,
+};
+
+enum
+{
+ MP4_BS_ERROR_NONE = (0 << 0),
+ MP4_BS_ERROR_HDR_PARSE = (1 << 0),
+ MP4_BS_ERROR_HDR_NONDEC = (1 << 1),
+ MP4_BS_ERROR_HDR_UNSUP = (1 << 2),
+ MP4_BS_ERROR_FRM_PARSE = (1 << 3),
+ MP4_BS_ERROR_FRM_NONDEC = (1 << 4),
+ MP4_BS_ERROR_FRM_UNSUP = (1 << 5),
+};
+
+#define MP4_HDR_ERROR_MASK (MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC | MP4_BS_ERROR_HDR_UNSUP)
+
+typedef enum
+{
+ VIDDEC_MP4_INDX_0 = 0,
+ VIDDEC_MP4_INDX_1 = 1,
+ VIDDEC_MP4_INDX_2 = 2,
+ VIDDEC_MP4_INDX_MAX = 3,
+} viddec_fw_mp4_ref_index_t;
+
+typedef struct
+{
+ uint8_t is_field;
+} viddec_mp4_ref_info_t;
+
+typedef struct
+{
+ // The relevant bitstream data for current stream
+ mp4_Info_t info;
+
+ // The previous start code (without the prefix)
+ uint32_t prev_sc;
+
+ // The current start code (without the prefix)
+ // TODO: Revisit for SVH
+ uint32_t current_sc;
+
+ // Indicates if we look for both short and long video header or just the long video header
+ // If false, sc detection looks for both short and long video headers.
+ // If true, long video header has been seen and sc detection does not look for short video header any more.
+ uint8_t ignore_scs;
+
+ // Indicates if the current start code prefix is long (if true).
+ uint8_t cur_sc_prefix;
+
+ // Indicates if the next start code prefix is long (if true).
+ uint8_t next_sc_prefix;
+
+ // Indicates start of a frame
+ uint8_t is_frame_start;
+
+ // Indicates which start codes were seen for this workload
+ uint8_t sc_seen;
+
+ // Indicates bitstream errors if any
+ uint16_t bitstream_error;
+
+ // Reference frame information
+ viddec_mp4_ref_info_t ref_frame[VIDDEC_MP4_INDX_MAX];
+
+}viddec_mp4_parser_t;
+
+#define BREAK_GETBITS_FAIL(x, ret) { \
+ if(x == -1){ \
+ FWTRACE; \
+ ret = MP4_STATUS_PARSE_ERROR; \
+ break;} \
+ }
+
+#define BREAK_GETBITS_REQD_MISSING(x, ret) { \
+ if(x == -1){ \
+ FWTRACE; \
+ ret = MP4_STATUS_REQD_DATA_ERROR; \
+ break;} \
+ }
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt);
+
+void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c
new file mode 100644
index 0000000..a3d894d
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c
@@ -0,0 +1,134 @@
+#include "viddec_mp4_shortheader.h"
+
+typedef struct
+{
+ uint16_t vop_width;
+ uint16_t vop_height;
+ uint16_t num_macroblocks_in_gob;
+ uint16_t num_gobs_in_vop;
+ uint8_t num_rows_in_gob;
+} svh_src_fmt_params_t;
+
+const svh_src_fmt_params_t svh_src_fmt_defaults[5] =
+{
+ {128, 96, 8, 6, 1},
+ {176, 144, 11, 9, 1},
+ {352, 288, 22, 18, 1},
+ {704, 576, 88, 18, 2},
+ {1408, 1152, 352, 18, 4},
+};
+
+mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Status_t ret = MP4_STATUS_OK;
+ unsigned int data;
+ mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263);
+ int32_t getbits = 0;
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 27);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+ data = data >> 1; // zero_bit
+ svh->vop_quant = (data & 0x1F);
+ data = data >> 9; // vop_quant + four_reserved_zero_bits
+ svh->picture_coding_type = (data & 0x1);
+ data = data >> 1; // vop_quant + four_reserved_zero_bits
+ svh->source_format = (data & 0x7);
+ data = data >> 8; // source_format + full_picture_freeze_release + document_camera_indicator + split_screen_indicator + zero_bit + marker_bit
+ svh->temporal_reference = data;
+
+ if (svh->source_format == 0 || svh->source_format > 5)
+ {
+ DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n");
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ for (;;)
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 1); // pei
+ BREAK_GETBITS_FAIL(getbits, ret);
+ if (!data)
+ break;
+ getbits = viddec_pm_get_bits(parent, &data, 8); // psupp
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+
+ // Anything after this needs to be fed to the decoder as PIXEL_ES
+ } while(0);
+
+ return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Status_t ret=MP4_STATUS_OK;
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VideoSignalType_t *vst = &(pInfo->VisualObject.VideoSignalType);
+ mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject);
+ mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+ mp4_VideoObjectPlaneH263 *svh = &(pInfo->VisualObject.VideoObject.VideoObjectPlaneH263);
+ uint8_t index = 0;
+
+ ret = mp4_Parse_VideoObjectPlane_svh(parent, parser);
+ if(ret == MP4_STATUS_OK)
+ {
+ // Populate defaults for the svh
+ vol->short_video_header = 1;
+ vol->video_object_layer_shape = MP4_SHAPE_TYPE_RECTANGULAR;
+ vol->obmc_disable = 1;
+ vol->quant_type = 0;
+ vol->resync_marker_disable = 1;
+ vol->data_partitioned = 0;
+ vol->reversible_vlc = 0;
+ vol->interlaced = 0;
+ vol->complexity_estimation_disable = 1;
+ vol->scalability = 0;
+ vol->not_8_bit = 0;
+ vol->bits_per_pixel = 8;
+ vol->quant_precision = 5;
+ vol->vop_time_increment_resolution = 30000;
+ vol->fixed_vop_time_increment = 1001;
+ vol->aspect_ratio_info = MP4_ASPECT_RATIO_12_11;
+
+ vop->vop_rounding_type = 0;
+ vop->vop_fcode_forward = 1;
+ vop->vop_coded = 1;
+ vop->vop_coding_type = svh->picture_coding_type ? MP4_VOP_TYPE_P: MP4_VOP_TYPE_I;
+ vop->vop_quant = svh->vop_quant;
+
+ vst->colour_primaries = 1;
+ vst->transfer_characteristics = 1;
+ vst->matrix_coefficients = 6;
+
+ index = svh->source_format - 1;
+ vol->video_object_layer_width = svh_src_fmt_defaults[index].vop_width;
+ vol->video_object_layer_height = svh_src_fmt_defaults[index].vop_height;
+ svh->num_macroblocks_in_gob = svh_src_fmt_defaults[index].num_macroblocks_in_gob;
+ svh->num_gobs_in_vop = svh_src_fmt_defaults[index].num_gobs_in_vop;
+ svh->num_rows_in_gob = svh_src_fmt_defaults[index].num_rows_in_gob;
+ }
+
+ mp4_set_hdr_bitstream_error(parser, false, ret);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT;
+
+ wi.mp4_vpsh.info = 0;
+ wi.mp4_vpsh.pad1 = 0;
+ wi.mp4_vpsh.pad2 = 0;
+
+ viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format);
+
+ ret = viddec_pm_append_workitem(parent, &wi);
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+ }
+
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h
new file mode 100644
index 0000000..e2ecaaa
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h
@@ -0,0 +1,11 @@
+#ifndef VIDDEC_MP4_SHORTHEADER_H
+#define VIDDEC_MP4_SHORTHEADER_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *cxt);
+
+mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *cxt);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c
new file mode 100644
index 0000000..6df06b6
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c
@@ -0,0 +1,596 @@
+#include "viddec_mp4_videoobjectlayer.h"
+
+const unsigned char mp4_DefaultIntraQuantMatrix[64] = {
+ 8, 17, 18, 19, 21, 23, 25, 27,
+ 17, 18, 19, 21, 23, 25, 27, 28,
+ 20, 21, 22, 23, 24, 26, 28, 30,
+ 21, 22, 23, 24, 26, 28, 30, 32,
+ 22, 23, 24, 26, 28, 30, 32, 35,
+ 23, 24, 26, 28, 30, 32, 35, 38,
+ 25, 26, 28, 30, 32, 35, 38, 41,
+ 27, 28, 30, 32, 35, 38, 41, 45
+};
+const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = {
+ 16, 17, 18, 19, 20, 21, 22, 23,
+ 17, 18, 19, 20, 21, 22, 23, 24,
+ 18, 19, 20, 21, 22, 23, 24, 25,
+ 19, 20, 21, 22, 23, 24, 26, 27,
+ 20, 21, 22, 23, 25, 26, 27, 28,
+ 21, 22, 23, 24, 26, 27, 28, 30,
+ 22, 23, 24, 26, 27, 28, 30, 31,
+ 23, 24, 25, 27, 28, 30, 31, 33
+};
+const unsigned char mp4_ClassicalZigzag[64] = {
+ 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+};
+
+static inline int mp4_GetMacroBlockNumberSize(int nmb)
+{
+ int nb = 0;
+ nmb --;
+ do {
+ nmb >>= 1;
+ nb ++;
+ } while (nmb);
+ return nb;
+}
+
+static inline void mp4_copy_default_table(const uint8_t *src, uint8_t *dst, uint32_t len)
+{
+ uint32_t i;
+ for(i=0; i< len; i++)
+ dst[i] = src[i];
+}
+
+
+static inline mp4_Status_t mp4_Parse_QuantMatrix(void *parent, uint8_t *pQM)
+{
+ uint32_t i,code=0;
+ uint8_t last=0;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_OK;
+
+ for (i = 0; i < 64; i ++)
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 8);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ if (code == 0) break;
+ pQM[mp4_ClassicalZigzag[i]] = (uint8_t)(code & 0xFF);
+ }
+ last = pQM[mp4_ClassicalZigzag[i-1]];
+ for (; i < 64; i ++)
+ {
+ pQM[mp4_ClassicalZigzag[i]] = last;
+ }
+ return ret;;
+}
+
+static inline uint8_t mp4_pvt_valid_object_type_indication(uint8_t val)
+{
+ return ((1 <= val) || (val <= 18));
+}
+
+static inline uint8_t mp4_pvt_valid_object_layer_verid(uint8_t val)
+{
+ uint8_t ret=false;
+ switch(val)
+ {
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ {
+ ret = true;
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+ return ret;
+}
+
+static mp4_Status_t
+mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_VOLControlParameters_t *cxt = &(parser->info.VisualObject.VideoObject.VOLControlParameters);
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+ uint32_t code=0;
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 4);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ cxt->chroma_format = (code >> 2) & 0x3;
+ cxt->low_delay = ((code & 0x2) > 0);
+ cxt->vbv_parameters = code & 0x1;
+
+ if (cxt->chroma_format != MP4_CHROMA_FORMAT_420)
+ {
+ DEB("Warning: mp4_Parse_VideoObject:vol_control_parameters.chroma_format != 4:2:0\n");
+ cxt->chroma_format= MP4_CHROMA_FORMAT_420;
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ ret = MP4_STATUS_NOTSUPPORT;
+ }
+
+ if(cxt->vbv_parameters)
+ {/* TODO: Check for validity of marker bits */
+ getbits = viddec_pm_get_bits(parent, &(code), 32);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ /* 32 bits= firsthalf(15) + M + LatterHalf(15) + M */
+ cxt->bit_rate = (code & 0xFFFE) >> 1; // Get rid of 1 marker bit
+ cxt->bit_rate |= ((code & 0xFFFE0000) >> 2); // Get rid of 2 marker bits
+
+ if(cxt->bit_rate == 0)
+ {
+ DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.bit_rate = 0\n");
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ ret = MP4_STATUS_NOTSUPPORT;
+ // Do we need to really break here? Why not just set an error and proceed
+ //break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 19);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ /* 19 bits= firsthalf(15) + M + LatterHalf(3)*/
+ cxt->vbv_buffer_size = code & 0x7;
+ cxt->vbv_buffer_size |= ( (code >> 4) & 0x7FFF);
+ if(cxt->vbv_buffer_size == 0)
+ {
+ DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.vbv_buffer_size = 0\n");
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ ret = MP4_STATUS_NOTSUPPORT;
+ // Do we need to really break here? Why not just set an error and proceed
+ //break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 28);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ /* 28 bits= firsthalf(11) + M + LatterHalf(15) + M */
+ code = code >>1;
+ cxt->vbv_occupancy = code & 0x7FFF;
+ code = code >>16;
+ cxt->vbv_occupancy |= (code & 0x07FF);
+ }
+ ret = MP4_STATUS_OK;
+ } while(0);
+
+ return ret;
+}
+
+static uint32_t mp4_pvt_count_number_of_bits(uint32_t val)
+{
+ uint32_t num_bits=0;
+ do{
+ val >>= 1;
+ num_bits++;
+ }while(val);
+ return num_bits;
+}
+
+static mp4_Status_t
+mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_VideoObjectLayer_t *vidObjLay = (&parser->info.VisualObject.VideoObject);
+ mp4_VOLSpriteInfo_t *cxt = &(vidObjLay->sprite_info);
+ uint32_t sprite_enable = vidObjLay->sprite_enable;
+ uint32_t code;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+
+ do{
+ if ((sprite_enable == MP4_SPRITE_STATIC) ||
+ (sprite_enable == MP4_SPRITE_GMC))
+ {
+ if (sprite_enable != MP4_SPRITE_GMC)
+ {
+ /* This is not a supported type by HW */
+ DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 9);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ cxt->sprite_brightness_change = code & 0x1;
+ cxt->sprite_warping_accuracy = (code >> 1) & 0x3;
+ cxt->no_of_sprite_warping_points = code >> 3;
+ if(cxt->no_of_sprite_warping_points > 1)
+ {
+ DEB("Error: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n",
+ cxt->no_of_sprite_warping_points);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ if((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change))
+ {
+ DEB("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ if (vidObjLay->sprite_enable != MP4_SPRITE_GMC)
+ {
+ DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ }
+ ret = MP4_STATUS_OK;
+ }while(0);
+
+ return ret;
+}
+
+static mp4_Status_t mp4_Parse_VOL_quant_mat(void *parent, mp4_VideoObjectLayer_t *vidObjLay)
+{
+ uint32_t code;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+ mp4_VOLQuant_mat_t *quant = &(vidObjLay->quant_mat_info);
+
+ do{
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ quant->load_intra_quant_mat = code;
+ if (quant->load_intra_quant_mat)
+ {
+ mp4_Parse_QuantMatrix(parent, &(quant->intra_quant_mat[0]));
+ }
+ else
+ {
+ mp4_copy_default_table((const uint8_t *)&mp4_DefaultIntraQuantMatrix[0], (uint8_t *)&(quant->intra_quant_mat[0]), 64);
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ quant->load_nonintra_quant_mat = code;
+ if (quant->load_nonintra_quant_mat)
+ {
+ mp4_Parse_QuantMatrix(parent, &(quant->nonintra_quant_mat[0]));
+ }
+ else
+ {
+ mp4_copy_default_table((const uint8_t *)&mp4_DefaultNonIntraQuantMatrix[0], (uint8_t *)&(quant->nonintra_quant_mat[0]), 64);
+ }
+ ret = MP4_STATUS_OK;
+ }while(0);
+ return ret;
+}
+
+static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t code;
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+
+ do{
+ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR)
+ {
+ /* TODO: check for validity of marker bits */
+ getbits = viddec_pm_get_bits(parent, &(code), 29);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->video_object_layer_height = (code >> 1) & 0x1FFF;
+ vidObjLay->video_object_layer_width = (code >> 15) & 0x1FFF;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->interlaced = ((code & 0x2) > 0);
+ vidObjLay->obmc_disable = ((code & 0x1) > 0);
+
+ {
+ uint32_t num_bits=1;
+ if(vidObjLay->video_object_layer_verid != 1) num_bits=2;
+ getbits = viddec_pm_get_bits(parent, &(code), num_bits);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->sprite_enable = code;
+ }
+
+ ret = mp4_Parse_VOL_sprite(parent, parser);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+
+ if ((vidObjLay->video_object_layer_verid != 1) &&
+ (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR))
+ {
+ /* not supported shape*/
+ DEB("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidObjLay->not_8_bit = (code > 0 );
+ if(vidObjLay->not_8_bit)
+ {
+ /* 8 bit is only supported mode*/
+ DEB("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ else
+ {/* We use default values since only 8 bit mode is supported */
+ vidObjLay->quant_precision = 5;
+ vidObjLay->bits_per_pixel = 8;
+ }
+
+ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)
+ {
+ /* Should not get here as shape is checked earlier */
+ DEB("Error: mp4_Parse_VideoObject: GRAYSCALE, not supp\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->quant_type = code;
+ if (vidObjLay->quant_type)
+ {
+ ret = mp4_Parse_VOL_quant_mat(parent, vidObjLay);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ }
+
+ if (vidObjLay->video_object_layer_verid != 1)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->quarter_sample = code;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->complexity_estimation_disable = code;
+ if(!vidObjLay->complexity_estimation_disable)
+ {/* complexity estimation not supported */
+ DEB("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->resync_marker_disable = ((code & 0x2) > 0);
+ vidObjLay->data_partitioned = code & 0x1;
+ if(vidObjLay->data_partitioned)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->reversible_vlc = code;
+ }
+
+ if (vidObjLay->video_object_layer_verid != 1)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidObjLay->newpred_enable = code;
+ if(vidObjLay->newpred_enable)
+ {
+ DEB("Error: NEWPRED mode is not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidObjLay->reduced_resolution_vop_enable = code;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidObjLay->scalability = code;
+ if(vidObjLay->scalability)
+ {
+ DEB("Error: VOL scalability is not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ // No need to parse further - none of the fields are interesting to parser/decoder/user
+ ret = MP4_STATUS_OK;
+ }while(0);
+ return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t code;
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VisualObject_t *visObj = &(pInfo->VisualObject);
+ mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+
+//DEB("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret);
+ do{
+ vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE;
+
+ vidObjLay->short_video_header = 0;
+ vidObjLay->video_object_layer_id = (parser->current_sc & 0xF);
+
+ getbits = viddec_pm_get_bits(parent, &code, 9);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->video_object_type_indication = code & 0xFF;
+ vidObjLay->random_accessible_vol = ((code & 0x100) > 0);
+
+ if(!mp4_pvt_valid_object_type_indication(vidObjLay->video_object_type_indication))
+ { /* Streams with "unknown" type mismatch with ref */
+ DEB("Warning: video_object_type_indication = %d, forcing to 1\n",
+ vidObjLay->video_object_type_indication);
+ vidObjLay->video_object_type_indication = 1;
+ }
+
+ if(vidObjLay->video_object_type_indication == MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE)
+ {/* This is not a supported type by HW */
+ DEB("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n",
+ vidObjLay->video_object_type_indication);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ else
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->is_object_layer_identifier = code;
+ vidObjLay->video_object_layer_verid =
+ (mp4_pvt_valid_object_layer_verid(visObj->visual_object_verid)) ? visObj->visual_object_verid : 1;
+
+ if (vidObjLay->is_object_layer_identifier)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 7);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->video_object_layer_priority = code & 0x7;
+ vidObjLay->video_object_layer_verid = (code >> 3) & 0xF;
+ if(!mp4_pvt_valid_object_layer_verid(vidObjLay->video_object_layer_verid))
+ {
+ DEB("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n",
+ vidObjLay->video_object_layer_verid);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ /* Video object layer ID supercedes visual object ID */
+ visObj->visual_object_verid = vidObjLay->video_object_layer_verid;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 4);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->aspect_ratio_info = code & 0xF;
+ if(vidObjLay->aspect_ratio_info == MP4_ASPECT_RATIO_EXTPAR)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 16);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->aspect_ratio_info_par_width = (code >> 8) & 0xFF;
+ vidObjLay->aspect_ratio_info_par_height = code & 0xFF;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->is_vol_control_parameters = code;
+ if(vidObjLay->is_vol_control_parameters)
+ {
+ ret = mp4_pvt_VOL_volcontrolparameters(parent, parser);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->video_object_layer_shape = code;
+ /* If shape is not rectangluar exit early without parsing */
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)
+ {
+ DEB("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n",
+ MP4_SHAPE_TYPE_RECTANGULAR, vidObjLay->video_object_layer_shape);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ if ((vidObjLay->video_object_layer_verid != 1) &&
+ (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE))
+ {/* Grayscale not supported */
+ DEB("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 19);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ /* TODO: check validity of marker */
+ vidObjLay->vop_time_increment_resolution = (code >> 2) & 0xFFFF;
+ vidObjLay->fixed_vop_rate = code & 0x1;
+
+ if(vidObjLay->vop_time_increment_resolution == 0)
+ {
+ DEB("Error: 0 value for vop_time_increment_resolution\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ /* calculate number bits in vop_time_increment_resolution */
+ vidObjLay->vop_time_increment_resolution_bits = (uint8_t)mp4_pvt_count_number_of_bits(
+ (uint32_t)(vidObjLay->vop_time_increment_resolution -1));
+
+ if(vidObjLay->fixed_vop_rate)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), vidObjLay->vop_time_increment_resolution_bits);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->fixed_vop_time_increment = code;
+ }
+
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+ {
+ ret = mp4_Parse_VOL_notbinaryonly(parent, parser);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ }
+ else
+ {
+ DEB("Error: MP4_SHAPE_TYPE_BINARYONLY not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ }
+
+ vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE;
+ ret = MP4_STATUS_OK;
+ } while(0);
+
+ mp4_set_hdr_bitstream_error(parser, true, ret);
+ if(ret != MP4_STATUS_OK)
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC;
+//DEB("before wkld mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+ viddec_workload_t *wl = viddec_pm_get_header(parent);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ;
+
+ wi.mp4_vol.vol_aspect_ratio = 0;
+ wi.mp4_vol.vol_bit_rate = 0;
+ wi.mp4_vol.vol_frame_rate = 0;
+
+ viddec_fw_mp4_vol_set_aspect_ratio_info(&wi.mp4_vol, vidObjLay->aspect_ratio_info);
+ viddec_fw_mp4_vol_set_par_width(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_width);
+ viddec_fw_mp4_vol_set_par_height(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_height);
+ viddec_fw_mp4_vol_set_control_param(&wi.mp4_vol, vidObjLay->is_vol_control_parameters);
+ viddec_fw_mp4_vol_set_chroma_format(&wi.mp4_vol, vidObjLay->VOLControlParameters.chroma_format);
+ viddec_fw_mp4_vol_set_interlaced(&wi.mp4_vol, vidObjLay->interlaced);
+ viddec_fw_mp4_vol_set_fixed_vop_rate(&wi.mp4_vol, vidObjLay->fixed_vop_rate);
+
+ viddec_fw_mp4_vol_set_vbv_param(&wi.mp4_vol, vidObjLay->VOLControlParameters.vbv_parameters);
+ viddec_fw_mp4_vol_set_bit_rate(&wi.mp4_vol, vidObjLay->VOLControlParameters.bit_rate);
+
+ viddec_fw_mp4_vol_set_fixed_vop_time_increment(&wi.mp4_vol, vidObjLay->fixed_vop_time_increment);
+ viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution);
+
+ ret = viddec_pm_append_workitem(parent, &wi);
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+
+ memset(&(wl->attrs), 0, sizeof(viddec_frame_attributes_t));
+
+ wl->attrs.cont_size.width = vidObjLay->video_object_layer_width;
+ wl->attrs.cont_size.height = vidObjLay->video_object_layer_height;
+ }
+
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h
new file mode 100644
index 0000000..4540b6b
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h
@@ -0,0 +1,17 @@
+#ifndef VIDDEC_MP4_VIDEOOBJECTLAYER_H
+#define VIDDEC_MP4_VIDEOOBJECTLAYER_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+void mp4_ResetVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_InitVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_FreeVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *cxt);
+
+
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c
new file mode 100644
index 0000000..9840af4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c
@@ -0,0 +1,422 @@
+#include "viddec_mp4_videoobjectplane.h"
+
+mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Info_t* pInfo = &(parser->info);
+ uint32_t code;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_REQD_DATA_ERROR;
+ mp4_GroupOfVideoObjectPlane_t *data;
+ uint32_t time_code = 0;
+
+ data = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane);
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 20);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ ret = MP4_STATUS_OK;
+
+ data->broken_link = ((code & 0x1) > 0);
+ data->closed_gov = ((code & 0x2) > 0);
+ time_code = code = code >> 2;
+ data->time_code_seconds = code & 0x3F;
+ code = code >> 6;
+ if((code & 1) == 0)
+ {/* SGA:Should we ignore marker bit? */
+ DEB("Error:mp4_Parse_GroupOfVideoObjectPlane: Invalid marker\n");
+ }
+ code = code >>1;
+ data->time_code_minutes = code & 0x3F;
+ code = code >> 6;
+ data->time_code_hours = code & 0x1F;
+
+ // This is the timebase in full second units
+ data->time_base = data->time_code_seconds + (60*data->time_code_minutes) + (3600*data->time_code_hours);
+ // Need to convert this into no. of ticks
+ data->time_base *= pInfo->VisualObject.VideoObject.vop_time_increment_resolution;
+
+ } while(0);
+
+ mp4_set_hdr_bitstream_error(parser, true, ret);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ;
+
+ wi.mp4_gvop.gvop_info = 0;
+ wi.mp4_gvop.pad1 = 0;
+ wi.mp4_gvop.pad2 = 0;
+
+ viddec_fw_mp4_gvop_set_broken_link(&wi.mp4_gvop, data->broken_link);
+ viddec_fw_mp4_gvop_set_closed_gov(&wi.mp4_gvop, data->closed_gov);
+ viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code);
+
+ ret = viddec_pm_append_workitem(parent, &wi);
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+ }
+
+ return ret;
+}
+
+static inline mp4_Status_t mp4_brightness_change(void *parent, int32_t *b_change)
+{
+ uint32_t code;
+ int32_t getbits=0;
+
+ *b_change = 0;
+ getbits = viddec_pm_peek_bits(parent, &code, 4);
+ if (code == 15)
+ {
+ getbits = viddec_pm_skip_bits(parent, 4);
+ getbits = viddec_pm_get_bits(parent, &code, 10);
+ *b_change = 625 + code;
+ }
+ else if (code == 14)
+ {
+ getbits = viddec_pm_skip_bits(parent, 4);
+ getbits = viddec_pm_get_bits(parent, &code, 9);
+ *b_change = 113 + code;
+ }
+ else if (code >= 12)
+ {
+ getbits = viddec_pm_skip_bits(parent, 3);
+ getbits = viddec_pm_get_bits(parent, &code, 7);
+ *b_change = (code < 64) ? ((int32_t)code - 112) : ((int32_t)code - 15);
+ }
+ else if (code >= 8)
+ {
+ getbits = viddec_pm_skip_bits(parent, 2);
+ getbits = viddec_pm_get_bits(parent, &code, 6);
+ *b_change = (code < 32) ? ((int32_t)code - 48) : ((int32_t)code - 15);
+ }
+ else
+ {
+ getbits = viddec_pm_skip_bits(parent, 1);
+ getbits = viddec_pm_get_bits(parent, &code, 5);
+ *b_change = (code < 16) ? ((int32_t)code - 16) : ((int32_t)code - 15);
+ }
+
+ return ( (getbits == -1) ? MP4_STATUS_PARSE_ERROR: MP4_STATUS_OK);
+}
+static inline int32_t mp4_Sprite_dmv_length(void * parent, int32_t *dmv_length)
+{
+ uint32_t code, skip;
+ int32_t getbits=0;
+ mp4_Status_t ret= MP4_STATUS_PARSE_ERROR;
+ *dmv_length=0;
+ skip=3;
+ do{
+ getbits = viddec_pm_peek_bits(parent, &code, skip);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+ if(code == 7)
+ {
+ viddec_pm_skip_bits(parent, skip);
+ getbits = viddec_pm_peek_bits(parent, &code, 9);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+ skip=1;
+ while((code & 256) != 0)
+ {/* count number of 1 bits */
+ code <<=1;
+ skip++;
+ }
+ *dmv_length = 5 + skip;
+ }
+ else
+ {
+ skip=(code <= 1) ? 2 : 3;
+ *dmv_length = code - 1;
+ }
+ viddec_pm_skip_bits(parent, skip);
+ ret= MP4_STATUS_OK;
+
+ }while(0);
+ return ret;
+}
+
+static inline mp4_Status_t
+mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_VideoObjectPlane_t *vidObjPlane)
+{
+ uint32_t code, i;
+ int32_t dmv_length=0, dmv_code=0, getbits=0;
+ mp4_Status_t ret = MP4_STATUS_OK;
+ for(i=0; i < (uint32_t)vidObjLay->sprite_info.no_of_sprite_warping_points; i++ )
+ {
+ ret = mp4_Sprite_dmv_length(parent, &dmv_length);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ if(dmv_length <= 0)
+ {
+ dmv_code = 0;
+ }
+ else
+ {
+ getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ dmv_code = (int32_t)code;
+ if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+ {
+ dmv_code -= (1 << dmv_length) - 1;
+ }
+ }
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ if(code != 1)
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ vidObjPlane->warping_mv_code_du[i] = dmv_code;
+ /* TODO: create another inline function to avoid code duplication */
+ ret = mp4_Sprite_dmv_length(parent, &dmv_length);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ if(dmv_length <= 0)
+ {
+ dmv_code = 0;
+ }
+ else
+ {
+ getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ dmv_code = (int32_t)code;
+ if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+ {
+ dmv_code -= (1 << dmv_length) - 1;
+ }
+ }
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ if(code != 1)
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ vidObjPlane->warping_mv_code_dv[i] = dmv_code;
+
+ }
+ return ret;
+}
+
+static inline mp4_Status_t mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(void *parent, uint32_t *base)
+{
+ mp4_Status_t ret= MP4_STATUS_OK;
+ int32_t getbits=0;
+ uint32_t code = 0;
+
+ *base = 0;
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ *base += code;
+ }while(code != 0);
+ return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t code;
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+ mp4_VideoObjectPlane_t *vidObjPlane = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+ int32_t getbits=0;
+ mp4_Status_t ret= MP4_STATUS_PARSE_ERROR;
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_coding_type = code & 0x3;
+ if( mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(parent,
+ &(vidObjPlane->modulo_time_base)) == MP4_STATUS_REQD_DATA_ERROR)
+ {
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ /* TODO: check for marker bit validity */
+ {
+ uint32_t numbits=0;
+ numbits = vidObjLay->vop_time_increment_resolution_bits;
+ if(numbits == 0) numbits=1; /*TODO:check if its greater than 16 bits ?? */
+ getbits = viddec_pm_get_bits(parent, &code, numbits);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_time_increment = code;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &code, 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+ vidObjPlane->vop_coded = code & 0x1;
+ if(vidObjPlane->vop_coded == 0)
+ {
+ ret = MP4_STATUS_OK;/* Exit point 1 */
+ break;
+ }
+
+ if(vidObjLay->newpred_enable)
+ {
+ /* New pred mode not supported in HW */
+ DEB("Error: mp4_Parse_VideoObjectPlane: New pred in vidObjPlane is not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) &&
+ ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P) ||
+ ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S) &&
+ (vidObjLay->sprite_enable == MP4_SPRITE_GMC))))
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_rounding_type = code;
+ }
+
+ if (vidObjLay->reduced_resolution_vop_enable &&
+ (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) &&
+ ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) ||
+ (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P)))
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_reduced_resolution = code;
+ if (vidObjPlane->vop_reduced_resolution)
+ {
+ DEB("Error: mp4_Parse_VideoObjectPlane: Reduced Resolution vidObjPlane is not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ }
+
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)
+ {
+ /* we support only rectangular shapes so the following logic is not required */
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) &&
+ (!vidObjLay->complexity_estimation_disable))
+ {
+ /* Not required according to DE team */
+ //read_vop_complexity_estimation_header();
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->intra_dc_vlc_thr = code;
+ if (vidObjLay->interlaced)
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->top_field_first = ((code & 0x2) > 0);
+ vidObjPlane->alternate_vertical_scan_flag = code & 0x1;
+ }
+ }
+
+ if (((vidObjLay->sprite_enable == MP4_SPRITE_STATIC) || (vidObjLay->sprite_enable == MP4_SPRITE_GMC)) &&
+ (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S))
+ {
+ if (vidObjLay->sprite_info.no_of_sprite_warping_points > 0){
+ if (mp4_Sprite_Trajectory(parent, vidObjLay, vidObjPlane) != MP4_STATUS_OK){
+ break;
+ }
+ }
+ vidObjPlane->brightness_change_factor = 0;
+ if (vidObjLay->sprite_info.sprite_brightness_change)
+ {
+ int32_t change=0;
+ if(mp4_brightness_change(parent, &change) == MP4_STATUS_PARSE_ERROR)
+ {
+ break;
+ }
+ vidObjPlane->brightness_change_factor = change;
+ }
+
+ if (vidObjLay->sprite_enable == MP4_SPRITE_STATIC)
+ {
+ /* SGA: IS decode sprite not required. Is static even supported */
+ ret = MP4_STATUS_OK;/* Exit point 2 */
+ break;
+ }
+ }
+
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+ {
+ // Length of vop_quant is specified by quant_precision
+ getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_quant = code;
+ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ if (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I)
+ {
+ vidObjPlane->vop_fcode_forward = 0;
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_fcode_forward = code & 0x7;
+ if (vidObjPlane->vop_fcode_forward == 0)
+ {
+ DEB("Error: vop_fcode_forward == 0\n");
+ break;
+ }
+ }
+ if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B)
+ {
+ vidObjPlane->vop_fcode_backward = 0;
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_fcode_backward = code &0x7;
+ if (vidObjPlane->vop_fcode_backward == 0)
+ {
+ DEB("Error: vop_fcode_backward == 0\n");
+ break;
+ }
+ }
+ if (!vidObjLay->scalability)
+ {
+ if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) &&
+ (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I))
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ // The remaining data contains the macroblock information that is handled by the BSP
+ // The offsets to be sent to the BSP are obtained in the workload population
+ }
+ else
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ }
+ else
+ {/* Binary Not supported */
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ /* Since we made it all the way here it a success condition */
+ ret = MP4_STATUS_OK; /* Exit point 3 */
+ }while(0);
+
+ mp4_set_hdr_bitstream_error(parser, false, ret);
+
+ return ret;
+} // mp4_Parse_VideoObjectPlane
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h
new file mode 100644
index 0000000..b54f642
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h
@@ -0,0 +1,11 @@
+#ifndef VIDDEC_MP4_VIDEOOBJECTPLANE_H
+#define VIDDEC_MP4_VIDEOOBJECTPLANE_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c
new file mode 100644
index 0000000..36c0b29
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c
@@ -0,0 +1,287 @@
+#include "viddec_mp4_visualobject.h"
+
+static inline uint8_t mp4_pvt_isValid_verID(uint8_t id)
+{
+ uint8_t ret=true;
+ switch(id)
+ {
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ {
+ break;
+ }
+ default:
+ {
+ ret = false;
+ break;
+ }
+ }
+ return ret;
+} // mp4_pvt_isValid_verID
+
+static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalType_t *vidSignal)
+{
+ uint32_t data=0;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+ /* Set default values defined in spec first */
+ vidSignal->video_format = 5;
+ vidSignal->video_range = 0;
+ vidSignal->colour_primaries = 1;
+ vidSignal->transfer_characteristics = 1;
+ vidSignal->matrix_coefficients = 1;
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidSignal->is_video_signal_type = (data > 0);
+ if(vidSignal->is_video_signal_type)
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 5);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidSignal->is_colour_description = data & 0x1;
+ vidSignal->video_range = ((data & 0x2) > 0);
+ data = data >> 2;
+ vidSignal->video_format = data & 0x7;
+ if(vidSignal->is_colour_description)
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 24);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidSignal->colour_primaries = (data >> 16) & 0xFF;
+ vidSignal->transfer_characteristics = (data >> 8) & 0xFF;
+ vidSignal->matrix_coefficients = data & 0xFF;
+ }
+ }
+ ret = MP4_STATUS_OK;
+ }while(0);
+
+ return ret;
+} // mp4_Parse_video_signal_type
+
+void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status)
+{
+ //DEB("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n",
+ // parser->bitstream_error, hdr_flag, parse_status);
+
+ if(hdr_flag)
+ {
+ if(parse_status & MP4_STATUS_NOTSUPPORT)
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ if(parse_status & MP4_STATUS_PARSE_ERROR)
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_PARSE;
+ if(parse_status & MP4_STATUS_REQD_DATA_ERROR)
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC;
+ parser->bitstream_error &= MP4_HDR_ERROR_MASK;
+ }
+ else
+ {
+ if(parse_status & MP4_STATUS_NOTSUPPORT)
+ parser->bitstream_error |= MP4_BS_ERROR_FRM_UNSUP;
+ if(parse_status & MP4_STATUS_PARSE_ERROR)
+ parser->bitstream_error |= MP4_BS_ERROR_FRM_PARSE;
+ if(parse_status & MP4_STATUS_REQD_DATA_ERROR)
+ parser->bitstream_error |= MP4_BS_ERROR_FRM_NONDEC;
+ }
+
+ //DEB("Exiting mp4_set_hdr_bitstream_error: bs_err: 0x%x\n", parser->bitstream_error);
+
+ return;
+} // mp4_set_hdr_bitstream_error
+
+mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t data=0;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+ getbits = viddec_pm_get_bits(parent, &data, 8);
+ if(getbits != -1)
+ {
+ parser->info.profile_and_level_indication = data & 0xFF;
+ // If present, check for validity
+ switch(parser->info.profile_and_level_indication)
+ {
+ case MP4_SIMPLE_PROFILE_LEVEL_0:
+ case MP4_SIMPLE_PROFILE_LEVEL_1:
+ case MP4_SIMPLE_PROFILE_LEVEL_2:
+ case MP4_SIMPLE_PROFILE_LEVEL_3:
+ case MP4_SIMPLE_PROFILE_LEVEL_4a:
+ case MP4_SIMPLE_PROFILE_LEVEL_5:
+ case MP4_SIMPLE_PROFILE_LEVEL_6:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B:
+ parser->bitstream_error = MP4_BS_ERROR_NONE;
+ ret = MP4_STATUS_OK;
+ break;
+ default:
+ parser->bitstream_error = MP4_BS_ERROR_HDR_UNSUP | MP4_BS_ERROR_HDR_NONDEC;
+ break;
+ }
+ }
+ else
+ {
+ parser->bitstream_error = MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC;
+ }
+
+ return ret;
+} // mp4_Parse_VisualSequence
+
+mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VisualObject_t *visObj = &(pInfo->VisualObject);
+ uint32_t data=0;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ visObj->is_visual_object_identifier = (data > 0);
+
+ visObj->visual_object_verid = 1; /* Default value as per spec */
+ if (visObj->is_visual_object_identifier)
+ {
+ viddec_pm_get_bits(parent, &data, 7);
+ visObj->visual_object_priority = data & 0x7;
+ data = data >> 3;
+ if(mp4_pvt_isValid_verID(data & 0xF))
+ {
+ visObj->visual_object_verid = data & 0xF;
+ }
+ else
+ {
+ DEB("Warning: Unsupported visual_object_verid\n");
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ // Continue parsing as it is not a required field for decoder
+ }
+ }
+
+ getbits = viddec_pm_get_bits(parent, &data, 4);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ visObj->visual_object_type = data;
+ if (visObj->visual_object_type != MP4_VISUAL_OBJECT_TYPE_VIDEO)
+ {
+ /* VIDEO is the only supported type */
+ DEB("Error: Unsupported object: visual_object_type != video ID\n");
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ break;
+ }
+
+ /* Not required to check for visual_object_type as we already handle it above */
+ ret = mp4_Parse_video_signal_type(parent, &(visObj->VideoSignalType));
+
+ // No need to check for user data or visual object layer because they have a different start code
+ // and will not be part of this header
+
+ } while(0);
+
+ mp4_set_hdr_bitstream_error(parser, true, ret);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+ mp4_VideoSignalType_t *vst = &(visObj->VideoSignalType);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ;
+
+ wi.mp4_vs_vo.vs_item = 0;
+ wi.mp4_vs_vo.video_signal_type = 0;
+ wi.mp4_vs_vo.color_desc = 0;
+
+ viddec_fw_mp4_vs_set_profile_and_level_indication(&wi.mp4_vs_vo, pInfo->profile_and_level_indication);
+
+ viddec_fw_mp4_vo_set_video_signal_type(&wi.mp4_vs_vo, vst->is_video_signal_type);
+ if(vst->is_video_signal_type)
+ {
+ viddec_fw_mp4_vo_set_video_range(&wi.mp4_vs_vo, vst->video_range);
+ viddec_fw_mp4_vo_set_video_format(&wi.mp4_vs_vo, vst->video_format);
+ viddec_fw_mp4_vo_set_colour_description(&wi.mp4_vs_vo, vst->is_colour_description);
+ if(vst->is_colour_description)
+ {
+ viddec_fw_mp4_vo_set_transfer_char(&wi.mp4_vs_vo, vst->transfer_characteristics);
+ viddec_fw_mp4_vo_set_color_primaries(&wi.mp4_vs_vo, vst->colour_primaries);
+ }
+ }
+
+ ret = viddec_pm_append_workitem(parent, &wi);
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+ }
+
+ return ret;
+} // mp4_Parse_VisualObject
+
+mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ uint32_t user_data;
+ viddec_workload_item_t wi;
+
+ DEB("ParseUser-prev_sc: 0x%x\n", parser->prev_sc);
+
+ /* find the scope based on start code sc */
+ switch(parser->prev_sc) {
+ case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+ break;
+ case MP4_SC_VISUAL_OBJECT:
+ wi.vwi_type = VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA;
+ break;
+ case MP4_SC_GROUP_OF_VOP:
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+ break;
+ case MP4_SC_VIDEO_OBJECT_LAYER_MIN:
+ wi.vwi_type = VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA;
+ break;
+ default:
+ wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen
+ break;
+ }
+
+ /* Read 1 byte of user data and store it in workitem for the current stream level (VS/VO/VOL/GVOP).
+ Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size,
+ append the workitem. This loop is repeated till all user data is extracted and appended. */
+ wi.user_data.size = 0;
+ while(viddec_pm_get_bits(parent, &user_data, 8) != -1)
+ {
+ /* Store the valid byte in data payload */
+ wi.user_data.data_payload[wi.user_data.size] = user_data;
+ wi.user_data.size++;
+
+ /* When size exceeds payload size, append workitem and continue */
+ if (wi.user_data.size >= 11)
+ {
+ viddec_pm_setup_userdata(&wi);
+ ret = viddec_pm_append_workitem(parent, &wi);
+ wi.user_data.size = 0;
+ }
+ }
+ /* If size is not 0, append remaining user data. */
+ if (wi.user_data.size > 0)
+ {
+ int i;
+ for(i=wi.user_data.size;i<11;i++)
+ {
+ wi.user_data.data_payload[i] = 0;
+ }
+ viddec_pm_setup_userdata(&wi);
+ ret = viddec_pm_append_workitem(parent, &wi);
+ wi.user_data.size = 0;
+ }
+
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+
+ return ret;
+} // mp4_Parse_UserData
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h
new file mode 100644
index 0000000..0aec9ad
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h
@@ -0,0 +1,13 @@
+#ifndef VIDDEC_MP4_VISUALOBJECT_H
+#define VIDDEC_MP4_VISUALOBJECT_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c
new file mode 100644
index 0000000..6a34500
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c
@@ -0,0 +1,143 @@
+#include "viddec_pm_parse.h"
+#include "viddec_fw_debug.h"
+#include "viddec_mp4_parse.h"
+
+/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success.
+ The conext is updated with current phase and sc_code position in the buffer.
+
+ What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+ Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+ if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+ we are looking for. Its incremented to 4 once we see a byte after this pattern.
+
+ For MP4 there are two startcode patterns LVH & SVH. LVH is same as other codecs (00 00 01), SVH
+ A.K.A H263 is (00 00 8X). So we have to look for both kind of start codes. The spec doesn't
+ explicitly say if both of them can exist in a stream? So current implemenation will assume
+ that only one of them is present in a given stream to simplify implementation. The reason it can
+ get complicated is resync marker in LVH can potentially be (00 00 8) which will cause false detect
+ of SVH start code.
+*/
+
+uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state)
+{
+ uint8_t *ptr;
+ uint32_t size;
+ uint32_t data_left=0, phase = 0, ret = 0;
+ viddec_sc_parse_cubby_cxt_t *cxt;
+ viddec_mp4_parser_t *p_info;
+
+ cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+ viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+ size = 0;
+ data_left = cxt->size;
+ ptr = cxt->buf;
+ phase = cxt->phase;
+ cxt->sc_end_pos = -1;
+ p_info = (viddec_mp4_parser_t *)pcxt;
+
+ /* parse until there is more data and start code not found */
+ while((data_left > 0) &&(phase < 3))
+ {
+ /* Check if we are byte aligned & phase=0, if thats the case we can check
+ work at a time instead of byte*/
+ if(((((uint32_t)ptr) & 0x3) == 0) && (phase == 0))
+ {
+ while(data_left > 3)
+ {
+ uint32_t data;
+ char mask1 = 0, mask2=0;
+
+ data = *((uint32_t *)ptr);
+#ifndef MFDBIGENDIAN
+ data = SWAP_WORD(data);
+#endif
+ mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+ mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+ /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+ two consecutive zero bytes for a start code pattern */
+ if(mask1 && mask2)
+ {/* Success so skip 4 bytes and start over */
+ ptr+=4;size+=4;data_left-=4;
+ continue;
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+
+ /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+ two zero bytes in the word so we look one byte at a time*/
+ if(data_left > 0)
+ {
+ if(*ptr == FIRST_STARTCODE_BYTE)
+ {/* Phase can be 3 only if third start code byte is found */
+ phase++;
+ ptr++;size++;data_left--;
+ if(phase > 2)
+ {
+ phase = 2;
+
+ if ( (((uint32_t)ptr) & 0x3) == 0 )
+ {
+ while( data_left > 3 )
+ {
+ if(*((uint32_t *)ptr) != 0)
+ {
+ break;
+ }
+ ptr+=4;size+=4;data_left-=4;
+ }
+ }
+ }
+ }
+ else
+ {
+ uint8_t normal_sc=0, short_sc=0;
+ if(phase == 2)
+ {
+ normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
+ short_sc = (p_info->ignore_scs == 0) && (SHORT_THIRD_STARTCODE_BYTE == ( *ptr & 0xFC));
+ }
+
+ if(!(normal_sc | short_sc))
+ {
+ phase = 0;
+ }
+ else
+ {/* Match for start code so update context with byte position */
+ cxt->sc_end_pos = size;
+ phase = 3;
+ p_info->cur_sc_prefix = p_info->next_sc_prefix;
+ p_info->next_sc_prefix = (normal_sc) ? 1: 0;
+ if(normal_sc)
+ {
+ p_info->ignore_scs=1;
+ }
+ else
+ {
+ /* For short start code since start code is in one nibble just return at this point */
+ phase += 1;
+ state->next_sc = *ptr;
+ state->second_scprfx_length = 2;
+ ret=1;
+ break;
+ }
+ }
+ ptr++;size++;data_left--;
+ }
+ }
+ }
+ if((data_left > 0) && (phase == 3))
+ {
+ cxt->sc_end_pos++;
+ state->next_sc = cxt->buf[cxt->sc_end_pos];
+ state->second_scprfx_length = 3;
+ phase++;
+ ret = 1;
+ }
+ cxt->phase = phase;
+ /* Return SC found only if phase is 4, else always success */
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h
new file mode 100644
index 0000000..d57a9bf
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h
@@ -0,0 +1,111 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: VC1 header.
+//
+*/
+
+#ifndef _VC1_COMMON_H_
+#define _VC1_COMMON_H_
+
+/* If the pixel data is left near an emulation prevention sequence, the decoder will be unaware
+ unless we send some previous bytes */
+//#define PADDING_FOR_EMUL 3
+#define PADDING_FOR_EMUL 0
+
+#define GET_BLSB( name, bitf ) BLSB_MFD_##name##_##bitf
+#define GET_BMSK( name, bitf ) BMSK_MFD_##name##_##bitf
+
+#define BF_READ( name, bitf, value ) ((value & GET_BMSK(name, bitf) ) >> GET_BLSB(name, bitf) )
+#define BF_WRITE( name, bitf, value, data ) value = ((value & ~GET_BMSK(name, bitf)) | ((data) << GET_BLSB(name, bitf)))
+
+enum vc1_workload_item_type
+{
+ VIDDEC_WORKLOAD_VC1_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+ VIDDEC_WORKLOAD_VC1_BITOFFSET,
+ VIDDEC_WORKLOAD_VC1_BITPLANE0,
+ VIDDEC_WORKLOAD_VC1_BITPLANE1,
+ VIDDEC_WORKLOAD_VC1_BITPLANE2,
+ VIDDEC_WORKLOAD_VC1_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+ VIDDEC_WORKLOAD_VC1_FUTURE_FRAME,
+};
+
+typedef enum
+{
+ vc1_ProgressiveFrame = 0,
+ vc1_InterlacedFrame = 2,
+ vc1_InterlacedField = 3,
+ vc1_PictureFormatNone
+} vc1_fcm;
+
+/** This enumeration defines the various frame types as defined in PTYPE syntax
+element.
+PTYPE interpretation depends on bitstream profile. The value that needs to get
+programmed in the frame_type register 0x2218 is this generic enum obtained
+from Canmore code.
+Changing this enum to match the spec for each profile caused md5 mismatches.
+TODO: Why are these the values to program - is this the case with reference decoder?
+*/
+enum
+{
+ VC1_I_FRAME = (1 << 0),
+ VC1_P_FRAME = (1 << 1),
+ VC1_B_FRAME = (1 << 2),
+ VC1_BI_FRAME = VC1_I_FRAME | VC1_B_FRAME,
+ VC1_SKIPPED_FRAME = (1 << 3) | VC1_P_FRAME
+};
+
+enum {
+ vc1_FrameDone = 1 << 0,
+ vc1_FieldDone = 1 << 1,
+ vc1_SliceDone = 1 << 2,
+ vc1_Field1Done = 1 << 3,
+ vc1_Field2Done = 1 << 4,
+ vc1_FrameError = 1 << 8,
+};
+
+typedef struct {
+ /* 0x00 */ uint32_t general;
+ /* 0x04 */ uint32_t stream_format1;
+ /* 0x08 */ uint32_t coded_size;
+ /* 0x0c */ uint32_t stream_format2;
+ /* 0x10 */ uint32_t entrypoint1;
+ /* 0x14 */ uint32_t range_map;
+ /* 0x18 */ uint32_t frame_type;
+ /* 0x1c */ uint32_t recon_control;
+ /* 0x20 */ uint32_t mv_control;
+ /* 0x24 */ uint32_t intcomp_fwd_top;
+ /* 0x28 */ uint32_t ref_bfraction;
+ /* 0x2c */ uint32_t blk_control;
+ /* 0x30 */ uint32_t trans_data;
+ /* 0x34 */ uint32_t vop_dquant;
+#define NUM_REF_ID 4
+ /* 0x38-0x48 */ uint32_t ref_frm_id[NUM_REF_ID];
+ /* 0x48 */ uint32_t fieldref_ctrl_id;
+ /* 0x4c */ uint32_t auxfrmctrl;
+ /* 0x50 */ uint32_t imgstruct;
+ /* 0x54 */ uint32_t alt_frame_type;
+ /* 0x58 */ uint32_t intcomp_fwd_bot;
+ /* 0x5c */ uint32_t intcomp_bwd_top;
+ /* 0x60 */ uint32_t intcomp_bwd_bot;
+ /* 0x64 */ uint32_t _stuffing;
+} VC1D_SPR_REGS;
+
+/*
+In VC1, past reference is the fwd reference and future reference is the backward reference
+i.e. P frame has only a forward reference and B frame has both a forward and a backward reference.
+*/
+enum {
+ VC1_FRAME_CURRENT_REF = 0,
+ VC1_FRAME_CURRENT_DIS,
+ VC1_FRAME_PAST,
+ VC1_FRAME_FUTURE,
+};
+
+#endif //_VC1_COMMON_H_
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c
new file mode 100644
index 0000000..a2d6721
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c
@@ -0,0 +1,16 @@
+#include "vc1.h"
+
+void vc1_start_new_frame (void *parent, vc1_viddec_parser_t *parser )
+{
+ return;
+}
+
+void vc1_end_frame (vc1_viddec_parser_t *parser)
+{
+ return;
+}
+
+int32_t vc1_parse_emit_current_frame( void *parent, vc1_viddec_parser_t *parser )
+{
+ return(0);
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h
new file mode 100644
index 0000000..8416b24
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h
@@ -0,0 +1,224 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: VC1 header.
+//
+*/
+
+#ifndef _VC1_H_
+#define _VC1_H_
+
+#ifdef MFD_FIRMWARE
+ typedef unsigned int size_t;
+ #define LOG(...)
+#else
+ #include <stdio.h>
+ #include <unistd.h>
+ #include <stdint.h>
+ enum {
+ NONE = 0,
+ CRITICAL,
+ WARNING,
+ INFO,
+ DEBUG,
+ } log_level;
+
+ #define vc1_log_level DEBUG
+
+ #define LOG( log_lev, format, args ... ) \
+ if (vc1_log_level >= log_lev) { OS_INFO("%s[%d]:: " format "\n", __FUNCTION__ , __LINE__ , ## args ); }
+#endif
+
+#include "viddec_fw_workload.h"
+#include "vc1parse_common_defs.h"
+#include "vc1common.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define LOG_CRIT(format, args ... ) LOG( CRITICAL, format, ## args)
+#define LOG_WARN(format, args ... ) LOG( WARNING, format, ## args)
+#define LOG_INFO(format, args ... ) LOG( INFO, format, ## args)
+#define LOG_DEBUG(format, args ... ) LOG( DEBUG, format, ## args)
+
+// Seems to be hardware bug: DO NOT TRY TO SWAP BITPLANE0 and BITPLANE2
+// Block Control Register at offset 222C uses Bitplane_raw_ID0 to indicate directmb/fieldtx while
+// and Bitplane_raw_ID2 for acpred/mvtypemb/forwardmb
+// but when we send bitplane index 0 for directmb/fieldtx and bitplane index 2 for acpred/mvtypemb/forwardmb
+// md5 mismatches are seen
+typedef enum
+{
+ BPP_FORWARDMB = VIDDEC_WORKLOAD_VC1_BITPLANE0,
+ BPP_ACPRED = VIDDEC_WORKLOAD_VC1_BITPLANE0,
+ BPP_MVTYPEMB = VIDDEC_WORKLOAD_VC1_BITPLANE0,
+ BPP_OVERFLAGS = VIDDEC_WORKLOAD_VC1_BITPLANE1,
+ BPP_SKIPMB = VIDDEC_WORKLOAD_VC1_BITPLANE1,
+ BPP_DIRECTMB = VIDDEC_WORKLOAD_VC1_BITPLANE2,
+ BPP_FIELDTX = VIDDEC_WORKLOAD_VC1_BITPLANE2,
+} vc1_bpp_type_t;
+
+/* status codes */
+typedef enum {
+ VC1_STATUS_EOF = 1, // end of file
+ VC1_STATUS_OK = 0, // no error
+ VC1_STATUS_NO_MEM = 2, // out of memory
+ VC1_STATUS_FILE_ERROR = 2, // file error
+ VC1_STATUS_NOTSUPPORT = 2, // not supported mode
+ VC1_STATUS_PARSE_ERROR = 2, // fail in parse MPEG-4 stream
+ VC1_STATUS_ERROR = 2 // unknown/unspecified error
+} vc1_Status;
+
+/* VC1 start code values */
+typedef enum {
+ vc1_Forbidden = 0x80,/*0x80-0xFF*/
+ vc1_Reserved1 = 0x09,/*0x00-0x09*/
+ vc1_Reserved2 = 0x10,
+ vc1_Reserved3 = 0x1A,
+ vc1_Reserved4 = 0x20,/*0x20-0x7F*/
+ vc1_SCEndOfSequence = 0x0A,
+ vc1_SCSlice = 0x0B,
+ vc1_SCField = 0x0C,
+ vc1_SCFrameHeader = 0x0D,
+ vc1_SCEntryPointHeader = 0x0E,
+ vc1_SCSequenceHeader = 0x0F,
+ vc1_SCSliceUser = 0x1B,
+ vc1_SCFieldUser = 0x1C,
+ vc1_SCFrameUser = 0x1D,
+ vc1_SCEntryPointUser = 0x1E,
+ vc1_SCSequenceUser = 0x1F
+} vc1_sc;
+
+#if 0
+typedef enum
+{
+ vc1_ProfileSimple = 0, /** Simple profile */
+ vc1_ProfileMain, /** Main profile */
+ vc1_ProfileReserved, /** Reserved */
+ vc1_ProfileAdvanced /** Advanced profile */
+} vc1_Profile;
+#endif
+
+typedef enum
+{
+ vc1_PtypeI = 1,
+ vc1_PtypeP = 2,
+ vc1_PtypeB = 4,
+ vc1_PtypeBI = 5,
+ vc1_PtypeSkipped = 8|2,
+} vc1_ptype;
+
+typedef enum
+{
+ vc1_PtypeII = 0,
+ vc1_PtypeIP = 1,
+ vc1_PtypePI = 2,
+ vc1_PtypePP = 3,
+ vc1_PtypeBB = 4,
+ vc1_PtypeBBI = 5,
+ vc1_PtypeBIB = 6,
+ vc1_PtypeBIBI = 7
+} vc1_fptype;
+
+typedef enum
+{
+ vc1_Imode_Raw = 0, //0x0000
+ vc1_Imode_Norm2, //0x10
+ vc1_Imode_Diff2, //0x001
+ vc1_Imode_Norm6, //0x11
+ vc1_Imode_Diff6, //0x0001
+ vc1_Imode_Rowskip, //0x010
+ vc1_Imode_Colskip, //0x011
+} vc1_Imode;
+
+/* calculation of MAX_BITPLANE_SZ 2048/16x1088/16 pel= 128x68 bit used for bitplane
+ * as rows are packed in DWORDS
+ * we have (128)/32 * 68 Dwords needed for bitplane storage
+ */
+#define MAX_BITPLANE_SZ 272
+
+/* Full Info */
+typedef struct {
+ unsigned char* bufptr; /* current frame, point to header or data */
+ int bitoff; /* mostly point to next frame header or PSC */
+ int picture_info_has_changed;
+ vc1_metadata_t metadata;
+ vc1_PictureLayerHeader picLayerHeader;
+ uint32_t bitplane[MAX_BITPLANE_SZ];
+} vc1_Info;
+
+#ifdef __cplusplus
+}
+#endif
+
+enum {
+ VC1_REF_FRAME_T_MINUS_1 = 0,
+ VC1_REF_FRAME_T_MINUS_2,
+ VC1_REF_FRAME_T_MINUS_0,
+ VC1_NUM_REFERENCE_FRAMES,
+};
+
+enum vc1_sc_seen_flags
+{
+ VC1_SC_INVALID = 0 << 0,
+ VC1_SC_SEQ = 1 << 0,
+ VC1_SC_EP = 1 << 1,
+ VC1_SC_FRM = 1 << 2,
+ VC1_SC_FLD = 1 << 3,
+ VC1_SC_SLC = 1 << 4,
+ VC1_SC_UD = 1 << 5,
+};
+#define VC1_SEQ_MASK VC1_SC_SEQ
+#define VC1_EP_MASK VC1_SC_SEQ | VC1_SC_EP
+#define VC1_FRM_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM
+#define VC1_FLD_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM | VC1_SC_FLD
+
+typedef struct {
+ int id;
+ uint32_t intcomp_top;
+ uint32_t intcomp_bot;
+ int fcm; /* frame coding mode */
+ int type;
+ int anchor[2]; /* one per field */
+ int rr_en; /* range reduction enable flag at sequence layer */
+ int rr_frm; /* range reduction flag at picture layer */
+} ref_frame_t;
+
+typedef struct
+{
+ uint32_t sc_seen_since_last_wkld;
+ uint32_t sc_seen;
+ uint32_t is_frame_start;
+ uint8_t is_reference_picture;
+ uint32_t intcomp_last[4]; /* for B frames */
+ uint32_t intcomp_top[2];
+ uint32_t intcomp_bot[2];
+ vc1_Info info;
+ VC1D_SPR_REGS spr;
+ ref_frame_t ref_frame[VC1_NUM_REFERENCE_FRAMES];
+#ifdef VBP
+ /* A storage area is provided for each type of bit plane. Only one of */
+ /* each type will ever be used for a picture and never more than three */
+ /* bit-planes per picture, and often only one is used. We never clear */
+ /* this data and writes into it when we need to. vc1parse_bitplane.c */
+ /* makes use of these set them to one of the bitplane types included */
+ /* in the picture header structure. Those sturctures are set every */
+ /* time a picture parse begins. */
+ uint32_t bp_forwardmb[4096];
+ uint32_t bp_acpred[4096];
+ uint32_t bp_mvtypemb[4096];
+ uint32_t bp_overflags[4096];
+ uint32_t bp_skipmb[4096];
+ uint32_t bp_directmb[4096];
+ uint32_t bp_fieldtx[4096];
+ uint32_t start_code;
+#endif
+} vc1_viddec_parser_t;
+
+#endif //_VC1_H_
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c
new file mode 100644
index 0000000..a033385
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c
@@ -0,0 +1,557 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 bitstream layers down to but not including
+// macroblock layer.
+//
+*/
+
+#include "viddec_fw_debug.h"
+#include "vc1parse.h"
+
+#define VC1_PIXEL_IN_LUMA 16
+
+/*------------------------------------------------------------------------------
+ * Parse modified rcv file, start codes are inserted using rcv2vc1.c.
+ * source is in
+ * http://svn.jf.intel.com/svn/DHG_Src/CESWE_Src/DEV/trunk/sv/mfd/tools/utils.
+ * Assumme rcv file width < 90,112 pixel to differenciate from real VC1
+ * advanced profile header.
+ * Original rcv description is in annex L
+ * Table 263 of SMPTE 421M.
+ */
+vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t result;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_RcvSequenceHeader rcv;
+
+ memset(&rcv, 0, sizeof(vc1_RcvSequenceHeader));
+
+ result = viddec_pm_get_bits(ctxt, &rcv.struct_a_rcv, 32);
+ md->width = rcv.struct_a.HORIZ_SIZE;
+ md->height = rcv.struct_a.VERT_SIZE;
+
+ result = viddec_pm_get_bits(ctxt, &rcv.struct_c_rcv, 32);
+ md->PROFILE = rcv.struct_c.PROFILE >> 2;
+ md->LOOPFILTER = rcv.struct_c.LOOPFILTER;
+ md->MULTIRES = rcv.struct_c.MULTIRES;
+ md->FASTUVMC = rcv.struct_c.FASTUVMC;
+ md->EXTENDED_MV = rcv.struct_c.EXTENDED_MV;
+ md->DQUANT = rcv.struct_c.DQUANT;
+ md->VSTRANSFORM = rcv.struct_c.VSTRANSFORM;
+ md->OVERLAP = rcv.struct_c.OVERLAP;
+ md->RANGERED = rcv.struct_c.RANGERED;
+ md->MAXBFRAMES = rcv.struct_c.MAXBFRAMES;
+ md->QUANTIZER = rcv.struct_c.QUANTIZER;
+ md->FINTERPFLAG = rcv.struct_c.FINTERPFLAG;
+#ifdef VBP
+ md->SYNCMARKER = rcv.struct_c.SYNCMARKER;
+#endif
+
+ if ((md->PROFILE == VC1_PROFILE_SIMPLE) ||
+ (md->MULTIRES && md->PROFILE == VC1_PROFILE_MAIN))
+ {
+ md->DQUANT = 0;
+ }
+ // TODO: NEED TO CHECK RESERVED BITS ARE 0
+
+ md->widthMB = (md->width + 15 ) / VC1_PIXEL_IN_LUMA;
+ md->heightMB = (md->height + 15) / VC1_PIXEL_IN_LUMA;
+
+ DEB("rcv: beforemod: res: %dx%d\n", md->width, md->height);
+
+ /* WL takes resolution in unit of 2 pel - sec. 6.2.13.1 */
+ md->width = md->width/2 -1;
+ md->height = md->height/2 -1;
+
+ DEB("rcv: res: %dx%d\n", md->width, md->height);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C;
+
+ wi.vc1_sh_struct_a_c.size = 0;
+ wi.vc1_sh_struct_a_c.flags = 0;
+ wi.vc1_sh_struct_a_c.pad = 0;
+
+ viddec_fw_vc1_set_rcv_horiz_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.HORIZ_SIZE);
+ viddec_fw_vc1_set_rcv_vert_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.VERT_SIZE);
+
+ viddec_fw_vc1_set_rcv_bitrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.BITRTQ_POSTPROC);
+ viddec_fw_vc1_set_rcv_frmrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.FRMRTQ_POSTPROC);
+ viddec_fw_vc1_set_rcv_profile(&wi.vc1_sh_struct_a_c, rcv.struct_c.PROFILE);
+ viddec_fw_vc1_set_rcv_level(&wi.vc1_sh_struct_a_c, 0);
+ viddec_fw_vc1_set_rcv_cbr(&wi.vc1_sh_struct_a_c, 0);
+ viddec_fw_vc1_set_rcv_rangered(&wi.vc1_sh_struct_a_c, rcv.struct_c.RANGERED);
+ viddec_fw_vc1_set_rcv_maxbframes(&wi.vc1_sh_struct_a_c, rcv.struct_c.MAXBFRAMES);
+ viddec_fw_vc1_set_rcv_finterpflag(&wi.vc1_sh_struct_a_c, rcv.struct_c.FINTERPFLAG);
+
+ result = viddec_pm_append_workitem(ctxt, &wi);
+ }
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse sequence layer. This function is only applicable to advanced profile
+ * as simple and main profiles use other mechanisms to communicate these
+ * metadata.
+ * Table 3 of SMPTE 421M.
+ * Table 13 of SMPTE 421M for HRD_PARAM().
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_SequenceLayerHeader sh;
+ uint32_t result;
+
+ memset(&sh, 0, sizeof(vc1_SequenceLayerHeader));
+
+ // PARSE SEQUENCE HEADER
+ result = viddec_pm_get_bits(ctxt, &sh.flags, 15);
+ if(result == 1)
+ {
+ md->PROFILE = sh.seq_flags.PROFILE;
+#ifdef VBP
+ md->LEVEL = sh.seq_flags.LEVEL;
+#endif
+ }
+
+ result = viddec_pm_get_bits(ctxt, &sh.max_size, 32);
+ if(result == 1)
+ {
+ md->POSTPROCFLAG = sh.seq_max_size.POSTPROCFLAG;
+ md->width = sh.seq_max_size.MAX_CODED_WIDTH;
+ md->height = sh.seq_max_size.MAX_CODED_HEIGHT;
+ md->PULLDOWN = sh.seq_max_size.PULLDOWN;
+ md->INTERLACE = sh.seq_max_size.INTERLACE;
+ md->TFCNTRFLAG = sh.seq_max_size.TFCNTRFLAG;
+ md->FINTERPFLAG = sh.seq_max_size.FINTERPFLAG;
+ md->PSF = sh.seq_max_size.PSF;
+ }
+
+ if (sh.seq_max_size.DISPLAY_EXT == 1)
+ {
+ result = viddec_pm_get_bits(ctxt, &sh.disp_size, 29);
+ if(result == 1)
+ {
+ if (sh.seq_disp_size.ASPECT_RATIO_FLAG == 1)
+ {
+ result = viddec_pm_get_bits(ctxt, &tempValue, 4);
+ sh.ASPECT_RATIO = tempValue;
+ if (sh.ASPECT_RATIO == 15)
+ {
+ result = viddec_pm_get_bits(ctxt, &sh.aspect_size, 16);
+ }
+ }
+
+ result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+ sh.FRAMERATE_FLAG = tempValue;
+ if (sh.FRAMERATE_FLAG == 1)
+ {
+ result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+ sh.FRAMERATEIND = tempValue;
+ if (sh.FRAMERATEIND == 0)
+ {
+ result = viddec_pm_get_bits(ctxt, &sh.framerate_fraction, 12);
+ }
+ else
+ {
+ result = viddec_pm_get_bits(ctxt, &tempValue, 16);
+ sh.FRAMERATEEXP = tempValue;
+ }
+ }
+
+ result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+ sh.COLOR_FORMAT_FLAG = tempValue;
+ if (sh.COLOR_FORMAT_FLAG == 1)
+ {
+ result = viddec_pm_get_bits(ctxt, &sh.color_format, 24);
+ }
+ } // Successful get of display size
+ } // DISPLAY_EXT is 1
+
+ result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+ sh.HRD_PARAM_FLAG = tempValue;
+ if (sh.HRD_PARAM_FLAG == 1)
+ {
+ /* HRD_PARAM(). */
+ result = viddec_pm_get_bits(ctxt, &tempValue, 5);
+ sh.HRD_NUM_LEAKY_BUCKETS = tempValue;
+ md->HRD_NUM_LEAKY_BUCKETS = sh.HRD_NUM_LEAKY_BUCKETS;
+ // Skip the rest of the parsing - hrdinfo is not required for decode or for attributes
+ }
+ else
+ {
+ md->HRD_NUM_LEAKY_BUCKETS = 0;
+ }
+
+ md->widthMB = (((md->width + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA;
+ md->heightMB = (((md->height + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA;
+
+ DEB("md: res: %dx%d\n", md->width, md->height);
+ DEB("sh: dispres: %dx%d\n", sh.seq_disp_size.DISP_HORIZ_SIZE, sh.seq_disp_size.DISP_VERT_SIZE);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi_sl, wi_de;
+
+ wi_sl.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+ wi_sl.vc1_sl.size = 0;
+ wi_sl.vc1_sl.flags = 0;
+ wi_sl.vc1_sl.pad = 0;
+
+ viddec_fw_vc1_set_profile(&wi_sl.vc1_sl, sh.seq_flags.PROFILE);
+ viddec_fw_vc1_set_level(&wi_sl.vc1_sl, sh.seq_flags.LEVEL);
+ viddec_fw_vc1_set_colordiff_format(&wi_sl.vc1_sl, sh.seq_flags.COLORDIFF_FORMAT);
+ viddec_fw_vc1_set_pulldown(&wi_sl.vc1_sl, sh.seq_max_size.PULLDOWN);
+ viddec_fw_vc1_set_max_coded_width(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_WIDTH);
+ viddec_fw_vc1_set_max_coded_height(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_HEIGHT);
+
+ viddec_fw_vc1_set_bitrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.BITRTQ_POSTPROC);
+ viddec_fw_vc1_set_frmrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.FRMRTQ_POSTPROC);
+ viddec_fw_vc1_set_interlace(&wi_sl.vc1_sl, sh.seq_max_size.INTERLACE);
+ viddec_fw_vc1_set_tfcntrflag(&wi_sl.vc1_sl, sh.seq_max_size.TFCNTRFLAG);
+ viddec_fw_vc1_set_finterpflag(&wi_sl.vc1_sl, sh.seq_max_size.FINTERPFLAG);
+ viddec_fw_vc1_set_psf(&wi_sl.vc1_sl, sh.seq_max_size.PSF);
+ viddec_fw_vc1_set_display_ext(&wi_sl.vc1_sl, sh.seq_max_size.DISPLAY_EXT);
+
+ result = viddec_pm_append_workitem(ctxt, &wi_sl);
+
+ // send DISPLAY EXTENSION metadata if present
+ if (sh.seq_max_size.DISPLAY_EXT)
+ {
+ wi_de.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+
+ wi_de.vc1_sl_de.size = 0;
+ wi_de.vc1_sl_de.framerate = 0;
+ wi_de.vc1_sl_de.aspectsize = 0;
+
+ viddec_fw_vc1_set_disp_horiz_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_HORIZ_SIZE);
+ viddec_fw_vc1_set_disp_vert_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_VERT_SIZE);
+ viddec_fw_vc1_set_disp_aspect_ratio_flag(&wi_de.vc1_sl_de, sh.seq_disp_size.ASPECT_RATIO_FLAG);
+ viddec_fw_vc1_set_disp_color_format_flag(&wi_de.vc1_sl_de, sh.COLOR_FORMAT_FLAG);
+ viddec_fw_vc1_set_disp_framerate_flag(&wi_de.vc1_sl_de, sh.FRAMERATE_FLAG);
+ viddec_fw_vc1_set_disp_framerateind(&wi_de.vc1_sl_de, sh.FRAMERATEIND);
+
+ viddec_fw_vc1_set_disp_aspect_ratio(&wi_de.vc1_sl_de, sh.ASPECT_RATIO);
+ viddec_fw_vc1_set_disp_frameratenr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATENR);
+ viddec_fw_vc1_set_disp_frameratedr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATEDR);
+ viddec_fw_vc1_set_disp_framerateexp(&wi_de.vc1_sl_de, sh.FRAMERATEEXP);
+
+ viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_HORIZ_SIZE);
+ viddec_fw_vc1_set_disp_aspect_ratio_vert_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_VERT_SIZE);
+ viddec_fw_vc1_set_disp_color_prim(&wi_de.vc1_sl_de, sh.seq_color_format.COLOR_PRIM);
+ viddec_fw_vc1_set_disp_transfer_char(&wi_de.vc1_sl_de, sh.seq_color_format.TRANSFER_CHAR);
+
+ result = viddec_pm_append_workitem(ctxt, &wi_de);
+ }
+ }
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse entry point layer. This function is only applicable for advanced
+ * profile and is used to signal a random access point and changes in coding
+ * control parameters.
+ * Table 14 of SMPTE 421M.
+ * Table 15 of SMPTE 421M for HRD_FULLNESS().
+ *------------------------------------------------------------------------------
+ */
+vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_EntryPointHeader ep;
+ uint32_t result;
+ uint32_t temp;
+
+ memset(&ep, 0, sizeof(vc1_EntryPointHeader));
+
+ // PARSE ENTRYPOINT HEADER
+ result = viddec_pm_get_bits(ctxt, &ep.flags, 13);
+ if(result == 1)
+ {
+ // Skip the flags already peeked at (13) and the unneeded hrd_full data
+ // NOTE: HRD_NUM_LEAKY_BUCKETS is initialized to 0 when HRD_PARAM_FLAG is not present
+ int hrd_bits = md->HRD_NUM_LEAKY_BUCKETS * 8;
+ while(hrd_bits >= 32)
+ {
+ result = viddec_pm_skip_bits(ctxt, 32);
+ hrd_bits -= 32;
+ }
+ result = viddec_pm_skip_bits(ctxt, hrd_bits);
+
+ md->REFDIST = 0;
+ md->PANSCAN_FLAG = ep.ep_flags.PANSCAN_FLAG;
+ md->REFDIST_FLAG = ep.ep_flags.REFDIST_FLAG;
+ md->LOOPFILTER = ep.ep_flags.LOOPFILTER;
+ md->FASTUVMC = ep.ep_flags.FASTUVMC;
+ md->EXTENDED_MV = ep.ep_flags.EXTENDED_MV;
+ md->DQUANT = ep.ep_flags.DQUANT;
+ md->VSTRANSFORM = ep.ep_flags.VSTRANSFORM;
+ md->OVERLAP = ep.ep_flags.OVERLAP;
+ md->QUANTIZER = ep.ep_flags.QUANTIZER;
+
+ result = viddec_pm_get_bits(ctxt, &temp, 1);
+ if(result == 1)
+ {
+ ep.CODED_SIZE_FLAG = temp;
+ if(ep.CODED_SIZE_FLAG)
+ {
+ result = viddec_pm_get_bits(ctxt, &ep.size, 24);
+ md->width = ep.ep_size.CODED_WIDTH;
+ md->height = ep.ep_size.CODED_HEIGHT;
+ }
+ }
+ if(ep.ep_flags.EXTENDED_MV)
+ {
+ result = viddec_pm_get_bits(ctxt, &temp, 1);
+ md->EXTENDED_DMV = ep.EXTENDED_DMV = temp;
+ }
+
+ result = viddec_pm_get_bits(ctxt, &temp, 1);
+ if(result == 1)
+ {
+ md->RANGE_MAPY_FLAG = ep.RANGE_MAPY_FLAG = temp;
+ if(ep.RANGE_MAPY_FLAG)
+ {
+ result = viddec_pm_get_bits(ctxt, &temp, 3);
+ md->RANGE_MAPY = ep.RANGE_MAPY = temp;
+ }
+ }
+
+ result = viddec_pm_get_bits(ctxt, &temp, 1);
+ if(result == 1)
+ {
+ md->RANGE_MAPUV_FLAG = ep.RANGE_MAPUV_FLAG = temp;
+ if(ep.RANGE_MAPUV_FLAG)
+ {
+ result = viddec_pm_get_bits(ctxt, &temp, 3);
+ md->RANGE_MAPUV = ep.RANGE_MAPUV = temp;
+ }
+ }
+ }
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO;
+
+ wi.vc1_ep.size = 0;
+ wi.vc1_ep.flags = 0;
+ wi.vc1_ep.pad = 0;
+
+ viddec_fw_vc1_set_ep_size_flag(&wi.vc1_ep, ep.CODED_SIZE_FLAG);
+ viddec_fw_vc1_set_ep_horiz_size(&wi.vc1_ep, ep.ep_size.CODED_WIDTH);
+ viddec_fw_vc1_set_ep_vert_size(&wi.vc1_ep, ep.ep_size.CODED_HEIGHT);
+
+ viddec_fw_vc1_set_ep_broken_link(&wi.vc1_ep, ep.ep_flags.BROKEN_LINK);
+ viddec_fw_vc1_set_ep_closed_entry(&wi.vc1_ep, ep.ep_flags.CLOSED_ENTRY);
+ viddec_fw_vc1_set_ep_panscan_flag(&wi.vc1_ep, ep.ep_flags.PANSCAN_FLAG);
+ viddec_fw_vc1_set_ep_range_mapy_flag(&wi.vc1_ep, ep.RANGE_MAPY_FLAG);
+ viddec_fw_vc1_set_ep_range_mapy(&wi.vc1_ep, ep.RANGE_MAPY);
+ viddec_fw_vc1_set_ep_range_mapuv_flag(&wi.vc1_ep, ep.RANGE_MAPUV_FLAG);
+ viddec_fw_vc1_set_ep_range_mapuv(&wi.vc1_ep, ep.RANGE_MAPUV);
+
+ result = viddec_pm_append_workitem(ctxt, &wi);
+ }
+
+#ifdef VBP
+ md->BROKEN_LINK = ep.ep_flags.BROKEN_LINK;
+ md->CLOSED_ENTRY = ep.ep_flags.CLOSED_ENTRY;
+#endif
+
+ DEB("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT);
+ DEB("md: after ep: res: %dx%d\n", md->width, md->height);
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses the picture layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t temp;
+ int i;
+
+ for(i=0; i<VC1_MAX_BITPLANE_CHUNKS; i++)
+ {
+ pInfo->metadata.bp_raw[i] = true;
+ }
+
+ if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED)
+ {
+ VC1_PEEK_BITS(2, temp); /* fcm */
+ if( (pInfo->metadata.INTERLACE == 1) && (temp == VC1_FCM_FIELD_INTERLACE))
+ {
+ status = vc1_ParseFieldHeader_Adv(ctxt, pInfo);
+ }
+ else
+ {
+ status = vc1_ParsePictureHeader_Adv(ctxt, pInfo);
+ }
+ }
+ else
+ {
+ status = vc1_ParsePictureHeader(ctxt, pInfo);
+ }
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse field picture layer. This function parses the field picture layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_PARSE_ERROR;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) {
+ if (picLayerHeader->CurrField == 0)
+ {
+ picLayerHeader->PTYPE = picLayerHeader->PTypeField1;
+ picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF);
+ }
+ else
+ {
+ picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF);
+ picLayerHeader->PTYPE = picLayerHeader->PTypeField2;
+ }
+ status = vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+ }
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse slice layer. This function parses the slice layer, which is only
+ * supported by advanced profile.
+ * Table 26 of SMPTE 421M but skipping parsing of macroblock layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ uint32_t SLICE_ADDR;
+ vc1_Status status = VC1_STATUS_OK;
+
+ VC1_GET_BITS9(9, SLICE_ADDR);
+ VC1_GET_BITS9(1, tempValue); /* PIC_HEADER_FLAG. */
+ if (tempValue == 1) {
+ uint8_t *last_bufptr = pInfo->bufptr;
+ uint32_t last_bitoff = pInfo->bitoff;
+ status = vc1_ParsePictureLayer(ctxt, pInfo);
+ pInfo->picture_info_has_changed = 1;
+ if( status ) {
+ /* FIXME - is this a good way of handling this? Failed, see if it's for fields */
+ pInfo->bufptr = last_bufptr;
+ pInfo->bitoff = last_bitoff;
+ status = vc1_ParseFieldHeader_Adv(ctxt, pInfo);
+ }
+ } else
+ pInfo->picture_info_has_changed = 0;
+
+ pInfo->picLayerHeader.SLICE_ADDR = SLICE_ADDR;
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * This function parses the user data information as defined in SMPTE 421M annex F.
+ * It then appends that data to the workload.
+ * Assume the flush byte 0x80 is within the 3 bytes before next start code.
+ * let's put 1 byte per item first
+ *------------------------------------------------------------------------------
+ */
+vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t user_data;
+ viddec_workload_item_t wi;
+ uint32_t ud_id;
+
+ /* find the scope based on start code sc */
+ switch(sc) {
+ case vc1_SCSequenceUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+ break;
+ case vc1_SCEntryPointUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+ break;
+ case vc1_SCFrameUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA;
+ break;
+ case vc1_SCFieldUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_FLD_USER_DATA;
+ break;
+ case vc1_SCSliceUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_SLC_USER_DATA;
+ break;
+ default:
+ wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen
+ break;
+ }
+
+ /* get identifier - 4 bytes*/
+ // Extract this information but discard it for now
+ VC1_GET_BITS(32, ud_id);
+
+ /* Read 1 byte of user data and store it in workitem for the current stream level (SEQ/GOP/PIC).
+ Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size,
+ append the workitem. This loop is repeated till all user data is extracted and appended. */
+ wi.user_data.size = 0;
+ while(viddec_pm_get_bits(ctxt, &user_data, 8) != -1)
+ {
+ /* Store the valid byte in data payload */
+ wi.user_data.data_payload[wi.user_data.size] = user_data;
+ wi.user_data.size++;
+
+ /* When size exceeds payload size, append workitem and continue */
+ if (wi.user_data.size >= 11)
+ {
+ viddec_pm_setup_userdata(&wi);
+ viddec_pm_append_workitem(ctxt, &wi);
+ wi.user_data.size = 0;
+ }
+ if(user_data == 0x80) // flushing byte
+ break;
+ }
+ /* If size is not 0, append remaining user data. */
+ if (wi.user_data.size > 0)
+ {
+ int i;
+ for(i=wi.user_data.size;i<11;i++)
+ {
+ wi.user_data.data_payload[i] = 0;
+ }
+ viddec_pm_setup_userdata(&wi);
+ viddec_pm_append_workitem(ctxt, &wi);
+ wi.user_data.size = 0;
+ }
+
+ return(status);
+} // vc1_ParseAndAppendUserData
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h
new file mode 100644
index 0000000..d0e2f00
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h
@@ -0,0 +1,136 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Common functions for parsing VC-1 bitstreams.
+//
+*/
+
+#ifndef _VC1PARSE_H_
+#define _VC1PARSE_H_
+
+#include "viddec_parser_ops.h"
+#include "vc1.h"
+
+/** @weakgroup vc1parse_defs VC-1 Parse Definitions */
+/** @ingroup vc1parse_defs */
+/*@{*/
+
+/* This macro gets the next less-than-nine bits from the bitstream. It is
+assumed that numBits is less than ten. */
+#ifdef VC1_VERBOSE
+#include <stdio.h>
+#define AUTO_TRACE OS_INFO("trace: %s\n", __FUNCTION__)
+#define DEBUGBITS(arg1, args ...) OS_INFO( arg1, ## args)
+#else
+#define AUTO_TRACE
+#define DEBUGBITS(...)
+#endif
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+/* This macro gets the next numBits from the bitstream. */
+#define VC1_GET_BITS VC1_GET_BITS9
+#define VC1_GET_BITS9(numBits, value) \
+{ uint32_t __tmp__; \
+ viddec_pm_get_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \
+ value = __tmp__;\
+ DEBUGBITS("BIT:%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \
+}
+
+#define VC1_PEEK_BITS(numBits, value) \
+{ uint32_t __tmp__; \
+ viddec_pm_peek_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \
+ value = __tmp__;\
+ DEBUGBITS("PEEK%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \
+}
+
+/* This macro asserts if the condition is not true. */
+#ifdef VC1_VERBOSE
+#define VC1_ASSERT(condition) \
+{ \
+ if (! (condition)) \
+ OS_INFO("Failed " #condition "!\n"); \
+}
+#else
+#define VC1_ASSERT(condition)
+#endif
+
+/*@}*/
+
+/** @weakgroup vc1parse VC-1 Parse Functions */
+/** @ingroup vc1parse */
+/*@{*/
+
+extern const uint8_t VC1_MVMODE_LOW_TBL[];
+extern const uint8_t VC1_MVMODE_HIGH_TBL[];
+extern const int32_t VC1_BITPLANE_IMODE_TBL[];
+extern const int32_t VC1_BITPLANE_K_TBL[];
+extern const int32_t VC1_BFRACTION_TBL[];
+extern const int32_t VC1_REFDIST_TBL[];
+
+void vc1_end_frame(vc1_viddec_parser_t *parser);
+
+/* Top-level functions to parse bitstream layers for rcv format. */
+vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse bitstream layers for the various profiles. */
+vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse headers for various picture layers for the
+simple and main profiles. */
+vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse common part of the headers for various picture
+layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_Adv (void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various progressive
+picture layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various interlace frame
+layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various interlace frame
+layers for the advanced profile. */
+vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse syntax element in bitstream. */
+vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo);
+vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint32_t height, vc1_bpp_type_t bptype);
+vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable);
+vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, int8_t *pFirst, int16_t *pSecond);
+
+void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser);
+int32_t vc1_parse_emit_current_frame(void *parent, vc1_viddec_parser_t *parser);
+
+/* function to handle user data */
+vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc);
+
+/*@}*/
+
+#endif /* _VC1PARSE_H_. */
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c
new file mode 100644
index 0000000..5ee9e18
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c
@@ -0,0 +1,753 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 bitstreams.
+//
+*/
+
+#include "vc1parse.h"
+
+#ifdef VBP
+#include "viddec_pm.h"
+#endif
+
+/*----------------------------------------------------------------------------*/
+
+
+/* put one bit into a buffer
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ * mbx - image width in MB
+ * mby - image height in MB
+ * x - x location (column) of MB in MB unit
+ * y - y location (row) of MB in MB unit
+ * output: outp - buffer to fill
+ */
+//#define put_bit(value,x,y,mbx,mby,invert,outp)
+static inline void put_bit( uint32_t value, int x, int y, int mbx, int mby, uint8_t invert, uint32_t* outp)
+{
+ int bit;
+ uint32_t *out;
+
+ bit = mby;
+
+ value ^= invert;
+ if (!value) return; /* assume buffer is initialized with zeros */
+
+ out = outp;
+ /* go to corresponding row location in DW unit */
+ out += (( mbx + 31 ) >> 5) * y;
+ out += x >> 5; /* go to corresponding column location in DW unit */
+ bit = x & 0x1f; /* compute remaining bits */
+ *out |= 1 << bit; /* put bit */
+}
+
+/* if b is the bit at location (x,y)
+ * b = b^invert
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ * x - x location (column) of MB in MB unit
+ * y - y location (row) of MB in MB unit
+ * mbx - image width in MB
+ * output: outp - buffer to fill
+ * returns bit value
+ */
+static inline int xor_bit( int x, int y, int mbx, uint32_t invert, uint32_t* outp)
+{
+ int bit;
+ uint32_t *out;
+ uint8_t value;
+ //if (invert == 0) return; /* do nothing if XOR with 0 */
+
+ out = outp;
+ out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */
+ out += x >> 5; /* go to corresponding row location in DW unit */
+ bit = x & 0x1f; /* compute remaining bits */
+
+ if (invert == 1)
+ *out ^= (1 << bit); /* put XOR bit */
+ value = (*out & (1 << bit)) >> bit; /* return bit value */
+
+ return(value);
+
+}
+
+/* get bit at location (x,y)
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ * x - x location (column) of MB in MB unit
+ * y - y location (row) of MB in MB unit
+ * mbx - image width in MB
+ * outp - bit buffer in dwords
+ * returns bit value
+ */
+static inline int get_bit( int x, int y, int mbx, uint32_t* outp)
+{
+ int bit;
+ uint32_t *out;
+ uint8_t value;
+
+ out = outp;
+ out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */
+ out += x >> 5; /* go to corresponding row location in DW unit */
+ bit = x & 0x1f; /* compute remaining bits */
+ value = (*out & (1 << bit)) >> bit; /* return bit value */
+
+ return(value);
+
+}
+
+static void vc1_InverseDiff(vc1_Bitplane *pBitplane, int32_t widthMB, int32_t heightMB)
+{
+ int32_t i, j, previousBit=0, temp;
+
+ for (i = 0; i < heightMB; i++)
+ {
+ for (j = 0; j < widthMB; j++)
+ {
+ if ((i == 0 && j == 0))
+ {
+ previousBit=xor_bit(j, i, widthMB, pBitplane->invert,
+ pBitplane->databits);
+ }
+ else if (j == 0) /* XOR with TOP */
+ {
+ previousBit = get_bit(0, i-1, widthMB, pBitplane->databits);
+ temp=xor_bit(j, i, widthMB, previousBit,
+ pBitplane->databits);
+ previousBit = temp;
+ }
+ //TODO isSameAsTop can be optimized
+ else if (((i > 0) && (previousBit !=
+ get_bit(j, i-1, widthMB, pBitplane->databits))))
+ {
+ temp=xor_bit(j, i, widthMB, pBitplane->invert,
+ pBitplane->databits);
+ previousBit = temp;
+ }
+ else
+ {
+ temp=xor_bit(j, i, widthMB, previousBit,
+ pBitplane->databits);
+ previousBit = temp;
+ }
+ }
+ }
+}
+
+
+/*----------------------------------------------------------------------------*/
+/* implement normal 2 mode bitplane decoding, SMPTE 412M 8.7.3.2
+ * width, height are in MB unit.
+ */
+static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane,
+ int32_t width, int32_t height)
+{
+ int32_t i;
+ int32_t tmp_databits = 0;
+
+ int32_t row[2], col[2];
+ int8_t tmp=0;
+
+ /* disable pBitplane->invert in the Norm2 decode stage of
+ VC1_BITPLANE_DIFF2_MODE */
+ if (pBitplane->imode == VC1_BITPLANE_DIFF2_MODE)
+ {
+ tmp = pBitplane->invert;
+ pBitplane->invert=0;
+ }
+
+ // By default, initialize the values for the even case
+ col[0] = 0; /* i%width; */
+ row[0] = 0; /* i/width; */
+ col[1] = 1; /* (i+1)%width; */
+ row[1] = 0; /* (i+1)/width; */
+
+ // If width*height is odd, the first bit is the value of the bitplane
+ // for the first macroblock
+ if ((width*height) & 1) /* first bit if size is odd */
+ {
+ VC1_GET_BITS(1, tmp_databits);
+ put_bit(tmp_databits, 0, 0, width, height, pBitplane->invert,
+ pBitplane->databits);
+
+ // Modify initialization for odd sizes
+ col[0] = 1; /* i%width; */
+ col[1] = 2; /* (i+1)%width; */
+
+ // Consider special case where width is 1
+ if(width == 1)
+ {
+ col[0] = 0; /* i%width; */
+ row[0] = 1; /* i/width; */
+ col[1] = 0; /* (i+1)%width; */
+ row[1] = 2; /* (i+1)/width; */
+ }
+ }
+
+ /* decode every pair of bits in natural scan order */
+ for (i = (width*height) & 1; i < (width*height/2)*2; i += 2)
+ {
+ int32_t tmp = 0;
+
+ //col[0]=i%width;
+ //row[0]=i/width;
+ //col[1]=(i+1)%width;
+ //row[1]=(i+1)/width;
+
+ VC1_GET_BITS(1, tmp);
+ if (tmp == 0)
+ {
+ put_bit(0, col[0],row[0], width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(0, col[1],row[1], width, height, pBitplane->invert,
+ pBitplane->databits);
+ }
+ else
+ {
+ VC1_GET_BITS(1, tmp);
+ if (tmp == 1)
+ {
+ put_bit(1, col[0],row[0], width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(1, col[1],row[1], width, height, pBitplane->invert,
+ pBitplane->databits);
+ }
+ else
+ {
+ VC1_GET_BITS(1, tmp);
+ if (tmp == 0)
+ {
+ put_bit(1, col[0],row[0], width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(0, col[1],row[1], width, height, pBitplane->invert,
+ pBitplane->databits);
+ }
+ else
+ {
+ put_bit(0, col[0],row[0], width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(1, col[1],row[1], width, height, pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+ }
+
+ // Consider special case where width is 1
+ if(width == 1)
+ {
+ row[0] += 2;
+ row[1] += 2;
+ }
+ else
+ {
+ col[0] += 2; /* i%width; */
+ if ( col[0] >= width )
+ {
+ // For odd sizes, col[0] can alternatively start at 0 and 1
+ col[0] -= width;
+ row[0]++;
+ }
+
+ col[1] += 2; /* (i+1)%width; */
+ if ( col[1] >= width )
+ {
+ // For odd sizes, col[1] can alternatively start at 0 and 1
+ col[1] -= width;
+ row[1]++;
+ }
+ }
+ }
+
+ /* restore value */
+ pBitplane->invert=tmp;
+}
+
+/*----------------------------------------------------------------------------*/
+/* compute Normal-6 mode bitplane decoding
+ * algorithm is described in SMPTE 421M 8.7.3.4
+ * width, height are in MB unit.
+ */
+static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane,
+ int32_t width, int32_t height)
+{
+ vc1_Status status;
+ int32_t i, j, k;
+ int32_t ResidualX = 0;
+ int32_t ResidualY = 0;
+ uint8_t _2x3tiled = (((width%3)!=0)&&((height%3)==0));
+
+ int32_t row, col;
+ int8_t tmp=0;
+
+ /* disable pBitplane->invert in the Norm2 decode stage of
+ VC1_BITPLANE_DIFF2_MODE */
+ if (pBitplane->imode == VC1_BITPLANE_DIFF6_MODE)
+ {
+ tmp = pBitplane->invert;
+ pBitplane->invert=0;
+ }
+
+ if (_2x3tiled)
+ {
+ int32_t sizeW = width/2;
+ int32_t sizeH = height/3;
+
+ for (i = 0; i < sizeH; i++)
+ {
+ row = 3*i; /* compute row location for tile */
+
+ for (j = 0; j < sizeW; j++)
+ {
+ col = 2*j + (width & 1); /* compute column location for tile */
+
+ /* get k=sum(bi2^i) were i is the ith bit of the tile */
+ status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL);
+ VC1_ASSERT(status == VC1_STATUS_OK);
+
+ /* put bits in tile */
+ put_bit(k&1, col, row, width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(((k&2)>>1), col+1, row, width, height,
+ pBitplane->invert,pBitplane->databits);
+
+ put_bit(((k&4)>>2), col, row+1, width, height,
+ pBitplane->invert,pBitplane->databits);
+ put_bit(((k&8)>>3), col+1, row+1, width, height,
+ pBitplane->invert,pBitplane->databits);
+
+ put_bit(((k&16)>>4), col, row+2, width, height,
+ pBitplane->invert,pBitplane->databits);
+ put_bit(((k&32)>>5), col+1, row+2, width,
+ height,pBitplane->invert, pBitplane->databits);
+ }
+ }
+ ResidualX = width & 1;
+ ResidualY = 0;
+ }
+ else /* 3x2 tile */
+ {
+ int32_t sizeW = width/3;
+ int32_t sizeH = height/2;
+
+ for (i = 0; i < sizeH; i++)
+ {
+ row = 2*i + (height&1) ; /* compute row location for tile */
+
+ for (j = 0; j < sizeW; j++)
+ {
+ col = 3*j + (width%3); /* compute column location for tile */
+
+ /* get k=sum(bi2^i) were i is the ith bit of the tile */
+ status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL);
+ VC1_ASSERT(status == VC1_STATUS_OK);
+
+ put_bit(k&1, col, row, width, height,pBitplane->invert,
+ pBitplane->databits);
+ put_bit((k&2)>>1, col+1, row, width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit((k&4)>>2, col+2, row, width, height, pBitplane->invert,
+ pBitplane->databits);
+
+ put_bit((k&8)>>3, col, row+1, width, height,pBitplane->invert,
+ pBitplane->databits);
+ put_bit((k&16)>>4, col+1, row+1, width,
+ height,pBitplane->invert, pBitplane->databits);
+ put_bit((k&32)>>5, col+2, row+1, width,
+ height,pBitplane->invert, pBitplane->databits);
+ }
+ }
+ ResidualX = width % 3;
+ ResidualY = height & 1;
+ }
+
+#ifndef VBP
+ for (i = 0; i < ResidualX; i++)
+ {
+ int32_t ColSkip;
+ VC1_GET_BITS(1, ColSkip);
+
+ if (1 == ColSkip)
+ {
+ for(j = 0; j < height; j++)
+ {
+ int32_t Value = 0;
+ VC1_GET_BITS(1, Value);
+ put_bit(Value, i, j, width, height,pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+ }
+
+ for (j = 0; j < ResidualY; j++)
+ {
+ int32_t RowSkip;
+ VC1_GET_BITS(1, RowSkip);
+ if (1 == RowSkip)
+ {
+ for (i = ResidualX; i < width; i++)
+ {
+ int32_t Value = 0;
+ VC1_GET_BITS(1, Value);
+ put_bit(Value, i, j, width, height,pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+ }
+ #else
+ int32_t Value = 0;
+ for (i = 0; i < ResidualX; i++)
+ {
+ int32_t ColSkip;
+ VC1_GET_BITS(1, ColSkip);
+ Value = 0;
+ for(j = 0; j < height; j++)
+ {
+ if (1 == ColSkip)
+ {
+ VC1_GET_BITS(1, Value);
+ }
+ put_bit(Value, i, j, width, height,pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+
+ for (j = 0; j < ResidualY; j++)
+ {
+ int32_t RowSkip;
+ VC1_GET_BITS(1, RowSkip);
+ Value = 0;
+ for (i = ResidualX; i < width; i++)
+ {
+ if (1 == RowSkip)
+ {
+ VC1_GET_BITS(1, Value);
+ }
+ put_bit(Value, i, j, width, height,pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+ #endif
+
+ /* restore value */
+ pBitplane->invert=tmp;
+
+}
+
+/*----------------------------------------------------------------------------*/
+/* initialize bitplane to array of zeros
+ * each row begins with a dword
+ * input:
+ * width: widh in MB unit
+ * height: height in MB unit
+ * returns even bitplane size in dwords
+ */
+int initBitplane(vc1_Bitplane *pBitplane,uint32_t width, uint32_t height)
+{
+ int i;
+ int numDword = 0;
+
+ numDword = ((width + 31)>>5) * height;
+ numDword += numDword & 1; /* add 1 in case numDword is odd */
+
+ for (i=0;i<numDword;i++) pBitplane->databits[i] = 0;
+ return(numDword);
+}
+
+/*----------------------------------------------------------------------------*/
+/* modified IPP code for bitplane decoding
+ * width: width in MB unit
+ * height: height in MB unit
+ */
+vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo,
+ uint32_t width, uint32_t height, vc1_bpp_type_t bpnum)
+{
+ uint32_t i, j;
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t biplaneSz; /* bitplane sz in dwords */
+ vc1_Bitplane bp;
+ vc1_Bitplane *bpp = &bp;
+
+ // By default, set imode to raw
+ pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = true;
+
+ // bitplane data would be temporarily stored in the vc1 context
+ bpp->databits = pInfo->bitplane;
+
+ /* init bitplane to zero, function retunr bitplane buffer size in dword */
+ biplaneSz = initBitplane(bpp, width, height);
+
+ VC1_GET_BITS(1, tempValue);
+ bpp->invert = (uint8_t) tempValue;
+
+ if ((status = vc1_DecodeHuffmanOne(ctxt, &bpp->imode,
+ VC1_BITPLANE_IMODE_TBL)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ // If the imode is VC1_BITPLANE_RAW_MODE: bitplane information is in the MB layer
+ // there is no need to parse for bitplane information in the picture layer
+ // Only bits need to be appropriately set in the block control register
+ // In all other modes, bitplane information follows and needs to be parsed and sent to the decoder
+
+ if (bpp->imode == VC1_BITPLANE_NORM2_MODE)
+ {
+ vc1_Norm2ModeDecode(ctxt, bpp, width, height);
+ }
+ else if (bpp->imode == VC1_BITPLANE_DIFF2_MODE)
+ {
+ vc1_Norm2ModeDecode(ctxt, bpp, width, height);
+ vc1_InverseDiff(bpp, width, height);
+ }
+ else if (bpp->imode == VC1_BITPLANE_NORM6_MODE)
+ {
+ vc1_Norm6ModeDecode(ctxt, bpp, width, height);
+
+ }
+ else if (bpp->imode == VC1_BITPLANE_DIFF6_MODE)
+ {
+ vc1_Norm6ModeDecode(ctxt, bpp, width, height);
+ vc1_InverseDiff(bpp, width, height);
+ }
+ else if (bpp->imode == VC1_BITPLANE_ROWSKIP_MODE)
+ {
+
+ for (i = 0; i < height; i++)
+ {
+ VC1_GET_BITS(1, tempValue);
+ /* if tempValue==0, put row of zeros Dwords*/
+ if (tempValue == 1)
+ {
+ for (j = 0; j < width; j++)
+ {
+ VC1_GET_BITS(1, tempValue);
+ put_bit( tempValue, j, i, width, height, bpp->invert,
+ bpp->databits);
+ }
+ }
+ else if (bpp->invert) { //TO TEST
+ for (j = 0; j < width; j++) {
+ put_bit( 0, j, i, width, height, bpp->invert,
+ bpp->databits);
+ }
+ }
+ }
+
+ }
+ else if (bpp->imode == VC1_BITPLANE_COLSKIP_MODE)
+ {
+ for (i = 0; i < width; i++)
+ {
+ VC1_GET_BITS(1, tempValue);
+ /* if tempValue==0, and invert == 0, fill column with zeros */
+ if (tempValue == 1)
+ {
+ for (j = 0; j < height; j++)
+ {
+ VC1_GET_BITS(1, tempValue);
+ put_bit( tempValue, i, j, width, height, bpp->invert,
+ bpp->databits);
+ }
+ }
+ else if (bpp->invert) { // fill column with ones
+ for (j = 0; j < height; j++) {
+ put_bit( 0, i, j, width, height, bpp->invert,
+ bpp->databits);
+ }
+ }//end for else
+ }
+ }
+
+ if(bpp->imode != VC1_BITPLANE_RAW_MODE)
+ {
+ uint32_t* pl;
+ int sizeinbytes,nitems,i;
+ viddec_workload_item_t wi;
+ uint32_t *bit_dw;
+
+ pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = false;
+
+ sizeinbytes = ((( width + 31 ) / 32)) * (height) * 4;
+
+ pl = bpp->databits;
+ bit_dw = bpp->databits;
+
+ // How many payloads must be generated
+ nitems = (sizeinbytes + (sizeof(wi.data.data_payload) - 1)) /
+ sizeof(wi.data.data_payload);
+
+ // Dump DMEM to an array of workitems
+ for( i = 0; i < nitems; i++ )
+ {
+ wi.vwi_type = bpnum;
+ wi.data.data_offset = (char *)pl - (char *)bit_dw; // offset within struct
+ wi.data.data_payload[0] = pl[0];
+ wi.data.data_payload[1] = pl[1];
+ pl += 2;
+
+ viddec_pm_append_workitem( ctxt, &wi );
+ }
+ }
+
+#ifdef VBP
+ {
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)ctxt;
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)(cxt->codec_data);
+
+ if (biplaneSz > 4096)
+ {
+ /* bigger than we got, so let's bail with a non meaningful error. */
+ return VC1_STATUS_ERROR;
+ }
+
+ /* At this point bp contains the information we need for the bit-plane */
+ /* bpnum is the enumeration that tells us which bitplane this is for. */
+ /* pInfo->picLayerHeader.ACPRED is one of the bitplanes I need to fill.*/
+ switch (bpnum)
+ {
+ case VIDDEC_WORKLOAD_VC1_BITPLANE0:
+ if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.FORWARDMB.invert = bp.invert;
+ pInfo->picLayerHeader.FORWARDMB.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_forwardmb[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.FORWARDMB.databits = parser->bp_forwardmb;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_FORWARDMB = 1;
+ }
+ }
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.ACPRED.invert = bp.invert;
+ pInfo->picLayerHeader.ACPRED.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_acpred[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.ACPRED.databits = parser->bp_acpred;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_ACPRED = 1;
+ }
+ }
+ if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.MVTYPEMB.invert = bp.invert;
+ pInfo->picLayerHeader.MVTYPEMB.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_mvtypemb[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.MVTYPEMB.databits = parser->bp_mvtypemb;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_MVTYPEMB = 1;
+ }
+ }
+ break;
+ case VIDDEC_WORKLOAD_VC1_BITPLANE1:
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.OVERFLAGS.invert = bp.invert;
+ pInfo->picLayerHeader.OVERFLAGS.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_overflags[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.OVERFLAGS.databits = parser->bp_overflags;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_OVERFLAGS = 1;
+ }
+ }
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.SKIPMB.invert = bp.invert;
+ pInfo->picLayerHeader.SKIPMB.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_skipmb[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.SKIPMB.databits = parser->bp_skipmb;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_SKIPMB = 1;
+ }
+ }
+ break;
+ case VIDDEC_WORKLOAD_VC1_BITPLANE2:
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.DIRECTMB.invert = bp.invert;
+ pInfo->picLayerHeader.DIRECTMB.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_directmb[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.DIRECTMB.databits = parser->bp_directmb;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_DIRECTMB = 1;
+ }
+ }
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.FIELDTX.invert = bp.invert;
+ pInfo->picLayerHeader.FIELDTX.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_fieldtx[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.FIELDTX.databits = parser->bp_fieldtx;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_FIELDTX = 1;
+ }
+ }
+ break;
+ }
+ }
+#endif
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c
new file mode 100644
index 0000000..e73cde3
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c
@@ -0,0 +1,100 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive B picture in simple
+// or main profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h" // For DEB
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive B picture for main
+ * profile bitstream. This parser starts after PTYPE was parsed but stops
+ * before parsing of macroblock layer.
+ * Table 21 of SMPTE 421M after processing up to PTYPE for B picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else picLayerHeader->HALFQP=0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ }
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ?
+ VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+ {
+ return VC1_STATUS_OK;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+ VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c
new file mode 100644
index 0000000..4074309
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c
@@ -0,0 +1,257 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive B picture in advanced
+// profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h" // For DEB
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive B picture for advanced
+ * profile bitstream.
+ * Table 22 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ?
+ VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+ VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace B frame for advanced
+ * profile bitstream.
+ * Table 84 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ VC1_GET_BITS9(1, picLayerHeader->INTCOMP);
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ // EPC picLayerHeader->MVMODE = VC1_MVMODE_1MV;
+ VC1_GET_BITS9(2, picLayerHeader->MBMODETAB);
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+ VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+ VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */
+ VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace B field for advanced
+ * profile bitstream.
+ * Table 89 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint8_t bit_count;
+ const uint8_t *table;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader* picLayerHeader = &pInfo->picLayerHeader;
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else
+ picLayerHeader->HALFQP = 0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ if (md->POSTPROCFLAG == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+ }
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQUANT > 12)
+ table = VC1_MVMODE_LOW_TBL;
+ else
+ table = VC1_MVMODE_HIGH_TBL;
+
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ while ((picLayerHeader->MVMODE == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ bit_count++;
+ }
+ if ((bit_count == 2) && (picLayerHeader->MVMODE == 0))
+ bit_count++;
+ picLayerHeader->MVMODE = table[bit_count];
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(3, picLayerHeader->MBMODETAB);
+ VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */
+ VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+
+ if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+ }
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h
new file mode 100644
index 0000000..9e621fc
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h
@@ -0,0 +1,608 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Common definitions for parsing VC-1 bitstreams.
+//
+*/
+
+#ifndef _VC1PARSE_COMMON_DEFS_H_
+#define _VC1PARSE_COMMON_DEFS_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdint.h>
+
+/** @weakgroup vc1parse_common_defs VC-1 Common Definitions */
+/** @ingroup vc1parse_common_defs */
+/*@{*/
+
+/** This defines the maximum number of horizontal macroblocks in a picture. */
+#define VC1_WIDTH_MB_MAX ((2048+15)/16)
+
+/** This defines the maximum number of vertical macroblocks in a picture. */
+#define VC1_HEIGHT_MB_MAX ((1088+15)/16)
+
+/** This defines the maximum number of bitplane storage per picture. */
+#define VC1_MAX_BITPLANE_CHUNKS 3
+
+/** This defines the value for an invalid BFRACTION syntax element. */
+#define VC1_BFRACTION_INVALID 0
+
+/** This defines the value for BFRACTION syntax element that defines a BI
+picture. */
+#define VC1_BFRACTION_BI 9
+
+/** This enumeration defines the various supported profiles as defined in
+PROFILE syntax element. */
+enum
+{
+ VC1_PROFILE_SIMPLE,
+ VC1_PROFILE_MAIN,
+ VC1_PROFILE_RESERVED,
+ VC1_PROFILE_ADVANCED
+};
+
+/** This enumeration defines the frame coding mode as defined in FCM syntax
+element. */
+enum
+{
+ VC1_FCM_PROGRESSIVE,
+ VC1_FCM_FRAME_INTERLACE = 2,
+ VC1_FCM_FIELD_INTERLACE = 3
+};
+
+/** This enumeration defines the various bitplane types as defined in IMODE
+syntax element. */
+enum
+{
+ VC1_BITPLANE_RAW_MODE,
+ VC1_BITPLANE_NORM2_MODE,
+ VC1_BITPLANE_DIFF2_MODE,
+ VC1_BITPLANE_NORM6_MODE,
+ VC1_BITPLANE_DIFF6_MODE,
+ VC1_BITPLANE_ROWSKIP_MODE,
+ VC1_BITPLANE_COLSKIP_MODE
+};
+
+/** This enumeration defines the various motion vector modes as defined in
+MVMODE or MVMODE2 syntax element. */
+enum
+{
+ VC1_MVMODE_1MV,
+#ifdef VBP
+ VC1_MVMODE_HPELBI_1MV,
+ VC1_MVMODE_HPEL_1MV,
+#else
+ VC1_MVMODE_HPEL_1MV,
+ VC1_MVMODE_HPELBI_1MV,
+#endif
+ VC1_MVMODE_MIXED_MV,
+ VC1_MVMODE_INTENSCOMP
+};
+
+/** This enumeration defines the extended differential motion vector range flag
+as defined in DMVRANGE syntax element. */
+enum
+{
+ VC1_DMVRANGE_NONE,
+ VC1_DMVRANGE_HORIZONTAL_RANGE,
+ VC1_DMVRANGE_VERTICAL_RANGE,
+ VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE
+};
+
+/** This enumeration defines the intensity compensation field as defined in
+INTCOMPFIELD syntax element. */
+enum
+{
+ VC1_INTCOMP_TOP_FIELD = 1,
+ VC1_INTCOMP_BOTTOM_FIELD = 2,
+ VC1_INTCOMP_BOTH_FIELD = 3
+};
+
+/** This enumeration defines the differential quantizer profiles as defined in
+DQPROFILE syntax element. */
+enum
+{
+ VC1_DQPROFILE_ALL4EDGES,
+ VC1_DQPROFILE_DBLEDGES,
+ VC1_DQPROFILE_SNGLEDGES,
+ VC1_DQPROFILE_ALLMBLKS
+};
+
+/** This enumeration defines the conditional overlap flag as defined in CONDOVER
+syntax element. */
+enum
+{
+ VC1_CONDOVER_FLAG_NONE = 0,
+ VC1_CONDOVER_FLAG_ALL = 2,
+ VC1_CONDOVER_FLAG_SOME = 3
+};
+
+/** This enumeration defines the type of quantizer to be used and is derived
+from bitstream syntax. */
+enum
+{
+ VC1_QUANTIZER_NONUNIFORM,
+ VC1_QUANTIZER_UNIFORM
+};
+
+/** This structure represents the various bitplanes within VC-1 bitstream. */
+typedef struct
+{
+ uint8_t invert;
+ int32_t imode;
+ uint32_t *databits;
+} vc1_Bitplane;
+
+/** This structure represents all bitstream metadata needed for register programming. */
+typedef struct
+{
+ // From Sequence Layer for Advanced Profile
+ uint8_t PROFILE; /** 2 bit(s). */
+#ifdef VBP
+ uint8_t LEVEL;
+#endif
+ uint8_t POSTPROCFLAG; /** 1 bit(s). */
+ uint8_t PULLDOWN; /** 1 bit(s). */
+ uint8_t INTERLACE; /** 1 bit(s). */
+ uint8_t TFCNTRFLAG; /** 1 bit(s). */
+ uint8_t FINTERPFLAG; /** 1 bit(s). */
+ uint8_t PSF; /** 1 bit(s). */
+ uint8_t HRD_NUM_LEAKY_BUCKETS; /** 5 bit(s). */
+
+ // From STRUCT_C
+ uint8_t MAXBFRAMES; /** 3 bit(s). */
+ uint8_t MULTIRES; /** 1 bit(s). */
+
+ // From EntryPoint Layer for Advanced Profile
+ uint8_t PANSCAN_FLAG;
+ uint8_t REFDIST_FLAG;
+ uint8_t LOOPFILTER;
+ uint8_t FASTUVMC;
+ uint8_t EXTENDED_MV;
+ uint8_t DQUANT;
+ uint8_t VSTRANSFORM;
+ uint8_t OVERLAP;
+ uint8_t QUANTIZER;
+ uint8_t EXTENDED_DMV;
+ uint8_t RANGE_MAPY_FLAG;
+ uint8_t RANGE_MAPY;
+ uint8_t RANGE_MAPUV_FLAG;
+ uint8_t RANGE_MAPUV;
+
+ // From Picture Header
+ uint8_t RANGERED; /** 1 bit(s). */
+ uint8_t RNDCTRL; /** 1 bit(s), rcv specific. */
+
+ // REFDIST is present only in field-interlaced mode on I/I, I/P, P/I, P/P frames
+ // From Canmore, looks like this needs to be propagated to following B frames
+ uint8_t REFDIST;
+ uint8_t INTCOMPFIELD; /** ? bit(s)? */
+ uint8_t LUMSCALE2; /** 6 bit(s). */
+ uint8_t LUMSHIFT2; /** 6 bit(s). */
+ uint8_t bp_raw[VC1_MAX_BITPLANE_CHUNKS];
+
+ // From SequenceLayerHeader, EntryPointHeader or Struct_A
+ uint16_t width;
+ uint16_t height;
+ uint16_t widthMB;
+ uint16_t heightMB;
+
+#ifdef VBP
+ uint8_t CLOSED_ENTRY;
+ uint8_t BROKEN_LINK;
+ uint8_t SYNCMARKER;
+#endif
+
+} vc1_metadata_t;
+
+/** This structure represents the sequence header for advanced profile. */
+typedef struct
+{
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned BITRTQ_POSTPROC:5;
+ unsigned FRMRTQ_POSTPROC:3;
+ unsigned COLORDIFF_FORMAT:2;
+ unsigned LEVEL:3;
+ unsigned PROFILE:2;
+ unsigned pad:17;
+ } seq_flags;
+#else
+ struct
+ {
+ unsigned pad:17;
+ unsigned PROFILE:2;
+ unsigned LEVEL:3;
+ unsigned COLORDIFF_FORMAT:2;
+ unsigned FRMRTQ_POSTPROC:3;
+ unsigned BITRTQ_POSTPROC:5;
+ } seq_flags;
+#endif
+ uint32_t flags;
+ };
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned DISPLAY_EXT:1;
+ unsigned PSF:1;
+ unsigned RESERVED:1;
+ unsigned FINTERPFLAG:1;
+ unsigned TFCNTRFLAG:1;
+ unsigned INTERLACE:1;
+ unsigned PULLDOWN:1;
+ unsigned MAX_CODED_HEIGHT:12;
+ unsigned MAX_CODED_WIDTH:12;
+ unsigned POSTPROCFLAG:1;
+ } seq_max_size;
+#else
+ struct
+ {
+ unsigned POSTPROCFLAG:1;
+ unsigned MAX_CODED_WIDTH:12;
+ unsigned MAX_CODED_HEIGHT:12;
+ unsigned PULLDOWN:1;
+ unsigned INTERLACE:1;
+ unsigned TFCNTRFLAG:1;
+ unsigned FINTERPFLAG:1;
+ unsigned RESERVED:1;
+ unsigned PSF:1;
+ unsigned DISPLAY_EXT:1;
+ } seq_max_size;
+#endif
+ uint32_t max_size;
+ };
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned ASPECT_RATIO_FLAG:1;
+ unsigned DISP_VERT_SIZE:14;
+ unsigned DISP_HORIZ_SIZE:14;
+ unsigned pad:3;
+ } seq_disp_size;
+#else
+ struct
+ {
+ unsigned pad:3;
+ unsigned DISP_HORIZ_SIZE:14;
+ unsigned DISP_VERT_SIZE:14;
+ unsigned ASPECT_RATIO_FLAG:1;
+ } seq_disp_size;
+#endif
+ uint32_t disp_size;
+ };
+
+ uint8_t ASPECT_RATIO; // 4 bits
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned ASPECT_VERT_SIZE:8;
+ unsigned ASPECT_HORIZ_SIZE:8;
+ unsigned pad:16;
+ } seq_aspect_size;
+#else
+ struct
+ {
+ unsigned pad:16;
+ unsigned ASPECT_HORIZ_SIZE:8;
+ unsigned ASPECT_VERT_SIZE:8;
+ } seq_aspect_size;
+#endif
+ uint32_t aspect_size;
+ };
+
+ uint8_t FRAMERATE_FLAG; // 1b
+ uint8_t FRAMERATEIND; // 1b
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned FRAMERATEDR:4;
+ unsigned FRAMERATENR:8;
+ unsigned pad:20;
+ } seq_framerate_fraction;
+#else
+ struct
+ {
+ unsigned pad:20;
+ unsigned FRAMERATENR:8;
+ unsigned FRAMERATEDR:4;
+ } seq_framerate_fraction;
+#endif
+ uint32_t framerate_fraction;
+ };
+
+ uint16_t FRAMERATEEXP; // 16b
+ uint8_t COLOR_FORMAT_FLAG; // 1b
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned MATRIX_COEF:8;
+ unsigned TRANSFER_CHAR:8;
+ unsigned COLOR_PRIM:8;
+ unsigned pad:8;
+ } seq_color_format;
+#else
+ struct
+ {
+ unsigned pad:8;
+ unsigned COLOR_PRIM:8;
+ unsigned TRANSFER_CHAR:8;
+ unsigned MATRIX_COEF:8;
+ } seq_color_format;
+#endif
+ uint32_t color_format;
+ };
+
+ uint8_t HRD_PARAM_FLAG; // 1b
+ uint8_t HRD_NUM_LEAKY_BUCKETS; // 5b
+ // No need to parse remaining items - not needed so far
+} vc1_SequenceLayerHeader;
+
+/** This structure represents metadata for struct c. */
+typedef struct
+{
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned res6:1;
+ unsigned FINTERPFLAG:1;
+ unsigned QUANTIZER:2;
+ unsigned MAXBFRAMES:3;
+ unsigned RANGERED:1;
+ unsigned SYNCMARKER:1;
+ unsigned OVERLAP:1;
+ unsigned res5:1;
+ unsigned VSTRANSFORM:1;
+ unsigned DQUANT:2;
+ unsigned EXTENDED_MV:1;
+ unsigned FASTUVMC:1;
+ unsigned res4:1;
+ unsigned MULTIRES:1;
+ unsigned res3:1;
+ unsigned LOOPFILTER:1;
+ unsigned BITRTQ_POSTPROC:5;
+ unsigned FRMRTQ_POSTPROC:3;
+ unsigned PROFILE:4;
+ } struct_c;
+#else
+ struct
+ {
+ unsigned PROFILE:4;
+ unsigned FRMRTQ_POSTPROC:3;
+ unsigned BITRTQ_POSTPROC:5;
+ unsigned LOOPFILTER:1;
+ unsigned res3:1;
+ unsigned MULTIRES:1;
+ unsigned res4:1;
+ unsigned FASTUVMC:1;
+ unsigned EXTENDED_MV:1;
+ unsigned DQUANT:2;
+ unsigned VSTRANSFORM:1;
+ unsigned res5:1;
+ unsigned OVERLAP:1;
+ unsigned SYNCMARKER:1;
+ unsigned RANGERED:1;
+ unsigned MAXBFRAMES:3;
+ unsigned QUANTIZER:2;
+ unsigned FINTERPFLAG:1;
+ unsigned res6:1;
+ } struct_c;
+#endif
+ uint32_t struct_c_rcv;
+ };
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned VERT_SIZE:16;
+ unsigned HORIZ_SIZE:16;
+ } struct_a;
+#else
+ struct
+ {
+ unsigned HORIZ_SIZE:16;
+ unsigned VERT_SIZE:16;
+ } struct_a;
+#endif
+ uint32_t struct_a_rcv;
+ };
+
+} vc1_RcvSequenceHeader;
+
+/** This structure represents metadata for entry point layers. */
+typedef struct
+{
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned QUANTIZER:2;
+ unsigned OVERLAP:1;
+ unsigned VSTRANSFORM:1;
+ unsigned DQUANT:2;
+ unsigned EXTENDED_MV:1;
+ unsigned FASTUVMC:1;
+ unsigned LOOPFILTER:1;
+ unsigned REFDIST_FLAG:1;
+ unsigned PANSCAN_FLAG:1;
+ unsigned CLOSED_ENTRY:1;
+ unsigned BROKEN_LINK:1;
+ unsigned pad1:19;
+ } ep_flags;
+#else
+ struct
+ {
+ unsigned pad1:19;
+ unsigned BROKEN_LINK:1;
+ unsigned CLOSED_ENTRY:1;
+ unsigned PANSCAN_FLAG:1;
+ unsigned REFDIST_FLAG:1;
+ unsigned LOOPFILTER:1;
+ unsigned FASTUVMC:1;
+ unsigned EXTENDED_MV:1;
+ unsigned DQUANT:2;
+ unsigned VSTRANSFORM:1;
+ unsigned OVERLAP:1;
+ unsigned QUANTIZER:2;
+ } ep_flags;
+#endif
+ uint32_t flags;
+ };
+
+ // Skipping HRD data because it is not needed for our processing
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned CODED_HEIGHT:12;
+ unsigned CODED_WIDTH:12;
+ unsigned pad2:8;
+ } ep_size;
+#else
+ struct
+ {
+ unsigned pad2:8;
+ unsigned CODED_WIDTH:12;
+ unsigned CODED_HEIGHT:12;
+ } ep_size;
+#endif
+ uint32_t size;
+ };
+
+ uint8_t CODED_SIZE_FLAG; /** 1 bit(s). */
+ uint8_t EXTENDED_DMV; /** 1 bit(s). */
+ uint8_t RANGE_MAPY_FLAG; /** 1 bit(s). */
+ uint8_t RANGE_MAPY; /** 3 bit(s). */
+ uint8_t RANGE_MAPUV_FLAG; /** 1 bit(s). */
+ uint8_t RANGE_MAPUV; /** 3 bit(s). */
+} vc1_EntryPointHeader;
+
+/** This structure represents metadata for slice and picture layers. */
+typedef struct
+{
+ /* Slice layer. */
+ uint16_t SLICE_ADDR; /** 9 bit(s). */
+
+ /* Picture layer for simple or main profile. */
+ uint8_t RANGEREDFRM; /** 1 bit(s). */
+ uint8_t PTYPE; /** 4 bit(s)? */
+ int8_t BFRACTION_NUM; /** ? bit(s). */
+ int16_t BFRACTION_DEN; /** ? bit(s). */
+ uint8_t PQINDEX; /** 5 bit(s). */
+ uint8_t HALFQP; /** 1 bit(s). */
+ uint8_t PQUANTIZER; /** 1 bit(s). */
+ uint8_t MVRANGE; /** 3 bit(s)? */
+ uint8_t MVMODE; /** 4 bit(s)? */
+ uint8_t MVMODE2; /** 3 bit(s)? */
+ uint8_t LUMSCALE; /** 6 bit(s). */
+ uint8_t LUMSHIFT; /** 6 bit(s). */
+ uint8_t MVTAB; /** 2 bit(s). */
+ uint8_t CBPTAB; /** 2 bit(s). */
+ uint8_t TTMBF; /** 1 bit(s). */
+ uint8_t TTFRM; /** 2 bit(s). */
+ uint8_t TRANSACFRM; /** 2 bit(s)? */
+ uint8_t TRANSACFRM2; /** 2 bit(s)? */
+ uint8_t TRANSDCTAB; /** 1 bit(s). */
+
+ /* Picture layer for advanced profile. */
+ uint8_t FCM; /** 2 bit(s)? */
+ uint8_t FPTYPE; /** 3 bit(s). */
+ uint8_t TFCNTR; /** 8 bit(s) */
+ uint8_t RPTFRM; /** 2 bit(s) */
+ uint8_t TFF; /** 1 bit(s). */
+ uint8_t RFF; /** 1 bit(s) */
+ uint8_t RNDCTRL; /** 1 bit(s). */
+ uint8_t UVSAMP; /** 1 bit(s). */
+ uint8_t POSTPROC; /** 2 bit(s). */
+ uint8_t CONDOVER; /** 2 bit(s)? */
+ uint8_t DMVRANGE; /** ? bit(s)? */
+ uint8_t MV4SWITCH; /** 1 bit(s). */
+ uint8_t INTCOMP; /** 1 bit(s). */
+ uint8_t MBMODETAB; /** 2 bit(s). */
+ uint8_t MV2BPTAB; /** 2 bit(s). */
+ uint8_t MV4BPTAB; /** 2 bit(s). */
+ uint8_t NUMREF; /** 1 bit(s). */
+ uint8_t REFFIELD; /** 1 bit(s). */
+
+ /* PAN SCAN */
+ uint8_t PS_PRESENT; /** 1 bit(s). */
+ uint8_t number_of_pan_scan_window; /** 4 max. */
+ viddec_vc1_pan_scan_window_t PAN_SCAN_WINDOW[VIDDEC_PANSCAN_MAX_OFFSETS];
+
+ /* VOPDQUANT. */
+ uint8_t PQDIFF; /** 3 bit(s). */
+ uint8_t ABSPQ; /** 5 bit(s). */
+ uint8_t DQUANTFRM; /** 1 bit(s). */
+ uint8_t DQPROFILE; /** 2 bit(s). */
+ uint8_t DQSBEDGE; /** 2 bit(s). */
+ uint8_t DQBILEVEL; /** 1 bit(s). */
+
+ /* Others. */
+ uint8_t PTypeField1;
+ uint8_t PTypeField2;
+ uint32_t PQUANT;
+ uint8_t CurrField;
+ uint8_t BottomField;
+ uint32_t UniformQuant;
+
+#ifdef VBP
+ uint8_t raw_MVTYPEMB;
+ uint8_t raw_DIRECTMB;
+ uint8_t raw_SKIPMB;
+ uint8_t raw_ACPRED;
+ uint8_t raw_FIELDTX;
+ uint8_t raw_OVERFLAGS;
+ uint8_t raw_FORWARDMB;
+
+ vc1_Bitplane MVTYPEMB;
+ vc1_Bitplane DIRECTMB;
+ vc1_Bitplane SKIPMB;
+ vc1_Bitplane ACPRED;
+ vc1_Bitplane FIELDTX;
+ vc1_Bitplane OVERFLAGS;
+ vc1_Bitplane FORWARDMB;
+ uint32_t ALTPQUANT;
+ uint8_t DQDBEDGE;
+#endif
+
+} vc1_PictureLayerHeader;
+
+/*@}*/
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus. */
+
+#endif /* _VC1PARSE_COMMON_DEFS_H_. */
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c
new file mode 100644
index 0000000..6fec35a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c
@@ -0,0 +1,198 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Contains tables for VLC decoding of syntax elements in simple
+// or main profile of VC-1 bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+const uint8_t VC1_MVMODE_LOW_TBL[] =
+{
+ VC1_MVMODE_HPELBI_1MV,
+ VC1_MVMODE_1MV,
+ VC1_MVMODE_HPEL_1MV,
+ VC1_MVMODE_MIXED_MV,
+ VC1_MVMODE_INTENSCOMP
+};
+
+const uint8_t VC1_MVMODE_HIGH_TBL[] =
+{
+ VC1_MVMODE_1MV,
+ VC1_MVMODE_MIXED_MV,
+ VC1_MVMODE_HPEL_1MV,
+ VC1_MVMODE_HPELBI_1MV,
+ VC1_MVMODE_INTENSCOMP
+};
+
+const int32_t VC1_BITPLANE_IMODE_TBL[] =
+{
+ 4, /* max bits */
+ 1, /* total subtables */
+ 4, /* subtable sizes */
+
+ 0, /* 1-bit codes */
+ 2, /* 2-bit codes */
+ 2, VC1_BITPLANE_NORM2_MODE,
+ 3, VC1_BITPLANE_NORM6_MODE,
+ 3, /* 3-bit codes */
+ 1, VC1_BITPLANE_DIFF2_MODE,
+ 2, VC1_BITPLANE_ROWSKIP_MODE,
+ 3, VC1_BITPLANE_COLSKIP_MODE,
+ 2, /* 4-bit codes */
+ 0, VC1_BITPLANE_RAW_MODE,
+ 1, VC1_BITPLANE_DIFF6_MODE,
+-1
+};
+
+/* This VLC table is used for decoding of k in bitplane. */
+const int32_t VC1_BITPLANE_K_TBL[] =
+{
+ 13, /* max bits */
+ 2, /* total subtables */
+ 6,7,/* subtable sizes */
+
+ 1, /* 1-bit codes */
+ 1, 0 ,
+ 0, /* 2-bit codes */
+ 0, /* 3-bit codes */
+ 6, /* 4-bit codes */
+ 2, 1, 3, 2, 4, 4, 5, 8,
+ 6, 16, 7, 32,
+ 0, /* 5-bit codes */
+ 1, /* 6-bit codes */
+ (3 << 1)| 1, 63,
+ 0, /* 7-bit codes */
+ 15, /* 8-bit codes */
+ 0, 3, 1, 5, 2, 6, 3, 9,
+ 4, 10, 5, 12, 6, 17, 7, 18,
+ 8, 20, 9, 24, 10, 33, 11, 34,
+ 12, 36, 13, 40, 14, 48,
+ 6, /* 9-bit codes */
+ (3 << 4)| 7, 31,
+ (3 << 4)| 6, 47,
+ (3 << 4)| 5, 55,
+ (3 << 4)| 4, 59,
+
+ (3 << 4)| 3, 61,
+ (3 << 4)| 2, 62,
+ 20, /* 10-bit codes */
+ (1 << 6)| 11, 11,
+ (1 << 6)| 7, 7 ,
+ (1 << 6)| 13, 13,
+ (1 << 6)| 14, 14,
+
+ (1 << 6)| 19, 19,
+ (1 << 6)| 21, 21,
+ (1 << 6)| 22, 22,
+ (1 << 6)| 25, 25,
+
+ (1 << 6)| 26, 26,
+ (1 << 6)| 28, 28,
+ (1 << 6)| 3, 35,
+ (1 << 6)| 5, 37,
+
+ (1 << 6)| 6, 38,
+ (1 << 6)| 9, 41,
+ (1 << 6)| 10, 42,
+ (1 << 6)| 12, 44,
+
+ (1 << 6)| 17, 49,
+ (1 << 6)| 18, 50,
+ (1 << 6)| 20, 52,
+ (1 << 6)| 24, 56,
+ 0, /* 11-bit codes */
+ 0, /* 12-bit codes */
+ 15, /* 13-bit codes */
+ (3 << 8)| 14, 15,
+ (3 << 8)| 13, 23,
+ (3 << 8)| 12, 27,
+ (3 << 8)| 11, 29,
+
+ (3 << 8)| 10, 30,
+ (3 << 8)| 9, 39,
+ (3 << 8)| 8, 43,
+ (3 << 8)| 7, 45,
+
+ (3 << 8)| 6, 46,
+ (3 << 8)| 5, 51,
+ (3 << 8)| 4, 53,
+ (3 << 8)| 3, 54,
+
+ (3 << 8)| 2, 57,
+ (3 << 8)| 1, 58,
+ (3 << 8)| 0, 60,
+ -1
+};
+
+/* This VLC table is used for decoding of BFRACTION. */
+const int32_t VC1_BFRACTION_TBL[] =
+{
+ 7, /* max bits */
+ 2, /* total subtables */
+ 3,4, /* subtable sizes */
+ 0, /* 1-bit codes */
+ 0, /* 2-bit codes */
+ 7, /* 3-bit codes */
+ 0x00,1,2, 0x01,1,3, 0x02,2,3, 0x03,1,4,
+ 0x04,3,4, 0x05,1,5, 0x06,2,5,
+ 0, /* 4-bit codes */
+ 0, /* 5-bit codes */
+ 0, /* 6-bit codes */
+ 16, /* 7-bit codes */
+ 0x70, 3,5, 0x71, 4,5, 0x72, 1,6, 0x73, 5,6,
+ 0x74, 1,7, 0x75, 2,7, 0x76, 3,7, 0x77, 4,7,
+ 0x78, 5,7, 0x79, 6,7, 0x7A, 1,8, 0x7B, 3,8,
+ 0x7C, 5,8, 0x7D, 7,8,
+ 0x7E, VC1_BFRACTION_INVALID,VC1_BFRACTION_INVALID,
+ 0x7F, VC1_BFRACTION_BI, VC1_BFRACTION_BI,
+
+ -1
+};
+
+/* This table is used for VLC decoding of REFDIST. */
+const int32_t VC1_REFDIST_TBL[] =
+{
+ 16, /* Max bits. */
+ 3, /* Total sub-tables. */
+ 5, 6, 5, /* Sub-table sizes. */
+
+ 0, /* 1-bit codes. */
+ 3, /* 2-bit codes. */
+ 0, 0, 1, 1, 2, 2,
+ 1, /* 3-bit codes. */
+ 6, 3,
+ 1, /* 4-bit codes. */
+ 14, 4,
+ 1, /* 5-bit codes. */
+ 30, 5,
+ 1, /* 6-bit codes. */
+ 62, 6,
+ 1, /* 7-bit codes. */
+ 126, 7,
+ 1, /* 8-bit codes. */
+ 254, 8,
+ 1, /* 9-bit codes. */
+ 510, 9,
+ 1, /* 10-bit codes. */
+ 1022, 10,
+ 1, /* 11-bit codes. */
+ 2046, 11,
+ 1, /* 12-bit codes. */
+ 4094, 12,
+ 1, /* 13-bit codes. */
+ 8190, 13,
+ 1, /* 14-bit codes. */
+ 16382, 14,
+ 1, /* 15-bit codes. */
+ 32766, 15,
+ 1, /* 16-bit codes. */
+ 65534, 16,
+ -1 /* end of table. */
+};
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c
new file mode 100644
index 0000000..c2f5985
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c
@@ -0,0 +1,97 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VLC syntax elements within VC-1 bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*----------------------------------------------------------------------------*/
+
+vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable)
+{
+ uint32_t tempValue;
+ const int32_t *pTable = pDecodeTable;
+ vc1_Status status = VC1_STATUS_OK;
+ int32_t i, j, maxBits, loopCount, totalBits, value;
+
+ maxBits = *pTable++;
+ loopCount = *pTable++;
+ totalBits = 0;
+ for (i = 0; i < loopCount; i++)
+ totalBits += *pTable++;
+
+ if (totalBits != maxBits)
+ return VC1_STATUS_PARSE_ERROR;
+
+ value = 0;
+ for (i = 0; i < maxBits; i++)
+ {
+ VC1_GET_BITS9(1, tempValue);
+ value = (value << 1) | tempValue;
+ loopCount = *pTable++;
+ if (loopCount == -1)
+ break;
+ for (j = 0; j < loopCount; j++)
+ {
+ if (value == *pTable++)
+ {
+ *pDst = *pTable;
+ return status;
+ }
+ else
+ pTable++;
+ }
+ }
+
+ return status;
+}
+
+/*----------------------------------------------------------------------------*/
+
+vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable,
+ int8_t *pFirst, int16_t *pSecond)
+{
+ uint32_t tempValue;
+ const int32_t *pTable = pDecodeTable;
+ vc1_Status status = VC1_STATUS_OK;
+ int32_t i, j, maxBits, loopCount, totalBits, value;
+
+ maxBits = *pTable++;
+ loopCount = *pTable++;
+ totalBits = 0;
+ for (i = 0; i < loopCount; i++)
+ totalBits += *pTable++;
+
+ if (totalBits != maxBits)
+ return VC1_STATUS_PARSE_ERROR;
+
+ value = 0;
+ for (i = 0; i < maxBits; i++)
+ {
+ VC1_GET_BITS9(1, tempValue);
+ value = (value << 1) | tempValue;
+ loopCount = *pTable++;
+ if (loopCount == -1)
+ break;
+ for (j = 0; j < loopCount; j++)
+ {
+ if (value == *pTable++)
+ {
+ *pFirst = *pTable++;
+ *pSecond = *pTable;
+ return status;
+ }
+ else
+ pTable += 2;
+ }
+ }
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c
new file mode 100644
index 0000000..1a37929
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c
@@ -0,0 +1,101 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive I picture in simple
+// or main profile bitstream or progressive BI picture in main profile
+// bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive I picture for simple
+ * or main profile bitstream or progressive BI picture in main profile
+ * bitstream. This parser starts after PTYPE was parsed but stops before
+ * parsing of macroblock layer.
+ * Table 16 of SMPTE 421M after processing up to PTYPE for I picture.
+ * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7.
+ For each I or BI frame, RND shall be set to 1 */
+ if (md->PROFILE != VC1_PROFILE_ADVANCED)
+ {
+ picLayerHeader->RNDCTRL = md->RNDCTRL | 1 ;
+ md->RNDCTRL = picLayerHeader->RNDCTRL;
+ }
+
+
+ if (picLayerHeader->PTYPE == VC1_BI_FRAME)
+ {
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN))
+ != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ if (picLayerHeader->BFRACTION_DEN != VC1_BFRACTION_BI)
+ return VC1_STATUS_PARSE_ERROR;
+ }
+
+ VC1_GET_BITS9(7, tempValue); /* BF. */
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else picLayerHeader->HALFQP=0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ /* MVRANGE but only for main profile. */
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->MULTIRES == 1 && picLayerHeader->PTYPE != VC1_BI_FRAME)
+ {
+ VC1_GET_BITS9(2, tempValue); /* RESPIC. */
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ if (picLayerHeader->TRANSACFRM2)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ picLayerHeader->TRANSACFRM2 += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c
new file mode 100644
index 0000000..03aeb79
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c
@@ -0,0 +1,257 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive I or BI picture in
+// advanced profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h"
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive I or BI picture for
+ * advanced profile bitstream.
+ * Table 18 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (picLayerHeader->CONDOVER)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (! picLayerHeader->CONDOVER)
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+ else
+ {
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB,
+ md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+ }
+ else
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ if (picLayerHeader->TRANSACFRM2)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ picLayerHeader->TRANSACFRM2 += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ status = vc1_VOPDQuant(ctxt, pInfo);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace I or BI frame for
+ * advanced profile bitstream.
+ * Table 82 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (picLayerHeader->CONDOVER)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (! picLayerHeader->CONDOVER)
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+ else
+ {
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB,
+ md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+ }
+ else
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ if (picLayerHeader->TRANSACFRM2)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ picLayerHeader->TRANSACFRM2 += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ status = vc1_VOPDQuant(ctxt, pInfo);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace I or BI field for
+ * advanced profile bitstream.
+ * Table 87 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ // Reset MVMODE when the second field is an I picture
+ // to avoid carrying forward the mvmode values from previous field
+ // especially the intensity compensation value
+ picLayerHeader->MVMODE = 0;
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) {
+ DEB("Error parsing I field \n");
+ return status;
+ }
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else
+ picLayerHeader->HALFQP = 0;
+
+ if (md->QUANTIZER == 1) {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ if (md->POSTPROCFLAG == 1)
+ VC1_GET_BITS9(2, tempValue); /* POSTPROC. */
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) !=
+ VC1_STATUS_OK)
+ {
+ DEB("Error parsing I field \n");
+ return status;
+ }
+
+ if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (picLayerHeader->CONDOVER)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (! picLayerHeader->CONDOVER)
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+ else
+ {
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB,
+ (md->heightMB+1)/2, BPP_OVERFLAGS)) !=
+ VC1_STATUS_OK)
+ {
+ DEB("Error parsing I field \n");
+ return status;
+ }
+ }
+ }
+ else
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ if (picLayerHeader->TRANSACFRM2)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ picLayerHeader->TRANSACFRM2 += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ status = vc1_VOPDQuant(ctxt, pInfo);
+ if (status != VC1_STATUS_OK) {
+ DEB("Error parsing I field \n");
+ return status;
+ }
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c
new file mode 100644
index 0000000..7cbcc34
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c
@@ -0,0 +1,82 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 syntax elements MVRANGE and DMVRANGE.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element MVRANGE, which exists for main and advanced profiles.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (md->EXTENDED_MV == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+ if (picLayerHeader->MVRANGE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+ if (picLayerHeader->MVRANGE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+ picLayerHeader->MVRANGE += 1;
+ }
+ picLayerHeader->MVRANGE += 1;
+ }
+ }
+ else
+ picLayerHeader->MVRANGE = 0;
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element DMVRANGE.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (md->EXTENDED_DMV == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+ if (picLayerHeader->DMVRANGE == 0)
+ picLayerHeader->DMVRANGE = VC1_DMVRANGE_NONE;
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+ if (picLayerHeader->DMVRANGE == 0)
+ picLayerHeader->DMVRANGE = VC1_DMVRANGE_HORIZONTAL_RANGE;
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+ if (picLayerHeader->DMVRANGE == 0)
+ picLayerHeader->DMVRANGE = VC1_DMVRANGE_VERTICAL_RANGE;
+ else
+ {
+ picLayerHeader->DMVRANGE =
+ VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE;
+ }
+ }
+ }
+ }
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c
new file mode 100644
index 0000000..c363456
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c
@@ -0,0 +1,101 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for simple and main profiles.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses the picture header for simple or
+ * main profile down to macroblock layer.
+ * Table 16 of SMPTE 421M after processing up to PTYPE for I picture.
+ * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture.
+ * Table 19 of SMPTE 421M after processing up to PTYPE for P picture.
+ * Table 21 of SMPTE 421M after processing up to PTYPE for B picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+ int32_t result;
+
+ if (md->PROFILE != VC1_PROFILE_ADVANCED)
+ {
+ // As per spec, for main/simple profile, if the size of the coded picture is <= 1B,
+ // it shall be treated as a skipped frame.
+ // In content with skipped frames, the data is "00".
+ // rcv to vc1 conversion process adds an additional byte (0x80) to the picture, hence
+ // the data looks like "00 80"
+ // Hence if data is <= 2B, we will consider it skipped (check for 16+1b, if it fails, the frame is skipped).
+ result = viddec_pm_peek_bits(ctxt, &tempValue, 17);
+ if(result == -1)
+ {
+ picLayerHeader->PTYPE = VC1_SKIPPED_FRAME;
+ return status;
+ }
+ }
+
+ if (md->FINTERPFLAG == 1)
+ {
+ VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */
+ }
+
+ VC1_GET_BITS9(2, tempValue); /* FRMCNT. */
+
+ if (md->RANGERED == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->RANGEREDFRM);
+ }
+
+ if (md->MAXBFRAMES == 0)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE == 0)
+ picLayerHeader->PTYPE = VC1_I_FRAME;
+ else
+ picLayerHeader->PTYPE = VC1_P_FRAME;
+ }
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE == 0)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE == 0) {
+ picLayerHeader->PTYPE = VC1_B_FRAME; /* Or VC1_BI_FRAME. */
+ /* if peek(7) = 0b1111111 then ptype = bi */
+ VC1_PEEK_BITS( 7, tempValue );
+ if ( tempValue == 0x7f )
+ picLayerHeader->PTYPE = VC1_BI_FRAME;
+ } else
+ picLayerHeader->PTYPE = VC1_I_FRAME;
+ }
+ else
+ picLayerHeader->PTYPE = VC1_P_FRAME;
+ }
+
+ if (picLayerHeader->PTYPE == VC1_I_FRAME ||
+ picLayerHeader->PTYPE == VC1_BI_FRAME)
+ {
+ status = vc1_ParsePictureHeader_ProgressiveIpicture(ctxt, pInfo);
+ }
+ else if (picLayerHeader->PTYPE == VC1_P_FRAME)
+ status = vc1_ParsePictureHeader_ProgressivePpicture(ctxt, pInfo);
+ else if (picLayerHeader->PTYPE == VC1_B_FRAME)
+ status = vc1_ParsePictureHeader_ProgressiveBpicture(ctxt, pInfo);
+ else
+ status = VC1_STATUS_PARSE_ERROR;
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c
new file mode 100644
index 0000000..fa9c3c7
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c
@@ -0,0 +1,403 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for advanced profile.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses the picture header for advanced
+ * profile down to POSTPROC syntax element.
+ * Table 18 of SMPTE 421M for progressive I or BI picture.
+ * Table 20 of SMPTE 421M for progressive P picture.
+ * Table 22 of SMPTE 421M for progressive B picture.
+ * Table 23 of SMPTE 421M for skipped picture.
+ * Table 82 of SMPTE 421M for interlace I or BI frame.
+ * Table 83 of SMPTE 421M for interlace P frame.
+ * Table 84 of SMPTE 421M for interlace B frame.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t i = 0;
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t number_of_pan_scan_window;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (md->INTERLACE == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->FCM);
+ if (picLayerHeader->FCM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->FCM);
+ if (picLayerHeader->FCM)
+ {
+ picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE;
+ return VC1_STATUS_PARSE_ERROR;
+ }
+ else
+ picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE;
+ }
+ else
+ picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+ }
+ else
+ picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+
+
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE)
+ picLayerHeader->PTYPE = VC1_SKIPPED_FRAME;
+ else
+ picLayerHeader->PTYPE = VC1_BI_FRAME;
+ }
+ else
+ picLayerHeader->PTYPE = VC1_I_FRAME;
+ }
+ else
+ picLayerHeader->PTYPE = VC1_B_FRAME;
+ }
+ else
+ picLayerHeader->PTYPE = VC1_P_FRAME;
+
+ if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME)
+ {
+ if (md->TFCNTRFLAG)
+ {
+ VC1_GET_BITS9(8, picLayerHeader->TFCNTR); /* TFCNTR. */
+ }
+ }
+
+ if (md->PULLDOWN)
+ {
+ if ((md->INTERLACE == 0) || (md->PSF == 1))
+ {
+ VC1_GET_BITS9(2, picLayerHeader->RPTFRM);
+ }
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TFF);
+ VC1_GET_BITS9(1, picLayerHeader->RFF);
+ }
+ }
+
+ if (md->PANSCAN_FLAG == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); /* PS_PRESENT. */
+ if (picLayerHeader->PS_PRESENT == 1)
+ {
+ if ((md->INTERLACE == 1) &&
+ (md->PSF == 0))
+ {
+ if (md->PULLDOWN == 1)
+ number_of_pan_scan_window = 2 + picLayerHeader->RFF;
+ else
+ number_of_pan_scan_window = 2;
+ }
+ else
+ {
+ if (md->PULLDOWN == 1)
+ number_of_pan_scan_window = 1 + picLayerHeader->RPTFRM;
+ else
+ number_of_pan_scan_window = 1;
+ }
+ picLayerHeader->number_of_pan_scan_window = number_of_pan_scan_window;
+
+ for (i = 0; i < number_of_pan_scan_window; i++)
+ {
+ VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */
+ VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */
+ VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */
+ VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */
+ }
+ }
+ }
+
+ if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->RNDCTRL);
+ md->RNDCTRL = picLayerHeader->RNDCTRL;
+
+ if ((md->INTERLACE == 1) ||
+ (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->UVSAMP);
+ }
+
+ if ((md->FINTERPFLAG == 1) &&
+ (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE))
+ {
+ VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */
+ }
+
+ if ((picLayerHeader->PTYPE == VC1_B_FRAME) &&
+ (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE))
+ {
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN))
+ != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else
+ picLayerHeader->HALFQP = 0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ if (md->POSTPROCFLAG == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+ }
+ }
+
+ return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses the picture header for advanced
+ * profile down to BFRACTION syntax element.
+ * Table 85 of SMPTE 421M.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t i = 0;
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t number_of_pan_scan_window;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ VC1_GET_BITS9(1, picLayerHeader->FCM);
+ if (picLayerHeader->FCM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->FCM);
+ if (picLayerHeader->FCM)
+ picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE;
+ else
+ picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE;
+ }
+ else
+ picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+ if (picLayerHeader->FCM != VC1_FCM_FIELD_INTERLACE)
+ return VC1_STATUS_PARSE_ERROR;
+
+ VC1_GET_BITS9(3, picLayerHeader->FPTYPE);
+ if (picLayerHeader->FPTYPE == 0)
+ {
+ picLayerHeader->PTypeField1 = VC1_I_FRAME;
+ picLayerHeader->PTypeField2 = VC1_I_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 1)
+ {
+ picLayerHeader->PTypeField1 = VC1_I_FRAME;
+ picLayerHeader->PTypeField2 = VC1_P_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 2)
+ {
+ picLayerHeader->PTypeField1 = VC1_P_FRAME;
+ picLayerHeader->PTypeField2 = VC1_I_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 3)
+ {
+ picLayerHeader->PTypeField1 = VC1_P_FRAME;
+ picLayerHeader->PTypeField2 = VC1_P_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 4)
+ {
+ picLayerHeader->PTypeField1 = VC1_B_FRAME;
+ picLayerHeader->PTypeField2 = VC1_B_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 5)
+ {
+ picLayerHeader->PTypeField1 = VC1_B_FRAME;
+ picLayerHeader->PTypeField2 = VC1_BI_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 6)
+ {
+ picLayerHeader->PTypeField1 = VC1_BI_FRAME;
+ picLayerHeader->PTypeField2 = VC1_B_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 7)
+ {
+ picLayerHeader->PTypeField1 = VC1_BI_FRAME;
+ picLayerHeader->PTypeField2 = VC1_BI_FRAME;
+ }
+
+ if (md->TFCNTRFLAG)
+ {
+ VC1_GET_BITS9(8, picLayerHeader->TFCNTR);
+ }
+
+ if (md->PULLDOWN == 1)
+ {
+ if (md->PSF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->RPTFRM);
+ }
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TFF);
+ VC1_GET_BITS9(1, picLayerHeader->RFF);
+ }
+ } else
+ picLayerHeader->TFF = 1;
+
+ if (md->PANSCAN_FLAG == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT);
+ if (picLayerHeader->PS_PRESENT)
+ {
+ if (md->PULLDOWN)
+ number_of_pan_scan_window = 2 + picLayerHeader->RFF;
+ else
+ number_of_pan_scan_window = 2;
+ picLayerHeader->number_of_pan_scan_window =number_of_pan_scan_window;
+
+ for (i = 0; i < number_of_pan_scan_window; i++)
+ {
+ VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */
+ VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */
+ VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */
+ VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */
+ }
+ }
+ }
+ VC1_GET_BITS9(1, md->RNDCTRL);
+
+#ifdef VBP
+ picLayerHeader->RNDCTRL = md->RNDCTRL;
+#endif
+
+ VC1_GET_BITS9(1, picLayerHeader->UVSAMP);
+
+ if ((md->REFDIST_FLAG == 1) && (picLayerHeader->FPTYPE <= 3))
+ {
+ int32_t tmp;
+ if ((status = vc1_DecodeHuffmanOne(ctxt, &tmp,
+ VC1_REFDIST_TBL)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ md->REFDIST = tmp;
+ }
+
+ if ((picLayerHeader->FPTYPE >= 4) && (picLayerHeader->FPTYPE <= 7))
+ {
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+
+ if (picLayerHeader->CurrField == 0)
+ {
+ picLayerHeader->PTYPE = picLayerHeader->PTypeField1;
+ picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF);
+ }
+ else
+ {
+ picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF);
+ picLayerHeader->PTYPE = picLayerHeader->PTypeField2;
+ }
+
+ return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function calls the appropriate function to further
+ * parse the picture header for advanced profile down to macroblock layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_PARSE_ERROR;
+
+ if (pInfo->picLayerHeader.FCM == VC1_FCM_PROGRESSIVE)
+ {
+ if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) ||
+ (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME))
+ {
+ status = vc1_ParsePictureHeader_ProgressiveIpicture_Adv(ctxt, pInfo);
+ }
+ else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ status = vc1_ParsePictureHeader_ProgressivePpicture_Adv(ctxt, pInfo);
+ else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+ status = vc1_ParsePictureHeader_ProgressiveBpicture_Adv(ctxt, pInfo);
+ else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME)
+ status = VC1_STATUS_OK;
+ }
+ else if (pInfo->picLayerHeader.FCM == VC1_FCM_FRAME_INTERLACE)
+ {
+ if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) ||
+ (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME))
+ {
+ status = vc1_ParsePictureHeader_InterlaceIpicture_Adv(ctxt, pInfo);
+ }
+ else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ status = vc1_ParsePictureHeader_InterlacePpicture_Adv(ctxt, pInfo);
+ else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+ status = vc1_ParsePictureHeader_InterlaceBpicture_Adv(ctxt, pInfo);
+ else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME)
+ status = VC1_STATUS_OK;
+ }
+ else if (pInfo->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE)
+ {
+ int ptype;
+ if( pInfo->picLayerHeader.CurrField == 0)
+ ptype = pInfo->picLayerHeader.PTypeField1;
+ else
+ ptype = pInfo->picLayerHeader.PTypeField2;
+
+ if ((ptype == VC1_I_FRAME) ||
+ (ptype == VC1_BI_FRAME))
+ {
+ status = vc1_ParseFieldHeader_InterlaceIpicture_Adv(ctxt, pInfo);
+ }
+ else if (ptype == VC1_P_FRAME)
+ status = vc1_ParseFieldHeader_InterlacePpicture_Adv(ctxt, pInfo);
+ else if (ptype == VC1_B_FRAME)
+ status = vc1_ParseFieldHeader_InterlaceBpicture_Adv(ctxt, pInfo);
+ else if (ptype == VC1_SKIPPED_FRAME)
+ status = VC1_STATUS_OK;
+ }
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c
new file mode 100644
index 0000000..ba9c756
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c
@@ -0,0 +1,149 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive P picture in simple
+// or main profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive P picture for simple
+ * or main profile bitstream. This parser starts after PTYPE was parsed but
+ * stops before parsing of macroblock layer.
+ * Table 19 of SMPTE 421M after processing up to PTYPE for P picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo)
+{
+ uint8_t bit_count;
+ const uint8_t *table;
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7.
+ It toggles back and forth between 0 and 1 for P frames */
+ if (md->PROFILE != VC1_PROFILE_ADVANCED)
+ {
+ picLayerHeader->RNDCTRL = md->RNDCTRL ^ 1 ;
+ md->RNDCTRL = picLayerHeader->RNDCTRL;
+ }
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else picLayerHeader->HALFQP=0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ /* MVRANGE. */
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->MULTIRES == 1)
+ VC1_GET_BITS9(2, tempValue); /* RESPIC. */
+
+ if (picLayerHeader->PQUANT > 12)
+ table = VC1_MVMODE_LOW_TBL;
+ else
+ table = VC1_MVMODE_HIGH_TBL;
+
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ while ((picLayerHeader->MVMODE == 0) && (bit_count < 3))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ bit_count++;
+ }
+ if (bit_count == 3)
+ bit_count += picLayerHeader->MVMODE;
+ picLayerHeader->MVMODE = table[bit_count];
+
+ if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+ {
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ bit_count++;
+ }
+ if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+ bit_count++;
+ picLayerHeader->MVMODE2 = table[bit_count];
+ VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+ VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+ }
+#ifdef VBP
+ else
+ picLayerHeader->MVMODE2 = 0;
+#else
+ else
+ picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+ if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) ||
+ ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) &&
+ (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)))
+ {
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_MVTYPEMB))
+ != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+ VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c
new file mode 100644
index 0000000..144c138
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c
@@ -0,0 +1,368 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive P picture in advanced
+// profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h"
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive P picture for advanced
+ * profile bitstream.
+ * Table 20 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint8_t bit_count;
+ const uint8_t *table;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ /* MVRANGE. */
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQUANT > 12)
+ table = VC1_MVMODE_LOW_TBL;
+ else
+ table = VC1_MVMODE_HIGH_TBL;
+
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ while ((picLayerHeader->MVMODE == 0) && (bit_count < 3))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ bit_count++;
+ }
+ if (bit_count == 3)
+ bit_count += picLayerHeader->MVMODE;
+ picLayerHeader->MVMODE = table[bit_count];
+
+ if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+ {
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ bit_count++;
+ }
+ if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+ bit_count++;
+ picLayerHeader->MVMODE2 = table[bit_count];
+ VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+ VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+ md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+ md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+ }
+ else
+#ifdef VBP
+ picLayerHeader->MVMODE2 = 0;
+#else
+ picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+ if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) ||
+ ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) &&
+ (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)))
+ {
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_MVTYPEMB)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+ VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace P frame for advanced
+ * profile bitstream.
+ * Table 83 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ /* MVRANGE. */
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ /* DMVRANGE. */
+ if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ VC1_GET_BITS9(1, picLayerHeader->MV4SWITCH);
+
+ VC1_GET_BITS9(1, picLayerHeader->INTCOMP);
+ if (picLayerHeader->INTCOMP)
+ {
+ VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+ VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+ md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+ md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MBMODETAB);
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+ VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+ VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */
+
+ if (picLayerHeader->MV4SWITCH == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+ }
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace P field for advanced
+ * profile bitstream.
+ * Table 88 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint8_t bit_count;
+ const uint8_t *table;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else
+ picLayerHeader->HALFQP = 0;
+
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ if (md->POSTPROCFLAG == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->NUMREF);
+
+ if (picLayerHeader->NUMREF == 0)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->REFFIELD);
+ }
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) {
+ DEB("Error in vc1_MVRangeDecode \n");
+ return status;
+ }
+
+ if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQUANT > 12)
+ table = VC1_MVMODE_LOW_TBL;
+ else
+ table = VC1_MVMODE_HIGH_TBL;
+
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ while ((picLayerHeader->MVMODE == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ bit_count++;
+ }
+ if (bit_count == 2 && picLayerHeader->MVMODE == 0) {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+
+ if ( picLayerHeader->MVMODE == 1)
+ bit_count ++;
+
+ bit_count++;
+ }
+ picLayerHeader->MVMODE = table[bit_count];
+
+ if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+ {
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ bit_count++;
+ }
+ if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+ bit_count++;
+ picLayerHeader->MVMODE2 = table[bit_count];
+
+ VC1_GET_BITS9(1, md->INTCOMPFIELD);
+ if (md->INTCOMPFIELD == 1)
+ md->INTCOMPFIELD = VC1_INTCOMP_BOTH_FIELD;
+ else
+ {
+ VC1_GET_BITS9(1, md->INTCOMPFIELD);
+ if(md->INTCOMPFIELD == 1)
+ md->INTCOMPFIELD = VC1_INTCOMP_BOTTOM_FIELD;
+ else
+ md->INTCOMPFIELD = VC1_INTCOMP_TOP_FIELD;
+ }
+ VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); /* LUMSCALE1. */
+ VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); /* LUMSHIFT1. */
+ if ( md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD ) {
+ md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+ md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+ }
+ if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD)
+ {
+ VC1_GET_BITS9(6, md->LUMSCALE2);
+ VC1_GET_BITS9(6, md->LUMSHIFT2);
+ }
+ }
+ else
+#ifdef VBP
+ picLayerHeader->MVMODE2 = 0;
+#else
+ picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+ VC1_GET_BITS9(3, picLayerHeader->MBMODETAB);
+
+ if (picLayerHeader->NUMREF)
+ {
+ VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */
+ }
+ else
+ {
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+ }
+
+ VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+
+#ifdef VBP
+ if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV)
+#else
+ if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)
+#endif
+ {
+ VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+ }
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+ picLayerHeader->TRANSACFRM2 = 0;
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c
new file mode 100644
index 0000000..559a0dd
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c
@@ -0,0 +1,130 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 syntax elements VOPDQUANT and DQUANT.
+//
+*/
+
+#include "vc1parse.h"
+
+#define VC1_UNDEF_PQUANT 0
+
+static const uint8_t MapPQIndToQuant_Impl[] =
+{
+ VC1_UNDEF_PQUANT,
+ 1, 2, 3, 4, 5, 6, 7, 8,
+ 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 27, 29, 31
+};
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element VOPDQuant as defined in Table 24 of SMPTE 421M.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (md->DQUANT == 0)
+ return status;
+
+ if (md->DQUANT == 2)
+ {
+ VC1_GET_BITS9(3, picLayerHeader->PQDIFF);
+ if (picLayerHeader->PQDIFF == 7)
+ {
+ VC1_GET_BITS9(5, picLayerHeader->ABSPQ);
+ }
+ }
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DQUANTFRM);
+ if (picLayerHeader->DQUANTFRM == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->DQPROFILE);
+ if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_SNGLEDGES)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE);
+ }
+ else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_DBLEDGES)
+ {
+#ifdef VBP
+ VC1_GET_BITS9(2, picLayerHeader->DQDBEDGE);
+#else
+ VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); /* DQDBEDGE. */
+#endif
+ }
+ else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DQBILEVEL);
+ }
+ if (! (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS &&
+ picLayerHeader->DQBILEVEL == 0))
+ {
+ VC1_GET_BITS9(3, picLayerHeader->PQDIFF);
+ if (picLayerHeader->PQDIFF == 7)
+ {
+ VC1_GET_BITS9(5, picLayerHeader->ABSPQ);
+ }
+ }
+ }
+ }
+#ifdef VBP
+ if ((picLayerHeader->DQUANTFRM == 1 && md->DQUANT == 1) || (md->DQUANT == 2))
+ {
+ if (picLayerHeader->PQDIFF == 7)
+ {
+ picLayerHeader->ALTPQUANT = picLayerHeader->ABSPQ;
+ }
+ else
+ {
+ picLayerHeader->ALTPQUANT = picLayerHeader->PQUANT + picLayerHeader->PQDIFF + 1;
+ }
+ }
+#endif
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Compute value for PQUANT syntax element that does not exist in bitstreams for
+ * progressive I and BI pictures.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ picLayerHeader->PQUANT = picLayerHeader->PQINDEX;
+ picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM;
+
+ if (md->QUANTIZER == 0)
+ {
+ if (picLayerHeader->PQINDEX < 9)
+ picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM;
+ else
+ {
+ picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM;
+ picLayerHeader->PQUANT =
+ MapPQIndToQuant_Impl[picLayerHeader->PQINDEX];
+ }
+ }
+ else
+ {
+ if (md->QUANTIZER == 2)
+ picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM;
+ }
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c
new file mode 100644
index 0000000..6af6f09
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c
@@ -0,0 +1,345 @@
+#include "viddec_fw_debug.h" // For DEB
+#include "viddec_parser_ops.h" // For parser helper functions
+#include "vc1.h" // For the parser structure
+#include "vc1parse.h" // For vc1 parser helper functions
+#ifdef VBP
+#include "viddec_pm.h"
+#endif
+#define vc1_is_frame_start_code( ch ) \
+ (( vc1_SCField == ch ||vc1_SCSlice == ch || vc1_SCFrameHeader == ch ) ? 1 : 0)
+
+/* init function */
+#ifdef VBP
+void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#else
+static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#endif
+{
+ vc1_viddec_parser_t *parser = ctxt;
+ int i;
+
+ persist_mem = persist_mem;
+
+ for(i=0; i<VC1_NUM_REFERENCE_FRAMES; i++)
+ {
+ parser->ref_frame[i].id = -1; /* first I frame checks that value */
+ parser->ref_frame[i].anchor[0] = 1;
+ parser->ref_frame[i].anchor[1] = 1;
+ parser->ref_frame[i].intcomp_top = 0;
+ parser->ref_frame[i].intcomp_bot = 0;
+ }
+
+ parser->intcomp_top[0] = 0;
+ parser->intcomp_bot[0] = 0;
+ parser->intcomp_top[1] = 0;
+ parser->intcomp_bot[1] = 0;
+ parser->is_reference_picture = false;
+
+ memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader));
+
+ if(preserve)
+ {
+ parser->sc_seen &= VC1_EP_MASK;
+ parser->sc_seen_since_last_wkld &= VC1_EP_MASK;
+ }
+ else
+ {
+ parser->sc_seen = VC1_SC_INVALID;
+ parser->sc_seen_since_last_wkld = VC1_SC_INVALID;
+ memset(&parser->info.metadata, 0, sizeof(parser->info.metadata));
+ }
+
+ return;
+} // viddec_vc1_init
+
+static void vc1_swap_intcomp(vc1_viddec_parser_t *parser)
+{
+ parser->intcomp_top[1] = parser->intcomp_top[0];
+ parser->intcomp_bot[1] = parser->intcomp_bot[0];
+ parser->intcomp_top[0] = 0;
+ parser->intcomp_bot[0] = 0;
+
+ return;
+} // vc1_swap_intcomp
+
+#ifdef VBP
+uint32_t viddec_vc1_parse(void *parent, void *ctxt)
+#else
+static uint32_t viddec_vc1_parse(void *parent, void *ctxt)
+#endif
+{
+ vc1_viddec_parser_t *parser = ctxt;
+ uint32_t sc=0x0;
+ int32_t ret=0, status=0;
+
+#ifdef VBP
+ /* This works only if there is one slice and no start codes */
+ /* A better fix would be to insert start codes it there aren't any. */
+ ret = viddec_pm_peek_bits(parent, &sc, 32);
+ if ((sc > 0x0100) && (sc < 0x0200)) /* a Start code will be in this range. */
+ {
+ ret = viddec_pm_get_bits(parent, &sc, 32);
+ }
+ else
+ {
+ /* In cases where we get a buffer with no start codes, we assume */
+ /* that this is a frame of data. We may have to fix this later. */
+ sc = vc1_SCFrameHeader;
+ }
+#else
+ ret = viddec_pm_get_bits(parent, &sc, 32);
+#endif
+ sc = sc & 0xFF;
+ parser->is_frame_start = (sc == vc1_SCFrameHeader);
+ DEB("START_CODE = %02x\n", sc);
+ switch( sc )
+ {
+ case vc1_SCSequenceHeader:
+ {
+ uint32_t data=0;
+ parser->ref_frame[0].anchor[0] = 1;
+ parser->ref_frame[0].anchor[1] = 1;
+ parser->ref_frame[1].anchor[0] = 1;
+ parser->ref_frame[1].anchor[1] = 1;
+ memset( &parser->info.metadata, 0, sizeof(parser->info.metadata));
+ /* look if we have a rcv header for main or simple profile */
+ ret = viddec_pm_peek_bits(parent,&data ,2);
+
+ if (data == 3)
+ {
+ status = vc1_ParseSequenceLayer(parent, &parser->info);
+ }
+ else
+ {
+ status = vc1_ParseRCVSequenceLayer(parent, &parser->info);
+ }
+ parser->sc_seen = VC1_SC_SEQ;
+ parser->sc_seen_since_last_wkld |= VC1_SC_SEQ;
+#ifdef VBP
+ parser->start_code = VC1_SC_SEQ;
+#endif
+ break;
+ }
+
+ case vc1_SCEntryPointHeader:
+ {
+ status = vc1_ParseEntryPointLayer(parent, &parser->info);
+ parser->sc_seen |= VC1_SC_EP;
+ // Clear all bits indicating data below ep header
+ parser->sc_seen &= VC1_EP_MASK;
+ parser->sc_seen_since_last_wkld |= VC1_SC_EP;
+#ifdef VBP
+ parser->start_code = VC1_SC_EP;
+#endif
+ break;
+ }
+
+ case vc1_SCFrameHeader:
+ {
+ memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader));
+ status = vc1_ParsePictureLayer(parent, &parser->info);
+ if((parser->info.picLayerHeader.PTypeField1 == VC1_I_FRAME) ||
+ (parser->info.picLayerHeader.PTypeField1 == VC1_P_FRAME) ||
+ (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) ||
+ (parser->info.picLayerHeader.PTYPE == VC1_P_FRAME))
+ {
+ vc1_swap_intcomp(parser);
+ }
+ parser->sc_seen |= VC1_SC_FRM;
+ // Clear all bits indicating data below frm header
+ parser->sc_seen &= VC1_FRM_MASK;
+ parser->sc_seen_since_last_wkld |= VC1_SC_FRM;
+ vc1_start_new_frame ( parent, parser );
+#ifdef VBP
+ parser->start_code = VC1_SC_FRM;
+#endif
+ break;
+ }
+
+ case vc1_SCSlice:
+ {
+ status = vc1_ParseSliceLayer(parent, &parser->info);
+ parser->sc_seen_since_last_wkld |= VC1_SC_SLC;
+#ifdef VBP
+ parser->start_code = VC1_SC_SLC;
+#endif
+ break;
+ }
+
+ case vc1_SCField:
+ {
+ parser->info.picLayerHeader.SLICE_ADDR = 0;
+ parser->info.picLayerHeader.CurrField = 1;
+ parser->info.picLayerHeader.REFFIELD = 0;
+ parser->info.picLayerHeader.NUMREF = 0;
+ parser->info.picLayerHeader.MBMODETAB = 0;
+ parser->info.picLayerHeader.MV4SWITCH = 0;
+ parser->info.picLayerHeader.DMVRANGE = 0;
+ parser->info.picLayerHeader.MVTAB = 0;
+ parser->info.picLayerHeader.MVMODE = 0;
+ parser->info.picLayerHeader.MVRANGE = 0;
+#ifdef VBP
+ parser->info.picLayerHeader.raw_MVTYPEMB = 0;
+ parser->info.picLayerHeader.raw_DIRECTMB = 0;
+ parser->info.picLayerHeader.raw_SKIPMB = 0;
+ parser->info.picLayerHeader.raw_ACPRED = 0;
+ parser->info.picLayerHeader.raw_FIELDTX = 0;
+ parser->info.picLayerHeader.raw_OVERFLAGS = 0;
+ parser->info.picLayerHeader.raw_FORWARDMB = 0;
+
+ memset(&(parser->info.picLayerHeader.MVTYPEMB), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.DIRECTMB), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.SKIPMB), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.ACPRED), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.FIELDTX), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.OVERFLAGS), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.FORWARDMB), 0, sizeof(vc1_Bitplane));
+
+ parser->info.picLayerHeader.ALTPQUANT = 0;
+ parser->info.picLayerHeader.DQDBEDGE = 0;
+ #endif
+
+ status = vc1_ParseFieldLayer(parent, &parser->info);
+ if((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) ||
+ (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME))
+ {
+ vc1_swap_intcomp(parser);
+ }
+
+ parser->sc_seen |= VC1_SC_FLD;
+ parser->sc_seen_since_last_wkld |= VC1_SC_FLD;
+#ifdef VBP
+ parser->start_code = VC1_SC_FLD;
+#endif
+ break;
+ }
+
+ case vc1_SCSequenceUser:
+ case vc1_SCEntryPointUser:
+ case vc1_SCFrameUser:
+ case vc1_SCSliceUser:
+ case vc1_SCFieldUser:
+ {/* Handle user data */
+ status = vc1_ParseAndAppendUserData(parent, sc); //parse and add items
+ parser->sc_seen_since_last_wkld |= VC1_SC_UD;
+#ifdef VBP
+ parser->start_code = VC1_SC_UD;
+#endif
+ break;
+ }
+
+ case vc1_SCEndOfSequence:
+ {
+ parser->sc_seen = VC1_SC_INVALID;
+ parser->sc_seen_since_last_wkld |= VC1_SC_INVALID;
+#ifdef VBP
+ parser->start_code = VC1_SC_INVALID;
+#endif
+ break;
+ }
+ default: /* Any other SC that is not handled */
+ {
+ DEB("SC = %02x - unhandled\n", sc );
+#ifdef VBP
+ parser->start_code = VC1_SC_INVALID;
+#endif
+ break;
+ }
+ }
+
+ if( vc1_is_frame_start_code( sc ) ) {
+ vc1_parse_emit_current_frame( parent, parser );
+ }
+
+ return VIDDEC_PARSE_SUCESS;
+} // viddec_vc1_parse
+
+/**
+ If a picture header was seen and the next start code is a sequence header, entrypoint header,
+ end of sequence or another frame header, this api returns frame done.
+ If a sequence header and a frame header was not seen before this point, all the
+ information needed for decode is not present and parser errors are reported.
+*/
+#ifdef VBP
+uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors)
+#else
+static uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors)
+#endif
+{
+ vc1_viddec_parser_t *parser = ctxt;
+ int ret = VIDDEC_PARSE_SUCESS;
+ parent = parent;
+ switch (next_sc)
+ {
+ case vc1_SCFrameHeader:
+ if(((parser->sc_seen_since_last_wkld & VC1_SC_EP) ||
+ (parser->sc_seen_since_last_wkld & VC1_SC_SEQ)) &&
+ (!(parser->sc_seen_since_last_wkld & VC1_SC_FRM)))
+ {
+ break;
+ }
+ // Deliberate fall-thru case
+ case vc1_SCEntryPointHeader:
+ if((next_sc == vc1_SCEntryPointHeader) &&
+ (parser->sc_seen_since_last_wkld & VC1_SC_SEQ) &&
+ (!(parser->sc_seen_since_last_wkld & VC1_SC_EP)))
+ {
+ break;
+ }
+ // Deliberate fall-thru case
+ case vc1_SCSequenceHeader:
+ case vc1_SCEndOfSequence:
+ case VIDDEC_PARSE_EOS:
+ case VIDDEC_PARSE_DISCONTINUITY:
+ ret = VIDDEC_PARSE_FRMDONE;
+ // Set errors for progressive
+ if((parser->sc_seen & VC1_SC_SEQ) && (parser->sc_seen & VC1_SC_FRM))
+ *codec_specific_errors = 0;
+ else
+ *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ vc1_end_frame(parser);
+ parser->sc_seen_since_last_wkld = VC1_SC_INVALID;
+ // TODO: Need to check for interlaced
+ break;
+ default:
+ ret = VIDDEC_PARSE_SUCESS;
+ break;
+ } //switch
+ DEB("sc: 0x%x, sc_seen: 0x%x, sc_since_last_wkld:%d, error:%d, ret: %d\n",
+ next_sc, parser->sc_seen, parser->sc_seen_since_last_wkld,
+ *codec_specific_errors, ret);
+
+ return ret;
+} // viddec_vc1_wkld_done
+
+#ifdef VBP
+void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size)
+#else
+static void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size)
+#endif
+{
+ size->context_size = sizeof(vc1_viddec_parser_t);
+ size->persist_size = 0;
+ return;
+} // viddec_vc1_get_context_size
+
+#ifdef VBP
+uint32_t viddec_vc1_is_start_frame(void *ctxt)
+#else
+static uint32_t viddec_vc1_is_start_frame(void *ctxt)
+#endif
+{
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *) ctxt;
+ return parser->is_frame_start;
+} // viddec_vc1_is_start_frame
+
+void viddec_vc1_get_ops(viddec_parser_ops_t *ops)
+{
+ ops->init = viddec_vc1_init;
+ ops->parse_syntax = viddec_vc1_parse;
+ ops->get_cxt_size = viddec_vc1_get_context_size;
+ ops->is_wkld_done = viddec_vc1_wkld_done;
+ ops->is_frame_start = viddec_vc1_is_start_frame;
+ return;
+} // viddec_vc1_get_ops
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c
new file mode 100644
index 0000000..b787831
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c
@@ -0,0 +1,691 @@
+/* Any workload management goes in this file */
+
+#include "viddec_fw_debug.h"
+#include "vc1.h"
+#include "vc1parse.h"
+#include "viddec_fw_workload.h"
+#include <auto_eas/gen4_mfd.h>
+#include "viddec_pm_utils_bstream.h"
+
+/* this function returns workload frame types corresponding to VC1 PTYPES (frame types)
+ * VC1 frame types: can be found in vc1parse_common_defs.h
+ * workload frame types are in viddec_workload.h
+*/
+static inline uint32_t vc1_populate_frame_type(uint32_t vc1_frame_type)
+{
+ uint32_t viddec_frame_type;
+
+ switch(vc1_frame_type)
+ {
+ case VC1_I_FRAME:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_I;
+ break;
+ case VC1_P_FRAME:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_P;
+ break;
+ case VC1_B_FRAME:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_B;
+ break;
+ case VC1_BI_FRAME:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_BI;
+ break;
+ case VC1_SKIPPED_FRAME :
+ viddec_frame_type = VIDDEC_FRAME_TYPE_SKIP;
+ break;
+ default:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_INVALID;
+ break;
+ } // switch on vc1 frame type
+
+ return(viddec_frame_type);
+} // vc1_populate_frame_type
+
+static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_parser_t *parser)
+{
+ viddec_workload_t *wl = viddec_pm_get_header( parent );
+ viddec_frame_attributes_t *attrs = &wl->attrs;
+ vc1_Info *info = &parser->info;
+ unsigned i;
+
+ /* typical sequence layer and entry_point data */
+ attrs->cont_size.height = info->metadata.height * 2 + 2;
+ attrs->cont_size.width = info->metadata.width * 2 + 2;
+
+ /* frame type */
+ /* we can have two fileds with different types for field interlace coding mode */
+ if (info->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) {
+ attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField1);
+ attrs->bottom_field_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField2);
+ } else {
+ attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTYPE);
+ attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; //unknown
+ }
+
+ /* frame counter */
+ attrs->vc1.tfcntr = info->picLayerHeader.TFCNTR;
+
+ /* TFF, repeat frame, field */
+ attrs->vc1.tff = info->picLayerHeader.TFF;
+ attrs->vc1.rptfrm = info->picLayerHeader.RPTFRM;
+ attrs->vc1.rff = info->picLayerHeader.RFF;
+
+ /* PAN Scan */
+ attrs->vc1.ps_present = info->picLayerHeader.PS_PRESENT;
+ attrs->vc1.num_of_pan_scan_windows = info->picLayerHeader.number_of_pan_scan_window;
+ for (i=0;i<attrs->vc1.num_of_pan_scan_windows;i++) {
+ attrs->vc1.pan_scan_window[i].hoffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].hoffset;
+ attrs->vc1.pan_scan_window[i].voffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].voffset;
+ attrs->vc1.pan_scan_window[i].width = info->picLayerHeader.PAN_SCAN_WINDOW[i].width;
+ attrs->vc1.pan_scan_window[i].height = info->picLayerHeader.PAN_SCAN_WINDOW[i].height;
+ } //end for i
+
+ return;
+} // translate_parser_info_to_frame_attributes
+
+void vc1_intcomp(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr)
+{
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader;
+ uint32_t intcomp1 = 1;
+ uint32_t intcomp2 = 0;
+
+ // Get the intensity compensation from the bitstream
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp1, pic->LUMSCALE);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp1, pic->LUMSHIFT);
+
+ if(md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD)
+ {
+ intcomp2 = 1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp2, md->LUMSCALE2);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp2, md->LUMSHIFT2);
+ }
+
+ switch(md->INTCOMPFIELD)
+ {
+ case VC1_INTCOMP_TOP_FIELD:
+ if(pic->CurrField == 0) // First field decoded
+ {
+ if(pic->TFF)
+ {
+ //parser->intcomp_bot[0] = intcomp1 << 13;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1);
+ }
+ else
+ {
+ parser->intcomp_top[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, intcomp1);
+ }
+ }
+ else // Second field
+ {
+ if(pic->TFF)
+ {
+ parser->intcomp_top[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_top, intcomp1);
+ }
+ else
+ {
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1);
+ }
+ }
+ break;
+ case VC1_INTCOMP_BOTTOM_FIELD:
+ if(pic->CurrField == 0) // First field decoded
+ {
+ if(pic->TFF)
+ {
+ parser->intcomp_bot[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_bot, intcomp1);
+ }
+ else
+ {
+ parser->intcomp_bot[0] = intcomp1 << 13;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1);
+ }
+ }
+ else // Second field
+ {
+ if(pic->TFF)
+ {
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1);
+ }
+ else
+ {
+ parser->intcomp_bot[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_bot, intcomp1);
+ }
+ }
+ break;
+ case VC1_INTCOMP_BOTH_FIELD:
+ if(pic->CurrField == 0) // First field decoded
+ {
+ if(pic->TFF)
+ {
+ parser->intcomp_bot[0] = intcomp2;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp2;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_bot, intcomp2);
+ }
+ else
+ {
+ parser->intcomp_top[0] = intcomp2;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp2;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, intcomp2);
+ }
+ }
+ else // Second field
+ {
+ if(pic->TFF)
+ {
+ parser->intcomp_top[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_top, intcomp1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp2);
+ }
+ else
+ {
+ parser->intcomp_bot[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_bot, intcomp1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp2);
+ }
+ }
+ break;
+ default:
+ break;
+ } // switch on INTCOMPFIELD
+
+ return;
+} // vc1_intcomp
+
+static void handle_intensity_compensation(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr)
+{
+ vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader;
+ uint8_t intcomp_present = false;
+
+ if((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP))
+ {
+ intcomp_present = true;
+ if(pic->FCM == VC1_FCM_FIELD_INTERLACE)
+ {
+ vc1_intcomp(parser, pInfo, spr);
+ }
+ else
+ {
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, 1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, spr->intcomp_fwd_top, pic->LUMSCALE);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, spr->intcomp_fwd_top, pic->LUMSHIFT);
+
+ if(parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE)
+ {
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, 1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, spr->intcomp_fwd_bot, pic->LUMSCALE);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, spr->intcomp_fwd_bot, pic->LUMSHIFT);
+ }
+
+ parser->intcomp_top[0] = spr->intcomp_fwd_top;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = spr->intcomp_fwd_top;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = spr->intcomp_fwd_top;
+ }
+ }
+
+ // Propagate the previous picture's intensity compensation
+ if(pic->FCM == VC1_FCM_FIELD_INTERLACE)
+ {
+ if( (pic->CurrField) ||
+ ((pic->CurrField == 0) && (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE)))
+ {
+ spr->intcomp_fwd_top |= parser->intcomp_top[1];
+ spr->intcomp_fwd_bot |= parser->intcomp_bot[1];
+ }
+ }
+ if(pic->FCM == VC1_FCM_FRAME_INTERLACE)
+ {
+ if( (pic->CurrField) ||
+ ((pic->CurrField == 0) && (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE)))
+ {
+ spr->intcomp_fwd_bot |= parser->intcomp_bot[1];
+ }
+ }
+
+ switch(pic->PTYPE)
+ {
+ case VC1_B_FRAME:
+ spr->intcomp_fwd_top = parser->intcomp_last[0];
+ spr->intcomp_fwd_bot = parser->intcomp_last[1];
+ spr->intcomp_bwd_top = parser->intcomp_last[2];
+ spr->intcomp_bwd_bot = parser->intcomp_last[3];
+ break;
+ case VC1_P_FRAME:
+ // If first field, store the intcomp values to propagate.
+ // If second field has valid intcomp values, store them
+ // to propagate.
+ if(pic->CurrField == 0) // first field
+ {
+ parser->intcomp_last[0] = spr->intcomp_fwd_top;
+ parser->intcomp_last[1] = spr->intcomp_fwd_bot;
+ parser->intcomp_last[2] = spr->intcomp_bwd_top;
+ parser->intcomp_last[3] = spr->intcomp_bwd_bot;
+ }
+ else // Second field
+ {
+ parser->intcomp_last[0] |= spr->intcomp_fwd_top;
+ parser->intcomp_last[1] |= spr->intcomp_fwd_bot;
+ parser->intcomp_last[2] |= spr->intcomp_bwd_top;
+ parser->intcomp_last[3] |= spr->intcomp_bwd_bot;
+ }
+ break;
+ case VC1_I_FRAME:
+ case VC1_BI_FRAME:
+ break;
+ default:
+ break;
+ }
+
+ return;
+} // handle_intensity_compensation
+
+/**
+ * This function populates the registers for range reduction (main profile)
+ * This function assumes pInfo->metadata.RANGERED is ON at the sequence layer (J.1.17)
+ * A frame is marked as range reduced by the RANGEREDFRM flag at the picture layer,
+ * and the output of the decoded range reduced frame needs to be scaled up (8.1.1.4).
+ * Previous reference frame needs be upscaled or downscaled based on the RR status of
+ * current and previous frame (8.3.4.11)
+ */
+static inline void vc1_fill_RR_hw_struct(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr)
+{
+ vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader;
+ int is_previous_ref_rr=0;
+
+ /* range reduction applies to luma and chroma component
+ which are the same register bit as RANGE_MAPY_FLAG, RANGE_MAPUV_FLAG */
+ BF_WRITE(VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, spr->range_map, pic->RANGEREDFRM);
+ BF_WRITE(VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, spr->range_map, pic->RANGEREDFRM);
+
+ /* Get the range reduced status of the previous frame */
+ switch (pic->PTYPE)
+ {
+ case VC1_P_FRAME:
+ {
+ is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm;
+ break;
+ }
+ case VC1_B_FRAME:
+ {
+ is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm;
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+
+ /* if current frame is RR and previous frame is not
+ donwscale the reference pixel ( RANGE_REF_RED_TYPE =1 in register) */
+ if(pic->RANGEREDFRM)
+ {
+ if(!is_previous_ref_rr)
+ {
+ BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, spr->recon_control, 1);
+ BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, spr->recon_control, 1);
+ }
+ }
+ else
+ {
+ /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */
+ if(is_previous_ref_rr)
+ {
+ BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, spr->recon_control, 1);
+ BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, spr->recon_control, 0);
+ }
+ } // end for RR upscale
+
+} // vc1_fill_RR_hw_struct
+
+/**
+ * fill workload items that will load registers for HW decoder
+ */
+static void vc1_fill_hw_struct(vc1_viddec_parser_t *parser, vc1_Info* pInfo, VC1D_SPR_REGS *spr)
+{
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader;
+ int field = pic->CurrField;
+ int ptype;
+
+ ptype = pic->PTYPE;
+
+ LOG_CRIT("ptype = %d, field = %d, topfield = %d, slice = %d", ptype, pic->CurrField, pic->BottomField, pic->SLICE_ADDR);
+
+ /* Common to both fields */
+ BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, spr->stream_format1, md->PROFILE);
+
+ BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, spr->coded_size, md->width);
+ BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, spr->coded_size, md->height);
+
+ BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, spr->stream_format2, md->INTERLACE);
+
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, spr->entrypoint1, md->LOOPFILTER);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, spr->entrypoint1, md->FASTUVMC);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, spr->entrypoint1, md->EXTENDED_MV);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, spr->entrypoint1, md->DQUANT);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, spr->entrypoint1, md->VSTRANSFORM);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, spr->entrypoint1, md->OVERLAP);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, spr->entrypoint1, md->QUANTIZER);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, spr->entrypoint1, md->EXTENDED_DMV);
+
+ /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/
+ if(md->RANGERED)
+ {
+ vc1_fill_RR_hw_struct(parser, pInfo, spr );
+ }
+ else
+ { //range mapping
+ BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, spr->range_map, md->RANGE_MAPY_FLAG);
+ BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, spr->range_map, md->RANGE_MAPY);
+ BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, spr->range_map, md->RANGE_MAPUV_FLAG);
+ BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, spr->range_map, md->RANGE_MAPUV);
+ }
+
+ BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, spr->frame_type, pic->FCM);
+ BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, spr->frame_type, pic->PTYPE);
+
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, spr->recon_control, md->RNDCTRL);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, spr->recon_control, pic->UVSAMP);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, spr->recon_control, pic->PQUANT);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, spr->recon_control, pic->HALFQP);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, spr->recon_control, pic->UniformQuant);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, spr->recon_control, pic->POSTPROC);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, spr->recon_control, pic->CONDOVER);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, spr->recon_control, (pic->PQINDEX <= 8));
+
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, spr->mv_control, pic->MVRANGE);
+ if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP)
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, spr->mv_control, pic->MVMODE2);
+ else
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, spr->mv_control, pic->MVMODE);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, spr->mv_control, pic->MVTAB);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, spr->mv_control, pic->DMVRANGE);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, spr->mv_control, pic->MV4SWITCH);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, spr->mv_control, pic->MBMODETAB);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, spr->mv_control,
+ pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) ));
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, spr->mv_control, pic->REFFIELD);
+
+ handle_intensity_compensation(parser, pInfo, spr);
+
+ BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, spr->ref_bfraction, pic->BFRACTION_DEN);
+ BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, spr->ref_bfraction, pic->BFRACTION_NUM);
+ BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, spr->ref_bfraction, md->REFDIST);
+
+ // BLOCK CONTROL REGISTER Offset 0x2C
+ BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, spr->blk_control, pic->CBPTAB);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, spr->blk_control, pic->TTMBF);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, spr->blk_control, pic->TTFRM);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, spr->blk_control, pic->MV2BPTAB);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, spr->blk_control, pic->MV4BPTAB);
+ if((field == 1) && (pic->SLICE_ADDR))
+ {
+ int mby = md->height * 2 + 2;
+ mby = (mby + 15 ) / 16;
+ pic->SLICE_ADDR -= (mby/2);
+ }
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, spr->blk_control, pic->SLICE_ADDR);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, spr->blk_control, md->bp_raw[0]);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, spr->blk_control, md->bp_raw[1]);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, spr->blk_control, md->bp_raw[2]);
+
+ BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, spr->trans_data, pic->TRANSACFRM);
+ BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, spr->trans_data, pic->TRANSACFRM2);
+ BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, spr->trans_data, pic->TRANSDCTAB);
+
+ // When DQUANT is 1 or 2, we have the VOPDQUANT structure in the bitstream that
+ // controls the value calculated for ALTPQUANT
+ // ALTPQUANT must be in the range of 1 and 31 for it to be valid
+ // DQUANTFRM is present only when DQUANT is 1 and ALTPQUANT setting should be dependent on DQUANT instead
+ if(md->DQUANT)
+ {
+ if(pic->PQDIFF == 7)
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, spr->vop_dquant, pic->ABSPQ);
+ else if (pic->DQUANTFRM == 1)
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, spr->vop_dquant, pic->PQUANT + pic->PQDIFF + 1);
+ }
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, spr->vop_dquant, pic->DQUANTFRM);
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, spr->vop_dquant, pic->DQPROFILE);
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, spr->vop_dquant, pic->DQSBEDGE);
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, spr->vop_dquant, pic->DQBILEVEL);
+
+ BF_WRITE(VC1_0_SEQPIC_CURR_FRAME_ID,FCM, spr->ref_frm_id[VC1_FRAME_CURRENT_REF], pic->FCM );
+
+ if ( ptype == VC1_B_FRAME) {
+ // Forward reference is past reference and is the second temporally closest reference - hence minus_2
+ BF_WRITE(VC1_0_SEQPIC_FWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_PAST], parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm );
+ // Backward reference is future reference frame and is temporally the closest - hence minus_1
+ BF_WRITE(VC1_0_SEQPIC_BWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_FUTURE], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm );
+ } else {
+ // Only Forward reference is valid and is the temporally closest reference - hence minus_1, backward is set same as forward
+ BF_WRITE(VC1_0_SEQPIC_FWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_PAST], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm );
+ BF_WRITE(VC1_0_SEQPIC_BWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_FUTURE], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm );
+ }
+
+ BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, spr->fieldref_ctrl_id, pic->BottomField);
+ BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, spr->fieldref_ctrl_id, pic->CurrField);
+ if(parser->info.picLayerHeader.PTYPE == VC1_I_FRAME)
+ {
+ BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, spr->fieldref_ctrl_id, 1);
+ }
+ else
+ {
+ BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, spr->fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]);
+ }
+
+ if( pic->FCM == VC1_FCM_FIELD_INTERLACE ) {
+ BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, spr->imgstruct, (pic->BottomField) ? 2 : 1);
+ }
+
+ return;
+} // vc1_fill_hw_struct
+
+int32_t vc1_parse_emit_current_frame(void *parent, vc1_viddec_parser_t *parser)
+{
+ viddec_workload_item_t wi;
+ const uint32_t *pl;
+ int i;
+ int nitems;
+
+ if( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) {
+ translate_parser_info_to_frame_attributes( parent, parser );
+ return 0;
+ }
+
+ translate_parser_info_to_frame_attributes( parent, parser );
+ memset(&parser->spr, 0, sizeof(VC1D_SPR_REGS));
+ vc1_fill_hw_struct( parser, &parser->info, &parser->spr );
+
+ /* STUFF BSP Data Memory it into a variety of workload items */
+
+ pl = (const uint32_t *) &parser->spr;
+
+ // How many payloads must be generated
+ nitems = (sizeof(parser->spr) + 7) / 8; /* In QWORDs rounded up */
+
+
+ // Dump DMEM to an array of workitems
+ for( i = 0; (i < nitems) && ( (parser->info.picLayerHeader.SLICE_ADDR == 0) || parser->info.picture_info_has_changed ); i++ )
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_DECODER_SPECIFIC;
+ wi.data.data_offset = (unsigned int)pl - (unsigned int)&parser->spr; // offset within struct
+ wi.data.data_payload[0] = pl[0];
+ wi.data.data_payload[1] = pl[1];
+ pl += 2;
+
+ viddec_pm_append_workitem( parent, &wi );
+ }
+
+ {
+ uint32_t bit, byte;
+ uint8_t is_emul;
+ viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul);
+ // Send current bit offset and current slice
+ wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET;
+ // If slice data starts in the middle of the emulation prevention sequence -
+ // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data
+ // to the decoder starting at the first byte of 0s so that the decoder can detect the
+ // emulation prevention. But the actual data starts are offset 8 in this bit sequence.
+ wi.vwi_payload[0] = bit + (is_emul*8);
+ wi.vwi_payload[1] = parser->info.picLayerHeader.SLICE_ADDR;
+ wi.vwi_payload[2] = 0xdeaddead;
+ viddec_pm_append_workitem( parent, &wi );
+ }
+
+ viddec_pm_append_pixeldata( parent );
+
+ return(0);
+}
+
+/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */
+static inline void vc1_send_past_ref_items(void *parent)
+{
+ viddec_workload_item_t wi;
+ wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ viddec_pm_append_workitem( parent, &wi );
+ return;
+}
+
+/* send future frame item */
+static inline void vc1_send_future_ref_items(void *parent)
+{
+ viddec_workload_item_t wi;
+ wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ viddec_pm_append_workitem( parent, &wi );
+ return;
+}
+
+/* send reorder frame item to host
+ * future frame gets push to past */
+static inline void send_reorder_ref_items(void *parent)
+{
+ viddec_workload_item_t wi;
+ wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+ wi.ref_reorder.ref_table_offset = 0;
+ wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0
+ wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same
+ viddec_pm_append_workitem( parent, &wi );
+ return;
+} // send_reorder_ref_items
+
+/** update workload with more workload items for ref and update values to store...
+ */
+void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser)
+{
+ vc1_metadata_t *md = &(parser->info.metadata);
+ viddec_workload_t *wl = viddec_pm_get_header(parent);
+ int frame_type = parser->info.picLayerHeader.PTYPE;
+ int frame_id = 1; // new reference frame is assigned index 1
+
+ /* init */
+ memset(&parser->spr, 0, sizeof(parser->spr));
+ wl->is_reference_frame = 0;
+
+ /* set flag - extra ouput frame needed for range adjustment (range mapping or range reduction */
+ if (parser->info.metadata.RANGE_MAPY_FLAG ||
+ parser->info.metadata.RANGE_MAPUV_FLAG ||
+ parser->info.picLayerHeader.RANGEREDFRM)
+ {
+ wl->is_reference_frame |= WORKLOAD_FLAGS_RA_FRAME;
+ }
+
+ LOG_CRIT("vc1_start_new_frame: frame_type=%d \n",frame_type);
+
+ parser->is_reference_picture = ((VC1_B_FRAME != frame_type) && (VC1_BI_FRAME != frame_type));
+
+ /* reference / anchor frames processing
+ * we need to send reorder before reference frames */
+ if (parser->is_reference_picture)
+ {
+ /* one frame has been sent */
+ if (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != -1)
+ {
+ /* there is a frame in the reference buffer, move it to the past */
+ send_reorder_ref_items(parent);
+ }
+ }
+
+ /* send workitems for reference frames */
+ switch( frame_type )
+ {
+ case VC1_B_FRAME:
+ {
+ vc1_send_past_ref_items(parent);
+ vc1_send_future_ref_items(parent);
+ break;
+ }
+ case VC1_SKIPPED_FRAME:
+ {
+ wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME;
+ vc1_send_past_ref_items(parent);
+ break;
+ }
+ case VC1_P_FRAME:
+ {
+ vc1_send_past_ref_items( parent);
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* reference / anchor frames from previous code
+ * we may need it for frame reduction */
+ if (parser->is_reference_picture)
+ {
+ wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK);
+
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].id = frame_id;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].fcm = parser->info.picLayerHeader.FCM;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0] = (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME);
+ if(parser->info.picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE)
+ {
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = (parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME);
+ }
+ else
+ {
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0];
+ }
+
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].type = parser->info.picLayerHeader.PTYPE;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_en = md->RANGERED;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_frm = parser->info.picLayerHeader.RANGEREDFRM;
+
+ LOG_CRIT("anchor[0] = %d, anchor[1] = %d",
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0],
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] );
+ }
+
+ return;
+} // vc1_start_new_frame
+
+void vc1_end_frame(vc1_viddec_parser_t *parser)
+{
+ /* update status of reference frames */
+ if(parser->is_reference_picture)
+ {
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_2] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1];
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0];
+ }
+
+ return;
+} // vc1_end_frame
+
diff --git a/mix_vbp/viddec_fw/fw/include/stdint.h b/mix_vbp/viddec_fw/fw/include/stdint.h
new file mode 100644
index 0000000..885cfe1
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/include/stdint.h
@@ -0,0 +1,23 @@
+#ifndef __STDINT_H
+#define __STDINT_H
+
+typedef unsigned char uint8_t;
+typedef unsigned short uint16_t;
+typedef unsigned int uint32_t;
+typedef unsigned long long uint64_t;
+
+//#ifndef _MACHTYPES_H_
+typedef signed char int8_t;
+typedef signed short int16_t;
+typedef signed int int32_t;
+typedef signed long long int64_t;
+//#endif
+
+#ifndef NULL
+#define NULL (void*)0x0
+#endif
+
+#define true 1
+#define false 0
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/include/viddec_debug.h b/mix_vbp/viddec_fw/fw/include/viddec_debug.h
new file mode 100644
index 0000000..23db98f
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/include/viddec_debug.h
@@ -0,0 +1,31 @@
+#ifndef VIDDEC_DEBUG_H
+#define VIDDEC_DEBUG_H
+
+#ifndef VBP
+
+#ifdef HOST_ONLY
+ #include <stdio.h>
+ #include <osal.h>
+ #define DEB OS_PRINT
+ #define FWTRACE OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ );
+// #define DEB(format, args...)
+// #define FWTRACE
+ #define DEB_FNAME(format, args...) OS_PRINT("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args )
+ #define CDEB(a, format, args...) if(a != 0) {DEB(format, ##args);}
+#else
+ #define DEB(format, args...)
+ #define FWTRACE
+ #define CDEB(a, format, args...)
+ #define DEB_FNAME(format, args...)
+#endif
+
+#else // VBP is defined
+
+#define DEB(format, args...)
+#define FWTRACE
+#define CDEB(a, format, args...)
+#define DEB_FNAME(format, args...)
+
+#endif // end of VBP
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h b/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h
new file mode 100644
index 0000000..099be69
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h
@@ -0,0 +1,7 @@
+#ifndef VIDDEC_FW_VERSION_H
+#define VIDDEC_FW_VERSION_H
+
+#define VIDDEC_FW_MAJOR_NUM 0
+#define VIDDEC_FW_MINOR_NUM 8
+#define VIDDEC_FW_BUILD_NUM 11
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/Makefile.am b/mix_vbp/viddec_fw/fw/parser/Makefile.am
new file mode 100644
index 0000000..c94b935
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/Makefile.am
@@ -0,0 +1,205 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+VC1PATH=./../codecs/vc1/parser
+MP2PATH=./../codecs/mp2/parser
+MP4PATH=./../codecs/mp4/parser
+H264PATH=./../codecs/h264/parser
+
+PARSER_INCLUDE_PATH=-I./include \
+ -I../include \
+ -I../../include \
+ -I./vc1/include \
+ -I../codecs/vc1/include \
+ -I../codecs/mp2/include \
+ -I../codecs/mp4/include \
+ -I../codecs/h264/include \
+ -I../codecs/vc1/parser
+
+
+PARSER_MACROS= -DVBP \
+ -DHOST_ONLY \
+ -DG_LOG_DOMAIN=\"vbp\"
+
+
+la_CFLAGS = $(GLIB_CFLAGS) \
+ $(MIX_CFLAGS) \
+ $(GOBJECT_CFLAGS) \
+ $(GTHREAD_CFLAGS) \
+ $(PARSER_INCLUDE_PATH) \
+ $(PARSER_MACROS) \
+ -DMIXVBP_CURRENT=@MIXVBP_CURRENT@ \
+ -DMIXVBP_AGE=@MIXVBP_AGE@ \
+ -DMIXVBP_REVISION=@MIXVBP_REVISION@
+
+la_LIBADD = $(GLIB_LIBS) \
+ $(GOBJECT_LIBS) \
+ $(GTHREAD_LIBS)
+
+la_LDFLAGS = $(GLIB_LIBS) \
+ $(GOBJECT_LIBS) \
+ $(GTHREAD_LIBS) \
+ -version-info @MIXVBP_CURRENT@:@MIXVBP_REVISION@:@MIXVBP_AGE@
+
+lib_LTLIBRARIES = libmixvbp.la \
+ libmixvbp_vc1.la \
+ libmixvbp_mpeg2.la \
+ libmixvbp_mpeg4.la \
+ libmixvbp_h264.la
+
+
+###################################### vbp loader ########################################
+
+# sources used to compile
+libmixvbp_la_SOURCES = vbp_loader.c \
+ vbp_utils.c \
+ vbp_trace.c \
+ vbp_h264_parser.c \
+ vbp_vc1_parser.c \
+ vbp_mp42_parser.c \
+ viddec_pm.c \
+ viddec_pm_stubs.c \
+ viddec_pm_parser_ops.c \
+ viddec_pm_utils_bstream.c \
+ viddec_pm_tags.c \
+ viddec_emit.c \
+ viddec_pm_utils_list.c \
+ viddec_parse_sc.c \
+ viddec_parse_sc_stub.c
+
+libmixvbp_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_la_LIBADD = $(la_LIBADD)
+libmixvbp_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_la_LIBTOOLFLAGS = --tag=disable-static
+
+###################################### VC-1 parser ########################################
+
+libmixvbp_vc1_la_SOURCES = $(VC1PATH)/vc1parse.c \
+ $(VC1PATH)/vc1parse_bitplane.c \
+ $(VC1PATH)/vc1parse_bpic.c \
+ $(VC1PATH)/vc1parse_bpic_adv.c \
+ $(VC1PATH)/vc1parse_common_tables.c \
+ $(VC1PATH)/vc1parse_huffman.c \
+ $(VC1PATH)/vc1parse_ipic.c \
+ $(VC1PATH)/vc1parse_ipic_adv.c \
+ $(VC1PATH)/vc1parse_mv_com.c \
+ $(VC1PATH)/vc1parse_pic_com.c \
+ $(VC1PATH)/vc1parse_pic_com_adv.c \
+ $(VC1PATH)/vc1parse_ppic.c \
+ $(VC1PATH)/vc1parse_ppic_adv.c \
+ $(VC1PATH)/vc1parse_vopdq.c \
+ $(VC1PATH)/viddec_vc1_parse.c \
+ $(VC1PATH)/mix_vbp_vc1_stubs.c
+
+libmixvbp_vc1_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_vc1_la_LIBADD = $(la_LIBADD) libmixvbp.la
+libmixvbp_vc1_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_vc1_la_LIBTOOLFLAGS = --tag=disable-static
+
+###################################### MPEG-2 parser ########################################
+
+libmixvbp_mpeg2_la_SOURCES = $(MP2PATH)/viddec_mpeg2_metadata.c \
+ $(MP2PATH)/viddec_mpeg2_parse.c \
+ $(MP2PATH)/mix_vbp_mpeg2_stubs.c
+
+libmixvbp_mpeg2_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_mpeg2_la_LIBADD = $(la_LIBADD) libmixvbp.la
+libmixvbp_mpeg2_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_mpeg2_la_LIBTOOLFLAGS = --tag=disable-static
+
+###################################### MPEG-4 parser ########################################
+
+libmixvbp_mpeg4_la_SOURCES = $(MP4PATH)/viddec_mp4_parse.c \
+ $(MP4PATH)/viddec_parse_sc_mp4.c \
+ $(MP4PATH)/viddec_mp4_visualobject.c \
+ $(MP4PATH)/viddec_mp4_videoobjectplane.c \
+ $(MP4PATH)/viddec_mp4_shortheader.c \
+ $(MP4PATH)/viddec_mp4_videoobjectlayer.c \
+ $(MP4PATH)/viddec_mp4_decodevideoobjectplane.c
+
+libmixvbp_mpeg4_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_mpeg4_la_LIBADD = $(la_LIBADD) libmixvbp.la
+libmixvbp_mpeg4_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_mpeg4_la_LIBTOOLFLAGS = --tag=disable-static
+
+###################################### H.264 parser ########################################
+
+libmixvbp_h264_la_SOURCES = $(H264PATH)/h264parse.c \
+ $(H264PATH)/h264parse_bsd.c \
+ $(H264PATH)/h264parse_math.c \
+ $(H264PATH)/h264parse_mem.c \
+ $(H264PATH)/h264parse_sei.c \
+ $(H264PATH)/h264parse_sh.c \
+ $(H264PATH)/h264parse_pps.c \
+ $(H264PATH)/h264parse_sps.c \
+ $(H264PATH)/h264parse_dpb.c \
+ $(H264PATH)/viddec_h264_parse.c \
+ $(H264PATH)/mix_vbp_h264_stubs.c
+
+libmixvbp_h264_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_h264_la_LIBADD = $(la_LIBADD) libmixvbp.la
+libmixvbp_h264_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_h264_la_LIBTOOLFLAGS = --tag=disable-static
+
+##############################################################################################
+
+# headers we need but don't want installed
+noinst_HEADERS = ./vbp_h264_parser.h \
+ ./vbp_mp42_parser.h \
+ ./vbp_vc1_parser.h \
+ ./vbp_trace.h \
+ ./vbp_loader.h \
+ ./vbp_utils.h \
+ ./include/fw_pvt.h \
+ ./include/ipc_fw_custom.h \
+ ./include/viddec_emitter.h \
+ ./include/viddec_fw_debug.h \
+ ./include/viddec_fw_parser_fw_ipc.h \
+ ./include/viddec_h264_parse.h \
+ ./include/viddec_mp4_parse.h \
+ ./include/viddec_mpeg2_parse.h \
+ ./include/viddec_parser_ops.h \
+ ./include/viddec_pm.h \
+ ./include/viddec_pm_parse.h \
+ ./include/viddec_pm_tags.h \
+ ./include/viddec_pm_utils_bstream.h \
+ ./include/viddec_pm_utils_list.h \
+ ./include/viddec_vc1_parse.h \
+ ../include/stdint.h \
+ ../include/viddec_debug.h \
+ ../include/viddec_fw_version.h \
+ ../../include/viddec_fw_common_defs.h \
+ ../../include/viddec_fw_decoder_host.h \
+ ../../include/viddec_fw_frame_attr.h \
+ ../../include/viddec_fw_item_types.h \
+ ../../include/viddec_fw_parser_host.h \
+ ../../include/viddec_fw_workload.h \
+ ../../fw/include/stdint.h \
+ ../../fw/include/viddec_debug.h \
+ ../../fw/include/viddec_fw_version.h \
+ ../../fw/codecs/h264/include/h264.h \
+ ../../fw/codecs/h264/include/h264parse.h \
+ ../../fw/codecs/h264/include/h264parse_dpb.h \
+ ../../fw/codecs/h264/include/h264parse_sei.h \
+ ../../fw/codecs/mp2/include/mpeg2.h \
+ ../../fw/codecs/mp2/include/viddec_mpeg2.h \
+ ../../fw/codecs/mp4/include/viddec_fw_mp4.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_parse.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_shortheader.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_visualobject.h \
+ ../../fw/codecs/vc1/include/vc1common.h \
+ ../../fw/codecs/vc1/parser/vc1.h \
+ ../../fw/codecs/vc1/parser/vc1parse.h \
+ ../../fw/codecs/vc1/parser/vc1parse_common_defs.h
+
+
+mixincludedir=$(includedir)/mixvbp
+mixinclude_HEADERS = vbp_loader.h
+
+##############################################################################################
diff --git a/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c
new file mode 100644
index 0000000..299dbce
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c
@@ -0,0 +1,224 @@
+/*
+
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+ Intel Corporation
+ 2200 Mission College Blvd.
+ Santa Clara, CA 97052
+
+ BSD LICENSE
+
+ Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef SVEN_FW_H
+#include "sven_fw.h"
+#endif
+
+#define _OSAL_IO_MEMMAP_H /* to prevent errors when including sven_devh.h */
+#define _OSAL_ASSERT_H /* to prevent errors when including sven_devh.h */
+#include "sven_devh.h"
+
+#include "fw_pvt.h"
+
+static os_devhandle_t g_svenh;
+
+#define FW_SVEN_DEVH_DISABLE_SVEN_REGISTER_IO
+//#define SVEN_DEVH_DISABLE_SVEN
+
+extern int sven_fw_is_tx_enabled(
+ struct SVENHandle *svenh );
+
+#ifndef SVEN_DEVH_DISABLE_SVEN
+static void sven_write_event(
+ struct SVENHandle *svenh,
+ struct SVENEvent *ev )
+{
+ if ( NULL == svenh )
+ svenh = &g_svenh.devh_svenh;
+
+ if ( NULL != svenh->phot )
+ sven_fw_write_event(svenh,ev);
+}
+
+static void sven_fw_initialize_event_top(
+ struct SVENEvent *ev,
+ int module,
+ int unit,
+ int event_type,
+ int event_subtype )
+{
+ ev->se_et.et_gencount = 0;
+ ev->se_et.et_module = module;
+ ev->se_et.et_unit = unit;
+ ev->se_et.et_type = event_type;
+ ev->se_et.et_subtype = event_subtype;
+}
+#endif
+
+uint32_t sven_get_timestamp()
+{
+ uint32_t value = 0;
+
+ if ( NULL != g_svenh.devh_svenh.ptime )
+ {
+ value = sven_fw_read_external_register( &g_svenh.devh_svenh, g_svenh.devh_svenh.ptime );
+ }
+
+ return(value);
+}
+
+/* ---------------------------------------------------------------------- */
+/* ---------------------------------------------------------------------- */
+
+void devh_SVEN_SetModuleUnit(
+ os_devhandle_t *devh,
+ int sven_module,
+ int sven_unit )
+{
+#ifndef SVEN_DEVH_DISABLE_SVEN
+ if ( NULL == devh )
+ devh = &g_svenh;
+ devh->devh_sven_module = sven_module;
+ devh->devh_sven_unit = sven_unit;
+#endif
+}
+
+os_devhandle_t *devhandle_factory( const char *desc )
+{
+ /* pointer to global vsparc local registers */
+ g_svenh.devh_regs_ptr = (void *) 0x10000000; /* firmware address to Local (GV) registers */
+
+ return( &g_svenh );
+}
+
+int devhandle_connect_name(
+ os_devhandle_t *devh,
+ const char *devname )
+{
+ return(1);
+}
+
+/* ---------------------------------------------------------------------- */
+/* ---------------------------------------------------------------------- */
+
+void devh_SVEN_WriteModuleEvent(
+ os_devhandle_t *devh,
+ int module_event_subtype,
+ unsigned int payload0,
+ unsigned int payload1,
+ unsigned int payload2,
+ unsigned int payload3,
+ unsigned int payload4,
+ unsigned int payload5 )
+{
+#ifndef SVEN_DEVH_DISABLE_SVEN
+ struct SVENEvent ev __attribute__ ((aligned(8)));
+
+ devh = (NULL != devh) ? devh : &g_svenh;
+
+ if ( ! sven_fw_is_tx_enabled( &devh->devh_svenh ) )
+ return;
+
+ sven_fw_initialize_event_top( &ev,
+ devh->devh_sven_module,
+ 1 /* devh->devh_sven_unit */,
+ SVEN_event_type_module_specific,
+ module_event_subtype );
+
+ ev.u.se_uint[0] = payload0;
+ ev.u.se_uint[1] = payload1;
+ ev.u.se_uint[2] = payload2;
+ ev.u.se_uint[3] = payload3;
+ ev.u.se_uint[4] = payload4;
+ ev.u.se_uint[5] = payload5;
+
+ sven_write_event( &devh->devh_svenh, &ev );
+#endif
+}
+
+/* ---------------------------------------------------------------------- */
+/* SVEN FW TX: Required custom routines to enable FW TX */
+/* ---------------------------------------------------------------------- */
+int sven_fw_set_globals(
+ struct SVEN_FW_Globals *fw_globals )
+{
+ sven_fw_attach( &g_svenh.devh_svenh, fw_globals );
+ devh_SVEN_SetModuleUnit( &g_svenh, SVEN_module_GEN4_GV, 1 );
+ return(0);
+}
+
+uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+unsigned int sven_fw_read_external_register(
+ struct SVENHandle *svenh,
+ volatile unsigned int *preg )
+{
+ unsigned int reg __attribute__ ((aligned(8)));
+
+ (void)svenh; // argument unused
+
+ cp_using_dma_phys( (uint32_t) preg, (uint32_t) &reg, 4, 0, 0 );
+
+ return( reg );
+}
+
+void sven_fw_copy_event_to_host_mem(
+ struct SVENHandle *svenh,
+ volatile struct SVENEvent *to,
+ const struct SVENEvent *from )
+{
+ (void)svenh; // argument unused
+
+ cp_using_dma_phys( (uint32_t) to, (uint32_t) from, sizeof(*to), 1, 0 );
+}
+/* ---------------------------------------------------------------------- */
+/* ---------------------------------------------------------------------- */
diff --git a/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h b/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h
new file mode 100644
index 0000000..0928ad3
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h
@@ -0,0 +1,114 @@
+#ifndef FW_PVT_H
+#define FW_PVT_H
+
+#include <stdint.h>
+#include "viddec_fw_parser_fw_ipc.h"
+#include "viddec_fw_parser_ipclib_config.h"
+#include "viddec_emitter.h"
+#include "viddec_pm.h"
+#include "viddec_fw_debug.h"
+
+#define GET_IPC_HANDLE(x) (FW_IPC_Handle *)&(x.fwIpc)
+#define GV_DDR_MEM_MASK 0x80000000
+/* Macros for Interrupts */
+#define TRAPS_ENABLE __asm__ volatile ("mov %%psr, %%l0; or %%l0, 0x20, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0")
+#define TRAPS_DISABLE __asm__ volatile ("mov %%psr, %%l0; and %%l0, ~0x20, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0")
+
+#define TRAPS_INT_ENABLE __asm__ volatile ("mov %%psr, %%l0; and %%l0, ~0xF00, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0")
+#define TRAPS_INT_DISABLE __asm__ volatile ("mov %%psr, %%l0; or %%l0, 0xF00, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0")
+
+#define TRAPS_ENABLED(enabled) __asm__ volatile ("mov %%psr, %0; and %0, 0x20, %0": "=r" (enabled):)
+
+#define TRAPS_INT_DISABLED(enabled) __asm__ volatile ("mov %%psr, %0; and %0, 0xF00, %0": "=r" (enabled):)
+
+#define VIDDEC_WATCHDOG_COUNTER_MAX (0x000FFFFF)
+
+/* Synchronous message buffer, which is shared by both Host and Fw for handling synchronous messages */
+typedef struct
+{
+ uint8_t data[CONFIG_IPC_SYNC_MESSAGE_BUF_SIZE];
+}mfd_sync_msg_t;
+
+/* Required Information needed by Parser Kernel for each stream */
+typedef struct
+{
+ uint32_t ddr_cxt; /* phys addr of swap space where Parser kernel stores pvt information */
+ uint32_t cxt_size; /* size of context buffer */
+ uint32_t strm_type; /* Current stream information*/
+ uint32_t wl_time; /* ticks for processing current workload */
+ uint32_t es_time; /* ticks for processing current workload */
+ uint32_t low_watermark; /* On crossing this value we generate low watermark interrupt */
+ uint8_t state; /* Current state of stream ... start(1), stop(0).. */
+ uint8_t priority; /* Priority of current stream Real time or Non real time */
+ uint8_t buffered_data;/* Do we have data from past buffer */
+ uint8_t pending_interrupt;/* Whether an Interrupt needs to be generated for this stream */
+}mfd_stream_info;
+
+/* Global data for Parser kernel */
+typedef struct
+{
+ int32_t low_id; /* last scheduled low priority stream id */
+ int32_t high_id;/* last scheduled high priority stream id */
+ uint32_t g_parser_tables; /* should point to global_parser_table in DDR */
+}mfd_pk_data_t;
+
+typedef struct
+{
+ ipc_msg_data input;
+ ipc_msg_data wkld1;
+ ipc_msg_data wkld2;
+ viddec_pm_cxt_t pm;
+}mfd_pk_strm_cxt;
+
+/* This structure defines the layout of local memory */
+typedef struct
+{
+ mfd_sync_msg_t buf;
+ _IPC_int_state_t int_status[FW_SUPPORTED_STREAMS];
+ FW_IPC_Handle fwIpc;
+ mfd_stream_info stream_info[FW_SUPPORTED_STREAMS];
+ mfd_pk_data_t g_pk_data;
+ mfd_pk_strm_cxt srm_cxt;
+}dmem_t;
+
+/* Pvt Functions which will be used by multiple modules */
+
+static inline void reg_write(uint32_t offset, uint32_t value)
+{
+ *((volatile uint32_t*) (GV_SI_MMR_BASE_ADDRESS + offset)) = value;
+}
+
+static inline uint32_t reg_read(uint32_t offset)
+{
+ uint32_t value=0;
+ value = *((volatile uint32_t*) (GV_SI_MMR_BASE_ADDRESS + offset));
+ return value;
+}
+
+
+static inline void DEBUG(uint32_t print, uint32_t code, uint32_t val)
+{
+ if(print > 0)
+ {
+ DUMP_TO_MEM(code);
+ DUMP_TO_MEM(val);
+ dump_ptr = (dump_ptr + 7) & ~0x7;
+ }
+}
+
+void *memcpy(void *dest, const void *src, uint32_t n);
+
+void *memset(void *s, int32_t c, uint32_t n);
+
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+uint32_t set_wdog(uint32_t offset);
+
+void get_wdog(uint32_t *value);
+
+void enable_intr(void);
+
+uint32_t get_total_ticks(uint32_t start, uint32_t end);
+
+void viddec_fw_init_swap_memory(unsigned int stream_id, unsigned int swap, unsigned int clean);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h b/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h
new file mode 100644
index 0000000..adfdabf
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h
@@ -0,0 +1,87 @@
+/*
+
+This file is provided under a dual BSD/GPLv2 license. When using or
+redistributing this file, you may do so under either license.
+
+GPL LICENSE SUMMARY
+
+Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of version 2 of the GNU General Public License as
+published by the Free Software Foundation.
+
+This program is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+The full GNU General Public License is included in this distribution
+in the file called LICENSE.GPL.
+
+Contact Information:
+Intel Corporation
+2200 Mission College Blvd.
+Santa Clara, CA 97052
+
+BSD LICENSE
+
+Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+* Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in
+the documentation and/or other materials provided with the
+distribution.
+* Neither the name of Intel Corporation nor the names of its
+contributors may be used to endorse or promote products derived
+from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef IPC_FW_CUSTOM_H
+#define IPC_FW_CUSTOM_H 1
+
+#include "viddec_fw_parser_fw_ipc.h"
+
+extern void custom_ipclib_firmware_out_of_reset(void);
+
+extern struct FW_IPC_Handler *custom_ipclib_get_fwipc(void);
+extern void *custom_ipclib_get_sync_message_area(void);
+
+extern void custom_ipclib_firmware_setup(void);
+extern void custom_ipclib_firmware_ready(void);
+
+extern int custom_ipclib_firmware_is_sync_command_requested(void);
+extern void custom_ipclib_firmware_ack_sync_command(void);
+
+void custom_ipclib_memcpy_to_host_mem(
+ void *to,
+ const void *from,
+ int size );
+
+void custom_ipclib_memcpy_from_host_mem(
+ void *to,
+ const void *from,
+ int size );
+
+#endif /* IPC_FW_CUSTOM_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h
new file mode 100644
index 0000000..bb96bab
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h
@@ -0,0 +1,96 @@
+#ifndef VIDDEC_EMITTER_H
+#define VIDDEC_EMITTER_H
+
+#include <stdint.h>
+#ifndef HOST_ONLY
+#define DDR_MEM_MASK 0x80000000
+#else
+#define DDR_MEM_MASK 0x0
+#endif
+#include "viddec_fw_workload.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_fw_debug.h"
+
+typedef struct
+{
+ viddec_workload_t *data;
+ uint32_t max_items;
+ uint32_t num_items;
+ uint32_t result;
+}viddec_emitter_wkld;
+
+typedef struct
+{
+ viddec_emitter_wkld cur;
+ viddec_emitter_wkld next;
+}viddec_emitter;
+
+/*
+ whats this for? Emitting current tag for ES buffer
+*/
+int32_t viddec_emit_assoc_tag(viddec_emitter *emit, uint32_t id, uint32_t using_next);
+
+int32_t viddec_emit_contr_tag(viddec_emitter *emit, viddec_input_buffer_t *ibuf, uint8_t incomplete, uint32_t using_next);
+
+int32_t viddec_emit_flush_current_wkld(viddec_emitter *emit);
+
+int32_t viddec_emit_append(viddec_emitter_wkld *cxt, viddec_workload_item_t *item);
+
+/*
+ Init function for setting up emitter context.
+*/
+static inline void viddec_emit_init(viddec_emitter *cxt)
+{
+ cxt->cur.data = cxt->next.data = 0;
+ cxt->cur.max_items = cxt->next.max_items = 0;
+ cxt->cur.num_items = cxt->next.num_items = 0;
+ cxt->cur.result = cxt->next.result = VIDDEC_FW_WORKLOAD_SUCCESS;
+}
+
+static inline void viddec_emit_update(viddec_emitter *cxt, uint32_t cur, uint32_t next, uint32_t cur_size, uint32_t next_size)
+{
+ cxt->cur.data = (cur != 0) ? (viddec_workload_t *)(cur | DDR_MEM_MASK) : NULL;
+ cxt->next.data = (next != 0) ? (viddec_workload_t *)(next | DDR_MEM_MASK): NULL;
+ cxt->cur.max_items = (cur_size - sizeof(viddec_workload_t))/sizeof(viddec_workload_item_t);
+ cxt->next.max_items = (next_size - sizeof(viddec_workload_t))/sizeof(viddec_workload_item_t);
+}
+
+static inline void viddec_emit_time(viddec_emitter *cxt, uint32_t time)
+{
+ viddec_emitter_wkld *cur;
+ cur = &(cxt->cur);
+ cur->data->time = time;
+}
+
+static inline void viddec_emit_set_codec(viddec_emitter *emit, uint32_t codec_type)
+{
+ emit->cur.data->codec = codec_type;
+}
+
+static inline void viddec_emit_set_codec_errors(viddec_emitter *emit, uint32_t codec_error)
+{
+ emit->cur.result |= codec_error;
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PM_WORKLOAD_STATUS, (int)emit->cur.result, (int)emit->cur.data,
+ (int)emit->cur.num_items, 0, 0, 0);
+}
+
+static inline void viddec_emit_set_workload_error(viddec_emitter *emit, uint32_t error, uint32_t using_next)
+{
+ viddec_emitter_wkld *cur_wkld;
+ cur_wkld = (using_next == false)? &(emit->cur):&(emit->next);
+ cur_wkld->result |= error;
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PM_WORKLOAD_STATUS, (int)cur_wkld->result, (int)cur_wkld->data,
+ (int)cur_wkld->num_items, using_next, 0, 0);
+}
+
+static inline void viddec_emit_set_inband_tag(viddec_emitter *emit, uint32_t type, uint32_t using_next)
+{
+ viddec_emitter_wkld *cur_wkld;
+ viddec_workload_item_t item;
+ cur_wkld = (using_next == false)? &(emit->cur):&(emit->next);
+ item.vwi_type = type;
+ item.vwi_payload[0] = item.vwi_payload[1] = item.vwi_payload[2] = 0;
+ viddec_emit_append(cur_wkld, &item);
+}
+
+#endif /* VIDDEC_EMITTER_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h
new file mode 100644
index 0000000..cccc437
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h
@@ -0,0 +1,80 @@
+#ifndef VIDDEC_FW_DEBUG_H
+#define VIDDEC_FW_DEBUG_H
+
+//#define SWAP_BYTE(x,y,z) (( ( (x)>>(8*y))& 0xFF) << (8*z))
+#define SWAP_BYTE(x,y,z) (( ( (x) >> ((y) << 3))& 0xFF) << ((z) << 3))
+#define SWAP_WORD(x) ( SWAP_BYTE((x),0,3) | SWAP_BYTE((x),1,2) |SWAP_BYTE((x),2,1) |SWAP_BYTE((x),3,0))
+
+#ifndef VBP
+
+#ifndef HOST_ONLY
+#define _OSAL_IO_MEMMAP_H /* to prevent errors when including sven_devh.h */
+#define _OSAL_ASSERT_H /* to prevent errors when including sven_devh.h */
+#endif
+#include <stdint.h>
+#include "viddec_debug.h"
+#include "sven_devh.h"
+#include "auto_eas/gen4_gv.h"
+
+#ifdef HOST_ONLY
+#define DUMP_TO_MEM(x) DEB("0x%.08X ",x);
+#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6) DEB("Sven evnt=0x%.8X p1=%d p2=%d p3=%d p4=%d p5=%d p6=%d\n",event, p1, p2, p3, p4, p5, p6)
+#define read_ret(x)
+#define read_fp(x)
+#define read_sp(x)
+#define read_wim(x)
+#define read_psr(x)
+#else
+extern uint32_t dump_ptr;
+/* Macros for Dumping data to DDR */
+#define DUMP_TO_MEM(x) ((volatile unsigned int *)0x8F000000)[dump_ptr++] = SWAP_WORD(x);
+#define read_ret(x) asm("mov %%i7, %0\n":"=r" (x))
+#define read_fp(x) asm("mov %%i6, %0\n":"=r" (x))
+#define read_sp(x) asm("mov %%sp, %0\n":"=r" (x))
+#define read_wim(x) asm("mov %%wim, %0\n":"=r" (x))
+#define read_psr(x) asm("mov %%psr, %0\n":"=r" (x))
+#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6) devh_SVEN_WriteModuleEvent( NULL, event, p1, p2, p3, p4, p5, p6)
+#endif
+
+#else // VBP is defined
+
+#include <stdint.h>
+#include "viddec_debug.h"
+#define DUMP_TO_MEM(x)
+#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6)
+#define read_ret(x)
+#define read_fp(x)
+#define read_sp(x)
+#define read_wim(x)
+#define read_psr(x)
+
+
+#endif
+
+static inline void DEBUG_WRITE(uint32_t p1, uint32_t p2, uint32_t p3, uint32_t p4, uint32_t p5, uint32_t p6)
+{
+ //uint32_t temp;
+ DUMP_TO_MEM(0xCACAFEED);
+ DUMP_TO_MEM(p1);
+ DUMP_TO_MEM(p2);
+ DUMP_TO_MEM(p3);
+ DUMP_TO_MEM(p4);
+ DUMP_TO_MEM(p5);
+ DUMP_TO_MEM(p6);
+ DUMP_TO_MEM(0xCACA0000);
+ //temp = dump_ptr;
+ //DUMP_TO_MEM(temp);
+}
+static inline void DUMP_SPARC_REG(void)
+{
+ uint32_t ret1, fp, sp, wim, psr;
+ read_ret(ret1);
+ read_fp(fp);
+ read_sp(sp);
+ read_wim(wim);
+ read_psr(psr);
+ //crash = (uint32_t *)0x1000bf0c;
+ //DEBUG_WRITE(sp, wim, fp, ret1, (*crash), 0xFED);
+ DEBUG_WRITE(sp, wim, fp, ret1, psr, 0xFFFFFFFF);
+}
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h
new file mode 100644
index 0000000..a77b645
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h
@@ -0,0 +1,194 @@
+/*
+
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+ Intel Corporation
+ 2200 Mission College Blvd.
+ Santa Clara, CA 97052
+
+ BSD LICENSE
+
+ Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_PARSER_FW_IPC_H
+#define VIDDEC_FW_PARSER_FW_IPC_H 1
+
+#include "viddec_fw_parser_ipclib.h"
+
+/** Generic Firmware-to-host Message Send Queue */
+typedef struct
+{
+ struct IPC_MsgQueue mq; /* local MSGQueue handle */
+} FW_IPC_SendQue;
+
+/** Generic Host-to-Firmware Message Receive Queue */
+typedef struct
+{
+ struct IPC_MsgQueue mq; /* local MSGQueue handle */
+} FW_IPC_ReceiveQue;
+
+typedef struct
+{
+ unsigned int state;
+ unsigned int priority;
+}FW_IPC_stream_info;
+
+/* ---------------------------------------------------------------------- */
+/* ---------------------------------------------------------------------- */
+
+typedef struct
+{
+ /** Synchronous Message Buffer, shared between host and firmware */
+ volatile char *sync_msg_buf;
+
+ /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */
+ FW_IPC_SendQue snd_q[CONFIG_IPC_HOST_MAX_RX_QUEUES];
+
+ /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */
+ FW_IPC_ReceiveQue rcv_q[CONFIG_IPC_FW_MAX_RX_QUEUES];
+ /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */
+ FW_IPC_ReceiveQue wkld_q[CONFIG_IPC_FW_MAX_RX_QUEUES];
+
+ /** FIRMWARE_TO_HOST Message Queues (outbound) */
+ struct _IPC_QueueHeader *snd_q_shared[CONFIG_IPC_HOST_MAX_RX_QUEUES];
+ /** HOST_TO_FIRMWARE Message Queues (inbbound) */
+ struct _IPC_QueueHeader *rcv_q_shared[CONFIG_IPC_FW_MAX_RX_QUEUES];
+ /** HOST_TO_FIRMWARE Message Queues (inbbound) */
+ struct _IPC_QueueHeader *wkld_q_shared[CONFIG_IPC_FW_MAX_RX_QUEUES];
+ /** Actual qheaders allocated in FW memory */
+ struct _IPC_QueueHeader snd_qh[CONFIG_IPC_HOST_MAX_RX_QUEUES];
+ struct _IPC_QueueHeader rcv_qh[CONFIG_IPC_FW_MAX_RX_QUEUES];
+ struct _IPC_QueueHeader wkld_qh[CONFIG_IPC_FW_MAX_RX_QUEUES];
+
+ /** Stream releated info like priority */
+ FW_IPC_stream_info strm_info[CONFIG_IPC_FW_MAX_RX_QUEUES];
+
+ unsigned int one_msg_size;
+ unsigned char one_msg[CONFIG_IPC_MESSAGE_MAX_SIZE];
+} FW_IPC_Handle;
+
+/*@}*/
+
+/** @weakgroup Host IPC Functions */
+/** @ingroup fw_ipc */
+/*@{*/
+
+/**
+This function allows us to check and see if there's space available on the send queue(output) of fw
+for the message of size(message_size). It also provides us the amount of space available.
+@param[in] fwipc : Ipc handle.
+@param[in] message_size : size of message that we want to write.
+@param[out] bytes : returns the amount of space available for writing.
+@retval 0 : if space is not available for current message.
+@retval 1 : if space is available for current message.
+*/
+int FwIPC_SpaceAvailForMessage(FW_IPC_Handle *fwipc, FW_IPC_SendQue *snd_q, unsigned int message_size, unsigned int *bytes);
+
+/**
+This function writes the message of message_size into queue(host_rx_queue).
+@param[in] fwipc : Ipc handle.
+@param[in] host_rx_queue : id of the queue that needs to be written.
+@param[in] message : Message that we want to write.
+@param[in] message_size : size of message that we want to write.
+@retval 0 : if write fails.
+@retval 1 : if write succeeds.
+*/
+int FwIPC_SendMessage(FW_IPC_Handle *fwipc, unsigned int host_rx_queue, const char *message, unsigned int message_size );
+
+/**
+This function reads a message(which is <= max_message_size) from rcv_queue of firmware into input parameter message.
+@param[in] fwipc : Ipc handle.
+@param[in] rcv_q : Receive queue to read from.
+@param[out] message : Message that we want to read.
+@param[in] max_message_size : max possible size of the message.
+@retval : The size of message that was read.
+*/
+int FwIPC_ReadMessage(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, char *message, unsigned int max_message_size );
+
+/**
+This function Initialises shared queue headers and sync command buffer for IPC.
+@param[in] fwipc : Ipc handle.
+@param[in] synchronous_command_buffer : update handle with pointer to shared memory
+ between host and FW.
+@retval 0 : if write succeeds.
+*/
+int FwIPC_Initialize(FW_IPC_Handle *fwipc, volatile char *synchronous_command_buffer );
+
+/**
+This function Initialises Sendqueue with circular buffer which has actual data.
+@param[in] fwipc : Ipc handle.
+@param[in] snd_q : Send queue that needs to be initialized.
+@param[in] snd_circbuf : Address of circular buffer.
+*/
+void FWIPC_SendQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_SendQue *snd_q, void *snd_circbuf );
+
+/**
+This function Initialises Recvqueue with circular buffer which has actual data.
+@param[in] fwipc : Ipc handle.
+@param[in] rcv_q : Receive queue that needs to be initialized.
+@param[in] rcv_circbuf : Address of circular buffer.
+*/
+void FwIPC_ReceiveQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, void *rcv_circbuf );
+
+/**
+This function reads the nth(index) message(which is <= max_message_size ) from rcv_queue of firmware into input parameter message
+by peeking the queue.
+@param[in] fwipc : Ipc handle.
+@param[in] rcv_q : Send queue to read from.
+@param[out] message : Message that we want to read.
+@param[in] max_message_size : max possible size of the message.
+@param[in] index : nth message(index >=0).
+@retval : The size of message that was read.
+*/
+int FwIPC_PeekReadMessage(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, char *message, unsigned int max_message_size, unsigned int index );
+
+/*@}*/
+#endif /* FW_IPC_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h
new file mode 100644
index 0000000..4712be7
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h
@@ -0,0 +1,6 @@
+#ifndef VIDDEC_H264_PARSE_H
+#define VIDDEC_H264_PARSE_H
+
+void viddec_h264_get_ops(viddec_parser_ops_t *ops);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h
new file mode 100644
index 0000000..e3e795a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h
@@ -0,0 +1,6 @@
+#ifndef VIDDEC_MP4_PARSE_H
+#define VIDDEC_MP4_PARSE_H
+
+void viddec_mp4_get_ops(viddec_parser_ops_t *ops);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h
new file mode 100644
index 0000000..7c0efea
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h
@@ -0,0 +1,6 @@
+#ifndef VIDDEC_MPEG2_PARSE_H
+#define VIDDEC_MPEG2_PARSE_H
+
+void viddec_mpeg2_get_ops(viddec_parser_ops_t *ops);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h
new file mode 100644
index 0000000..a61e340
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h
@@ -0,0 +1,106 @@
+#ifndef VIDDEC_PARSER_OPS_H
+#define VIDDEC_PARSER_OPS_H
+
+#include "viddec_fw_workload.h"
+
+#define VIDDEC_PARSE_INVALID_POS 0xFFFFFFFF
+
+typedef enum
+{
+ VIDDEC_PARSE_EOS = 0x0FFF, /* Dummy start code to force EOS */
+ VIDDEC_PARSE_DISCONTINUITY, /* Dummy start code to force completion and flush */
+}viddec_parser_inband_messages_t;
+
+typedef struct
+{
+ uint32_t context_size;
+ uint32_t persist_size;
+}viddec_parser_memory_sizes_t;
+
+typedef struct
+{
+ void (*init)(void *ctxt, uint32_t *persist, uint32_t preserve);
+ uint32_t (*parse_sc) (void *ctxt, void *pcxt, void *sc_state);
+ uint32_t (*parse_syntax) (void *parent, void *ctxt);
+ void (*get_cxt_size) (viddec_parser_memory_sizes_t *size);
+ uint32_t (*is_wkld_done)(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors);
+ uint32_t (*is_frame_start)(void *ctxt);
+ uint32_t (*gen_contrib_tags)(void *parent, uint32_t ignore_partial);
+ uint32_t (*gen_assoc_tags)(void *parent);
+}viddec_parser_ops_t;
+
+
+typedef enum
+{
+ VIDDEC_PARSE_ERROR = 0xF0,
+ VIDDEC_PARSE_SUCESS = 0xF1,
+ VIDDEC_PARSE_FRMDONE = 0xF2,
+}viddec_parser_error_t;
+
+/*
+ *
+ *Functions used by Parsers
+ *
+ */
+
+/* This function returns the requested number of bits(<=32) and increments au byte position.
+ */
+int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+/* This function returns requested number of bits(<=32) with out incrementing au byte position
+ */
+int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+/* This function skips requested number of bits(<=32) by incrementing au byte position.
+ */
+int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits);
+
+/* This function appends a work item to current workload.
+ */
+int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item);
+
+/* This function appends a work item to next workload.
+ */
+int32_t viddec_pm_append_workitem_next(void *parent, viddec_workload_item_t *item);
+
+/* This function gets current byte and bit positions and information on whether an emulation byte is present after
+current byte.
+ */
+int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul);
+
+/* This function appends Pixel tag to current work load starting from current position to end of au unit.
+ */
+int32_t viddec_pm_append_pixeldata(void *parent);
+
+/* This function appends Pixel tag to next work load starting from current position to end of au unit.
+ */
+int32_t viddec_pm_append_pixeldata_next(void *parent);
+
+/* This function provides the workload header for pasers to fill in attribute values
+ */
+viddec_workload_t* viddec_pm_get_header(void *parent);
+
+/* This function provides the next workload header for pasers to fill in attribute values
+ */
+viddec_workload_t* viddec_pm_get_next_header(void *parent);
+
+/* Returns the current byte value where offset is on */
+uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte);
+
+/* Tells us if there is more data that need to parse */
+int32_t viddec_pm_is_nomoredata(void *parent);
+
+/* This function appends misc tag to work load starting from start position to end position of au unit */
+int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next);
+
+void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error);
+
+void viddec_pm_set_late_frame_detect(void *parent);
+
+static inline void viddec_fw_reset_workload_item(viddec_workload_item_t *wi)
+{
+ wi->vwi_payload[0] = wi->vwi_payload[1] = wi->vwi_payload[2] = 0;
+}
+
+void viddec_pm_setup_userdata(viddec_workload_item_t *wi);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h
new file mode 100644
index 0000000..6d1d2be
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h
@@ -0,0 +1,95 @@
+#ifndef VIDDEC_PM_H
+#define VIDDEC_PM_H
+
+#include <stdint.h>
+#include "viddec_emitter.h"
+#include "viddec_pm_utils_list.h"
+#include "viddec_pm_utils_bstream.h"
+#include "viddec_pm_parse.h"
+#include "viddec_parser_ops.h"
+
+#define SC_DETECT_BUF_SIZE 1024
+#define MAX_CODEC_CXT_SIZE 4096
+
+typedef enum
+{
+ PM_SUCCESS = 0,
+ /* Messages to indicate more ES data */
+ PM_NO_DATA = 0x100,
+ /* Messages to indicate SC found */
+ PM_SC_FOUND = 0x200,
+ PM_FIRST_SC_FOUND = 0x201,
+ /* Messages to indicate Frame done */
+ PM_WKLD_DONE = 0x300,
+ /* Messages to indicate Error conditions */
+ PM_OVERFLOW = 0x400,
+ /* Messages to indicate inband conditions */
+ PM_INBAND_MESSAGES = 0x500,
+ PM_EOS = 0x501,
+ PM_DISCONTINUITY = 0x502,
+}pm_parse_state_t;
+
+/* This is a temporary structure for first pass sc parsing. index tells us where we are in list of es buffers
+ cur_es points to current es buffer we are parsing. */
+typedef struct
+{
+ int32_t list_index; /* current index of list */
+ uint32_t cur_offset;
+ uint32_t cur_size;
+ viddec_input_buffer_t *cur_es;
+}viddec_pm_sc_cur_buf_t;
+
+typedef struct
+{
+ uint32_t pending_tags[MAX_IBUFS_PER_SC];
+ uint8_t dummy;
+ uint8_t frame_done;
+ uint8_t first_buf_aligned;
+ uint8_t using_next;
+}vidded_pm_pending_tags_t;
+
+/* This structure holds all necessary data required by parser manager for stream parsing.
+ */
+typedef struct
+{
+ /* Actual buffer where data gets DMA'd. 8 padding bytes for alignment */
+ uint8_t scbuf[SC_DETECT_BUF_SIZE + 8];
+ viddec_sc_parse_cubby_cxt_t parse_cubby;
+ viddec_pm_utils_list_t list;
+ /* Place to store tags to be added to next to next workload */
+ viddec_pm_sc_cur_buf_t cur_buf;
+ viddec_emitter emitter;
+ viddec_pm_utils_bstream_cxt_t getbits;
+ viddec_sc_prefix_state_t sc_prefix_info;
+ vidded_pm_pending_tags_t pending_tags;
+ uint8_t word_align_dummy;
+ uint8_t late_frame_detect;
+ uint8_t frame_start_found;
+ uint8_t found_fm_st_in_current_au;
+ uint32_t next_workload_error_eos;
+ uint32_t pending_inband_tags;
+#ifdef VBP
+ uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3];
+#else
+ uint32_t codec_data[MAX_CODEC_CXT_SIZE>>2];
+#endif
+}viddec_pm_cxt_t;
+
+/*
+ *
+ * Functions used by Parser kernel
+ *
+ */
+
+/* This is for initialising parser manager context to default values */
+void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean);
+
+/* This is the main parse function which returns state information that parser kernel can understand.*/
+uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf);
+
+void viddec_pm_init_ops();
+
+void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time);
+
+uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h
new file mode 100644
index 0000000..703d65d
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h
@@ -0,0 +1,24 @@
+#ifndef VIDDEC_PM_PARSE_H
+#define VIDDEC_PM_PARSE_H
+
+#include <stdint.h>
+/* This structure is used by first pass parsing(sc detect), the pm passes information on number of bytes
+ that needs to be parsed and if start code found then sc_end_pos contains the index of last sc code byte
+ in the current buffer */
+typedef struct
+{
+ uint32_t size; /* size pointed to by buf */
+ uint8_t *buf; /* ptr to data */
+ int32_t sc_end_pos; /* return value end position of sc */
+ uint32_t phase; /* phase information(state) for sc */
+}viddec_sc_parse_cubby_cxt_t;
+
+typedef struct
+{
+ uint16_t next_sc;
+ uint8_t second_scprfx_length;
+ uint8_t first_sc_detect;
+}viddec_sc_prefix_state_t;
+
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h
new file mode 100644
index 0000000..f035e53
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h
@@ -0,0 +1,17 @@
+#ifndef VIDDEC_PM_TAGS_H
+#define VIDDEC_PM_TAGS_H
+
+#include "viddec_pm.h"
+#include "viddec_emitter.h"
+
+/* Define to initalize temporary association list */
+#define INVALID_ENTRY ((uint32_t) -1)
+
+void viddec_pm_generate_tags_for_unused_buffers_to_flush(viddec_pm_cxt_t *cxt);
+uint32_t viddec_generic_add_association_tags(void *parent);
+uint32_t viddec_h264_add_association_tags(void *parent);
+uint32_t viddec_mpeg2_add_association_tags(void *parent);
+uint32_t viddec_pm_lateframe_generate_contribution_tags(void *parent, uint32_t ignore_partial);
+uint32_t viddec_pm_generic_generate_contribution_tags(void *parent, uint32_t ignore_partial);
+uint32_t viddec_pm_generate_missed_association_tags(viddec_pm_cxt_t *cxt, uint32_t using_next);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h
new file mode 100644
index 0000000..1971a36
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h
@@ -0,0 +1,81 @@
+#ifndef VIDDEC_PM_UTILS_BSTREAM_H
+#define VIDDEC_PM_UTILS_BSTREAM_H
+
+#include "viddec_pm_utils_list.h"
+
+#define CUBBY_SIZE 1024
+//#define CUBBY_SIZE 512
+#define SCRATCH_SIZE 20
+#define MIN_DATA 8
+
+typedef struct
+{
+#ifdef VBP
+ uint8_t *buf;
+#else
+ uint8_t buf[CUBBY_SIZE + 8 + MIN_DATA];/* extra 8 bytes for alignmet, extra 8 bytes for old data */
+#endif
+ uint32_t buf_st; /* start pos in buf */
+ uint32_t buf_end; /* first invalid byte in buf */
+ uint32_t buf_index; /* current index in buf */
+ uint32_t buf_bitoff; /* bit offset in current index position */
+}viddec_pm_utils_bstream_buf_cxt_t;
+
+typedef struct
+{
+ uint8_t buf_scratch[SCRATCH_SIZE];/* scratch for boundary reads*/
+ uint32_t st; /* start index of valid byte */
+ uint32_t size;/* Total number of bytes in current buffer */
+ uint32_t bitoff; /* bit offset in first valid byte */
+}viddec_pm_utils_bstream_scratch_cxt_t;
+
+typedef struct
+{
+#ifdef VBP
+ /* counter of emulation preventation byte */
+ uint32_t emulation_byte_counter;
+#endif
+ /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store
+ the bstream buffer's first valid byte index wrt to accessunit in this variable */
+ uint32_t au_pos;
+ /* This is for keeping track of which list item was used to load data last */
+ uint32_t list_off;
+ /* This is for tracking emulation prevention bytes */
+ uint32_t phase;
+ /* This flag tells us whether to look for emulation prevention or not */
+ uint32_t is_emul_reqd;
+ /* A pointer to list of es buffers which contribute to current access unit */
+ viddec_pm_utils_list_t *list;
+ /* scratch buffer to stage data on boundaries and reloads */
+ viddec_pm_utils_bstream_scratch_cxt_t scratch;
+ /* Actual context which has valid data for get bits functionality */
+ viddec_pm_utils_bstream_buf_cxt_t bstrm_buf;
+}viddec_pm_utils_bstream_cxt_t;
+
+void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul);
+
+int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits);
+
+int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip);
+
+int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte);
+
+uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt);
+
+uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt);
+
+static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *bit, uint32_t *byte, uint8_t *is_emul)
+{
+ uint32_t phase=cxt->phase;
+
+ *bit = cxt->bstrm_buf.buf_bitoff;
+ *byte = cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st);
+ if(cxt->phase > 0)
+ {
+ phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 );
+ }
+ *is_emul = (cxt->is_emul_reqd) && (phase > 0) &&
+ (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) &&
+ (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3);
+}
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h
new file mode 100644
index 0000000..98f2d46
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h
@@ -0,0 +1,51 @@
+#ifndef VIDDEC_PM_COMMON_LIST_H
+#define VIDDEC_PM_COMMON_LIST_H
+
+#include "viddec_emitter.h"
+
+/* Limitation:This is the maximum numbers of es buffers between start codes. Needs to change if we encounter
+ a case if this is not sufficent */
+#ifdef VBP
+#define MAX_IBUFS_PER_SC 512
+#else
+#define MAX_IBUFS_PER_SC 64
+#endif
+
+/* This structure is for storing information on byte position in the current access unit.
+ stpos is the au byte index of first byte in current es buffer.edpos is the au byte index+1 of last
+ valid byte in current es buffer.*/
+typedef struct
+{
+ uint32_t stpos;
+ uint32_t edpos;
+}viddec_pm_utils_au_bytepos_t;
+
+/* this structure is for storing all necessary information for list handling */
+typedef struct
+{
+ uint16_t num_items; /* Number of buffers in List */
+ uint16_t first_scprfx_length; /* Length of first sc prefix in this list */
+ int32_t start_offset; /* starting offset of unused data including sc prefix in first buffer */
+ int32_t end_offset; /* Offset of unsused data in last buffer including 2nd sc prefix */
+ viddec_input_buffer_t sc_ibuf[MAX_IBUFS_PER_SC]; /* Place to store buffer descriptors */
+ viddec_pm_utils_au_bytepos_t data[MAX_IBUFS_PER_SC]; /* place to store au byte positions */
+ int32_t total_bytes; /* total bytes for current access unit including first sc prefix*/
+}viddec_pm_utils_list_t;
+
+/* This function initialises the list to default values */
+void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt);
+
+/* This function adds a new entry to list and will emit tags if needed */
+uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf);
+
+/* This function updates au byte position of the current list. This should be called after sc codes are detected and before
+ syntax parsing as get bits requires this to be initialized. */
+void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length);
+
+/* This function walks through the list and removes consumed buffers based on total bytes. It then moves
+ unused entires to the top of list. */
+void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length);
+
+/* this function returns 1 if the requested byte is not found. If found returns list and offset into list */
+uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h
new file mode 100644
index 0000000..c77aed1
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h
@@ -0,0 +1,6 @@
+#ifndef VIDDEC_VC1_PARSE_H
+#define VIDDEC_VC1_PARSE_H
+
+void viddec_vc1_get_ops(viddec_parser_ops_t *ops);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c
new file mode 100644
index 0000000..1bb368a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/main.c
@@ -0,0 +1,608 @@
+#include "fw_pvt.h"
+#include "viddec_fw_parser_ipclib_config.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_fw_parser.h"
+#include "viddec_fw_debug.h"
+
+/* This define makes sure that the structure is stored in Local memory.
+ This is shared memory between host and FW.*/
+volatile dmem_t _dmem __attribute__ ((section (".exchange")));
+/* Debug index should be disbaled for Production FW */
+uint32_t dump_ptr=0;
+uint32_t timer=0;
+
+/* Auto Api definitions */
+ismd_api_group viddec_fw_api_array[2];
+
+extern void viddec_fw_parser_register_callbacks(void);
+
+/*------------------------------------------------------------------------------
+ * Function: initialize firmware SVEN TX Output
+ *------------------------------------------------------------------------------
+ */
+int SMDEXPORT viddec_fw_parser_sven_init(struct SVEN_FW_Globals *sven_fw_globals )
+{
+ extern int sven_fw_set_globals(struct SVEN_FW_Globals *fw_globals );
+ return(sven_fw_set_globals(sven_fw_globals));
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_check_watermark_boundary
+ * This function figures out if we crossesd watermark boundary on input data.
+ * before represents the ES Queue data when we started and current represents ES Queue data
+ * when we are ready to swap.Threshold is the amount of data specified by the driver to trigger an
+ * interrupt.
+ * We return true if threshold is between before and current.
+ *------------------------------------------------------------------------------
+ */
+static inline uint32_t viddec_fw_check_watermark_boundary(uint32_t before, uint32_t current, uint32_t threshold)
+{
+ return ((before >= threshold) && (current < threshold));
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_get_total_input_Q_data
+ * This function figures out how much data is available in input queue of the FW
+ *------------------------------------------------------------------------------
+ */
+static uint32_t viddec_fw_get_total_input_Q_data(uint32_t indx)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ uint32_t ret;
+ int32_t pos=0;
+ FW_IPC_ReceiveQue *rcv_q;
+
+ rcv_q = &fwipc->rcv_q[indx];
+ /* count the cubby buffer which we already read if present */
+ ret = (_dmem.stream_info[indx].buffered_data) ? CONFIG_IPC_MESSAGE_MAX_SIZE:0;
+ ret += ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos);
+ return ret;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: mfd_round_robin
+ * Params:
+ * [in] pri: Priority of the stream
+ * [in] indx: stream id number of the last stream that was scheduled.
+ * [out] qnum: Stream id of priority(pri) which has data.
+ * This function is responsible for figuring out which stream needs to be scheduled next.
+ * It starts after the last scheduled stream and walks through all streams until it finds
+ * a stream which is of required priority, in start state, has space on output and data in
+ * input.
+ * If no such stream is found qnum is not updated and return value is 0.
+ * If a stream is found then qnum is updated with that id and function returns 1.
+ *------------------------------------------------------------------------------
+ */
+
+uint32_t mfd_round_robin(uint32_t pri, int32_t *qnum, int32_t indx)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ int32_t i = CONFIG_IPC_FW_MAX_RX_QUEUES;
+ uint32_t ret = 0;
+ /* Go through all queues until we find a valid queue of reqd priority */
+ while(i>0)
+ {
+ indx++;
+ if(indx >= CONFIG_IPC_FW_MAX_RX_QUEUES) indx = 0;
+
+ /* We should look only at queues which match priority and
+ in running state */
+ if( (_dmem.stream_info[indx].state == 1)
+ && (_dmem.stream_info[indx].priority == pri))
+ {
+ uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos;
+ FW_IPC_ReceiveQue *rcv_q;
+ rcv_q = &fwipc->rcv_q[indx];
+ inpt_avail = (_dmem.stream_info[indx].buffered_data > 0) || (ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos) > 0);
+ /* we have to check for two workloads to protect against error cases where we might have to push both current and next workloads */
+ output_avail = FwIPC_SpaceAvailForMessage(fwipc, &fwipc->snd_q[indx], CONFIG_IPC_MESSAGE_MAX_SIZE, &pos) >= 2;
+ pos = 0;
+ /* Need at least current and next to proceed */
+ wklds_avail = (ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos) >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1));
+ if(inpt_avail && output_avail && wklds_avail)
+ {/* Success condition: we have some data on input and enough space on output queue */
+ *qnum = indx;
+ ret =1;
+ break;
+ }
+ }
+ i--;
+ }
+ return ret;
+}
+static inline void mfd_setup_emitter(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, mfd_pk_strm_cxt *cxt)
+{
+ int32_t ret1=0,ret=0;
+ /* We don't check return values for the peek as round robin guarantee's that we have required free workloads */
+ ret = FwIPC_PeekReadMessage(fwipc, rcv_q, (char *)&(cxt->wkld1), sizeof(ipc_msg_data), 0);
+ ret1 = FwIPC_PeekReadMessage(fwipc, rcv_q, (char *)&(cxt->wkld2), sizeof(ipc_msg_data), 1);
+ viddec_emit_update(&(cxt->pm.emitter), cxt->wkld1.phys, cxt->wkld2.phys, cxt->wkld1.len, cxt->wkld2.len);
+}
+
+static inline void mfd_init_swap_memory(viddec_pm_cxt_t *pm, uint32_t codec_type, uint32_t start_addr, uint32_t clean)
+{
+ uint32_t *persist_mem;
+ persist_mem = (uint32_t *)(start_addr | GV_DDR_MEM_MASK);
+ viddec_pm_init_context(pm,codec_type, persist_mem, clean);
+ pm->sc_prefix_info.first_sc_detect = 1;
+ viddec_emit_init(&(pm->emitter));
+}
+
+void output_omar_wires( unsigned int value )
+{
+#ifdef RTL_SIMULATION
+ reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, value );
+#endif
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_init_swap_memory
+ * This function is responsible for seeting the swap memory to a good state for current stream.
+ * The swap parameter tells us whether we need to dma the context to local memory.
+ * We call init on emitter and parser manager which inturn calls init of the codec we are opening the stream for.
+ *------------------------------------------------------------------------------
+ */
+
+void viddec_fw_init_swap_memory(unsigned int stream_id, unsigned int swap, unsigned int clean)
+{
+ mfd_pk_strm_cxt *cxt;
+ mfd_stream_info *cxt_swap;
+ cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt);
+ cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[stream_id]);
+
+ if(swap)
+ {/* Swap context into local memory */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false);
+ }
+
+ {
+ mfd_init_swap_memory(&(cxt->pm), cxt_swap->strm_type, cxt_swap->ddr_cxt+cxt_swap->cxt_size, clean);
+ cxt_swap->wl_time = 0;
+ cxt_swap->es_time = 0;
+ }
+ if(swap)
+ {/* Swap context into DDR */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false);
+ }
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_push_current_frame_to_output
+ * This is a helper function to read a workload from input queue and push to output queue.
+ * This is called when are done with a frame.
+ *------------------------------------------------------------------------------
+ */
+static inline void viddec_fw_push_current_frame_to_output(FW_IPC_Handle *fwipc, uint32_t cur)
+{
+ ipc_msg_data wkld_to_push;
+ FwIPC_ReadMessage(fwipc, &fwipc->wkld_q[cur], (char *)&(wkld_to_push), sizeof(ipc_msg_data));
+ FwIPC_SendMessage(fwipc, cur, (char *)&(wkld_to_push), sizeof(ipc_msg_data));
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_get_next_stream_to_schedule
+ * This is a helper function to figure out which active stream needs to be scheduled next.
+ * If none of the streams are active it returns -1.
+ *------------------------------------------------------------------------------
+ */
+static inline int viddec_fw_get_next_stream_to_schedule(void)
+{
+ int32_t cur = -1;
+
+ if(mfd_round_robin(viddec_stream_priority_REALTIME, &cur, _dmem.g_pk_data.high_id))
+ {
+ /* On success store the stream id */
+ _dmem.g_pk_data.high_id = cur;
+ }
+ else
+ {
+ /* Check Low priority Queues, Since we couldn't find a valid realtime stream */
+ if(mfd_round_robin(viddec_stream_priority_BACKGROUND, &cur, _dmem.g_pk_data.low_id))
+ {
+ _dmem.g_pk_data.low_id = cur;
+ }
+ }
+
+ return cur;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_update_pending_interrupt_flag
+ * This is a helper function to figure out if we need to mark an interrupt pending for this stream.
+ * We update status value here if we find any of the interrupt conditions are true.
+ * If this stream has a interrupt pending which we could not send to host, we don't overwrite past status info.
+ *------------------------------------------------------------------------------
+ */
+static inline void viddec_fw_update_pending_interrupt_flag(int32_t cur, mfd_stream_info *cxt_swap, uint8_t pushed_a_workload,
+ uint32_t es_Q_data_at_start)
+{
+ if(_dmem.int_status[cur].mask)
+ {
+ if(!cxt_swap->pending_interrupt)
+ {
+ uint32_t es_Q_data_now;
+ uint8_t wmark_boundary_reached=false;
+ es_Q_data_now = viddec_fw_get_total_input_Q_data((uint32_t)cur);
+ wmark_boundary_reached = viddec_fw_check_watermark_boundary(es_Q_data_at_start, es_Q_data_now, cxt_swap->low_watermark);
+ _dmem.int_status[cur].status = 0;
+ if(pushed_a_workload)
+ {
+ _dmem.int_status[cur].status |= VIDDEC_FW_WKLD_DATA_AVAIL;
+ }
+ if(wmark_boundary_reached)
+ {
+ _dmem.int_status[cur].status |= VIDDEC_FW_INPUT_WATERMARK_REACHED;
+ }
+ cxt_swap->pending_interrupt = ( _dmem.int_status[cur].status != 0);
+ }
+ }
+ else
+ {
+ cxt_swap->pending_interrupt = false;
+ }
+}
+
+static inline void viddec_fw_handle_error_and_inband_messages(int32_t cur, uint32_t pm_ret)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+
+ viddec_fw_push_current_frame_to_output(fwipc, cur);
+ switch(pm_ret)
+ {
+ case PM_EOS:
+ case PM_OVERFLOW:
+ {
+ viddec_fw_init_swap_memory(cur, false, true);
+ }
+ break;
+ case PM_DISCONTINUITY:
+ {
+ viddec_fw_init_swap_memory(cur, false, false);
+ }
+ break;
+ default:
+ break;
+ }
+}
+
+void viddec_fw_debug_scheduled_stream_state(int32_t indx, int32_t start)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos;
+ FW_IPC_ReceiveQue *rcv_q;
+ uint32_t message;
+
+ message = (start) ? SVEN_MODULE_EVENT_GV_FW_PK_SCHDL_STRM_START: SVEN_MODULE_EVENT_GV_FW_PK_SCHDL_STRM_END;
+ rcv_q = &fwipc->rcv_q[indx];
+ inpt_avail = ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos);
+ inpt_avail += ((_dmem.stream_info[indx].buffered_data > 0) ? CONFIG_IPC_MESSAGE_MAX_SIZE: 0);
+ inpt_avail = inpt_avail >> 4;
+ pos = 0;
+ output_avail = ipc_mq_read_avail(&fwipc->snd_q[indx].mq, (int32_t *)&pos);
+ output_avail = output_avail >> 4;
+ pos = 0;
+ wklds_avail = ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos);
+ wklds_avail = wklds_avail >> 4;
+ WRITE_SVEN(message, (int)indx, (int)inpt_avail, (int)output_avail,
+ (int)wklds_avail, 0, 0);
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_process_async_queues(A.K.A -> Parser Kernel)
+ * This function is responsible for handling the asynchronous queues.
+ *
+ * The first step is to figure out which stream to run. The current algorithm
+ * will go through all high priority queues for a valid stream, if not found we
+ * go through lower priority queues.
+ *
+ * If a valid stream is found we swap the required context from DDR to DMEM and do all necessary
+ * things to setup the stream.
+ * Once a stream is setup we call the parser manager and wait until a wrkld is created or no more input
+ * data left.
+ * Once we find a wkld we push it to host and save the current context to DDR.
+ *------------------------------------------------------------------------------
+ */
+
+static inline int32_t viddec_fw_process_async_queues()
+{
+ int32_t cur = -1;
+
+ cur = viddec_fw_get_next_stream_to_schedule();
+
+ if(cur != -1)
+ {
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ FW_IPC_ReceiveQue *rcv_q;
+ /* bits captured by OMAR */
+ output_omar_wires( 0x0 );
+ rcv_q = &fwipc->rcv_q[cur];
+ {
+ mfd_pk_strm_cxt *cxt;
+ mfd_stream_info *cxt_swap;
+ cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt);
+ cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[cur]);
+
+ /* Step 1: Swap rodata to local memory. Not doing this currently as all the rodata fits in local memory. */
+ {/* Step 2: Swap context into local memory */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false);
+ }
+ /* Step 3:setup emitter by reading input data and workloads and initialising it */
+ mfd_setup_emitter(fwipc, &fwipc->wkld_q[cur], cxt);
+ viddec_fw_debug_scheduled_stream_state(cur, true);
+ /* Step 4: Call Parser Manager until workload done or No more ES buffers */
+ {
+ ipc_msg_data *data = 0;
+ uint8_t stream_active = true, pushed_a_workload=false;
+ uint32_t pm_ret = PM_SUCCESS, es_Q_data_at_start;
+ uint32_t start_time, time=0;
+
+ start_time = set_wdog(VIDDEC_WATCHDOG_COUNTER_MAX);
+ timer=0;
+ es_Q_data_at_start = viddec_fw_get_total_input_Q_data((uint32_t)cur);
+ do
+ {
+ output_omar_wires( 0x1 );
+ {
+ uint32_t es_t0,es_t1;
+ get_wdog(&es_t0);
+ pm_ret = viddec_pm_parse_es_buffer(&(cxt->pm), cxt_swap->strm_type, data);
+ get_wdog(&es_t1);
+ cxt_swap->es_time += get_total_ticks(es_t0, es_t1);
+ }
+ switch(pm_ret)
+ {
+ case PM_EOS:
+ case PM_WKLD_DONE:
+ case PM_OVERFLOW:
+ case PM_DISCONTINUITY:
+ {/* Finished a frame worth of data or encountered fatal error*/
+ stream_active = false;
+ }
+ break;
+ case PM_NO_DATA:
+ {
+ uint32_t next_ret=0;
+ if ( (NULL != data) && (0 != cxt_swap->es_time) )
+ {
+ /* print performance info for this buffer */
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_DONE, (int)cur, (int)cxt_swap->es_time, (int)cxt->input.phys,
+ (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags );
+ cxt_swap->es_time = 0;
+ }
+
+ next_ret = FwIPC_ReadMessage(fwipc, rcv_q, (char *)&(cxt->input), sizeof(ipc_msg_data));
+ if(next_ret != 0)
+ {
+ data = &(cxt->input);
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_START, (int)cur, (int)cxt_swap->wl_time,
+ (int)cxt->input.phys, (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags );
+ }
+ else
+ {/* No data on input queue */
+ cxt_swap->buffered_data = 0;
+ stream_active = false;
+ }
+ }
+ break;
+ default:
+ {/* Not done with current buffer */
+ data = NULL;
+ }
+ break;
+ }
+ }while(stream_active);
+ get_wdog(&time);
+ cxt_swap->wl_time += get_total_ticks(start_time, time);
+ /* Step 5: If workload done push workload out */
+ switch(pm_ret)
+ {
+ case PM_EOS:
+ case PM_WKLD_DONE:
+ case PM_OVERFLOW:
+ case PM_DISCONTINUITY:
+ {/* Push current workload as we are done with the frame */
+ cxt_swap->buffered_data = (PM_WKLD_DONE == pm_ret) ? true: false;
+ viddec_pm_update_time(&(cxt->pm), cxt_swap->wl_time);
+
+ /* xmit performance info for this workload output */
+ WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_PK_WL_DONE, (int)cur, (int)cxt_swap->wl_time, (int)cxt->wkld1.phys,
+ (int)cxt->wkld1.len, (int)cxt->wkld1.id, (int)cxt->wkld1.flags );
+ cxt_swap->wl_time = 0;
+
+ viddec_fw_push_current_frame_to_output(fwipc, cur);
+ if(pm_ret != PM_WKLD_DONE)
+ {
+ viddec_fw_handle_error_and_inband_messages(cur, pm_ret);
+ }
+ pushed_a_workload = true;
+ }
+ break;
+ default:
+ break;
+ }
+ /* Update information on whether we have active interrupt for this stream */
+ viddec_fw_update_pending_interrupt_flag(cur, cxt_swap, pushed_a_workload, es_Q_data_at_start);
+ }
+ viddec_fw_debug_scheduled_stream_state(cur, false);
+ /* Step 6: swap context into DDR */
+ {
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false);
+ }
+ }
+
+ }
+ return cur;
+}
+
+
+/*------------------------------------------------------------------------------
+ * Function: process_command
+ * This magic function figures out which function to excute based on autoapi.
+ *------------------------------------------------------------------------------
+ */
+
+static inline void process_command(uint32_t cmd_id, unsigned char *command)
+{
+ int32_t groupid = ((cmd_id >> 24) - 13) & 0xff;
+ int32_t funcid = cmd_id & 0xffffff;
+ /* writing func pointer to hsot doorbell */
+ output_omar_wires( (int) viddec_fw_api_array[groupid].unmarshal[funcid] );
+ WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_AUTOAPI_CMD,(int) cmd_id, (int) command, ((int *)command)[0],
+ ((int *)command)[1], ((int *)command)[2], ((int *)command)[3] );
+
+ viddec_fw_api_array[groupid].unmarshal[funcid](0, command);
+
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_process_sync_queues(A.K.A auto api)
+ * Params:
+ * [in] msg: common sync structure where all required parameters are present for autoapi.
+ *
+ * This function is responsible for handling synchronous messages. All synchronous messages
+ * are handled through auto api.
+ * what are synchronous messages? Anything releated to teardown or opening a stream Ex: open, close, flush etc.
+ *
+ * Only once synchronous message at a time. When a synchronous message its id is usually in cp doorbell. Once
+ * we are done handling synchronous message through auto api we release doorbell to let the host write next
+ * message.
+ *------------------------------------------------------------------------------
+ */
+
+static inline int32_t viddec_fw_process_sync_queues(unsigned char *msg)
+{
+ int32_t ret = -1;
+
+ if(0 == reg_read(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS))
+ {
+ uint32_t command1=0;
+ command1 = reg_read(CONFIG_IPC_ROFF_RISC_RX_DOORBELL);
+ process_command(command1, msg);
+ reg_write(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS, 0x2); /* Inform Host we are done with this message */
+ ret = 0;
+ }
+ return ret;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_check_for_pending_int
+ * This function walks through all active streams to see if atleast one stream has a pending interrupt
+ * and returns true if it finds one.
+ *------------------------------------------------------------------------------
+ */
+static inline uint32_t viddec_fw_check_for_pending_int(void)
+{
+ uint32_t i=0, ret=false;
+ /* start from 0 to max streams that fw can handle*/
+ while(i < FW_SUPPORTED_STREAMS)
+ {
+ if(_dmem.stream_info[i].state == 1)
+ {
+ if((_dmem.stream_info[i].pending_interrupt) && _dmem.int_status[i].mask)
+ {
+ ret = true;
+ }
+ else
+ {/* If this is not in INT state clear the status before sending it to host */
+ _dmem.int_status[i].status = 0;
+ }
+ }
+ i++;
+ }
+ return ret;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_clear_processed_int
+ * This function walks through all active streams to clear pending interrupt state.This is
+ * called after a INT was issued.
+ *------------------------------------------------------------------------------
+ */
+static inline void viddec_fw_clear_processed_int(void)
+{
+ uint32_t i=0;
+ /* start from 0 to max streams that fw can handle*/
+ while(i < FW_SUPPORTED_STREAMS)
+ {
+ //if(_dmem.stream_info[i].state == 1)
+ _dmem.stream_info[i].pending_interrupt = false;
+ i++;
+ }
+ return;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_int_host
+ * This function interrupts host if data is available for host or any other status
+ * is valid which the host configures the FW to.
+ * There is only one interrupt line so this is a shared Int for all streams, Host should
+ * look at status of all streams when it receives a Int.
+ * The FW will interrupt the host only if host doorbell is free, in other words the host
+ * should always make the doorbell free at the End of its ISR.
+ *------------------------------------------------------------------------------
+ */
+
+static inline int32_t viddec_fw_int_host()
+{
+ /* We Interrupt the host only if host is ready to receive an interrupt */
+ if((reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) & GV_DOORBELL_STATS) == GV_DOORBELL_STATS)
+ {
+ if(viddec_fw_check_for_pending_int())
+ {
+ /* If a pending interrupt is found trigger INT */
+ reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, VIDDEC_FW_PARSER_IPC_HOST_INT);
+ /* Clear all stream's pending Interrupt info since we use a global INT for all streams */
+ viddec_fw_clear_processed_int();
+ }
+ }
+ return 1;
+}
+volatile unsigned int stack_corrupted __attribute__ ((section (".stckovrflwchk")));
+/*------------------------------------------------------------------------------
+ * Function: main
+ * This function is the main firmware function. Its a infinite loop where it polls
+ * for messages and processes them if they are available. Currently we ping pong between
+ * synchronous and asynchronous messages one at a time. If we have multiple aysnchronous
+ * queues we always process only one between synchronous messages.
+ *
+ * For multiple asynchronous queues we round robin through the high priorities first and pick
+ * the first one available. Next time when we come around for asynchronous message we start
+ * from the next stream onwards so this guarantees that we give equal time slices for same
+ * priority queues. If no high priority queues are active we go to low priority queues and repeat
+ * the same process.
+ *------------------------------------------------------------------------------
+ */
+
+int main(void)
+{
+ unsigned char *msg = (uint8_t *)&(_dmem.buf.data[0]);
+
+ /* We wait until host reads sync message */
+ reg_write(CONFIG_IPC_ROFF_HOST_RX_DOORBELL, GV_FW_IPC_HOST_SYNC);
+
+ while ( GV_DOORBELL_STATS != reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) )
+ { /*poll register until done bit is set */
+ /* Host re-writes Vsparc DRAM (BSS) in this loop and will hit the DONE bit when complete */
+ }
+ enable_intr();
+ /* Initialize State for queues */
+ viddec_fw_parser_register_callbacks();
+ FwIPC_Initialize(GET_IPC_HANDLE(_dmem), (volatile char *)msg);
+ _dmem.g_pk_data.high_id = _dmem.g_pk_data.low_id = -1;
+ viddec_pm_init_ops();
+ stack_corrupted = 0xDEADBEEF;
+ while(1)
+ {
+ viddec_fw_process_sync_queues(msg);
+ viddec_fw_process_async_queues();
+ viddec_fw_int_host();
+#if 0
+ if(stack_corrupted != 0xDEADBEEF)
+ {
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_STACK_CORRPON, 0, 0, 0, 0, 0, 0);
+ while(1);
+ }
+#endif
+ }
+ return 1;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/utils.c b/mix_vbp/viddec_fw/fw/parser/utils.c
new file mode 100644
index 0000000..5a22e5b
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/utils.c
@@ -0,0 +1,253 @@
+#include "fw_pvt.h"
+#include "viddec_fw_parser_ipclib_config.h"
+
+extern uint32_t timer;
+
+/*------------------------------------------------------------------------------
+ * Function: memcpy
+ * This is a memory-copy function.
+ *------------------------------------------------------------------------------
+ */
+/* NOTE: we are inventing memcpy since we don't want to include string libs as part of FW Due to size limitations*/
+void *memcpy(void *dest, const void *src, uint32_t n)
+{
+ uint8_t *ptr8_frm, *ptr8_to;
+ uint32_t *ptr32_frm, *ptr32_to;
+ uint32_t bytes_left=n,trail = 0;
+ uint32_t align=0;
+
+ ptr8_frm = (uint8_t *)src;
+ ptr8_to = (uint8_t *)dest;
+
+ trail = ((uint32_t)ptr8_frm) & 0x3;
+ if((trail == (((uint32_t)ptr8_to) & 0x3)) && (n > 4))
+ {
+ /* check to see what's the offset bytes to go to a word alignment */
+ bytes_left -= trail;
+ while(align > 0){
+ *ptr8_to ++ = *ptr8_frm ++;
+ trail--;
+ }
+ /* check to see if rest of bytes is a multiple of 4. */
+ trail = bytes_left & 0x3;
+ bytes_left = (bytes_left >> 2) << 2;
+ ptr32_to = (uint32_t *)ptr8_to;
+ ptr32_frm = (uint32_t *)ptr8_frm;
+ /* copy word by word */
+ while(bytes_left > 0){
+ *ptr32_to ++ = *ptr32_frm ++;
+ bytes_left -= 4;
+ }
+ /* If there are any trailing bytes do a byte copy */
+ ptr8_to = (uint8_t *)ptr32_to;
+ ptr8_frm = (uint8_t *)ptr32_frm;
+ while(trail > 0){
+ *ptr8_to ++ = *ptr8_frm ++;
+ trail--;
+ }
+ }
+ else
+ {/* case when src and dest addr are not on same alignment.
+ Just do a byte copy */
+ while(bytes_left > 0){
+ *ptr8_to ++ = *ptr8_frm ++;
+ bytes_left -= 1;
+ }
+ }
+ return dest;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: memset
+ * This is a function to copy specificed value into memory array.
+ *------------------------------------------------------------------------------
+ */
+/* NOTE: we are inventing memset since we don't want to include string libs as part of FW Due to size limitations*/
+void *memset(void *s, int32_t c, uint32_t n)
+{
+ uint8_t *ptr8 = (uint8_t *)s;
+ uint32_t *ptr32, data;
+ uint32_t mask = 0, bytes_left = n;
+
+ mask = c & 0xFF;
+ mask |= (mask << 8);
+ mask |= (mask << 16);
+ if(n >= 4)
+ {
+ uint32_t trail=0;
+ trail = 4 - (((uint32_t)ptr8) & 0x3);
+ if(trail < 4)
+ {
+ ptr32 = (uint32_t *)(((uint32_t)ptr8) & ~0x3);
+ data = (*ptr32 >> (8*trail)) << (8*trail);
+ data |= (mask >> (32 - (8*trail)));
+ *ptr32 = data;
+ bytes_left -= trail;
+ ptr8 += trail;
+ }
+ ptr32 = (uint32_t *)((uint32_t)ptr8);
+ while(bytes_left >= 4)
+ {
+ *ptr32 = mask;
+ ptr32++;
+ bytes_left -=4;
+ }
+ if(bytes_left > 0)
+ {
+ data = (*ptr32 << (8*bytes_left)) >> (8*bytes_left);
+ data |= (mask << (32 - (8*bytes_left)));
+ *ptr32=data;
+ }
+ }
+
+ return s;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: cp_using_dma
+ * This is a function to copy data from local memory to/from system memory.
+ * Params:
+ * [in] ddr_addr : Word aligned ddr address.
+ * [in] local_addr: Word aligned local address.
+ * [in] size : No of bytes to transfer.
+ * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory.
+ * [in] swap : Enable or disable byte swap(endian).
+ * [out] return : Actual number of bytes copied, which can be more than what was requested
+ * since we can only copy words at a time.
+ * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned.
+ *------------------------------------------------------------------------------
+ */
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+ uint32_t val=0, wrote = size;
+
+ while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0)
+ {
+ /* wait if DMA is busy with a transcation Error condition??*/
+ }
+
+ reg_write(DMA_SYSTEM_ADDRESS, (ddr_addr & ~3) & ~GV_DDR_MEM_MASK);
+ reg_write(DMA_LOCAL_ADDRESS, (local_addr & 0xfffc));
+ //wrote += (ddr_addr & 0x3);
+ wrote = (wrote+3)>>2;/* make number of bytes multiple of 4 */
+ val=(wrote & 0xffff) << 2;
+ reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE);
+ val |= DMA_CTRL_STATUS_START;
+ /* If size > 64 use 128 byte burst speed */
+ if(wrote > 64)
+ val |= (1<<18);
+ if(swap) /* Endian swap if needed */
+ val |= DMA_CTRL_STATUS_SWAP;
+ if(to_ddr)
+ val = val | DMA_CTRL_STATUS_DIRCN;
+ reg_write(DMA_CONTROL_STATUS, val);
+ while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0)
+ {
+ /* wait till DMA is done */
+ }
+ reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE);
+
+ return (wrote << 2);
+}
+
+/*------------------------------------------------------------------------------
+ * Function: cp_using_dma
+ * This is a function to copy data from local memory to/from system memory.
+ * Params:
+ * [in] ddr_addr : Word aligned ddr address.
+ * [in] local_addr: Word aligned local address.
+ * [in] size : No of bytes to transfer.
+ * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory.
+ * [in] swap : Enable or disable byte swap(endian).
+ * [out] return : Actual number of bytes copied, which can be more than what was requested
+ * since we can only copy words at a time.
+ * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned.
+ *------------------------------------------------------------------------------
+ */
+uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+ uint32_t val=0, wrote = size;
+
+ while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0)
+ {
+ /* wait if DMA is busy with a transcation Error condition??*/
+ }
+
+ reg_write(DMA_SYSTEM_ADDRESS, (ddr_addr & ~3));
+ reg_write(DMA_LOCAL_ADDRESS, (local_addr & 0xfffc));
+ //wrote += (ddr_addr & 0x3);
+ wrote = (wrote+3)>>2;/* make number of bytes multiple of 4 */
+ val=(wrote & 0xffff) << 2;
+ reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE);
+ val |= DMA_CTRL_STATUS_START;
+ /* If size > 64 use 128 byte burst speed */
+ if(wrote > 64)
+ val |= (1<<18);
+ if(swap) /* Endian swap if needed */
+ val |= DMA_CTRL_STATUS_SWAP;
+ if(to_ddr)
+ val = val | DMA_CTRL_STATUS_DIRCN;
+ reg_write(DMA_CONTROL_STATUS, val);
+ while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0)
+ {
+ /* wait till DMA is done */
+ }
+ reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE);
+
+ return (wrote << 2);
+}
+
+void update_ctrl_reg(uint8_t enable, uint32_t mask)
+{
+ uint32_t read_val = 0;
+ read_val = reg_read(CONFIG_CP_CONTROL_REG);
+ if(enable)
+ {
+ read_val = read_val | mask;
+ }
+ else
+ {
+ read_val = read_val & ~mask;
+ }
+ reg_write(CONFIG_CP_CONTROL_REG, read_val);
+ return;
+
+}
+
+extern uint32_t sven_get_timestamp();
+
+uint32_t set_wdog(uint32_t offset)
+{
+#ifdef B0_TIMER_FIX
+ update_ctrl_reg(0, WATCH_DOG_ENABLE);
+ reg_write(INT_REG, INT_WDOG_ENABLE);
+ reg_write(WATCH_DOG_COUNTER, offset & WATCH_DOG_MASK);
+ update_ctrl_reg(1, WATCH_DOG_ENABLE);
+ return offset & WATCH_DOG_MASK;
+#else
+ return sven_get_timestamp();
+#endif
+}
+
+void get_wdog(uint32_t *value)
+{
+#ifdef B0_TIMER_FIX
+ *value = reg_read(WATCH_DOG_COUNTER) & WATCH_DOG_MASK;
+ reg_write(INT_REG, ~INT_WDOG_ENABLE);
+ update_ctrl_reg(0, WATCH_DOG_ENABLE);
+#else
+ *value = sven_get_timestamp();
+#endif
+}
+
+uint32_t get_total_ticks(uint32_t start, uint32_t end)
+{
+ uint32_t value;
+#ifdef B0_TIMER_FIX
+ value = (start-end) + (start*timer);
+ timer=0;
+#else
+ value = end-start;/* convert to 1 MHz clocks */
+#endif
+ return value;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
new file mode 100644
index 0000000..033f6b6
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
@@ -0,0 +1,1568 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include <glib.h>
+#include <dlfcn.h>
+
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_h264_parser.h"
+
+
+/* number of bytes used to encode length of NAL payload. Default is 4 bytes. */
+static int NAL_length_size = 4;
+
+/* default scaling list table */
+unsigned char Default_4x4_Intra[16] =
+{
+ 6,13,20,28,
+ 13,20,28,32,
+ 20,28,32,37,
+ 28,32,37,42
+};
+
+unsigned char Default_4x4_Inter[16] =
+{
+ 10,14,20,24,
+ 14,20,24,27,
+ 20,24,27,30,
+ 24,27,30,34
+};
+
+unsigned char Default_8x8_Intra[64] =
+{
+ 6,10,13,16,18,23,25,27,
+ 10,11,16,18,23,25,27,29,
+ 13,16,18,23,25,27,29,31,
+ 16,18,23,25,27,29,31,33,
+ 18,23,25,27,29,31,33,36,
+ 23,25,27,29,31,33,36,38,
+ 25,27,29,31,33,36,38,40,
+ 27,29,31,33,36,38,40,42
+};
+
+unsigned char Default_8x8_Inter[64] =
+{
+ 9,13,15,17,19,21,22,24,
+ 13,13,17,19,21,22,24,25,
+ 15,17,19,21,22,24,25,27,
+ 17,19,21,22,24,25,27,28,
+ 19,21,22,24,25,27,28,30,
+ 21,22,24,25,27,28,30,32,
+ 22,24,25,27,28,30,32,33,
+ 24,25,27,28,30,32,33,35
+};
+
+unsigned char quant_flat[16] =
+{
+ 16,16,16,16,
+ 16,16,16,16,
+ 16,16,16,16,
+ 16,16,16,16
+};
+
+unsigned char quant8_flat[64] =
+{
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16
+};
+
+unsigned char* UseDefaultList[8] =
+{
+ Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra,
+ Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter,
+ Default_8x8_Intra,
+ Default_8x8_Inter
+};
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_h264(vbp_context *pcontext)
+{
+ if (NULL == pcontext->parser_ops)
+ {
+ return VBP_PARM;
+ }
+ pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init");
+ if (NULL == pcontext->parser_ops->init)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+ pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse");
+ if (NULL == pcontext->parser_ops->parse_syntax)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size");
+ if (NULL == pcontext->parser_ops->get_cxt_size)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done");
+ if (NULL == pcontext->parser_ops->is_wkld_done)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ /* entry point not needed */
+ pcontext->parser_ops->is_frame_start = NULL;
+ return VBP_OK;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_allocate_query_data_h264(vbp_context *pcontext)
+{
+ if (NULL != pcontext->query_data)
+ {
+ return VBP_PARM;
+ }
+
+ pcontext->query_data = NULL;
+ vbp_data_h264 *query_data = NULL;
+
+ query_data = g_try_new0(vbp_data_h264, 1);
+ if (NULL == query_data)
+ {
+ goto cleanup;
+ }
+
+ /* assign the pointer */
+ pcontext->query_data = (void *)query_data;
+
+ query_data->pic_data = g_try_new0(vbp_picture_data_h264, MAX_NUM_PICTURES);
+ if (NULL == query_data->pic_data)
+ {
+ goto cleanup;
+ }
+
+ int i;
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferH264, 1);
+ if (NULL == query_data->pic_data[i].pic_parms)
+ {
+ goto cleanup;
+ }
+ query_data->pic_data[i].num_slices = 0;
+ query_data->pic_data[i].slc_data = g_try_new0(vbp_slice_data_h264, MAX_NUM_SLICES);
+ if (NULL == query_data->pic_data[i].slc_data)
+ {
+ goto cleanup;
+ }
+ }
+
+
+ query_data->IQ_matrix_buf = g_try_new0(VAIQMatrixBufferH264, 1);
+ if (NULL == query_data->IQ_matrix_buf)
+ {
+ goto cleanup;
+ }
+
+ query_data->codec_data = g_try_new0(vbp_codec_data_h264, 1);
+ if (NULL == query_data->codec_data)
+ {
+ goto cleanup;
+ }
+
+ return VBP_OK;
+
+cleanup:
+ vbp_free_query_data_h264(pcontext);
+
+ return VBP_MEM;
+}
+
+uint32 vbp_free_query_data_h264(vbp_context *pcontext)
+{
+ if (NULL == pcontext->query_data)
+ {
+ return VBP_OK;
+ }
+
+ int i;
+ vbp_data_h264 *query_data;
+ query_data = (vbp_data_h264 *)pcontext->query_data;
+
+ if (query_data->pic_data)
+ {
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ g_free(query_data->pic_data[i].slc_data);
+ g_free(query_data->pic_data[i].pic_parms);
+ }
+ g_free(query_data->pic_data);
+ }
+
+ g_free(query_data->IQ_matrix_buf);
+ g_free(query_data->codec_data);
+ g_free(query_data);
+
+ pcontext->query_data = NULL;
+
+ return VBP_OK;
+}
+
+
+static inline uint16_t vbp_utils_ntohs(uint8_t* p)
+{
+ uint16_t i = ((*p) << 8) + ((*(p+1)));
+ return i;
+}
+
+static inline uint32_t vbp_utils_ntohl(uint8_t* p)
+{
+ uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3)));
+ return i;
+}
+
+
+static inline void vbp_set_VAPicture_h264(
+ int curr_picture_structure,
+ int bottom_field,
+ frame_store* store,
+ VAPictureH264* pic)
+{
+ if (FRAME == curr_picture_structure)
+ {
+ if (FRAME != viddec_h264_get_dec_structure(store))
+ {
+ WTRACE("Reference picture structure is not frame for current frame picture!");
+ }
+ pic->flags = 0;
+ pic->TopFieldOrderCnt = store->frame.poc;
+ pic->BottomFieldOrderCnt = store->frame.poc;
+ }
+ else
+ {
+ if (FRAME == viddec_h264_get_dec_structure(store))
+ {
+ WTRACE("reference picture structure is frame for current field picture!");
+ }
+ if (bottom_field)
+ {
+ pic->flags = VA_PICTURE_H264_BOTTOM_FIELD;
+ pic->TopFieldOrderCnt = store->top_field.poc;
+ pic->BottomFieldOrderCnt = store->bottom_field.poc;
+ }
+ else
+ {
+ pic->flags = VA_PICTURE_H264_TOP_FIELD;
+ pic->TopFieldOrderCnt = store->top_field.poc;
+ pic->BottomFieldOrderCnt = store->bottom_field.poc;
+ }
+ }
+}
+
+static inline void vbp_set_slice_ref_list_h264(
+ struct h264_viddec_parser* h264_parser,
+ VASliceParameterBufferH264 *slc_parms)
+{
+ int i, j;
+ int num_ref_idx_active = 0;
+ h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+ uint8_t* p_list = NULL;
+ VAPictureH264* refPicListX = NULL;
+ frame_store* fs = NULL;
+
+ /* initialize ref picutre list, set picture id and flags to invalid. */
+
+ for (i = 0; i < 2; i++)
+ {
+ refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+ for (j = 0; j < 32; j++)
+ {
+ refPicListX->picture_id = VA_INVALID_SURFACE;
+ refPicListX->frame_idx = 0;
+ refPicListX->flags = VA_PICTURE_H264_INVALID;
+ refPicListX->TopFieldOrderCnt = 0;
+ refPicListX->BottomFieldOrderCnt = 0;
+ refPicListX++;
+ }
+ }
+
+ for (i = 0; i < 2; i++)
+ {
+ refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+
+ if ((i == 0) &&
+ ((h264_PtypeB == slice_header->slice_type) ||
+ (h264_PtypeP == slice_header->slice_type)))
+ {
+ num_ref_idx_active = slice_header->num_ref_idx_l0_active;
+ if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ p_list = h264_parser->info.slice_ref_list0;
+ }
+ else
+ {
+ p_list = h264_parser->info.dpb.listX_0;
+ }
+ }
+ else if((i == 1) && (h264_PtypeB == slice_header->slice_type))
+ {
+ num_ref_idx_active = slice_header->num_ref_idx_l1_active;
+ if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag)
+ {
+ p_list = h264_parser->info.slice_ref_list1;
+ }
+ else
+ {
+ p_list = h264_parser->info.dpb.listX_1;
+ }
+ }
+ else
+ {
+ num_ref_idx_active = 0;
+ p_list = NULL;
+ }
+
+
+ for (j = 0; j < num_ref_idx_active; j++)
+ {
+ fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]);
+
+ /* bit 5 indicates if reference picture is bottom field */
+ vbp_set_VAPicture_h264(
+ h264_parser->info.img.structure,
+ (p_list[j] & 0x20) >> 5,
+ fs,
+ refPicListX);
+
+ refPicListX->frame_idx = fs->frame_num;
+ refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ refPicListX++;
+ }
+ }
+}
+
+static inline void vbp_set_pre_weight_table_h264(
+ struct h264_viddec_parser* h264_parser,
+ VASliceParameterBufferH264 *slc_parms)
+{
+ h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+ int i, j;
+
+ if ((((h264_PtypeP == slice_header->slice_type) ||
+ (h264_PtypeB == slice_header->slice_type)) &&
+ h264_parser->info.active_PPS.weighted_pred_flag) ||
+ ((h264_PtypeB == slice_header->slice_type) &&
+ (1 == h264_parser->info.active_PPS.weighted_bipred_idc)))
+ {
+ slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom;
+ slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom;
+ slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag;
+ slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag;
+ slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag;
+ slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag;
+
+ for (i = 0; i < 32; i++)
+ {
+ slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i];
+ slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i];
+ slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i];
+ slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i];
+
+ for (j = 0; j < 2; j++)
+ {
+ slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j];
+ slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j];
+ slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j];
+ slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j];
+ }
+ }
+ }
+ else
+ {
+ /* default weight table */
+ slc_parms->luma_log2_weight_denom = 5;
+ slc_parms->chroma_log2_weight_denom = 5;
+ slc_parms->luma_weight_l0_flag = 0;
+ slc_parms->luma_weight_l1_flag = 0;
+ slc_parms->chroma_weight_l0_flag = 0;
+ slc_parms->chroma_weight_l1_flag = 0;
+ for (i = 0; i < 32; i++)
+ {
+ slc_parms->luma_weight_l0[i] = 0;
+ slc_parms->luma_offset_l0[i] = 0;
+ slc_parms->luma_weight_l1[i] = 0;
+ slc_parms->luma_offset_l1[i] = 0;
+
+ for (j = 0; j < 2; j++)
+ {
+ slc_parms->chroma_weight_l0[i][j] = 0;
+ slc_parms->chroma_offset_l0[i][j] = 0;
+ slc_parms->chroma_weight_l1[i][j] = 0;
+ slc_parms->chroma_offset_l1[i][j] = 0;
+ }
+ }
+ }
+}
+
+
+static inline void vbp_set_reference_frames_h264(
+ struct h264_viddec_parser *parser,
+ VAPictureParameterBufferH264* pic_parms)
+{
+ int buffer_idx;
+ int frame_idx;
+ frame_store* store = NULL;
+ h264_DecodedPictureBuffer* dpb = &(parser->info.dpb);
+ /* initialize reference frames */
+ for (frame_idx = 0; frame_idx < 16; frame_idx++)
+ {
+ pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+ pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+ pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+ }
+ pic_parms->num_ref_frames = 0;
+
+ frame_idx = 0;
+
+ /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */
+ /* set short term reference frames */
+ for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++)
+ {
+ if (frame_idx >= 16)
+ {
+ WTRACE("Frame index is out of bound.");
+ break;
+ }
+
+ store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]];
+ /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */
+ if (viddec_h264_get_is_used(store))
+ {
+ pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num;
+ pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ if (FRAME == parser->info.img.structure)
+ {
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
+ }
+ else
+ {
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+ if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+ {
+ /* if both fields are used for reference, just set flag to be frame (0) */
+ }
+ else
+ {
+ if (store->top_field.used_for_reference)
+ pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+ if (store->bottom_field.used_for_reference)
+ pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+ }
+ }
+ }
+ frame_idx++;
+ }
+
+ /* set long term reference frames */
+ for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++)
+ {
+ if (frame_idx >= 16)
+ {
+ WTRACE("Frame index is out of bound.");
+ break;
+ }
+ store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]];
+ if (!viddec_h264_get_is_long_term(store))
+ {
+ WTRACE("long term frame is not marked as long term.");
+ }
+ /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */
+ if (viddec_h264_get_is_used(store))
+ {
+ pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE;
+ if (FRAME == parser->info.img.structure)
+ {
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
+ }
+ else
+ {
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+ if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+ {
+ /* if both fields are used for reference, just set flag to be frame (0)*/
+ }
+ else
+ {
+ if (store->top_field.used_for_reference)
+ pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+ if (store->bottom_field.used_for_reference)
+ pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+ }
+ }
+ }
+ frame_idx++;
+ }
+
+ pic_parms->num_ref_frames = frame_idx;
+
+ if (frame_idx > parser->info.active_SPS.num_ref_frames)
+ {
+ WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).",
+ frame_idx, parser->info.active_SPS.num_ref_frames);
+ }
+}
+
+
+static inline void vbp_set_scaling_list_h264(
+ struct h264_viddec_parser *parser,
+ VAIQMatrixBufferH264* IQ_matrix_buf)
+{
+ int i;
+ if (parser->info.active_PPS.pic_scaling_matrix_present_flag)
+ {
+ for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++)
+ {
+ if (parser->info.active_PPS.pic_scaling_list_present_flag[i])
+ {
+ if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+ ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]))
+ {
+ /* use default scaling list */
+ if (i < 6)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+ }
+ else
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+ }
+ }
+ else
+ {
+ /* use PPS list */
+ if (i < 6)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16);
+ }
+ else
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64);
+ }
+ }
+ }
+ else /* pic_scaling_list not present */
+ {
+ if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+ {
+ /* SPS matrix present - use fallback rule B */
+ switch (i)
+ {
+ case 0:
+ case 3:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i],
+ parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i],
+ 16);
+ break;
+
+ case 6:
+ case 7:
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6],
+ parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i],
+ 64);
+ break;
+
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i],
+ IQ_matrix_buf->ScalingList4x4[i - 1],
+ 16);
+ break;
+
+ default:
+ g_warning("invalid scaling list index.");
+ break;
+ }
+ }
+ else /* seq_scaling_matrix not present */
+ {
+ /* SPS matrix not present - use fallback rule A */
+ switch (i)
+ {
+ case 0:
+ case 3:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+ break;
+
+ case 6:
+ case 7:
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+ break;
+
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i],
+ IQ_matrix_buf->ScalingList4x4[i - 1],
+ 16);
+ break;
+
+ default:
+ WTRACE("invalid scaling list index.");
+ break;
+ }
+ } /* end of seq_scaling_matrix not present */
+ } /* end of pic_scaling_list not present */
+ } /* for loop for each index from 0 to 7 */
+ } /* end of pic_scaling_matrix present */
+ else
+ {
+ /* PPS matrix not present, use SPS information */
+ if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+ {
+ for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++)
+ {
+ if (parser->info.active_SPS.seq_scaling_list_present_flag[i])
+ {
+ if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+ ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6]))
+ {
+ /* use default scaling list */
+ if (i < 6)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+ }
+ else
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+ }
+ }
+ else
+ {
+ /* use SPS list */
+ if (i < 6)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16);
+ }
+ else
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64);
+ }
+ }
+ }
+ else
+ {
+ /* SPS list not present - use fallback rule A */
+ switch (i)
+ {
+ case 0:
+ case 3:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+ break;
+
+ case 6:
+ case 7:
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+ break;
+
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i],
+ IQ_matrix_buf->ScalingList4x4[i - 1],
+ 16);
+ break;
+
+ default:
+ WTRACE("invalid scaling list index.");
+ break;
+ }
+ }
+ }
+ }
+ else
+ {
+ /* SPS matrix not present - use flat lists */
+ for (i = 0; i < 6; i++)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16);
+ }
+ for (i = 0; i < 2; i++)
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+ }
+ }
+ }
+
+ if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) &&
+ (parser->info.active_PPS.pic_scaling_matrix_present_flag ||
+ parser->info.active_SPS.seq_scaling_matrix_present_flag))
+ {
+ for (i = 0; i < 2; i++)
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+ }
+ }
+}
+
+static void vbp_set_codec_data_h264(
+ struct h264_viddec_parser *parser,
+ vbp_codec_data_h264* codec_data)
+{
+ /* parameter id */
+ codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id;
+ codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id;
+
+ /* profile and level */
+ codec_data->profile_idc = parser->info.active_SPS.profile_idc;
+ codec_data->level_idc = parser->info.active_SPS.level_idc;
+
+
+ codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2;
+
+
+ /* reference frames */
+ codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames;
+
+ if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag &&
+ !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag)
+ {
+ /* no longer necessary: two fields share the same interlaced surface */
+ /* codec_data->num_ref_frames *= 2; */
+ }
+
+ codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+
+ /* frame coding */
+ codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+ codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+
+ /* frame dimension */
+ codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16;
+
+ codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+ (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16;
+
+ /* frame cropping */
+ codec_data->frame_cropping_flag =
+ parser->info.active_SPS.sps_disp.frame_cropping_flag;
+
+ codec_data->frame_crop_rect_left_offset =
+ parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset;
+
+ codec_data->frame_crop_rect_right_offset =
+ parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset;
+
+ codec_data->frame_crop_rect_top_offset =
+ parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset;
+
+ codec_data->frame_crop_rect_bottom_offset =
+ parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset;
+
+ /* aspect ratio */
+ codec_data->aspect_ratio_info_present_flag =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag;
+
+ codec_data->aspect_ratio_idc =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
+
+ codec_data->sar_width =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
+
+ codec_data->sar_height =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
+
+ /* video format */
+ codec_data->video_format =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;
+
+ codec_data->video_format =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag;
+}
+
+
+static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+ vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+ struct h264_viddec_parser* parser = NULL;
+ vbp_picture_data_h264* pic_data = NULL;
+ VAPictureParameterBufferH264* pic_parms = NULL;
+
+ parser = (struct h264_viddec_parser *)cxt->codec_data;
+
+ if (0 == parser->info.SliceHeader.first_mb_in_slice)
+ {
+ /* a new picture is parsed */
+ query_data->num_pictures++;
+ }
+
+ if (query_data->num_pictures > MAX_NUM_PICTURES)
+ {
+ ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES);
+ return VBP_DATA;
+ }
+
+ int pic_data_index = query_data->num_pictures - 1;
+ if (pic_data_index < 0)
+ {
+ WTRACE("MB address does not start from 0!");
+ return VBP_DATA;
+ }
+
+ pic_data = &(query_data->pic_data[pic_data_index]);
+ pic_parms = pic_data->pic_parms;
+
+ if (parser->info.SliceHeader.first_mb_in_slice == 0)
+ {
+ /**
+ * picture parameter only needs to be set once,
+ * even multiple slices may be encoded
+ */
+
+ /* VAPictureParameterBufferH264 */
+ pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE;
+ pic_parms->CurrPic.frame_idx = 0;
+ if (parser->info.img.field_pic_flag == 1)
+ {
+ if (parser->info.img.bottom_field_flag)
+ {
+ pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD;
+ }
+ else
+ {
+ /* also OK set to 0 (from test suite) */
+ pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD;
+ }
+ }
+ else
+ {
+ pic_parms->CurrPic.flags = 0; /* frame picture */
+ }
+ pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc;
+ pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc;
+ pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num;
+
+ /* don't care if current frame is used as long term reference */
+ if (parser->info.SliceHeader.nal_ref_idc != 0)
+ {
+ pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ }
+
+ pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1;
+
+ /* frame height in MBS */
+ pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+ (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1;
+
+ pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8;
+ pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8;
+
+
+ pic_parms->seq_fields.value = 0;
+ pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc;
+ pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag;
+ pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+ pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+ pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag;
+
+ /* new fields in libva 0.31 */
+ pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+ pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4;
+ pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+ pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+ pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag;
+
+
+ /* referened from UMG_Moorstown_TestSuites */
+ pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0;
+
+ pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1;
+ pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type;
+ pic_parms->slice_group_change_rate_minus1 = 0;
+ pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26;
+ pic_parms->pic_init_qs_minus26 = 0;
+ pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset;
+ pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset;
+
+ pic_parms->pic_fields.value = 0;
+ pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag;
+ pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag;
+ pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc;
+ pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag;
+
+ /* new LibVA fields in v0.31*/
+ pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag;
+ pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag;
+ pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag;
+ pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0;
+
+ /* all slices in the pciture have the same field_pic_flag */
+ pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag;
+ pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag;
+
+ pic_parms->frame_num = parser->info.SliceHeader.frame_num;
+ }
+
+
+ /* set reference frames, and num_ref_frames */
+ vbp_set_reference_frames_h264(parser, pic_parms);
+ if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ /* num of reference frame is 0 if current picture is IDR */
+ pic_parms->num_ref_frames = 0;
+ }
+ else
+ {
+ /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */
+ }
+
+ return VBP_OK;
+}
+
+#if 0
+static inline void vbp_update_reference_frames_h264_methodA(vbp_picture_data_h264* pic_data)
+{
+ VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms;
+
+ char is_used[16];
+ memset(is_used, 0, sizeof(is_used));
+
+ int ref_list;
+ int slice_index;
+ int i, j;
+ VAPictureH264* pRefList = NULL;
+
+ for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++)
+ {
+ VASliceParameterBufferH264* slice_parms =
+ &(pic_data->slc_data[slice_index].slc_parms);
+
+ for (ref_list = 0; ref_list < 2; ref_list++)
+ {
+ if (0 == ref_list)
+ pRefList = slice_parms->RefPicList0;
+ else
+ pRefList = slice_parms->RefPicList1;
+
+ for (i = 0; i < 32; i++, pRefList++)
+ {
+ if (VA_PICTURE_H264_INVALID == pRefList->flags)
+ break;
+
+ for (j = 0; j < 16; j++)
+ {
+ if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt ==
+ pRefList->TopFieldOrderCnt)
+ {
+ is_used[j] = 1;
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ int frame_idx = 0;
+ VAPictureH264* pRefFrame = pic_parms->ReferenceFrames;
+ for (i = 0; i < 16; i++)
+ {
+ if (is_used[i])
+ {
+ memcpy(pRefFrame,
+ &(pic_parms->ReferenceFrames[i]),
+ sizeof(VAPictureH264));
+
+ pRefFrame++;
+ frame_idx++;
+ }
+ }
+ pic_parms->num_ref_frames = frame_idx;
+
+ for (; frame_idx < 16; frame_idx++)
+ {
+ pRefFrame->picture_id = VA_INVALID_SURFACE;
+ pRefFrame->frame_idx = -1;
+ pRefFrame->flags = VA_PICTURE_H264_INVALID;
+ pRefFrame->TopFieldOrderCnt = -1;
+ pRefFrame->BottomFieldOrderCnt = -1;
+ pRefFrame++;
+ }
+}
+#endif
+
+#if 0
+static inline void vbp_update_reference_frames_h264_methodB(vbp_picture_data_h264* pic_data)
+{
+ VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms;
+ int i;
+ VAPictureH264* pRefFrame = pic_parms->ReferenceFrames;
+ for (i = 0; i < 16; i++)
+ {
+ pRefFrame->picture_id = VA_INVALID_SURFACE;
+ pRefFrame->frame_idx = -1;
+ pRefFrame->flags = VA_PICTURE_H264_INVALID;
+ pRefFrame->TopFieldOrderCnt = -1;
+ pRefFrame->BottomFieldOrderCnt = -1;
+ pRefFrame++;
+ }
+
+ pic_parms->num_ref_frames = 0;
+
+
+ int ref_list;
+ int slice_index;
+ int j;
+ VAPictureH264* pRefList = NULL;
+
+ for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++)
+ {
+ VASliceParameterBufferH264* slice_parms =
+ &(pic_data->slc_data[slice_index].slc_parms);
+
+ for (ref_list = 0; ref_list < 2; ref_list++)
+ {
+ if (0 == ref_list)
+ pRefList = slice_parms->RefPicList0;
+ else
+ pRefList = slice_parms->RefPicList1;
+
+ for (i = 0; i < 32; i++, pRefList++)
+ {
+ if (VA_PICTURE_H264_INVALID == pRefList->flags)
+ break;
+
+ for (j = 0; j < 16; j++)
+ {
+ if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt ==
+ pRefList->TopFieldOrderCnt)
+ {
+ pic_parms->ReferenceFrames[j].flags |=
+ pRefList->flags;
+
+ if ((pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_TOP_FIELD) &&
+ (pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_BOTTOM_FIELD))
+ {
+ pic_parms->ReferenceFrames[j].flags = 0;
+ }
+ break;
+ }
+ }
+ if (j == 16)
+ {
+ memcpy(&(pic_parms->ReferenceFrames[pic_parms->num_ref_frames++]),
+ pRefList,
+ sizeof(VAPictureH264));
+ }
+
+ }
+ }
+ }
+}
+#endif
+
+
+static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ uint32 bit, byte;
+ uint8 is_emul;
+
+ vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+ VASliceParameterBufferH264 *slc_parms = NULL;
+ vbp_slice_data_h264 *slc_data = NULL;
+ struct h264_viddec_parser* h264_parser = NULL;
+ h264_Slice_Header_t* slice_header = NULL;
+ vbp_picture_data_h264* pic_data = NULL;
+
+
+ h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
+ int pic_data_index = query_data->num_pictures - 1;
+ if (pic_data_index < 0)
+ {
+ ETRACE("invalid picture data index.");
+ return VBP_DATA;
+ }
+
+ pic_data = &(query_data->pic_data[pic_data_index]);
+
+ slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+ slc_data->buffer_addr = cxt->parse_cubby.buf;
+ slc_parms = &(slc_data->slc_parms);
+
+ /* byte: how many bytes have been parsed */
+ /* bit: bits parsed within the current parsing position */
+ viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+
+#if 0
+ /* add 4 bytes of start code prefix */
+ slc_parms->slice_data_size = slc_data->slice_size =
+ pcontext->parser_cxt->list.data[index].edpos -
+ pcontext->parser_cxt->list.data[index].stpos + 4;
+
+ slc_data->slice_offset = pcontext->parser_cxt->list.data[index].stpos - 4;
+
+ /* overwrite the "length" bytes to start code (0x00000001) */
+ *(slc_data->buffer_addr + slc_data->slice_offset) = 0;
+ *(slc_data->buffer_addr + slc_data->slice_offset + 1) = 0;
+ *(slc_data->buffer_addr + slc_data->slice_offset + 2) = 0;
+ *(slc_data->buffer_addr + slc_data->slice_offset + 3) = 1;
+
+
+ /* the offset to the NAL start code for this slice */
+ slc_parms->slice_data_offset = 0;
+
+ /* whole slice is in this buffer */
+ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+ /* bit offset from NAL start code to the beginning of slice data */
+ /* slc_parms->slice_data_bit_offset = bit;*/
+ slc_parms->slice_data_bit_offset = (byte + 4)* 8 + bit;
+
+#else
+ slc_parms->slice_data_size = slc_data->slice_size =
+ pcontext->parser_cxt->list.data[index].edpos -
+ pcontext->parser_cxt->list.data[index].stpos;
+
+ /* the offset to the NAL start code for this slice */
+ slc_data->slice_offset = cxt->list.data[index].stpos;
+ slc_parms->slice_data_offset = 0;
+
+ /* whole slice is in this buffer */
+ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+ /* bit offset from NAL start code to the beginning of slice data */
+ slc_parms->slice_data_bit_offset = bit + byte * 8;
+#endif
+
+ if (is_emul)
+ {
+ WTRACE("next byte is emulation prevention byte.");
+ /*slc_parms->slice_data_bit_offset += 8; */
+ }
+
+ if (cxt->getbits.emulation_byte_counter != 0)
+ {
+ slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8;
+ }
+
+ slice_header = &(h264_parser->info.SliceHeader);
+ slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
+
+ if(h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
+ (!(h264_parser->info.SliceHeader.field_pic_flag)))
+ {
+ slc_parms->first_mb_in_slice /= 2;
+ }
+
+ slc_parms->slice_type = slice_header->slice_type;
+
+ slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag;
+
+ slc_parms->num_ref_idx_l0_active_minus1 = 0;
+ slc_parms->num_ref_idx_l1_active_minus1 = 0;
+ if (slice_header->slice_type == h264_PtypeI)
+ {
+ }
+ else if (slice_header->slice_type == h264_PtypeP)
+ {
+ slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
+ }
+ else if (slice_header->slice_type == h264_PtypeB)
+ {
+ slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
+ slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1;
+ }
+ else
+ {
+ WTRACE("slice type %d is not supported.", slice_header->slice_type);
+ }
+
+ slc_parms->cabac_init_idc = slice_header->cabac_init_idc;
+ slc_parms->slice_qp_delta = slice_header->slice_qp_delta;
+ slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc;
+ slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2;
+ slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2;
+
+
+ vbp_set_pre_weight_table_h264(h264_parser, slc_parms);
+ vbp_set_slice_ref_list_h264(h264_parser, slc_parms);
+
+
+ pic_data->num_slices++;
+
+ //vbp_update_reference_frames_h264_methodB(pic_data);
+ if (pic_data->num_slices > MAX_NUM_SLICES)
+ {
+ ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
+ return VBP_DATA;
+ }
+ return VBP_OK;
+}
+
+/**
+* parse decoder configuration data
+*/
+uint32 vbp_parse_init_data_h264(vbp_context* pcontext)
+{
+ /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */
+
+ uint8 configuration_version = 0;
+ uint8 AVC_profile_indication = 0;
+ uint8 profile_compatibility = 0;
+ uint8 AVC_level_indication = 0;
+ uint8 length_size_minus_one = 0;
+ uint8 num_of_sequence_parameter_sets = 0;
+ uint8 num_of_picture_parameter_sets = 0;
+ uint16 sequence_parameter_set_length = 0;
+ uint16 picture_parameter_set_length = 0;
+
+ int i = 0;
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ uint8* cur_data = cxt->parse_cubby.buf;
+
+
+ if (cxt->parse_cubby.size < 6)
+ {
+ /* need at least 6 bytes to start parsing the structure, see spec 15 */
+ return VBP_DATA;
+ }
+
+ configuration_version = *cur_data++;
+ AVC_profile_indication = *cur_data++;
+
+ /*ITRACE("Profile indication: %d", AVC_profile_indication); */
+
+ profile_compatibility = *cur_data++;
+ AVC_level_indication = *cur_data++;
+
+ /* ITRACE("Level indication: %d", AVC_level_indication);*/
+ /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */
+ length_size_minus_one = (*cur_data) & 0x3;
+
+ if (length_size_minus_one != 3)
+ {
+ WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1);
+ }
+
+ NAL_length_size = length_size_minus_one + 1;
+
+ cur_data++;
+
+ /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */
+ num_of_sequence_parameter_sets = (*cur_data) & 0x1f;
+ if (num_of_sequence_parameter_sets > 1)
+ {
+ WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets);
+ }
+ if (num_of_sequence_parameter_sets > MAX_NUM_SPS)
+ {
+ /* this would never happen as MAX_NUM_SPS = 32 */
+ WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS);
+ }
+ cur_data++;
+
+ cxt->list.num_items = 0;
+ for (i = 0; i < num_of_sequence_parameter_sets; i++)
+ {
+ if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+ {
+ /* need at least 2 bytes to parse sequence_parameter_set_length */
+ return VBP_DATA;
+ }
+
+ /* 16 bits */
+ sequence_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+
+ cur_data += 2;
+
+ if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size)
+ {
+ /* need at least sequence_parameter_set_length bytes for SPS */
+ return VBP_DATA;
+ }
+
+ cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+ /* end pos is exclusive */
+ cxt->list.data[cxt->list.num_items].edpos =
+ cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length;
+
+ cxt->list.num_items++;
+
+ cur_data += sequence_parameter_set_length;
+ }
+
+ if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size)
+ {
+ /* need at least one more byte to parse num_of_picture_parameter_sets */
+ return VBP_DATA;
+ }
+
+ num_of_picture_parameter_sets = *cur_data++;
+ if (num_of_picture_parameter_sets > 1)
+ {
+ /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */
+ }
+
+ for (i = 0; i < num_of_picture_parameter_sets; i++)
+ {
+ if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+ {
+ /* need at least 2 bytes to parse picture_parameter_set_length */
+ return VBP_DATA;
+ }
+
+ /* 16 bits */
+ picture_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+ cur_data += 2;
+
+ if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size)
+ {
+ /* need at least picture_parameter_set_length bytes for PPS */
+ return VBP_DATA;
+ }
+
+ cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+ /* end pos is exclusive */
+ cxt->list.data[cxt->list.num_items].edpos =
+ cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length;
+
+ cxt->list.num_items++;
+
+ cur_data += picture_parameter_set_length;
+ }
+
+ if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size)
+ {
+ WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.",
+ cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf));
+ }
+
+ return VBP_OK;
+}
+
+static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p)
+{
+ switch (NAL_length_size)
+ {
+ case 4:
+ return vbp_utils_ntohl(p);
+
+ case 3:
+ {
+ uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2)));
+ return i;
+ }
+
+ case 2:
+ return vbp_utils_ntohs(p);
+
+ case 1:
+ return *p;
+
+ default:
+ WTRACE("invalid NAL_length_size: %d.", NAL_length_size);
+ /* default to 4 bytes for length */
+ NAL_length_size = 4;
+ return vbp_utils_ntohl(p);
+ }
+}
+
+/**
+** H.264 elementary stream does not have start code.
+* instead, it is comprised of size of NAL unit and payload
+* of NAL unit. See spec 15 (Sample format)
+*/
+uint32 vbp_parse_start_code_h264(vbp_context *pcontext)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ int32_t size_left = 0;
+ int32_t size_parsed = 0;
+ int32_t NAL_length = 0;
+ viddec_sc_parse_cubby_cxt_t* cubby = NULL;
+
+ /* reset query data for the new sample buffer */
+ vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+ int i;
+
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ query_data->pic_data[i].num_slices = 0;
+ }
+ query_data->num_pictures = 0;
+
+
+ cubby = &(cxt->parse_cubby);
+
+ cxt->list.num_items = 0;
+
+ /* start code emulation prevention byte is present in NAL */
+ cxt->getbits.is_emul_reqd = 1;
+
+ size_left = cubby->size;
+
+ while (size_left >= NAL_length_size)
+ {
+ NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed);
+
+ size_parsed += NAL_length_size;
+ cxt->list.data[cxt->list.num_items].stpos = size_parsed;
+ size_parsed += NAL_length; /* skip NAL bytes */
+ /* end position is exclusive */
+ cxt->list.data[cxt->list.num_items].edpos = size_parsed;
+ cxt->list.num_items++;
+ if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+ {
+ ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
+ break;
+ }
+
+ size_left = cubby->size - size_parsed;
+ }
+
+ if (size_left != 0)
+ {
+ WTRACE("Elementary stream is not aligned (%d).", size_left);
+ }
+ return VBP_OK;
+}
+
+/**
+*
+* process parsing result after a NAL unit is parsed
+*
+*/
+uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i)
+{
+ if (i >= MAX_NUM_SLICES)
+ {
+ return VBP_PARM;
+ }
+
+ uint32 error = VBP_OK;
+
+ struct h264_viddec_parser* parser = NULL;
+ parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]);
+ switch (parser->info.nal_unit_type)
+ {
+ case h264_NAL_UNIT_TYPE_SLICE:
+ /* ITRACE("slice header is parsed."); */
+ error = vbp_add_pic_data_h264(pcontext, i);
+ if (VBP_OK == error)
+ {
+ error = vbp_add_slice_data_h264(pcontext, i);
+ }
+ break;
+
+ case h264_NAL_UNIT_TYPE_IDR:
+ /* ITRACE("IDR header is parsed."); */
+ error = vbp_add_pic_data_h264(pcontext, i);
+ if (VBP_OK == error)
+ {
+ error = vbp_add_slice_data_h264(pcontext, i);
+ }
+ break;
+
+ case h264_NAL_UNIT_TYPE_SEI:
+ /* ITRACE("SEI header is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_SPS:
+ /*ITRACE("SPS header is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_PPS:
+ /* ITRACE("PPS header is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+ /* ITRACE("ACC unit delimiter is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_EOSeq:
+ /* ITRACE("EOSeq is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_EOstream:
+ /* ITRACE("EOStream is parsed."); */
+ break;
+
+ default:
+ WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type);
+ break;
+ }
+ return error;
+}
+
+/*
+*
+* fill query data structure after sample buffer is parsed
+*
+*/
+uint32 vbp_populate_query_data_h264(vbp_context *pcontext)
+{
+ vbp_data_h264 *query_data = NULL;
+ struct h264_viddec_parser *parser = NULL;
+
+ parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data;
+ query_data = (vbp_data_h264 *)pcontext->query_data;
+
+ vbp_set_codec_data_h264(parser, query_data->codec_data);
+
+ /* buffer number */
+ query_data->buf_number = buffer_counter;
+
+ /* VQIAMatrixBufferH264 */
+ vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf);
+
+ if (query_data->num_pictures > 0)
+ {
+ /*
+ * picture parameter buffer and slice parameter buffer have been populated
+ */
+ }
+ else
+ {
+ /**
+ * add a dummy picture that contains picture parameters parsed
+ from SPS and PPS.
+ */
+ vbp_add_pic_data_h264(pcontext, 0);
+ }
+ return VBP_OK;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h
new file mode 100644
index 0000000..6ed4499
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h
@@ -0,0 +1,48 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_H264_PARSER_H
+#define VBP_H264_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+uint32 vbp_init_parser_entries_h264(vbp_context *pcontext);
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_h264(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_h264(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_h264(vbp_context *pcontext);
+
+/*
+ * parse start code. Only support lenght prefixed mode. Start
+ * code prefixed is not supported.
+ */
+uint32 vbp_parse_start_code_h264(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_h264(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_h264(vbp_context *pcontext);
+
+#endif /*VBP_H264_PARSER_H*/
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c
new file mode 100644
index 0000000..27a2dd0
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c
@@ -0,0 +1,162 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include <glib.h>
+
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+
+/**
+ *
+ */
+uint32 vbp_open(uint32 parser_type, Handle *hcontext)
+{
+ vbp_context **ppcontext;
+ uint32 error;
+
+ if (NULL == hcontext)
+ {
+ return VBP_PARM;
+ }
+
+ *hcontext = NULL; /* prepare for failure. */
+
+ ppcontext = (vbp_context **)hcontext;
+
+ /**
+ * TO DO:
+ * check if vbp context has been created.
+ */
+
+ error = vbp_utils_create_context(parser_type, ppcontext);
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to create context: %d.", error);
+ }
+
+ return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_close(Handle hcontext)
+{
+ uint32 error;
+
+ if (NULL == hcontext)
+ {
+ return VBP_PARM;
+ }
+
+ vbp_context *pcontext = (vbp_context *)hcontext;
+
+ if (MAGIC_NUMBER != pcontext->identifier)
+ {
+ /* not a valid vbp context. */
+ ETRACE("context is not initialized");
+ return VBP_INIT;
+ }
+ error = vbp_utils_destroy_context(pcontext);
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to destroy context: %d.", error);
+ }
+
+ return error;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag)
+{
+ vbp_context *pcontext;
+ uint32 error = VBP_OK;
+
+ if ((NULL == hcontext) || (NULL == data) || (0 == size))
+ {
+ ETRACE("Invalid input parameters.");
+ return VBP_PARM;
+ }
+
+ pcontext = (vbp_context *)hcontext;
+
+ if (MAGIC_NUMBER != pcontext->identifier)
+ {
+ ETRACE("context is not initialized");
+ return VBP_INIT;
+ }
+
+ error = vbp_utils_parse_buffer(pcontext, data, size, init_data_flag);
+
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to parse buffer: %d.", error);
+ }
+ return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_query(Handle hcontext, void **data)
+{
+ vbp_context *pcontext;
+ uint32 error = VBP_OK;
+
+ if ((NULL == hcontext) || (NULL == data))
+ {
+ ETRACE("Invalid input parameters.");
+ return VBP_PARM;
+ }
+
+ pcontext = (vbp_context *)hcontext;
+
+ if (MAGIC_NUMBER != pcontext->identifier)
+ {
+ ETRACE("context is not initialized");
+ return VBP_INIT;
+ }
+
+ error = vbp_utils_query(pcontext, data);
+
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to query parsing result: %d.", error);
+ }
+ return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_flush(Handle hcontext)
+{
+ vbp_context *pcontext;
+ uint32 error = VBP_OK;
+
+ if (NULL == hcontext)
+ {
+ ETRACE("Invalid input parameters.");
+ return VBP_PARM;
+ }
+
+ pcontext = (vbp_context *)hcontext;
+
+ if (MAGIC_NUMBER != pcontext->identifier)
+ {
+ ETRACE("context is not initialized");
+ return VBP_INIT;
+ }
+
+ error = vbp_utils_flush(pcontext);
+
+ return error;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
new file mode 100644
index 0000000..66169dd
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
@@ -0,0 +1,318 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_LOADER_H
+#define VBP_LOADER_H
+
+#include <va/va.h>
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+
+#ifndef uint8
+typedef unsigned char uint8;
+#endif
+#ifndef uint16
+typedef unsigned short uint16;
+#endif
+#ifndef uint32
+typedef unsigned int uint32;
+#endif
+#ifndef bool
+typedef int bool;
+#endif
+
+typedef void *Handle;
+
+/*
+ * MPEG-4 Part 2 data structure
+ */
+
+typedef struct _vbp_codec_data_mp42
+{
+ uint8 profile_and_level_indication;
+} vbp_codec_data_mp42;
+
+typedef struct _vbp_slice_data_mp42
+{
+ uint8* buffer_addr;
+ uint32 slice_offset;
+ uint32 slice_size;
+ VASliceParameterBufferMPEG4 slice_param;
+} vbp_slice_data_mp42;
+
+typedef struct _vbp_picture_data_mp42
+{
+ uint8 vop_coded;
+ VAPictureParameterBufferMPEG4 picture_param;
+ VAIQMatrixBufferMPEG4 iq_matrix_buffer;
+
+ uint32 number_slices;
+ vbp_slice_data_mp42 *slice_data;
+
+} vbp_picture_data_mp42;
+
+typedef struct _vbp_data_mp42
+{
+ vbp_codec_data_mp42 codec_data;
+
+ uint32 number_pictures;
+ vbp_picture_data_mp42 *picture_data;
+
+} vbp_data_mp42;
+
+/*
+ * H.264 data structure
+ */
+
+typedef struct _vbp_codec_data_h264
+{
+ uint8 pic_parameter_set_id;
+ uint8 seq_parameter_set_id;
+
+ uint8 profile_idc;
+ uint8 level_idc;
+ uint8 constraint_set1_flag;
+
+ uint8 num_ref_frames;
+ uint8 gaps_in_frame_num_value_allowed_flag;
+
+ uint8 frame_mbs_only_flag;
+ uint8 mb_adaptive_frame_field_flag;
+
+ int frame_width;
+ int frame_height;
+
+ uint8 frame_cropping_flag;
+ int frame_crop_rect_left_offset;
+ int frame_crop_rect_right_offset;
+ int frame_crop_rect_top_offset;
+ int frame_crop_rect_bottom_offset;
+
+ uint8 vui_parameters_present_flag;
+ /* aspect ratio */
+ uint8 aspect_ratio_info_present_flag;
+ uint8 aspect_ratio_idc;
+ uint16 sar_width;
+ uint16 sar_height;
+
+ /* video fromat */
+ uint8 video_signal_type_present_flag;
+ uint8 video_format;
+
+} vbp_codec_data_h264;
+
+typedef struct _vbp_slice_data_h264
+{
+ uint8* buffer_addr;
+
+ uint32 slice_offset; /* slice data offset */
+
+ uint32 slice_size; /* slice data size */
+
+ VASliceParameterBufferH264 slc_parms;
+
+} vbp_slice_data_h264;
+
+
+ typedef struct _vbp_picture_data_h264
+ {
+ VAPictureParameterBufferH264* pic_parms;
+
+ uint32 num_slices;
+
+ vbp_slice_data_h264* slc_data;
+
+ } vbp_picture_data_h264;
+
+
+typedef struct _vbp_data_h264
+{
+ /* rolling counter of buffers sent by vbp_parse */
+ uint32 buf_number;
+
+ uint32 num_pictures;
+
+ vbp_picture_data_h264* pic_data;
+
+ /**
+ * do we need to send matrix to VA for each picture? If not, we need
+ * a flag indicating whether it is updated.
+ */
+ VAIQMatrixBufferH264* IQ_matrix_buf;
+
+ vbp_codec_data_h264* codec_data;
+
+} vbp_data_h264;
+
+/*
+ * vc1 data structure
+ */
+typedef struct _vbp_codec_data_vc1
+{
+ /* Sequence layer. */
+ uint8 PROFILE;
+ uint8 LEVEL;
+ uint8 POSTPROCFLAG;
+ uint8 PULLDOWN;
+ uint8 INTERLACE;
+ uint8 TFCNTRFLAG;
+ uint8 FINTERPFLAG;
+ uint8 PSF;
+
+ /* Entry point layer. */
+ uint8 BROKEN_LINK;
+ uint8 CLOSED_ENTRY;
+ uint8 PANSCAN_FLAG;
+ uint8 REFDIST_FLAG;
+ uint8 LOOPFILTER;
+ uint8 FASTUVMC;
+ uint8 EXTENDED_MV;
+ uint8 DQUANT;
+ uint8 VSTRANSFORM;
+ uint8 OVERLAP;
+ uint8 QUANTIZER;
+ uint16 CODED_WIDTH;
+ uint16 CODED_HEIGHT;
+ uint8 EXTENDED_DMV;
+ uint8 RANGE_MAPY_FLAG;
+ uint8 RANGE_MAPY;
+ uint8 RANGE_MAPUV_FLAG;
+ uint8 RANGE_MAPUV;
+
+ /* Others. */
+ uint8 RANGERED;
+ uint8 MAXBFRAMES;
+ uint8 MULTIRES;
+ uint8 SYNCMARKER;
+ uint8 RNDCTRL;
+ uint8 REFDIST;
+ uint16 widthMB;
+ uint16 heightMB;
+
+ uint8 INTCOMPFIELD;
+ uint8 LUMSCALE2;
+ uint8 LUMSHIFT2;
+} vbp_codec_data_vc1;
+
+typedef struct _vbp_slice_data_vc1
+{
+ uint8 *buffer_addr;
+ uint32 slice_offset;
+ uint32 slice_size;
+ VASliceParameterBufferVC1 slc_parms; /* pointer to slice parms */
+} vbp_slice_data_vc1;
+
+
+typedef struct _vbp_picture_data_vc1
+{
+ uint32 picture_is_skipped; /* VC1_PTYPE_SKIPPED is PTYPE is skipped. */
+ VAPictureParameterBufferVC1 *pic_parms; /* current parsed picture header */
+ uint32 size_bitplanes; /* based on number of MBs */
+ uint8 *packed_bitplanes; /* contains up to three bitplanes packed for libVA */
+ uint32 num_slices; /* number of slices. always at least one */
+ vbp_slice_data_vc1 *slc_data; /* pointer to array of slice data */
+} vbp_picture_data_vc1;
+
+typedef struct _vbp_data_vc1
+{
+ uint32 buf_number; /* rolling counter of buffers sent by vbp_parse */
+ vbp_codec_data_vc1 *se_data; /* parsed SH/EPs */
+
+ uint32 num_pictures;
+
+ vbp_picture_data_vc1* pic_data;
+} vbp_data_vc1;
+
+enum _picture_type
+{
+ VC1_PTYPE_I,
+ VC1_PTYPE_P,
+ VC1_PTYPE_B,
+ VC1_PTYPE_BI,
+ VC1_PTYPE_SKIPPED
+};
+
+enum _vbp_parser_error
+{
+ VBP_OK,
+ VBP_TYPE,
+ VBP_LOAD,
+ VBP_UNLOAD,
+ VBP_INIT,
+ VBP_DATA,
+ VBP_DONE,
+ VBP_GLIB,
+ VBP_MEM,
+ VBP_PARM,
+ VBP_CXT,
+ VBP_IMPL
+};
+
+enum _vbp_parser_type
+{
+ VBP_VC1,
+ VBP_MPEG2,
+ VBP_MPEG4,
+ VBP_H264
+};
+
+/*
+ * open video bitstream parser to parse a specific media type.
+ * @param parser_type: one of the types defined in #vbp_parser_type
+ * @param hcontext: pointer to hold returned VBP context handle.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_open(uint32 parser_type, Handle *hcontext);
+
+/*
+ * close video bitstream parser.
+ * @param hcontext: VBP context handle.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_close(Handle hcontext);
+
+/*
+ * parse bitstream.
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to bitstream buffer.
+ * @param size: size of bitstream buffer.
+ * @param init_flag: 1 if buffer contains bitstream configuration data, 0 otherwise.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag);
+
+/*
+ * query parsing result.
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to hold a data blob that contains parsing result.
+ * Structure of data blob is determined by the media type.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_query(Handle hcontext, void **data);
+
+
+/*
+ * flush any un-parsed bitstream.
+ * @param hcontext: handle to VBP context.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_flush(Handle hcontent);
+
+#endif /* VBP_LOADER_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
new file mode 100644
index 0000000..87beca4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
@@ -0,0 +1,1277 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include <glib.h>
+#include <dlfcn.h>
+
+#include <string.h>
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_mp42_parser.h"
+#include "../codecs/mp4/parser/viddec_mp4_parse.h"
+
+#define MIX_VBP_COMP "mixvbp"
+
+/*
+ * Some divX avi files contains 2 frames in one gstbuffer.
+ */
+#define MAX_NUM_PICTURES_MP42 8
+
+uint32 vbp_get_sc_pos_mp42(uint8 *buf, uint32 length,
+ uint32* sc_phase, uint32 *sc_end_pos, uint8 *is_normal_sc);
+
+void vbp_on_vop_mp42(vbp_context *pcontext, int list_index);
+void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index);
+void vbp_dump_query_data(vbp_context *pcontext, int list_index);
+
+uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index);
+uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index);
+
+/* This is coppied from DHG mp42 parser */
+static inline mp4_Status_t
+vbp_sprite_trajectory_mp42(void *parent, mp4_VideoObjectLayer_t *vidObjLay,
+ mp4_VideoObjectPlane_t *vidObjPlane);
+
+/* This is coppied from DHG mp42 parser */
+static inline int32_t vbp_sprite_dmv_length_mp42(void * parent,
+ int32_t *dmv_length);
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext)
+{
+ if (NULL == pcontext->parser_ops)
+ {
+ /* absolutely impossible, just sanity check */
+ return VBP_PARM;
+ }
+ pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init");
+ if (pcontext->parser_ops->init == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4");
+ if (pcontext->parser_ops->parse_sc == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse");
+ if (pcontext->parser_ops->parse_syntax == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size");
+ if (pcontext->parser_ops->get_cxt_size == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done");
+ if (pcontext->parser_ops->is_wkld_done == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ return VBP_OK;
+}
+
+
+/*
+ * For the codec_data passed by gstreamer
+ */
+uint32 vbp_parse_init_data_mp42(vbp_context *pcontext)
+{
+ VTRACE ("begin\n");
+ vbp_parse_start_code_mp42(pcontext);
+ VTRACE ("end\n");
+
+ return VBP_OK;
+}
+
+uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index)
+{
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+
+ uint8 is_svh = 0;
+ uint32 current_sc = parser->current_sc;
+ is_svh = parser->cur_sc_prefix ? false : true;
+
+ VTRACE ("begin\n");
+
+ VTRACE ("current_sc = 0x%x profile_and_level_indication = 0x%x\n",
+ parser->current_sc, parser->info.profile_and_level_indication);
+
+ if (!is_svh)
+ {
+ /* remove prefix from current_sc */
+ current_sc &= 0x0FF;
+ switch (current_sc)
+ {
+ case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+ VTRACE ("MP4_SC_VISUAL_OBJECT_SEQUENCE\n");
+
+ query_data->codec_data.profile_and_level_indication
+ = parser->info.profile_and_level_indication;
+
+ break;
+ case MP4_SC_VIDEO_OBJECT_PLANE:
+ VTRACE ("MP4_SC_VIDEO_OBJECT_PLANE\n");
+ vbp_on_vop_mp42(pcontext, list_index);
+ break;
+ default: {
+ if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (current_sc
+ <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) {
+ query_data->codec_data.profile_and_level_indication
+ = parser->info.profile_and_level_indication;
+ } else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX) {
+ if (parser->sc_seen == MP4_SC_SEEN_SVH) {
+ VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n");
+ vbp_on_vop_svh_mp42(pcontext, list_index);
+ }
+ }
+ }
+ break;
+ }
+
+ } else {
+ if (parser->sc_seen == MP4_SC_SEEN_SVH) {
+ VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n");
+ vbp_on_vop_svh_mp42(pcontext, list_index);
+ }
+ }
+
+ VTRACE ("End\n");
+
+ return VBP_OK;
+}
+
+/*
+ * This function fills viddec_pm_cxt_t by start codes
+ * I may change the codes to make it more efficient later
+ */
+
+uint32 vbp_parse_start_code_mp42(vbp_context *pcontext)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ /*viddec_parser_ops_t *func = pcontext->parser_ops; */
+ uint8 *buf = NULL;
+ uint32 size = 0;
+ uint32 sc_phase = 0;
+ uint32 sc_end_pos = -1;
+
+ uint32 bytes_parsed = 0;
+
+ viddec_mp4_parser_t *pinfo = NULL;
+
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ /* reset query data for the new sample buffer */
+ query_data->number_pictures = 0;
+
+ /* emulation prevention byte is always present */
+ cxt->getbits.is_emul_reqd = 1;
+
+ cxt->list.num_items = 0;
+ cxt->list.data[0].stpos = 0;
+ cxt->list.data[0].edpos = cxt->parse_cubby.size;
+
+ buf = cxt->parse_cubby.buf;
+ size = cxt->parse_cubby.size;
+
+ pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]);
+
+ uint8 is_normal_sc = 0;
+
+ uint32 found_sc = 0;
+
+ VTRACE ("begin cxt->parse_cubby.size= %d\n", size);
+
+ while (1) {
+
+ sc_phase = 0;
+
+ found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size
+ - bytes_parsed, &sc_phase, &sc_end_pos, &is_normal_sc);
+
+ if (found_sc) {
+
+ VTRACE ("sc_end_pos = %d\n", sc_end_pos);
+
+ cxt->list.data[cxt->list.num_items].stpos = bytes_parsed
+ + sc_end_pos - 3;
+ if (cxt->list.num_items != 0) {
+ cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed
+ + sc_end_pos - 3;
+ }
+ bytes_parsed += sc_end_pos;
+
+ cxt->list.num_items++;
+ pinfo->cur_sc_prefix = is_normal_sc;
+
+ } else {
+
+ if (cxt->list.num_items != 0) {
+ cxt->list.data[cxt->list.num_items - 1].edpos
+ = cxt->parse_cubby.size;
+ break;
+ } else {
+
+ VTRACE ("I didn't find any sc in cubby buffer! The size of cubby is %d\n",
+ size);
+
+ cxt->list.num_items = 1;
+ cxt->list.data[0].stpos = 0;
+ cxt->list.data[0].edpos = cxt->parse_cubby.size;
+ break;
+ }
+ }
+ }
+
+ return VBP_OK;
+}
+
+uint32 vbp_populate_query_data_mp42(vbp_context *pcontext)
+{
+#if 0
+ vbp_dump_query_data(pcontext);
+#endif
+ return VBP_OK;
+}
+
+void vbp_fill_codec_data(vbp_context *pcontext, int list_index)
+{
+
+ /* fill vbp_codec_data_mp42 data */
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ query_data->codec_data.profile_and_level_indication
+ = parser->info.profile_and_level_indication;
+}
+
+void vbp_fill_slice_data(vbp_context *pcontext, int list_index)
+{
+
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+
+ if (!parser->info.VisualObject.VideoObject.short_video_header) {
+ vbp_process_slices_mp42(pcontext, list_index);
+ } else {
+ vbp_process_slices_svh_mp42(pcontext, list_index);
+ }
+}
+
+void vbp_fill_picture_param(vbp_context *pcontext, int list_index)
+{
+
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ vbp_picture_data_mp42 *picture_data = NULL;
+ VAPictureParameterBufferMPEG4 *picture_param = NULL;
+
+ picture_data = &(query_data->picture_data[query_data->number_pictures]);
+
+ picture_param = &(picture_data->picture_param);
+
+ uint8 idx = 0;
+
+ picture_data->vop_coded
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded;
+ VTRACE ("vop_coded = %d\n", picture_data->vop_coded);
+
+ /*
+ * fill picture_param
+ */
+
+ /* NOTE: for short video header, the parser saves vop_width and vop_height
+ * to VOL->video_object_layer_width and VOL->video_object_layer_height
+ */
+ picture_param->vop_width
+ = parser->info.VisualObject.VideoObject.video_object_layer_width;
+ picture_param->vop_height
+ = parser->info.VisualObject.VideoObject.video_object_layer_height;
+
+ picture_param->forward_reference_picture = VA_INVALID_SURFACE;
+ picture_param->backward_reference_picture = VA_INVALID_SURFACE;
+
+ /*
+ * VAPictureParameterBufferMPEG4::vol_fields
+ */
+ picture_param->vol_fields.bits.short_video_header
+ = parser->info.VisualObject.VideoObject.short_video_header;
+ picture_param->vol_fields.bits.chroma_format
+ = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format;
+
+ /* TODO: find out why testsuite always set this value to be 0 */
+ // picture_param->vol_fields.bits.chroma_format = 0;
+
+ picture_param->vol_fields.bits.interlaced
+ = parser->info.VisualObject.VideoObject.interlaced;
+ picture_param->vol_fields.bits.obmc_disable
+ = parser->info.VisualObject.VideoObject.obmc_disable;
+ picture_param->vol_fields.bits.sprite_enable
+ = parser->info.VisualObject.VideoObject.sprite_enable;
+ picture_param->vol_fields.bits.sprite_warping_accuracy
+ = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy;
+ picture_param->vol_fields.bits.quant_type
+ = parser->info.VisualObject.VideoObject.quant_type;
+ picture_param->vol_fields.bits.quarter_sample
+ = parser->info.VisualObject.VideoObject.quarter_sample;
+ picture_param->vol_fields.bits.data_partitioned
+ = parser->info.VisualObject.VideoObject.data_partitioned;
+ picture_param->vol_fields.bits.reversible_vlc
+ = parser->info.VisualObject.VideoObject.reversible_vlc;
+ picture_param->vol_fields.bits.resync_marker_disable
+ = parser->info.VisualObject.VideoObject.resync_marker_disable;
+
+ picture_param->no_of_sprite_warping_points
+ = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points;
+
+ for (idx = 0; idx < 3; idx++) {
+ picture_param->sprite_trajectory_du[idx]
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx];
+ picture_param->sprite_trajectory_dv[idx]
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx];
+ }
+
+ picture_param->quant_precision
+ = parser->info.VisualObject.VideoObject.quant_precision;
+
+ /*
+ * VAPictureParameterBufferMPEG4::vop_fields
+ */
+
+ if (!parser->info.VisualObject.VideoObject.short_video_header) {
+ picture_param->vop_fields.bits.vop_coding_type
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type;
+ } else {
+ picture_param->vop_fields.bits.vop_coding_type
+ = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type;
+ }
+
+ /* TODO:
+ * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type
+ * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7
+ */
+
+ if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) {
+ picture_param->vop_fields.bits.backward_reference_vop_coding_type
+ = picture_param->vop_fields.bits.vop_coding_type;
+ }
+
+ picture_param->vop_fields.bits.vop_rounding_type
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type;
+ picture_param->vop_fields.bits.intra_dc_vlc_thr
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr;
+ picture_param->vop_fields.bits.top_field_first
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first;
+ picture_param->vop_fields.bits.alternate_vertical_scan_flag
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag;
+
+ picture_param->vop_fcode_forward
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward;
+ picture_param->vop_fcode_backward
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward;
+ picture_param->vop_time_increment_resolution
+ = parser->info.VisualObject.VideoObject.vop_time_increment_resolution;
+
+ /* short header related */
+ picture_param->num_gobs_in_vop
+ = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop;
+ picture_param->num_macroblocks_in_gob
+ = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob;
+
+ /* for direct mode prediction */
+ picture_param->TRB = parser->info.VisualObject.VideoObject.TRB;
+ picture_param->TRD = parser->info.VisualObject.VideoObject.TRD;
+
+#if 0
+ printf(
+ "parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable = %d\n",
+ parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable);
+
+ printf("parser->info.VisualObject.VideoObject.data_partitioned = %d\n",
+ parser->info.VisualObject.VideoObject.data_partitioned);
+
+ printf(
+ "####parser->info.VisualObject.VideoObject.resync_marker_disable = %d####\n",
+ parser->info.VisualObject.VideoObject.resync_marker_disable);
+#endif
+}
+
+void vbp_fill_iq_matrix_buffer(vbp_context *pcontext, int list_index)
+{
+
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ mp4_VOLQuant_mat_t *quant_mat_info =
+ &(parser->info.VisualObject.VideoObject.quant_mat_info);
+
+ vbp_picture_data_mp42 *picture_data = NULL;
+ VAIQMatrixBufferMPEG4 *iq_matrix = NULL;
+
+ picture_data = &(query_data->picture_data[query_data->number_pictures]);
+ iq_matrix = &(picture_data->iq_matrix_buffer);
+
+ iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat;
+ iq_matrix->load_non_intra_quant_mat
+ = quant_mat_info->load_nonintra_quant_mat;
+ memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64);
+ memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat,
+ 64);
+}
+
+void vbp_on_vop_mp42(vbp_context *pcontext, int list_index)
+{
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ vbp_fill_codec_data(pcontext, list_index);
+
+ vbp_fill_picture_param(pcontext, list_index);
+ vbp_fill_iq_matrix_buffer(pcontext, list_index);
+ vbp_fill_slice_data(pcontext, list_index);
+
+ query_data->number_pictures++;
+}
+
+void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index)
+{
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ vbp_fill_codec_data(pcontext, list_index);
+
+ vbp_fill_picture_param(pcontext, list_index);
+ vbp_fill_iq_matrix_buffer(pcontext, list_index);
+ vbp_fill_slice_data(pcontext, list_index);
+
+ query_data->number_pictures++;
+}
+
+uint32 vbp_get_sc_pos_mp42(
+ uint8 *buf,
+ uint32 length,
+ uint32* sc_phase,
+ uint32 *sc_end_pos,
+ uint8 *is_normal_sc)
+{
+ uint8 *ptr = buf;
+ uint32 size;
+ uint32 data_left = 0, phase = 0, ret = 0;
+ size = 0;
+
+ data_left = length;
+ phase = *sc_phase;
+ *sc_end_pos = -1;
+
+ /* parse until there is more data and start code not found */
+ while ((data_left > 0) && (phase < 3)) {
+ /* Check if we are byte aligned & phase=0, if thats the case we can check
+ work at a time instead of byte*/
+ if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) {
+ while (data_left > 3) {
+ uint32 data;
+ char mask1 = 0, mask2 = 0;
+
+ data = *((uint32 *) ptr);
+#ifndef MFDBIGENDIAN
+ data = SWAP_WORD(data);
+#endif
+ mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+ mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+ /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+ two consecutive zero bytes for a start code pattern */
+ if (mask1 && mask2) {/* Success so skip 4 bytes and start over */
+ ptr += 4;
+ size += 4;
+ data_left -= 4;
+ continue;
+ } else {
+ break;
+ }
+ }
+ }
+
+ /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+ two zero bytes in the word so we look one byte at a time*/
+ if (data_left > 0) {
+ if (*ptr == FIRST_STARTCODE_BYTE) {/* Phase can be 3 only if third start code byte is found */
+ phase++;
+ ptr++;
+ size++;
+ data_left--;
+ if (phase > 2) {
+ phase = 2;
+
+ if ((((uint32) ptr) & 0x3) == 0) {
+ while (data_left > 3) {
+ if (*((uint32 *) ptr) != 0) {
+ break;
+ }
+ ptr += 4;
+ size += 4;
+ data_left -= 4;
+ }
+ }
+ }
+ } else {
+ uint8 normal_sc = 0, short_sc = 0;
+ if (phase == 2) {
+ normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
+ short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC));
+
+ VTRACE ("short_sc = %d\n", short_sc);
+
+ *is_normal_sc = normal_sc;
+ }
+
+ if (!(normal_sc | short_sc)) {
+ phase = 0;
+ } else {/* Match for start code so update context with byte position */
+ *sc_end_pos = size;
+ phase = 3;
+
+ if (normal_sc) {
+ } else {
+ /* For short start code since start code is in one nibble just return at this point */
+ phase += 1;
+ ret = 1;
+ break;
+ }
+ }
+ ptr++;
+ size++;
+ data_left--;
+ }
+ }
+ }
+ if ((data_left > 0) && (phase == 3)) {
+ (*sc_end_pos)++;
+ phase++;
+ ret = 1;
+ }
+ *sc_phase = phase;
+ /* Return SC found only if phase is 4, else always success */
+ return ret;
+}
+
+uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs)
+{
+ uint32 length = 0;
+ numOfMbs--;
+ do {
+ numOfMbs >>= 1;
+ length++;
+ } while (numOfMbs);
+ return length;
+}
+
+mp4_Status_t vbp_video_packet_header_mp42(
+ void *parent,
+ viddec_mp4_parser_t *parser_cxt,
+ uint16_t *quant_scale,
+ uint32 *macroblock_number)
+{
+
+ mp4_Status_t ret = MP4_STATUS_OK;
+ mp4_Info_t *pInfo = &(parser_cxt->info);
+ mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+ mp4_VideoObjectPlane_t *vidObjPlane =
+ &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+ uint32 code = 0;
+ int32_t getbits = 0;
+
+ uint16_t _quant_scale = 0;
+ uint32 _macroblock_number = 0;
+ uint32 header_extension_codes = 0;
+ uint8 vop_coding_type = vidObjPlane->vop_coding_type;
+
+ do {
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ /* get macroblock_number */
+ {
+ uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4;
+ uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4;
+ uint32 length = vbp_macroblock_number_length_mp42(mbs_x
+ * mbs_y);
+
+ getbits = viddec_pm_get_bits(parent, &code, length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ length = code;
+ }
+
+ /* quant_scale */
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) {
+ getbits = viddec_pm_get_bits(parent, &code,
+ vidObjLay->quant_precision);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ _quant_scale = code;
+ }
+
+ /* header_extension_codes */
+ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ header_extension_codes = code;
+ }
+
+ if (header_extension_codes) {
+ do {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ } while (code);
+
+ /* marker_bit */
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ /* vop_time_increment */
+ {
+ uint32 numbits = 0;
+ numbits = vidObjLay->vop_time_increment_resolution_bits;
+ if (numbits == 0) {
+ numbits = 1;
+ }
+ getbits = viddec_pm_get_bits(parent, &code, numbits);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+ /* marker_bit */
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ /* vop_coding_type */
+ getbits = viddec_pm_get_bits(parent, &code, 2);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ vop_coding_type = code & 0x3;
+
+ /* Fixed Klocwork issue: Code is unreachable.
+ * Comment the following codes because we have
+ * already checked video_object_layer_shape
+ */
+ /* if (vidObjLay->video_object_layer_shape
+ != MP4_SHAPE_TYPE_RECTANGULAR) {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ */
+ if (vidObjLay->video_object_layer_shape
+ != MP4_SHAPE_TYPE_BINARYONLY) {
+ /* intra_dc_vlc_thr */
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC)
+ && (vop_coding_type == MP4_VOP_TYPE_S)
+ && (vidObjLay->sprite_info.no_of_sprite_warping_points
+ > 0)) {
+ if (vbp_sprite_trajectory_mp42(parent, vidObjLay,
+ vidObjPlane) != MP4_STATUS_OK) {
+ break;
+ }
+ }
+
+ if (vidObjLay->reduced_resolution_vop_enable
+ && (vidObjLay->video_object_layer_shape
+ == MP4_SHAPE_TYPE_RECTANGULAR)
+ && ((vop_coding_type == MP4_VOP_TYPE_I)
+ || (vop_coding_type == MP4_VOP_TYPE_P))) {
+ /* vop_reduced_resolution */
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+
+ if (vop_coding_type == MP4_VOP_TYPE_I) {
+ /* vop_fcode_forward */
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+
+ if (vop_coding_type == MP4_VOP_TYPE_B) {
+ /* vop_fcode_backward */
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+ }
+ }
+
+ if (vidObjLay->newpred_enable) {
+ /* New pred mode not supported in HW, but, does libva support this? */
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ *quant_scale = _quant_scale;
+ *macroblock_number = _macroblock_number;
+ } while (0);
+ return ret;
+}
+
+uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt)
+{
+
+ mp4_Info_t *pInfo = &(parser_cxt->info);
+ mp4_VideoObjectPlane_t *vidObjPlane =
+ &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+ uint32 resync_marker_length = 0;
+ if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) {
+ resync_marker_length = 17;
+ } else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) {
+ uint8 fcode_max = vidObjPlane->vop_fcode_forward;
+ if (fcode_max < vidObjPlane->vop_fcode_backward) {
+ fcode_max = vidObjPlane->vop_fcode_backward;
+ }
+ resync_marker_length = 16 + fcode_max;
+ } else {
+ resync_marker_length = 16 + vidObjPlane->vop_fcode_forward;
+ }
+ return resync_marker_length;
+}
+
+uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index)
+{
+ uint32 ret = MP4_STATUS_OK;
+
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+ viddec_mp4_parser_t *parser_cxt =
+ (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+
+ VTRACE ("begin\n");
+
+ vbp_picture_data_mp42 *picture_data =
+ &(query_data->picture_data[query_data->number_pictures]);
+ vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data[0]);
+ VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param);
+
+ picture_data->number_slices = 1;
+
+ uint8 is_emul = 0;
+ uint32 bit_offset = 0;
+ uint32 byte_offset = 0;
+
+ /* The offsets are relative to parent->parse_cubby.buf */
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+ slice_data->buffer_addr = parent->parse_cubby.buf;
+
+ slice_data->slice_offset = byte_offset
+ + parent->list.data[list_index].stpos;
+ slice_data->slice_size = parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos - byte_offset;
+
+ slice_param->slice_data_size = slice_data->slice_size;
+ slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+ slice_param->slice_data_offset = 0;
+ slice_param->macroblock_offset = bit_offset;
+ slice_param->macroblock_number = 0;
+ slice_param->quant_scale
+ = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant;
+
+ VTRACE ("end\n");
+
+ return ret;
+}
+
+mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index)
+{
+
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+ viddec_mp4_parser_t *parser_cxt =
+ (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+
+ vbp_picture_data_mp42 *picture_data = NULL;
+ vbp_slice_data_mp42 *slice_data = NULL;
+ VASliceParameterBufferMPEG4* slice_param = NULL;
+
+ uint32 ret = MP4_STATUS_OK;
+
+ uint8 is_emul = 0;
+ uint32 bit_offset = 0;
+ uint32 byte_offset = 0;
+
+ uint32 code = 0;
+ int32_t getbits = 0;
+ uint32 resync_marker_length = 0;
+
+ uint32 slice_index = 0;
+
+#ifdef VBP_TRACE
+ uint32 list_size_at_index = parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos;
+#endif
+
+ VTRACE ("list_index = %d list_size_at_index = %d\n", list_index,
+ list_size_at_index);
+
+ VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index,
+ parent->list.data[list_index].edpos,
+ parent->list.data[list_index].stpos);
+
+ /* The offsets are relative to parent->parse_cubby.buf */
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+#if 0
+ if (is_emul) {
+ g_print("*** emul != 0\n");
+ /*byte_offset += 1;*/
+ }
+#endif
+
+ picture_data = &(query_data->picture_data[query_data->number_pictures]);
+ slice_data = &(picture_data->slice_data[slice_index]);
+ slice_param = &(slice_data->slice_param);
+
+ slice_data->buffer_addr = parent->parse_cubby.buf;
+
+ slice_data->slice_offset = byte_offset
+ + parent->list.data[list_index].stpos;
+ slice_data->slice_size = parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos - byte_offset;
+
+ slice_param->slice_data_size = slice_data->slice_size;
+ slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+ slice_param->slice_data_offset = 0;
+ slice_param->macroblock_offset = bit_offset;
+ slice_param->macroblock_number = 0;
+ slice_param->quant_scale
+ = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant;
+
+ slice_index++;
+ picture_data->number_slices = slice_index;
+
+ /*
+ * scan for resync_marker
+ */
+
+ if (!parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) {
+
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+ if (bit_offset) {
+ getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
+ if (getbits == -1) {
+ ret = MP4_STATUS_PARSE_ERROR;
+ return ret;
+ }
+ }
+
+ /*
+ * get resync_marker_length
+ */
+ resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt);
+
+ while (1) {
+
+ uint16_t quant_scale = 0;
+ uint32 macroblock_number = 0;
+
+ getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ if (code != 1) {
+ getbits = viddec_pm_get_bits(parent, &code, 8);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ continue;
+ }
+
+ /*
+ * We found resync_marker
+ */
+
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+ slice_data->slice_size -= (parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos - byte_offset);
+ slice_param->slice_data_size = slice_data->slice_size;
+
+ slice_data = &(picture_data->slice_data[slice_index]);
+ slice_param = &(slice_data->slice_param);
+
+ /*
+ * parse video_packet_header
+ */
+ getbits = viddec_pm_get_bits(parent, &code, resync_marker_length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ vbp_video_packet_header_mp42(parent, parser_cxt,
+ &quant_scale, &macroblock_number);
+
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+ slice_data->buffer_addr = parent->parse_cubby.buf;
+
+ slice_data->slice_offset = byte_offset
+ + parent->list.data[list_index].stpos;
+ slice_data->slice_size = parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos - byte_offset;
+
+ slice_param->slice_data_size = slice_data->slice_size;
+ slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+ slice_param->slice_data_offset = 0;
+ slice_param->macroblock_offset = bit_offset;
+ slice_param->macroblock_number = macroblock_number;
+ slice_param->quant_scale = quant_scale;
+
+ slice_index++;
+
+ if (slice_index >= MAX_NUM_SLICES) {
+ ret = MP4_STATUS_PARSE_ERROR;
+ break;
+ }
+
+ picture_data->number_slices = slice_index;
+ }
+ }
+ return ret;
+}
+
+/* This is coppied from DHG MP42 parser */
+static inline int32_t vbp_sprite_dmv_length_mp42(
+ void * parent,
+ int32_t *dmv_length)
+{
+ uint32 code, skip;
+ int32_t getbits = 0;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ *dmv_length = 0;
+ skip = 3;
+ do {
+ getbits = viddec_pm_peek_bits(parent, &code, skip);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ if (code == 7) {
+ viddec_pm_skip_bits(parent, skip);
+ getbits = viddec_pm_peek_bits(parent, &code, 9);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ skip = 1;
+ while ((code & 256) != 0) {/* count number of 1 bits */
+ code <<= 1;
+ skip++;
+ }
+ *dmv_length = 5 + skip;
+ } else {
+ skip = (code <= 1) ? 2 : 3;
+ *dmv_length = code - 1;
+ }
+ viddec_pm_skip_bits(parent, skip);
+ ret = MP4_STATUS_OK;
+
+ } while (0);
+ return ret;
+}
+
+/* This is coppied from DHG MP42 parser */
+static inline mp4_Status_t vbp_sprite_trajectory_mp42(
+ void *parent,
+ mp4_VideoObjectLayer_t *vidObjLay,
+ mp4_VideoObjectPlane_t *vidObjPlane)
+{
+ uint32 code, i;
+ int32_t dmv_length = 0, dmv_code = 0, getbits = 0;
+ mp4_Status_t ret = MP4_STATUS_OK;
+ for (i = 0; i
+ < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) {
+ ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
+ if (ret != MP4_STATUS_OK) {
+ break;
+ }
+ if (dmv_length <= 0) {
+ dmv_code = 0;
+ } else {
+ getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ dmv_code = (int32_t) code;
+ if ((dmv_code & (1 << (dmv_length - 1))) == 0) {
+ dmv_code -= (1 << dmv_length) - 1;
+ }
+ }
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ if (code != 1) {
+ ret = MP4_STATUS_PARSE_ERROR;
+ break;
+ }
+ vidObjPlane->warping_mv_code_du[i] = dmv_code;
+ /* TODO: create another inline function to avoid code duplication */
+ ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
+ if (ret != MP4_STATUS_OK) {
+ break;
+ }
+ if (dmv_length <= 0) {
+ dmv_code = 0;
+ } else {
+ getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ dmv_code = (int32_t) code;
+ if ((dmv_code & (1 << (dmv_length - 1))) == 0) {
+ dmv_code -= (1 << dmv_length) - 1;
+ }
+ }
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ if (code != 1) {
+ ret = MP4_STATUS_PARSE_ERROR;
+ break;
+ }
+ vidObjPlane->warping_mv_code_dv[i] = dmv_code;
+
+ }
+ return ret;
+}
+
+/*
+ * free memory of vbp_data_mp42 structure and its members
+ */
+uint32 vbp_free_query_data_mp42(vbp_context *pcontext)
+{
+
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ gint idx = 0;
+
+ if (query_data) {
+ if (query_data->picture_data) {
+ for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
+ g_free(query_data->picture_data[idx].slice_data);
+ }
+ g_free(query_data->picture_data);
+ }
+
+ g_free(query_data);
+ }
+
+ pcontext->query_data = NULL;
+ return VBP_OK;
+}
+
+/*
+ * Allocate memory for vbp_data_mp42 structure and all its members.
+ */
+uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext)
+{
+
+ gint idx = 0;
+ vbp_data_mp42 *query_data;
+ pcontext->query_data = NULL;
+
+ query_data = g_try_new0(vbp_data_mp42, 1);
+ if (query_data == NULL) {
+ goto cleanup;
+ }
+
+ query_data->picture_data = g_try_new0(vbp_picture_data_mp42,
+ MAX_NUM_PICTURES_MP42);
+ if (NULL == query_data->picture_data) {
+ goto cleanup;
+ }
+
+ for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
+ query_data->picture_data[idx].number_slices = 0;
+ query_data->picture_data[idx].slice_data = g_try_new0(
+ vbp_slice_data_mp42, MAX_NUM_SLICES);
+
+ if (query_data->picture_data[idx].slice_data == NULL) {
+ goto cleanup;
+ }
+ }
+
+ pcontext->query_data = (void *) query_data;
+ return VBP_OK;
+
+ cleanup:
+
+ if (query_data) {
+ if (query_data->picture_data) {
+ for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
+ g_free(query_data->picture_data[idx].slice_data);
+ }
+ g_free(query_data->picture_data);
+ }
+
+ g_free(query_data);
+ }
+
+ return VBP_MEM;
+}
+
+void vbp_dump_query_data(vbp_context *pcontext, int list_index)
+{
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ vbp_picture_data_mp42 *picture_data = NULL;
+ VAPictureParameterBufferMPEG4 *picture_param = NULL;
+ vbp_slice_data_mp42 *slice_data = NULL;
+
+ uint32 idx = 0, jdx = 0;
+
+ for (idx = 0; idx < query_data->number_pictures; idx++) {
+
+ picture_data = &(query_data->picture_data[idx]);
+ picture_param = &(picture_data->picture_param);
+ slice_data = &(picture_data->slice_data[0]);
+
+ g_print("======================= dump_begin ======================\n\n");
+ g_print("======================= codec_data ======================\n");
+
+ /* codec_data */
+ g_print("codec_data.profile_and_level_indication = 0x%x\n",
+ query_data->codec_data.profile_and_level_indication);
+
+ g_print("==================== picture_param =======================\n");
+
+ /* picture_param */
+ g_print("picture_param->vop_width = %d\n", picture_param->vop_width);
+ g_print("picture_param->vop_height = %d\n", picture_param->vop_height);
+
+ g_print("picture_param->vol_fields.bits.short_video_header = %d\n",
+ picture_param->vol_fields.bits.short_video_header);
+ g_print("picture_param->vol_fields.bits.chroma_format = %d\n",
+ picture_param->vol_fields.bits.chroma_format);
+ g_print("picture_param->vol_fields.bits.interlaced = %d\n",
+ picture_param->vol_fields.bits.interlaced);
+ g_print("picture_param->vol_fields.bits.obmc_disable = %d\n",
+ picture_param->vol_fields.bits.obmc_disable);
+ g_print("picture_param->vol_fields.bits.sprite_enable = %d\n",
+ picture_param->vol_fields.bits.sprite_enable);
+ g_print(
+ "picture_param->vol_fields.bits.sprite_warping_accuracy = %d\n",
+ picture_param->vol_fields.bits.sprite_warping_accuracy);
+ g_print("picture_param->vol_fields.bits.quant_type = %d\n",
+ picture_param->vol_fields.bits.quant_type);
+ g_print("picture_param->vol_fields.bits.quarter_sample = %d\n",
+ picture_param->vol_fields.bits.quarter_sample);
+ g_print("picture_param->vol_fields.bits.data_partitioned = %d\n",
+ picture_param->vol_fields.bits.data_partitioned);
+ g_print("picture_param->vol_fields.bits.reversible_vlc = %d\n",
+ picture_param->vol_fields.bits.reversible_vlc);
+
+ g_print("picture_param->no_of_sprite_warping_points = %d\n",
+ picture_param->no_of_sprite_warping_points);
+ g_print("picture_param->quant_precision = %d\n",
+ picture_param->quant_precision);
+ g_print("picture_param->sprite_trajectory_du = %d, %d, %d\n",
+ picture_param->sprite_trajectory_du[0],
+ picture_param->sprite_trajectory_du[1],
+ picture_param->sprite_trajectory_du[2]);
+ g_print("picture_param->sprite_trajectory_dv = %d, %d, %d\n",
+ picture_param->sprite_trajectory_dv[0],
+ picture_param->sprite_trajectory_dv[1],
+ picture_param->sprite_trajectory_dv[2]);
+
+ g_print("picture_param->vop_fields.bits.vop_coding_type = %d\n",
+ picture_param->vop_fields.bits.vop_coding_type);
+ g_print(
+ "picture_param->vop_fields.bits.backward_reference_vop_coding_type = %d\n",
+ picture_param->vop_fields.bits.backward_reference_vop_coding_type);
+ g_print("picture_param->vop_fields.bits.vop_rounding_type = %d\n",
+ picture_param->vop_fields.bits.vop_rounding_type);
+ g_print("picture_param->vop_fields.bits.intra_dc_vlc_thr = %d\n",
+ picture_param->vop_fields.bits.intra_dc_vlc_thr);
+ g_print("picture_param->vop_fields.bits.top_field_first = %d\n",
+ picture_param->vop_fields.bits.top_field_first);
+ g_print(
+ "picture_param->vop_fields.bits.alternate_vertical_scan_flag = %d\n",
+ picture_param->vop_fields.bits.alternate_vertical_scan_flag);
+
+ g_print("picture_param->vop_fcode_forward = %d\n",
+ picture_param->vop_fcode_forward);
+ g_print("picture_param->vop_fcode_backward = %d\n",
+ picture_param->vop_fcode_backward);
+ g_print("picture_param->num_gobs_in_vop = %d\n",
+ picture_param->num_gobs_in_vop);
+ g_print("picture_param->num_macroblocks_in_gob = %d\n",
+ picture_param->num_macroblocks_in_gob);
+ g_print("picture_param->TRB = %d\n", picture_param->TRB);
+ g_print("picture_param->TRD = %d\n", picture_param->TRD);
+
+ g_print("==================== slice_data ==========================\n");
+
+ g_print("slice_data.buffer_addr = 0x%x\n",
+ (unsigned int) slice_data->buffer_addr);
+ g_print("slice_data.slice_offset = 0x%x\n", slice_data->slice_offset);
+ g_print("slice_data.slice_size = 0x%x\n", slice_data->slice_size);
+
+ g_print("slice_data.slice_param.macroblock_number = %d\n",
+ slice_data->slice_param.macroblock_number);
+ g_print("slice_data.slice_param.macroblock_offset = 0x%x\n",
+ slice_data->slice_param.macroblock_offset);
+ g_print("slice_data.slice_param.quant_scale = %d\n",
+ slice_data->slice_param.quant_scale);
+ g_print("slice_data.slice_param.slice_data_flag = %d\n",
+ slice_data->slice_param.slice_data_flag);
+ g_print("slice_data.slice_param.slice_data_offset = %d\n",
+ slice_data->slice_param.slice_data_offset);
+ g_print("slice_data.slice_param.slice_data_size = %d\n",
+ slice_data->slice_param.slice_data_size);
+
+ g_print("================= iq_matrix_buffer ======================\n");
+ g_print("iq_matrix_buffer.load_intra_quant_mat = %d\n",
+ picture_data->iq_matrix_buffer.load_intra_quant_mat);
+ g_print("iq_matrix_buffer.load_non_intra_quant_mat = %d\n",
+ picture_data->iq_matrix_buffer.load_non_intra_quant_mat);
+
+ g_print("------- iq_matrix_buffer.intra_quant_mat ----------\n");
+ for (jdx = 0; jdx < 64; jdx++) {
+
+ g_print("%02x ",
+ picture_data->iq_matrix_buffer.intra_quant_mat[jdx]);
+
+ if ((jdx + 1) % 8 == 0) {
+ g_print("\n");
+ }
+ }
+
+ g_print("----- iq_matrix_buffer.non_intra_quant_mat --------\n");
+ for (jdx = 0; jdx < 64; jdx++) {
+
+ g_print("%02x ",
+ picture_data->iq_matrix_buffer.non_intra_quant_mat[jdx]);
+
+ if ((jdx + 1) % 8 == 0) {
+ g_print("\n");
+ }
+ }
+
+ g_print("-------- slice buffer begin ------------\n");
+
+ for (jdx = 0; jdx < 64; jdx++) {
+ g_print("%02x ", *(slice_data->buffer_addr
+ + slice_data->slice_offset + jdx));
+ if ((jdx + 1) % 8 == 0) {
+ g_print("\n");
+ }
+ }
+ g_print("-------- slice buffer begin ------------\n");
+
+ g_print("\n\n============== dump_end ==========================\n\n");
+
+ }
+}
+
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h
new file mode 100644
index 0000000..c0deaa4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h
@@ -0,0 +1,49 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_MP42_PARSER_H
+#define VBP_MP42_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+
+uint32 vbp_init_parser_entries_mp42(vbp_context *pcontext);
+
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_mp42(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_mp42(vbp_context *pcontext);
+
+/*
+ * parse start code.
+ */
+uint32 vbp_parse_start_code_mp42(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_mp42(vbp_context *pcontext);
+
+#endif /*VBP_MP42_PARSER_H*/
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.c b/mix_vbp/viddec_fw/fw/parser/vbp_trace.c
new file mode 100644
index 0000000..d87bfd8
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.c
@@ -0,0 +1,28 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include "vbp_trace.h"
+
+#ifdef VBP_TRACE
+
+void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...)
+{
+ if (NULL == cat || NULL == fun || NULL == format)
+ return;
+
+ printf("%s %s(#%d): ", cat, fun, line);
+ va_list args;
+ va_start(args, format);
+ vprintf(format, args);
+ va_end(args);
+ printf("\n");
+}
+
+#endif
+
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h
new file mode 100644
index 0000000..9f2a21c
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h
@@ -0,0 +1,47 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#ifndef VBP_TRACE_H_
+#define VBP_TRACE_H_
+
+
+
+//#define VBP_TRACE
+
+
+#ifdef VBP_TRACE /* if VBP_TRACE is defined*/
+
+#include <stdio.h>
+#include <stdarg.h>
+
+extern void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...);
+
+#define VBP_TRACE_UTIL(cat, format, ...) \
+vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__)
+
+
+#define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR: ", format, ##__VA_ARGS__)
+#define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ", format, ##__VA_ARGS__)
+#define ITRACE(format, ...) VBP_TRACE_UTIL("INFO: ", format, ##__VA_ARGS__)
+#define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ", format, ##__VA_ARGS__)
+
+#else /* if VBP_TRACE is not defined */
+
+#define ETRACE(format, ...)
+#define WTRACE(format, ...)
+#define ITRACE(format, ...)
+#define VTRACE(format, ...)
+
+
+#endif /* VBP_TRACE*/
+
+
+#endif /*VBP_TRACE_H_*/
+
+
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
new file mode 100644
index 0000000..651b801
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
@@ -0,0 +1,548 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include <glib.h>
+#include <dlfcn.h>
+
+#include "vc1.h"
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vc1_parser.h"
+#include "vbp_h264_parser.h"
+#include "vbp_mp42_parser.h"
+
+
+
+/* buffer counter */
+uint32 buffer_counter = 0;
+
+
+/**
+ *
+ * uninitialize parser context
+ *
+ */
+static uint32 vbp_utils_uninitialize_context(vbp_context *pcontext)
+{
+ uint32 error = VBP_OK;
+
+ if (NULL == pcontext)
+ {
+ return error;
+ }
+
+ /* not need to reset parser entry points. */
+
+ g_free(pcontext->parser_ops);
+ pcontext->parser_ops = NULL;
+
+
+ if (pcontext->fd_parser)
+ {
+ dlclose(pcontext->fd_parser);
+ pcontext->fd_parser = NULL;
+ }
+
+ return error;
+}
+
+/**
+ *
+ * initialize parser context
+ *
+ */
+static uint32 vbp_utils_initialize_context(vbp_context *pcontext)
+{
+ uint32 error = VBP_OK;
+ char *parser_name;
+
+ switch (pcontext->parser_type)
+ {
+ case VBP_VC1:
+ parser_name = "libmixvbp_vc1.so.0";
+ break;
+
+ /* MPEG-2 parser is not supported. */
+
+ /* case VBP_MPEG2:
+ parser_name = "libmixvbp_mpeg2.so.0";
+ break;*/
+
+ case VBP_MPEG4:
+ parser_name = "libmixvbp_mpeg4.so.0";
+ break;
+
+ case VBP_H264:
+ parser_name = "libmixvbp_h264.so.0";
+ break;
+
+ default:
+ g_warning ("Warning! Unsupported parser type!");
+ return VBP_TYPE;
+ }
+
+ pcontext->fd_parser = dlopen(parser_name, RTLD_LAZY);
+ if (NULL == pcontext->fd_parser)
+ {
+ ETRACE("Failed to load parser %s.", parser_name);
+ error = VBP_LOAD;
+ goto cleanup;
+ }
+
+ pcontext->parser_ops = g_try_new(viddec_parser_ops_t, 1);
+ if (NULL == pcontext->parser_ops)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+#define SET_FUNC_POINTER(X, Y)\
+ case X:\
+ pcontext->func_init_parser_entries = vbp_init_parser_entries_##Y;\
+ pcontext->func_allocate_query_data = vbp_allocate_query_data_##Y;\
+ pcontext->func_free_query_data = vbp_free_query_data_##Y;\
+ pcontext->func_parse_init_data = vbp_parse_init_data_##Y;\
+ pcontext->func_parse_start_code = vbp_parse_start_code_##Y;\
+ pcontext->func_process_parsing_result = vbp_process_parsing_result_##Y;\
+ pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\
+ break;
+
+ switch (pcontext->parser_type)
+ {
+ SET_FUNC_POINTER(VBP_VC1, vc1);
+ SET_FUNC_POINTER(VBP_MPEG4, mp42);
+ SET_FUNC_POINTER(VBP_H264, h264);
+ }
+
+ /* set entry points for parser operations:
+ init
+ parse_sc
+ parse_syntax
+ get_cxt_size
+ is_wkld_done
+ is_frame_start
+ */
+ error = pcontext->func_init_parser_entries(pcontext);
+
+cleanup:
+
+ if (VBP_OK != error)
+ {
+ /* no need to log error. the loader would have done so already. */
+ vbp_utils_uninitialize_context(pcontext);
+ }
+
+ return error;
+}
+
+/**
+*
+* free allocated memory.
+*
+*/
+static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext)
+{
+ if (NULL == pcontext)
+ {
+ return VBP_OK;
+ }
+
+ if (pcontext->func_free_query_data)
+ {
+ pcontext->func_free_query_data(pcontext);
+ }
+
+ g_free(pcontext->workload2);
+ pcontext->workload2 = NULL;
+
+ g_free(pcontext->workload1);
+ pcontext->workload1 = NULL;
+
+ g_free(pcontext->persist_mem);
+ pcontext->persist_mem = NULL;
+
+ g_free(pcontext->parser_cxt);
+ pcontext->parser_cxt = NULL;
+
+ return VBP_OK;
+}
+
+
+/**
+ *
+ * allocate memory
+ *
+ */
+static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext)
+{
+ /* pcontext is guaranteed to be valid input. */
+ uint32 error = VBP_OK;
+ viddec_parser_memory_sizes_t sizes;
+
+ pcontext->parser_cxt = g_try_new(viddec_pm_cxt_t, 1);
+ if (NULL == pcontext->parser_cxt)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+ /* invoke parser entry to get context size */
+ /* no return value, should always succeed. */
+ pcontext->parser_ops->get_cxt_size(&sizes);
+
+ /* allocate persistent memory for parser */
+ if (sizes.persist_size)
+ {
+ pcontext->persist_mem = g_try_malloc(sizes.persist_size);
+ if (NULL == pcontext->persist_mem)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+ }
+ else
+ {
+ /* OK for VC-1, MPEG2 and MPEG4. */
+ if ((VBP_VC1 == pcontext->parser_type) ||
+ (VBP_MPEG2 == pcontext->parser_type) ||
+ (VBP_MPEG4 == pcontext->parser_type))
+ {
+ pcontext->persist_mem = NULL;
+ }
+ else
+ {
+ /* mandatory for H.264 */
+ ETRACE("Failed to allocate memory");
+ error = VBP_CXT;
+ goto cleanup;
+ }
+ }
+
+ /* allocate a new workload with 1000 items. */
+ pcontext->workload1 = g_try_malloc(sizeof(viddec_workload_t) +
+ (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t)));
+ if (NULL == pcontext->workload1)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+ /* allocate a second workload with 1000 items. */
+ pcontext->workload2 = g_try_malloc(sizeof(viddec_workload_t) +
+ (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t)));
+ if (NULL == pcontext->workload2)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+ /* allocate format-specific query data */
+ error = pcontext->func_allocate_query_data(pcontext);
+
+cleanup:
+ if (error != VBP_OK)
+ {
+ vbp_utils_free_parser_memory(pcontext);
+ }
+ return error;
+}
+
+
+
+/**
+ *
+ * parse the elementary sample buffer or codec configuration data
+ *
+ */
+static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ viddec_parser_ops_t *ops = pcontext->parser_ops;
+ uint32 error = VBP_OK;
+ int i;
+
+ /* reset list number. func_parse_init_data or func_parse_start_code will
+ * set it equal to number of sequence headers, picture headers or slices headers
+ * found in the sample buffer
+ */
+ cxt->list.num_items = 0;
+
+ /**
+ * READ THIS NOTE: cxt->getbits.is_emul_reqd must be set to 1
+ * for H.264 and MPEG-4, VC1 advanced profile and set to 0
+ * for VC1 simple or main profile when parsing the frame
+ * buffer. When parsing the sequence header, it must be set to 1
+ * always.
+ *
+ * PARSER IMPLEMENTOR: set this flag in the parser.
+ */
+
+ /*
+ if ((codec_type == VBP_H264) || (codec_type == VBP_MPEG4))
+ {
+ cxt->getbits.is_emul_reqd = 1;
+ }
+ */
+
+
+ /* populate the list.*/
+ if (init_data_flag)
+ {
+ error = pcontext->func_parse_init_data(pcontext);
+ }
+ else
+ {
+ error = pcontext->func_parse_start_code(pcontext);
+ }
+
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to parse the start code!");
+ return error;
+ }
+
+ /* set up bitstream buffer */
+ cxt->getbits.list = &(cxt->list);
+
+ /* setup buffer pointer */
+ cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf;
+
+ /*
+ * TO DO:
+ * check if cxt->getbits.is_emul_reqd is set properly
+ */
+
+ for (i = 0; i < cxt->list.num_items; i++)
+ {
+ /* setup bitstream parser */
+ cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos;
+ cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos;
+ cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos;
+
+ /* It is possible to end up with buf_offset not equal zero. */
+ cxt->getbits.bstrm_buf.buf_bitoff = 0;
+
+ cxt->getbits.au_pos = 0;
+ cxt->getbits.list_off = 0;
+ cxt->getbits.phase = 0;
+ cxt->getbits.emulation_byte_counter = 0;
+
+ cxt->list.start_offset = cxt->list.data[i].stpos;
+ cxt->list.end_offset = cxt->list.data[i].edpos;
+ cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos;
+
+ /* invoke parse entry point to parse the buffer */
+ error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0]));
+
+ /* can't return error for now. Neet further investigation */
+
+ /*if (0 != error)
+ {
+ ETRACE("failed to parse the syntax: %d!", error);
+ return error;
+ }*/
+
+ /*
+ * process parsing result
+ */
+ error = pcontext->func_process_parsing_result(pcontext, i);
+
+ if (0 != error)
+ {
+ ETRACE("Failed to process parsing result.");
+ return error;
+ }
+ }
+
+ /* currently always assume a complete frame is supplied for parsing, so
+ * there is no need to check if workload is done
+ */
+
+ /*
+ uint32_t codec_errors = 0;
+ uint32_t state;
+
+ error = ops->is_wkld_done(
+ (void *)cxt,
+ (void *)&(cxt->codec_data[0]),
+ (uint32_t)cxt->sc_prefix_info.next_sc,
+ &codec_errors);
+ state = (ret == VIDDEC_PARSE_FRMDONE) ? VBP_DONE : VBP_OK;
+ return state;
+ */
+
+ return VBP_OK;
+}
+
+
+/**
+ *
+ * create the parser context
+ *
+ */
+uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext)
+{
+ uint32 error = VBP_OK;
+ vbp_context *pcontext = NULL;
+
+ /* prevention from the failure */
+ *ppcontext = NULL;
+
+ pcontext = g_try_new0(vbp_context, 1);
+ if (NULL == pcontext)
+ {
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+ pcontext->parser_type = parser_type;
+
+ /* load parser, initialize parser operators and entry points */
+ error = vbp_utils_initialize_context(pcontext);
+ if (VBP_OK != error)
+ {
+ goto cleanup;
+ }
+
+ /* allocate parser context, persistent memory, query data and workload */
+ error = vbp_utils_allocate_parser_memory(pcontext);
+ if (VBP_OK != error)
+ {
+ goto cleanup;
+ }
+
+ viddec_pm_utils_list_init(&(pcontext->parser_cxt->list));
+ viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0);
+ pcontext->parser_cxt->cur_buf.list_index = -1;
+ pcontext->parser_cxt->parse_cubby.phase = 0;
+
+ /* invoke the entry point to initialize the parser. */
+ pcontext->parser_ops->init(
+ (void *)pcontext->parser_cxt->codec_data,
+ (void *)pcontext->persist_mem,
+ FALSE);
+
+ viddec_emit_init(&(pcontext->parser_cxt->emitter));
+
+ /* overwrite init with our number of items. */
+ pcontext->parser_cxt->emitter.cur.max_items = MAX_WORKLOAD_ITEMS;
+ pcontext->parser_cxt->emitter.next.max_items = MAX_WORKLOAD_ITEMS;
+
+ /* set up to find the first start code. */
+ pcontext->parser_cxt->sc_prefix_info.first_sc_detect = 1;
+
+ /* indicates initialized OK. */
+ pcontext->identifier = MAGIC_NUMBER;
+ *ppcontext = pcontext;
+ error = VBP_OK;
+
+cleanup:
+
+ if (VBP_OK != error)
+ {
+ vbp_utils_free_parser_memory(pcontext);
+ vbp_utils_uninitialize_context(pcontext);
+ g_free(pcontext);
+ pcontext = NULL;
+ }
+
+ return error;
+}
+
+/**
+ *
+ * destroy the context.
+ *
+ */
+uint32 vbp_utils_destroy_context(vbp_context *pcontext)
+{
+ /* entry point, not need to validate input parameters. */
+ vbp_utils_free_parser_memory(pcontext);
+ vbp_utils_uninitialize_context(pcontext);
+ g_free(pcontext);
+ pcontext = NULL;
+
+ return VBP_OK;
+}
+
+
+/**
+ *
+ * parse the sample buffer or parser configuration data.
+ *
+ */
+uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag)
+{
+ /* entry point, not need to validate input parameters. */
+
+ uint32 error = VBP_OK;
+
+ /* ITRACE("buffer counter: %d",buffer_counter); */
+
+ /* set up emitter. */
+ pcontext->parser_cxt->emitter.cur.data = pcontext->workload1;
+ pcontext->parser_cxt->emitter.next.data = pcontext->workload2;
+
+ /* reset bit offset */
+ pcontext->parser_cxt->getbits.bstrm_buf.buf_bitoff = 0;
+
+
+ /* set up cubby. */
+ pcontext->parser_cxt->parse_cubby.buf = data;
+ pcontext->parser_cxt->parse_cubby.size = size;
+ pcontext->parser_cxt->parse_cubby.phase = 0;
+
+ error = vbp_utils_parse_es_buffer(pcontext, init_data_flag);
+
+ /* rolling count of buffers. */
+ if (0 == init_data_flag)
+ {
+ buffer_counter++;
+ }
+ return error;
+}
+
+/**
+ *
+ * provide query data back to the consumer
+ *
+ */
+uint32 vbp_utils_query(vbp_context *pcontext, void **data)
+{
+ /* entry point, not need to validate input parameters. */
+ uint32 error = VBP_OK;
+
+ error = pcontext->func_populate_query_data(pcontext);
+ if (VBP_OK == error)
+ {
+ *data = pcontext->query_data;
+ }
+ else
+ {
+ *data = NULL;
+ }
+ return error;
+}
+
+/**
+ *
+ * flush parsing buffer. Currently it is no op.
+ *
+ */
+uint32 vbp_utils_flush(vbp_context *pcontext)
+{
+ return VBP_IMPL;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h
new file mode 100644
index 0000000..67ff3e8
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h
@@ -0,0 +1,106 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_UTILS_H
+#define VBP_UTILS_H
+
+#include "viddec_parser_ops.h"
+#include "viddec_pm_parse.h"
+#include "viddec_pm.h"
+#include "vbp_trace.h"
+
+#define MAGIC_NUMBER 0x0DEADBEEF
+#define MAX_WORKLOAD_ITEMS 1000
+
+/* maximum 256 slices per sample buffer */
+#define MAX_NUM_SLICES 256
+
+/* maximum two pictures per sample buffer */
+#define MAX_NUM_PICTURES 2
+
+
+extern uint32 viddec_parse_sc(void *in, void *pcxt, void *sc_state);
+
+/* rolling counter of sample buffer */
+extern uint32 buffer_counter;
+
+typedef struct vbp_context_t vbp_context;
+
+typedef uint32 (*function_init_parser_entries)(vbp_context* cxt);
+typedef uint32 (*function_allocate_query_data)(vbp_context* cxt);
+typedef uint32 (*function_free_query_data)(vbp_context* cxt);
+typedef uint32 (*function_parse_init_data)(vbp_context* cxt);
+typedef uint32 (*function_parse_start_code)(vbp_context* cxt);
+typedef uint32 (*function_process_parsing_result)(vbp_context* cxt, int i);
+typedef uint32 (*function_populate_query_data)(vbp_context* cxt);
+
+
+
+struct vbp_context_t
+{
+ /* magic number */
+ uint32 identifier;
+
+ /* parser type, eg, MPEG-2, MPEG-4, H.264, VC1 */
+ uint32 parser_type;
+
+ /* handle to parser (shared object) */
+ void *fd_parser;
+
+ /* parser (shared object) entry points */
+ viddec_parser_ops_t *parser_ops;
+
+ /* parser context */
+ viddec_pm_cxt_t *parser_cxt;
+
+ /* work load */
+ viddec_workload_t *workload1, *workload2;
+
+ /* persistent memory for parser */
+ uint32 *persist_mem;
+
+ /* format specific query data */
+ void *query_data;
+
+
+ function_init_parser_entries func_init_parser_entries;
+ function_allocate_query_data func_allocate_query_data;
+ function_free_query_data func_free_query_data;
+ function_parse_init_data func_parse_init_data;
+ function_parse_start_code func_parse_start_code;
+ function_process_parsing_result func_process_parsing_result;
+ function_populate_query_data func_populate_query_data;
+
+};
+
+/**
+ * create VBP context
+ */
+uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext);
+
+/*
+ * destroy VBP context
+ */
+uint32 vbp_utils_destroy_context(vbp_context *pcontext);
+
+/*
+ * parse bitstream
+ */
+uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_utils_query(vbp_context *pcontext, void **data);
+
+/*
+ * flush un-parsed bitstream
+ */
+uint32 vbp_utils_flush(vbp_context *pcontext);
+
+#endif /* VBP_UTILS_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
new file mode 100644
index 0000000..502cdc6
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
@@ -0,0 +1,1029 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include <glib.h>
+#include <dlfcn.h>
+#include <string.h>
+
+#include "vc1.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vc1_parser.h"
+
+/* maximum number of Macroblock divided by 2, see va.h */
+#define MAX_BITPLANE_SIZE 16384
+
+/* Start code prefix is 001 which is 3 bytes. */
+#define PREFIX_SIZE 3
+
+static uint32 b_fraction_table[][9] = {
+ /* num 0 1 2 3 4 5 6 7 8 den */
+ /* 0 */ { 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ /* 1 */ { 0, 0, 0, 1, 3, 5, 9, 11, 17 },
+ /* 2 */ { 0, 0, 0, 2, 0, 6, 0, 12, 0 },
+ /* 3 */ { 0, 0, 0, 0, 4, 7, 0, 13, 18 },
+ /* 4 */ { 0, 0, 0, 0, 0, 8, 0, 14, 0 },
+ /* 5 */ { 0, 0, 0, 0, 0, 0, 10, 15, 19 },
+ /* 6 */ { 0, 0, 0, 0, 0, 0, 0, 16, 0 },
+ /* 7 */ { 0, 0, 0, 0, 0, 0, 0, 0, 20 }
+};
+
+
+
+/**
+ * set parser entry points
+ */
+uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext)
+{
+ if (NULL == pcontext->parser_ops)
+ {
+ /* impossible, just sanity check */
+ return VBP_PARM;
+ }
+
+ pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vc1_init");
+ if (NULL == pcontext->parser_ops->init)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+ pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vc1_parse");
+ if (NULL == pcontext->parser_ops->parse_syntax)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vc1_get_context_size");
+ if (NULL == pcontext->parser_ops->get_cxt_size)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done");
+ if (NULL == pcontext->parser_ops->is_wkld_done)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame");
+ if (NULL == pcontext->parser_ops->is_frame_start)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ return VBP_OK;
+}
+
+/**
+ * allocate query data structure
+ */
+uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext)
+{
+ if (NULL != pcontext->query_data)
+ {
+ /* impossible, just sanity check */
+ return VBP_PARM;
+ }
+
+ pcontext->query_data = NULL;
+
+ vbp_data_vc1 *query_data = NULL;
+ query_data = g_try_new0(vbp_data_vc1, 1);
+ if (NULL == query_data)
+ {
+ return VBP_MEM;
+ }
+
+ /* assign the pointer */
+ pcontext->query_data = (void *)query_data;
+
+ query_data->se_data = g_try_new0(vbp_codec_data_vc1, 1);
+ if (NULL == query_data->se_data)
+ {
+ goto cleanup;
+ }
+ query_data->pic_data = g_try_new0(vbp_picture_data_vc1, MAX_NUM_PICTURES);
+ if (NULL == query_data->pic_data)
+ {
+ goto cleanup;
+ }
+
+ int i;
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferVC1, 1);
+ if (NULL == query_data->pic_data[i].pic_parms)
+ {
+ goto cleanup;
+ }
+
+ query_data->pic_data[i].packed_bitplanes = g_try_malloc0(MAX_BITPLANE_SIZE);
+ if (NULL == query_data->pic_data[i].packed_bitplanes)
+ {
+ goto cleanup;
+ }
+
+ query_data->pic_data[i].slc_data = g_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1));
+ if (NULL == query_data->pic_data[i].slc_data)
+ {
+ goto cleanup;
+ }
+ }
+
+ return VBP_OK;
+
+cleanup:
+ vbp_free_query_data_vc1(pcontext);
+
+ return VBP_MEM;
+}
+
+
+/**
+ * free query data structure
+ */
+uint32 vbp_free_query_data_vc1(vbp_context *pcontext)
+{
+ vbp_data_vc1 *query_data = NULL;
+
+ if (NULL == pcontext->query_data)
+ {
+ return VBP_OK;
+ }
+
+ query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+ if (query_data->pic_data)
+ {
+ int i = 0;
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ g_free(query_data->pic_data[i].slc_data);
+ g_free(query_data->pic_data[i].packed_bitplanes);
+ g_free(query_data->pic_data[i].pic_parms);
+ }
+ }
+
+ g_free(query_data->pic_data);
+
+ g_free(query_data->se_data);
+
+ g_free(query_data);
+
+ pcontext->query_data = NULL;
+
+ return VBP_OK;
+}
+
+
+/**
+ * We want to create a list of buffer segments where each segment is a start
+ * code followed by all the data up to the next start code or to the end of
+ * the buffer. In VC-1, it is common to get buffers with no start codes. The
+ * parser proper, doesn't really handle the situation where there are no SCs.
+ * In this case, I will bypass the stripping of the SC code and assume a frame.
+ */
+static uint32 vbp_parse_start_code_helper_vc1(
+ viddec_pm_cxt_t *cxt,
+ viddec_parser_ops_t *ops,
+ int init_data_flag)
+{
+ uint32_t ret = VBP_OK;
+ viddec_sc_parse_cubby_cxt_t cubby;
+
+ /* make copy of cubby */
+ /* this doesn't copy the buffer, merely the structure that holds the buffer */
+ /* pointer. Below, where we call parse_sc() the code starts the search for */
+ /* SCs at the beginning of the buffer pointed to by the cubby, so in our */
+ /* cubby copy we increment the pointer as we move through the buffer. If */
+ /* you think of each start code followed either by another start code or the */
+ /* end of the buffer, then parse_sc() is returning information relative to */
+ /* current segment. */
+
+ cubby = cxt->parse_cubby;
+
+ cxt->list.num_items = 0;
+ cxt->list.data[0].stpos = 0;
+ cxt->getbits.is_emul_reqd = 1;
+
+ /* codec initialization data is always start code prefixed. (may not start at position 0)
+ * sample buffer for AP has three start code patterns here:
+ * pattern 0: no start code at all, the whole buffer is a single segment item
+ * pattern 1: start codes for all segment items
+ * pattern 2: no start code for the first segment item, start codes for the rest segment items
+ */
+
+ gboolean is_pattern_two = FALSE;
+
+ unsigned char start_code = 0;
+
+ while(1)
+ {
+ /* parse the created buffer for sc */
+ ret = ops->parse_sc((void *)&cubby, (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info));
+ if(ret == 1)
+ {
+ cubby.phase = 0;
+ start_code = *(unsigned char*)(cubby.buf + cubby.sc_end_pos);
+#if 1
+ if (0 == init_data_flag &&
+ PREFIX_SIZE != cubby.sc_end_pos &&
+ 0 == cxt->list.num_items)
+ {
+ /* buffer does not have start code at the beginning */
+ vc1_viddec_parser_t *parser = NULL;
+ vc1_metadata_t *seqLayerHeader = NULL;
+
+ parser = (vc1_viddec_parser_t *)cxt->codec_data;
+ seqLayerHeader = &(parser->info.metadata);
+ if (1 == seqLayerHeader->INTERLACE)
+ {
+ /* this is a hack for interlaced field coding */
+ /* handle field interlace coding. One sample contains two fields, where:
+ * the first field does not have start code prefix,
+ * the second field has start code prefix.
+ */
+ cxt->list.num_items = 1;
+ cxt->list.data[0].stpos = 0;
+ is_pattern_two = TRUE;
+ }
+ }
+#endif
+ if (cxt->list.num_items == 0) /* found first SC. */
+ {
+ /* sc_end_pos gets us to the SC type. We need to back up to the first zero */
+ cxt->list.data[0].stpos = cubby.sc_end_pos - PREFIX_SIZE;
+ }
+ else
+ {
+ /* First we set the end position of the last segment. */
+ /* Since the SC parser searches from SC type to SC type and the */
+ /* sc_end_pos is relative to this segment only, we merely add */
+ /* sc_end_pos to the start to find the end. */
+ cxt->list.data[cxt->list.num_items - 1].edpos =
+ cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+
+ /* Then we set the start position of the current segment. */
+ /* So I need to subtract 1 ??? */
+ cxt->list.data[cxt->list.num_items].stpos =
+ cxt->list.data[cxt->list.num_items - 1].edpos;
+
+ if (is_pattern_two)
+ {
+ cxt->list.data[cxt->list.num_items].stpos -= PREFIX_SIZE;
+ /* restore to normal pattern */
+ is_pattern_two = FALSE;
+ }
+ }
+ /* We need to set up the cubby buffer for the next time through parse_sc(). */
+ /* But even though we want the list to contain a segment as described */
+ /* above, we want the cubby buffer to start just past the prefix, or it will */
+ /* find the same SC again. So I bump the cubby buffer past the prefix. */
+ cubby.buf = cubby.buf +
+ cxt->list.data[cxt->list.num_items].stpos +
+ PREFIX_SIZE;
+
+ cubby.size = cxt->parse_cubby.size -
+ cxt->list.data[cxt->list.num_items].stpos -
+ PREFIX_SIZE;
+
+ if (start_code >= 0x0A && start_code <= 0x0F)
+ {
+ /* only put known start code to the list
+ * 0x0A: end of sequence
+ * 0x0B: slice header
+ * 0x0C: frame header
+ * 0x0D: field header
+ * 0x0E: entry point header
+ * 0x0F: sequence header
+ */
+ cxt->list.num_items++;
+ }
+ else
+ {
+ ITRACE("skipping unknown start code :%d", start_code);
+ }
+
+ if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+ {
+ WTRACE("Num items exceeds the limit!");
+ /* not fatal, just stop parsing */
+ break;
+ }
+ }
+ else
+ {
+ /* we get here, if we reach the end of the buffer while looking or a SC. */
+ /* If we never found a SC, then num_items will never get incremented. */
+ if (cxt->list.num_items == 0)
+ {
+ /* If we don't find a SC we probably still have a frame of data. */
+ /* So let's bump the num_items or else later we will not parse the */
+ /* frame. */
+ cxt->list.num_items = 1;
+ }
+ /* now we can set the end position of the last segment. */
+ cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+ break;
+ }
+ }
+ return VBP_OK;
+}
+
+/*
+* parse initialization data (decoder configuration data)
+* for VC1 advanced profile, data is sequence header and
+* entry pointer header.
+* for VC1 main/simple profile, data format
+* is defined in VC1 spec: Annex J, (Decoder initialization metadata
+* structure 1 and structure 3
+*/
+uint32 vbp_parse_init_data_vc1(vbp_context *pcontext)
+{
+ /**
+ * init data (aka decoder configuration data) must
+ * be start-code prefixed
+ */
+
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ viddec_parser_ops_t *ops = pcontext->parser_ops;
+ return vbp_parse_start_code_helper_vc1(cxt, ops, 1);
+}
+
+
+
+/**
+* Parse start codes, VC1 main/simple profile does not have start code;
+* VC1 advanced may not have start code either.
+*/
+uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ viddec_parser_ops_t *ops = pcontext->parser_ops;
+
+ vc1_viddec_parser_t *parser = NULL;
+ vc1_metadata_t *seqLayerHeader = NULL;
+
+ vbp_data_vc1 *query_data = (vbp_data_vc1 *) pcontext->query_data;
+
+ /* Reset query data for the new sample buffer */
+ int i = 0;
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ query_data->num_pictures = 0;
+ query_data->pic_data[i].num_slices = 0;
+ query_data->pic_data[i].picture_is_skipped = 0;
+ }
+
+ parser = (vc1_viddec_parser_t *)cxt->codec_data;
+ seqLayerHeader = &(parser->info.metadata);
+
+
+ /* WMV codec data will have a start code, but the WMV picture data won't. */
+ if (VC1_PROFILE_ADVANCED == seqLayerHeader->PROFILE)
+ {
+ return vbp_parse_start_code_helper_vc1(cxt, ops, 0);
+ }
+ else
+ {
+ /* WMV: vc1 simple or main profile. No start code present.
+ */
+
+ /* must set is_emul_reqd to 0! */
+ cxt->getbits.is_emul_reqd = 0;
+ cxt->list.num_items = 1;
+ cxt->list.data[0].stpos = 0;
+ cxt->list.data[0].edpos = cxt->parse_cubby.size;
+ }
+
+ return VBP_OK;
+}
+
+
+/**
+ *
+ */
+static inline uint8 vbp_get_bit_vc1(uint32 *data, uint32 *current_word, uint32 *current_bit)
+{
+ uint8 value;
+
+ value = (data[*current_word] >> *current_bit) & 1;
+
+ /* Fix up bit/byte offsets. endianess?? */
+ if (*current_bit < 31)
+ {
+ ++(*current_bit);
+ }
+ else
+ {
+ ++(*current_word);
+ *current_bit = 0;
+ }
+
+ return value;
+}
+
+
+/**
+ *
+ */
+static uint32 vbp_pack_bitplane_vc1(
+ uint32 *from_plane,
+ uint8 *to_plane,
+ uint32 width,
+ uint32 height,
+ uint32 nibble_shift)
+{
+ uint32 error = VBP_OK;
+ uint32 current_word = 0;
+ uint32 current_bit = 0; /* must agree with number in vbp_get_bit_vc1 */
+ uint32 i, j, n;
+ uint8 value;
+ uint32 stride = 0;
+
+ stride = 32 * ((width + 31) / 32);
+
+ for (i = 0, n = 0; i < height; i++)
+ {
+ for (j = 0; j < stride; j++)
+ {
+ if (j < width)
+ {
+ value = vbp_get_bit_vc1(
+ from_plane,
+ &current_word,
+ &current_bit);
+
+ to_plane[n / 2] |= value << (nibble_shift + ((n % 2) ? 0 : 4));
+ n++;
+ }
+ else
+ {
+ break;
+ }
+ }
+ if (stride > width)
+ {
+ current_word++;
+ current_bit = 0;
+ }
+ }
+
+ return error;
+}
+
+
+/**
+ *
+ */
+static inline uint32 vbp_map_bfraction(uint32 numerator, uint32 denominator)
+{
+ uint32 b_fraction = 0;
+
+ if ((numerator < 8) && (denominator < 9))
+ {
+ b_fraction = b_fraction_table[numerator][denominator];
+ }
+
+ return b_fraction;
+}
+
+/**
+ *
+ */
+static uint32 vbp_pack_bitplanes_vc1(
+ vbp_context *pcontext,
+ int index,
+ vbp_picture_data_vc1* pic_data)
+{
+ uint32 error = VBP_OK;
+ if (0 == pic_data->pic_parms->bitplane_present.value)
+ {
+ /* return if bitplane is not present */
+ pic_data->size_bitplanes = 0;
+ memset(pic_data->packed_bitplanes, 0, MAX_BITPLANE_SIZE);
+ return error;
+ }
+
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data;
+ vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+ vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader);
+
+
+ /* set bit plane size */
+ pic_data->size_bitplanes = ((seqLayerHeader->widthMB * seqLayerHeader->heightMB) + 1) / 2;
+
+
+ memset(pic_data->packed_bitplanes, 0, pic_data->size_bitplanes);
+
+ /* see libva library va.h for nibble bit */
+ switch (picLayerHeader->PTYPE)
+ {
+ case VC1_I_FRAME:
+ case VC1_BI_FRAME:
+ if (picLayerHeader->OVERFLAGS.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->OVERFLAGS.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 2);
+ }
+ if (picLayerHeader->ACPRED.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->ACPRED.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 1);
+ }
+ if (picLayerHeader->FIELDTX.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->FIELDTX.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 0);
+ }
+ /* sanity check */
+ if (picLayerHeader->MVTYPEMB.imode ||
+ picLayerHeader->DIRECTMB.imode ||
+ picLayerHeader->SKIPMB.imode ||
+ picLayerHeader->FORWARDMB.imode)
+ {
+ ETRACE("Unexpected bit-plane type.");
+ error = VBP_TYPE;
+ }
+ break;
+
+ case VC1_P_FRAME:
+ if (picLayerHeader->MVTYPEMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->MVTYPEMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 2);
+ }
+ if (picLayerHeader->SKIPMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->SKIPMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 1);
+ }
+ if (picLayerHeader->DIRECTMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->DIRECTMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 0);
+ }
+ /* sanity check */
+ if (picLayerHeader->FIELDTX.imode ||
+ picLayerHeader->FORWARDMB.imode ||
+ picLayerHeader->ACPRED.imode ||
+ picLayerHeader->OVERFLAGS.imode )
+ {
+ ETRACE("Unexpected bit-plane type.");
+ error = VBP_TYPE;
+ }
+ break;
+
+ case VC1_B_FRAME:
+ if (picLayerHeader->FORWARDMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->FORWARDMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 2);
+ }
+ if (picLayerHeader->SKIPMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->SKIPMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 1);
+ }
+ if (picLayerHeader->DIRECTMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->DIRECTMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 0);
+ }
+ /* sanity check */
+ if (picLayerHeader->MVTYPEMB.imode ||
+ picLayerHeader->FIELDTX.imode ||
+ picLayerHeader->ACPRED.imode ||
+ picLayerHeader->OVERFLAGS.imode)
+ {
+ ETRACE("Unexpected bit-plane type.");
+ error = VBP_TYPE;
+ }
+ break;
+ }
+ return error;
+}
+
+
+/**
+ * fill the query data structure after sequence header, entry point header
+ * or a complete frame is parsed.
+ * NOTE: currently partial frame is not handled properly
+ */
+uint32 vbp_populate_query_data_vc1(vbp_context *pcontext)
+{
+ uint32 error = VBP_OK;
+
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data;
+ vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+
+ vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+ /* first we get the SH/EP data. Can we cut down on this? */
+ vbp_codec_data_vc1 *se_data = query_data->se_data;
+ se_data->PROFILE = seqLayerHeader->PROFILE;
+ se_data->LEVEL = seqLayerHeader->LEVEL;
+ se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG;
+ se_data->PULLDOWN = seqLayerHeader->PULLDOWN;
+ se_data->INTERLACE = seqLayerHeader->INTERLACE;
+ se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG;
+ se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG;
+ se_data->PSF = seqLayerHeader->PSF;
+ se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK;
+ se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY;
+ se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG;
+ se_data->REFDIST_FLAG = seqLayerHeader->REFDIST_FLAG;
+ se_data->LOOPFILTER = seqLayerHeader->LOOPFILTER;
+ se_data->FASTUVMC = seqLayerHeader->FASTUVMC;
+ se_data->EXTENDED_MV = seqLayerHeader->EXTENDED_MV;
+ se_data->DQUANT = seqLayerHeader->DQUANT;
+ se_data->VSTRANSFORM = seqLayerHeader->VSTRANSFORM;
+ se_data->OVERLAP = seqLayerHeader->OVERLAP;
+ se_data->QUANTIZER = seqLayerHeader->QUANTIZER;
+ se_data->CODED_WIDTH = (seqLayerHeader->width + 1) << 1;
+ se_data->CODED_HEIGHT = (seqLayerHeader->height + 1) << 1;
+ se_data->EXTENDED_DMV = seqLayerHeader->EXTENDED_DMV;
+ se_data->RANGE_MAPY_FLAG = seqLayerHeader->RANGE_MAPY_FLAG;
+ se_data->RANGE_MAPY = seqLayerHeader->RANGE_MAPY;
+ se_data->RANGE_MAPUV_FLAG = seqLayerHeader->RANGE_MAPUV_FLAG;
+ se_data->RANGE_MAPUV = seqLayerHeader->RANGE_MAPUV;
+ se_data->RANGERED = seqLayerHeader->RANGERED;
+ se_data->MAXBFRAMES = seqLayerHeader->MAXBFRAMES;
+ se_data->MULTIRES = seqLayerHeader->MULTIRES;
+ se_data->SYNCMARKER = seqLayerHeader->SYNCMARKER;
+ se_data->RNDCTRL = seqLayerHeader->RNDCTRL;
+ se_data->REFDIST = seqLayerHeader->REFDIST;
+ se_data->widthMB = seqLayerHeader->widthMB;
+ se_data->heightMB = seqLayerHeader->heightMB;
+ se_data->INTCOMPFIELD = seqLayerHeader->INTCOMPFIELD;
+ se_data->LUMSCALE2 = seqLayerHeader->LUMSCALE2;
+ se_data->LUMSHIFT2 = seqLayerHeader->LUMSHIFT2;
+
+ /* update buffer number */
+ query_data->buf_number = buffer_counter;
+
+ if (query_data->num_pictures > 2)
+ {
+ WTRACE("sampe buffer contains %d pictures", query_data->num_pictures);
+ }
+ return error;
+}
+
+
+
+static void vbp_pack_picture_params_vc1(
+ vbp_context *pcontext,
+ int index,
+ vbp_picture_data_vc1* pic_data)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data;
+ vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+ vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader);
+
+
+ VAPictureParameterBufferVC1 *pic_parms = pic_data->pic_parms;
+
+ /* Then we get the picture header data. Picture type need translation. */
+ pic_parms->forward_reference_picture = VA_INVALID_SURFACE;
+ pic_parms->backward_reference_picture = VA_INVALID_SURFACE;
+ pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE;
+
+ pic_parms->sequence_fields.value = 0;
+ pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE;
+ pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER;
+ pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP;
+
+ pic_parms->coded_width = (seqLayerHeader->width + 1) << 1;
+ pic_parms->coded_height = (seqLayerHeader->height + 1) << 1;
+
+ pic_parms->entrypoint_fields.value = 0;
+ pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY;
+ pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK;
+ pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER;
+
+ pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER;
+ pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC;
+
+ pic_parms->range_mapping_fields.value = 0;
+ pic_parms->range_mapping_fields.bits.luma_flag = seqLayerHeader->RANGE_MAPY_FLAG;
+ pic_parms->range_mapping_fields.bits.luma = seqLayerHeader->RANGE_MAPY;
+ pic_parms->range_mapping_fields.bits.chroma_flag = seqLayerHeader->RANGE_MAPUV_FLAG;
+ pic_parms->range_mapping_fields.bits.chroma = seqLayerHeader->RANGE_MAPUV;
+
+ pic_parms->b_picture_fraction =
+ vbp_map_bfraction(picLayerHeader->BFRACTION_NUM, picLayerHeader->BFRACTION_DEN);
+
+ pic_parms->cbp_table = picLayerHeader->CBPTAB;
+ pic_parms->mb_mode_table = picLayerHeader->MBMODETAB;
+ pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM;
+ pic_parms->rounding_control = picLayerHeader->RNDCTRL;
+ pic_parms->post_processing = picLayerHeader->POSTPROC;
+ /* fix this. Add RESPIC to parser. */
+ pic_parms->picture_resolution_index = 0;
+ pic_parms->luma_scale = picLayerHeader->LUMSCALE;
+ pic_parms->luma_shift = picLayerHeader->LUMSHIFT;
+
+ pic_parms->picture_fields.value = 0;
+ switch (picLayerHeader->PTYPE)
+ {
+ case VC1_I_FRAME:
+ pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_I;
+ break;
+
+ case VC1_P_FRAME:
+ pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_P;
+ break;
+
+ case VC1_B_FRAME:
+ pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_B;
+ break;
+
+ case VC1_BI_FRAME:
+ pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_BI;
+ break;
+
+ case VC1_SKIPPED_FRAME:
+ pic_data->picture_is_skipped = VC1_PTYPE_SKIPPED;
+ break;
+
+ default:
+ /* to do: handle this case */
+ break;
+ }
+ pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM;
+ if (0 == seqLayerHeader->PROFILE || 1 == seqLayerHeader->PROFILE)
+ {
+ /* simple or main profile, top field flag is not present, default to 1.*/
+ pic_parms->picture_fields.bits.top_field_first = 1;
+ }
+ else
+ {
+ pic_parms->picture_fields.bits.top_field_first = picLayerHeader->TFF;
+ }
+
+ pic_parms->picture_fields.bits.is_first_field = !(picLayerHeader->CurrField);
+ /* This seems to be set based on the MVMODE and MVMODE2 syntax. */
+ /* This is a hack. Probably will need refining. */
+ if ((VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE) ||
+ (VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE2))
+ {
+ pic_parms->picture_fields.bits.intensity_compensation = 1;
+ }
+ else
+ {
+ pic_parms->picture_fields.bits.intensity_compensation = picLayerHeader->INTCOMP;
+ }
+
+ /* Lets store the raw-mode BP bits. */
+ pic_parms->raw_coding.value = 0;
+ pic_parms->raw_coding.flags.mv_type_mb = picLayerHeader->raw_MVTYPEMB;
+ pic_parms->raw_coding.flags.direct_mb = picLayerHeader->raw_DIRECTMB;
+ pic_parms->raw_coding.flags.skip_mb = picLayerHeader->raw_SKIPMB;
+ pic_parms->raw_coding.flags.field_tx = picLayerHeader->raw_FIELDTX;
+ pic_parms->raw_coding.flags.forward_mb = picLayerHeader->raw_FORWARDMB;
+ pic_parms->raw_coding.flags.ac_pred = picLayerHeader->raw_ACPRED;
+ pic_parms->raw_coding.flags.overflags = picLayerHeader->raw_OVERFLAGS;
+
+ /* imode 1/0 indicates bitmap presence in Pic Hdr. */
+ pic_parms->bitplane_present.value = 0;
+
+ pic_parms->bitplane_present.flags.bp_mv_type_mb =
+ pic_parms->raw_coding.flags.mv_type_mb ? 1 :
+ (picLayerHeader->MVTYPEMB.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_direct_mb =
+ pic_parms->raw_coding.flags.direct_mb ? 1 :
+ (picLayerHeader->DIRECTMB.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_skip_mb =
+ pic_parms->raw_coding.flags.skip_mb ? 1 :
+ (picLayerHeader->SKIPMB.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_field_tx =
+ pic_parms->raw_coding.flags.field_tx ? 1 :
+ (picLayerHeader->FIELDTX.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_forward_mb =
+ pic_parms->raw_coding.flags.forward_mb ? 1 :
+ (picLayerHeader->FORWARDMB.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_ac_pred =
+ pic_parms->raw_coding.flags.ac_pred ? 1 :
+ (picLayerHeader->ACPRED.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_overflags =
+ pic_parms->raw_coding.flags.overflags ? 1 :
+ (picLayerHeader->OVERFLAGS.imode ? 1: 0);
+
+ pic_parms->reference_fields.value = 0;
+ pic_parms->reference_fields.bits.reference_distance_flag =
+ seqLayerHeader->REFDIST_FLAG;
+
+ pic_parms->reference_fields.bits.reference_distance =
+ seqLayerHeader->REFDIST;
+
+ pic_parms->reference_fields.bits.num_reference_pictures =
+ picLayerHeader->NUMREF;
+
+ pic_parms->reference_fields.bits.reference_field_pic_indicator =
+ picLayerHeader->REFFIELD;
+
+ pic_parms->mv_fields.value = 0;
+ pic_parms->mv_fields.bits.mv_mode = picLayerHeader->MVMODE;
+ pic_parms->mv_fields.bits.mv_mode2 = picLayerHeader->MVMODE2;
+
+ pic_parms->mv_fields.bits.mv_table = picLayerHeader->MVTAB;
+ pic_parms->mv_fields.bits.two_mv_block_pattern_table = picLayerHeader->MV2BPTAB;
+ pic_parms->mv_fields.bits.four_mv_switch = picLayerHeader->MV4SWITCH;
+ pic_parms->mv_fields.bits.four_mv_block_pattern_table = picLayerHeader->MV4BPTAB;
+ pic_parms->mv_fields.bits.extended_mv_flag = seqLayerHeader->EXTENDED_MV;
+ pic_parms->mv_fields.bits.extended_mv_range = picLayerHeader->MVRANGE;
+ pic_parms->mv_fields.bits.extended_dmv_flag = seqLayerHeader->EXTENDED_DMV;
+ pic_parms->mv_fields.bits.extended_dmv_range = picLayerHeader->DMVRANGE;
+
+ pic_parms->pic_quantizer_fields.value = 0;
+ pic_parms->pic_quantizer_fields.bits.dquant = seqLayerHeader->DQUANT;
+ pic_parms->pic_quantizer_fields.bits.quantizer = seqLayerHeader->QUANTIZER;
+ pic_parms->pic_quantizer_fields.bits.half_qp = picLayerHeader->HALFQP;
+ pic_parms->pic_quantizer_fields.bits.pic_quantizer_scale = picLayerHeader->PQUANT;
+ pic_parms->pic_quantizer_fields.bits.pic_quantizer_type = picLayerHeader->UniformQuant;
+ pic_parms->pic_quantizer_fields.bits.dq_frame = picLayerHeader->DQUANTFRM;
+ pic_parms->pic_quantizer_fields.bits.dq_profile = picLayerHeader->DQPROFILE;
+ pic_parms->pic_quantizer_fields.bits.dq_sb_edge = picLayerHeader->DQSBEDGE;
+ pic_parms->pic_quantizer_fields.bits.dq_db_edge = picLayerHeader->DQDBEDGE;
+ pic_parms->pic_quantizer_fields.bits.dq_binary_level = picLayerHeader->DQBILEVEL;
+ pic_parms->pic_quantizer_fields.bits.alt_pic_quantizer = picLayerHeader->ALTPQUANT;
+
+ pic_parms->transform_fields.value = 0;
+ pic_parms->transform_fields.bits.variable_sized_transform_flag =
+ seqLayerHeader->VSTRANSFORM;
+
+ pic_parms->transform_fields.bits.mb_level_transform_type_flag = picLayerHeader->TTMBF;
+ pic_parms->transform_fields.bits.frame_level_transform_type = picLayerHeader->TTFRM;
+
+ pic_parms->transform_fields.bits.transform_ac_codingset_idx1 =
+ (picLayerHeader->TRANSACFRM > 0) ? picLayerHeader->TRANSACFRM - 1 : 0;
+
+ pic_parms->transform_fields.bits.transform_ac_codingset_idx2 =
+ (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0;
+
+ pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB;
+}
+
+
+static void vbp_pack_slice_data_vc1(
+ vbp_context *pcontext,
+ int index,
+ vbp_picture_data_vc1* pic_data)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ uint32 slice_size = cxt->list.data[index].edpos - cxt->list.data[index].stpos;
+ uint32 bit;
+ uint32 byte;
+ uint8 is_emul;
+ viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+ vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+ VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms);
+
+ /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/
+
+ slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos;
+ slc_data->slice_size = slice_size - byte;
+ slc_data->slice_offset = byte;
+
+ slc_parms->slice_data_size = slc_data->slice_size;
+ slc_parms->slice_data_offset = 0;
+
+ /* fix this. we need to be able to handle partial slices. */
+ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+ slc_parms->macroblock_offset = bit;
+
+ /* fix this. we need o get the slice_vertical_position from the code */
+ slc_parms->slice_vertical_position = pic_data->num_slices;
+
+ pic_data->num_slices++;
+}
+
+/**
+ * process parsing result
+ */
+uint32_t vbp_process_parsing_result_vc1(vbp_context *pcontext, int index)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ uint32 error = VBP_OK;
+
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data;
+ if (parser->start_code != VC1_SC_FRM && parser->start_code != VC1_SC_FLD &&
+ parser->start_code != VC1_SC_SLC)
+ {
+ /* only handle frame data, field data and slice data here
+ */
+ return VBP_OK;
+ }
+ vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+ if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD)
+ {
+ query_data->num_pictures++;
+ }
+
+ if (query_data->num_pictures > MAX_NUM_PICTURES)
+ {
+ ETRACE("Num of pictures per sample buffer exceeds the limit (%d).", MAX_NUM_PICTURES);
+ return VBP_DATA;
+ }
+
+ if (query_data->num_pictures == 0)
+ {
+ ETRACE("Unexpected num of pictures.");
+ return VBP_DATA;
+ }
+
+ /* start packing data */
+ int picture_index = query_data->num_pictures - 1;
+ vbp_picture_data_vc1* pic_data = &(query_data->pic_data[picture_index]);
+
+ if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD)
+ {
+ /* setup picture parameter first*/
+ vbp_pack_picture_params_vc1(pcontext, index, pic_data);
+
+ /* setup bitplane after setting up picture parameter (so that bitplane_present is updated) */
+ error = vbp_pack_bitplanes_vc1(pcontext, index, pic_data);
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to pack bitplane.");
+ return error;
+ }
+
+ }
+
+ /* Always pack slice parameter. The first macroblock in the picture CANNOT
+ * be preceeded by a slice header, so we will have first slice parsed always.
+ *
+ */
+
+ if (pic_data->num_slices >= MAX_NUM_SLICES)
+ {
+ ETRACE("Num of slices exceeds the limit (%d).", MAX_NUM_SLICES);
+ return VBP_DATA;
+ }
+
+ /* set up slice parameter */
+ vbp_pack_slice_data_vc1(pcontext, index, pic_data);
+
+
+ return VBP_OK;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h
new file mode 100644
index 0000000..510e16c
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h
@@ -0,0 +1,54 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_VC1_PARSER_H
+#define VBP_VC1_PARSER_H
+
+
+/*
+ * setup parser's entry pointer
+ */
+uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext);
+
+/*
+ * allocate query data structure - vbp_vc1_data
+ */
+uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext);
+
+/*
+ * free query data structure
+ */
+uint32 vbp_free_query_data_vc1(vbp_context *pcontext);
+
+/*
+ * parse bitstream configuration data
+ */
+uint32 vbp_parse_init_data_vc1(vbp_context *pcontext);
+
+/*
+ * parse bitstream start code and fill the viddec_input_buffer_t list.
+ * WMV has no start code so the whole buffer will be treated as a single frame.
+ * For VC1 progressive, if start code is not found, the whole buffer will be treated as a
+ * single frame as well.
+ * For VC1 interlace, the first field is not start code prefixed, but the second field
+ * is always start code prefixed.
+ */
+uint32 vbp_parse_start_code_vc1(vbp_context *pcontext);
+
+/*
+ * processe parsing result
+ */
+uint32 vbp_process_parsing_result_vc1(vbp_context *pcontext, int list_index);
+
+/*
+ * populate query data structure
+ */
+uint32 vbp_populate_query_data_vc1(vbp_context *pcontext);
+
+
+#endif /*VBP_VC1_PARSER_H*/
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c
new file mode 100644
index 0000000..f6e6a8a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c
@@ -0,0 +1,78 @@
+#include "viddec_emitter.h"
+#include "viddec_fw_workload.h"
+#include "viddec_fw_debug.h"
+
+int32_t viddec_emit_flush_current_wkld(viddec_emitter *emit)
+{
+ if(emit->cur.data != NULL)
+ {
+ emit->cur.data->num_items = emit->cur.num_items;
+ }
+ if(emit->next.data != NULL)
+ {
+ emit->next.data->num_items = emit->next.num_items;
+ }
+ emit->cur.num_items = emit->next.num_items;
+ emit->next.num_items = 0;
+ if(emit->cur.data != NULL)
+ {
+ emit->cur.data->result = emit->cur.result;
+ }
+ if(emit->next.data != NULL)
+ {
+ emit->next.data->result = emit->next.result;
+ }
+ emit->cur.result = emit->next.result;
+ emit->next.result = 0;
+ return 1;
+}
+
+int32_t viddec_emit_append(viddec_emitter_wkld *cxt, viddec_workload_item_t *item)
+{
+ int32_t ret =0;
+ if((cxt->num_items < cxt->max_items) && (cxt->data != NULL))
+ {
+ cxt->data->item[cxt->num_items] = *item;
+ cxt->num_items++;
+ ret = 1;
+ CDEB(0, "%s: item(%02d) = [%08x %08x %08x %08x]\n",__FUNCTION__, cxt->num_items - 1, item->vwi_type, item->vwi_payload[0], item->vwi_payload[1], item->vwi_payload[2]);
+ }
+ else
+ {
+ cxt->result |= (VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_WKLD_OVERLFOW, (int)item->vwi_type, (int)(cxt->data), 0, 0, 0, 0);
+ }
+ return ret;
+}
+
+int32_t viddec_emit_contr_tag(viddec_emitter *emit, viddec_input_buffer_t *ibuf, uint8_t incomplete, uint32_t using_next)
+{
+ viddec_workload_item_t item;
+ viddec_emitter_wkld *cur_wkld;
+
+ cur_wkld = (using_next == 0)? &(emit->cur):&(emit->next);
+
+ if(!incomplete)
+ item.vwi_type = VIDDEC_WORKLOAD_IBUF_DONE;
+ else
+ item.vwi_type = VIDDEC_WORKLOAD_IBUF_CONTINUED;
+ item.tag.tag_phys_addr = ibuf->phys;
+ item.tag.tag_phys_len = ibuf->len;
+ item.tag.tag_value = ibuf->id;
+
+ return viddec_emit_append(cur_wkld, &item);
+}
+
+int32_t viddec_emit_assoc_tag(viddec_emitter *emit, uint32_t id, uint32_t using_next)
+{
+ viddec_workload_item_t item;
+ viddec_emitter_wkld *cur_wkld;
+
+ cur_wkld = (using_next == false)? &(emit->cur):&(emit->next);
+ item.vwi_type = VIDDEC_WORKLOAD_TAG;
+ item.tag.tag_phys_addr = -1;
+ item.tag.tag_phys_len = -1;
+ item.tag.tag_value = id;
+ return viddec_emit_append(cur_wkld, &item);
+}
+
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_intr.c b/mix_vbp/viddec_fw/fw/parser/viddec_intr.c
new file mode 100644
index 0000000..fa6c1f2
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_intr.c
@@ -0,0 +1,56 @@
+#include "fw_pvt.h"
+#include "viddec_fw_parser_ipclib_config.h"
+#include "viddec_fw_debug.h"
+
+extern uint32_t timer;
+
+void enable_intr(void)
+{
+ TRAPS_ENABLE;
+ TRAPS_INT_ENABLE;
+ //reg_write(INT_REG, 0);
+}
+
+/*------------------------------------------------------------------------------
+ * Function: mfd_trap_handler
+ * This is the FW's ISR, Currently we don't support any INT as we are running parsers only on GV which
+ * are pure SW modules.
+ *------------------------------------------------------------------------------
+ */
+void mfd_trap_handler()
+{
+ uint32_t reg=0, temp=0;
+ temp = reg_read(INT_STATUS);
+ //DEBUG_WRITE(0xff, temp, timer, 0, 0, 0);
+ if(temp & INT_WDOG_ENABLE)
+ {
+ timer++;
+ set_wdog(VIDDEC_WATCHDOG_COUNTER_MAX);
+ reg = reg_read(INT_STATUS);
+ }
+ if(temp & 0x4)
+ {
+
+ temp = temp & (~0x4);
+ reg_write(INT_REG, temp);
+ //val = reg_read(DMA_CONTROL_STATUS);
+ //val |=DMA_CTRL_STATUS_DONE;
+ //reg_write(DMA_CONTROL_STATUS, val);
+ //reg = reg_read(INT_STATUS);
+ }
+ if(temp & 0x2)
+ {
+
+ temp = temp & (~0x2);
+ reg_write(INT_REG, temp);
+ }
+
+ if(temp & 0x1)
+ {
+ temp = temp & (~0x1);
+ reg_write(INT_REG, temp);
+ }
+ //DEBUG_WRITE(0xff, timer, temp, reg, 0, val);
+ __asm__("nop");
+
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c
new file mode 100644
index 0000000..85b6b8e
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c
@@ -0,0 +1,119 @@
+#include "viddec_pm_parse.h"
+#include "viddec_fw_debug.h"
+
+#define FIRST_STARTCODE_BYTE 0x00
+#define SECOND_STARTCODE_BYTE 0x00
+#define THIRD_STARTCODE_BYTE 0x01
+
+/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* these are little-endian defines */
+#define SC_BYTE_MASK0 0x00ff0000 /* little-endian */
+#define SC_BYTE_MASK1 0x000000ff /* little-endian */
+
+/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success.
+ The conext is updated with current phase and sc_code position in the buffer.
+*/
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state)
+{
+ uint8_t *ptr;
+ uint32_t size;
+ uint32_t data_left=0, phase = 0, ret = 0;
+ viddec_sc_parse_cubby_cxt_t *cxt;
+ /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+ Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+ if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+ we are looking for. Its incremented to 4 once we see a byte after this pattern */
+ cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+ size = 0;
+ data_left = cxt->size;
+ ptr = cxt->buf;
+ phase = cxt->phase;
+ cxt->sc_end_pos = -1;
+ pcxt=pcxt;
+
+ /* parse until there is more data and start code not found */
+ while((data_left > 0) &&(phase < 3))
+ {
+ /* Check if we are byte aligned & phase=0, if thats the case we can check
+ work at a time instead of byte*/
+ if(((((uint32_t)ptr) & 0x3) == 0) && (phase == 0))
+ {
+ while(data_left > 3)
+ {
+ uint32_t data;
+ char mask1 = 0, mask2=0;
+
+ data = *((uint32_t *)ptr);
+#ifndef MFDBIGENDIAN
+ data = SWAP_WORD(data);
+#endif
+ mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+ mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+ /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+ two consecutive zero bytes for a start code pattern */
+ if(mask1 && mask2)
+ {/* Success so skip 4 bytes and start over */
+ ptr+=4;size+=4;data_left-=4;
+ continue;
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+
+ /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+ two zero bytes in the word so we look one byte at a time*/
+ if(data_left > 0)
+ {
+ if(*ptr == FIRST_STARTCODE_BYTE)
+ {/* Phase can be 3 only if third start code byte is found */
+ phase++;
+ ptr++;size++;data_left--;
+ if(phase > 2)
+ {
+ phase = 2;
+
+ if ( (((uint32_t)ptr) & 0x3) == 0 )
+ {
+ while( data_left > 3 )
+ {
+ if(*((uint32_t *)ptr) != 0)
+ {
+ break;
+ }
+ ptr+=4;size+=4;data_left-=4;
+ }
+ }
+ }
+ }
+ else
+ {
+ if((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2))
+ {/* Match for start code so update context with byte position */
+ phase = 3;
+ cxt->sc_end_pos = size;
+ }
+ else
+ {
+ phase = 0;
+ }
+ ptr++;size++;data_left--;
+ }
+ }
+ }
+ if((data_left > 0) && (phase == 3))
+ {
+ viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+ cxt->sc_end_pos++;
+ state->next_sc = cxt->buf[cxt->sc_end_pos];
+ state->second_scprfx_length = 3;
+ phase++;
+ ret = 1;
+ }
+ cxt->phase = phase;
+ /* Return SC found only if phase is 4, else always success */
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c
new file mode 100644
index 0000000..6f00d27
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c
@@ -0,0 +1,190 @@
+#include "viddec_pm_parse.h"
+#include "viddec_fw_debug.h"
+
+#define FIRST_STARTCODE_BYTE 0x00
+#define SECOND_STARTCODE_BYTE 0x00
+#define THIRD_STARTCODE_BYTE 0x01
+
+/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* these are little-endian defines */
+#define SC_BYTE_MASK0 0x00ff0000 /* little-endian */
+#define SC_BYTE_MASK1 0x000000ff /* little-endian */
+
+// This is the 2.25 clocks per byte loop
+#define USE_2p25_CLOCK_PER_BYTE_LOOP
+
+#ifdef USE_2p25_CLOCK_PER_BYTE_LOOP
+static int parser_find_next_startcode(
+ const unsigned char *buf,
+ int i,
+ int len,
+ unsigned int *pphase )
+{
+ int sc_pos = -1;
+ int in_slow_loop;
+ register unsigned int scphase;
+
+ scphase = *pphase;
+
+ in_slow_loop = 1;
+ if ( (0 == (0x3 & i)) && /* dword aligned */
+ (0 == scphase) && /* no "potential" SC detected */
+ ((len - i) >= 4) ) /* more than four bytes left */
+ {
+ in_slow_loop = 0; /* go to fast loop */
+ }
+
+ while( i < len )
+ {
+ if ( in_slow_loop )
+ {
+/* ------- slow SC Detect Loop, used when 0 detected in stream --------*/
+sc_detect_slow_loop:
+
+ while ( i < len )
+ {
+ unsigned char ch;
+
+ ch = buf[i];
+
+ /* searching for a zero, ignore phase for now */
+ if ( FIRST_STARTCODE_BYTE == ch )
+ {
+ /* if we've already got two zeros, hold at phase == 2 */
+ if ( scphase < 2 )
+ {
+ scphase++;
+ }
+ else if ( scphase > 2 )
+ {
+ /* RARE Valid Condition, SC == 00 00 01 00 */
+ /* if we've already got two zeros hold at phase == 2
+ * we also enter here of we're at phase 3
+ * meaning we've got 00 00 01 00 which is a valid SC
+ */
+ /* 00 00 01 00 */
+ sc_pos = i;
+ *pphase = scphase;
+ return(sc_pos);
+ }
+ else /* implies scphase == 2, holding receiving 0's */
+ {
+ }
+ }
+ else if ( THIRD_STARTCODE_BYTE == ch )
+ {
+ if ( 2 == scphase )
+ {
+ /* next byte is the SC */
+ scphase++;
+ }
+ else if ( scphase < 2 )
+ {
+ scphase = 0; /* start over */
+ }
+ else if ( scphase > 2 )
+ {
+ /* RARE Valid Condition, SC == 00 00 01 01 */
+ sc_pos = i;
+ *pphase = scphase;
+ return(sc_pos);
+ }
+ }
+ else if ( 3 == scphase )
+ {
+ /* Valid Condition, SC == 00 00 01 xx */
+ sc_pos = i;
+ *pphase = scphase;
+ return(sc_pos);
+ }
+ else
+ {
+ scphase = 0;
+
+ if ( (3 == (0x3 & i)) && /* dword aligned? */
+ ((len - i) > 4) ) /* more than four bytes left */
+ {
+ i++;
+ in_slow_loop = 0; /* go to fast loop */
+
+ /* WARNING: Performance GoTo */
+ goto sc_detect_fast_loop;
+ }
+ }
+
+ i++;
+ }
+ }
+ else /* we're in the fast loop */
+ {
+/* ------- FAST SC Detect Loop, used to skip at high bandwidth --------*/
+sc_detect_fast_loop:
+
+ /* FAST start-code scanning loop (Krebs Algorithm) */
+ while ( i <= (len - 4) )
+ {
+ register unsigned int dw;
+
+ dw = *((unsigned int *)&buf[i]);
+#ifndef MFDBIGENDIAN
+ dw = SWAP_WORD(dw);
+#endif
+ if ( 0 != (dw & SC_BYTE_MASK0) )
+ {
+ if ( 0 != (dw & SC_BYTE_MASK1) )
+ {
+ /* most common code path */
+ i += 4;
+ continue;
+ }
+ }
+
+ break;
+ }
+ /* potential SC detected or at end of loop */
+ in_slow_loop = 1;
+
+ /* WARNING: performance goto */
+ goto sc_detect_slow_loop;
+ }
+ }
+
+ *pphase = scphase;
+ return(sc_pos);
+}
+unsigned int viddec_parse_sc(void *in, void *pcxt)
+{
+ viddec_sc_parse_cubby_cxt_t *cxt;
+ int boff;
+ int retval=0;
+
+ cxt = (viddec_sc_parse_cubby_cxt_t *)in;
+
+ /* get to four-byte alignment */
+ boff = (int)cxt->buf & 0x3;
+
+ cxt->sc_end_pos = parser_find_next_startcode(
+ (const unsigned char *)cxt->buf - boff,
+ boff,
+ cxt->size + boff,
+ &cxt->phase );
+
+ if ( (int)cxt->sc_end_pos >= 0 )
+ {
+ cxt->sc_end_pos -= boff;
+
+ /* have not fully finished the buffer */
+ if ( cxt->sc_end_pos < cxt->size )
+ cxt->phase++;
+
+ retval = 1;
+ }
+ else
+ {
+ /* No startcode found */
+ }
+
+ return(retval);
+}
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c
new file mode 100644
index 0000000..5aa2e9c
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c
@@ -0,0 +1,6 @@
+#include <stdint.h>
+
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+ return (0);
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c
new file mode 100644
index 0000000..ffcff11
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c
@@ -0,0 +1,554 @@
+#include "viddec_pm.h"
+#include "viddec_fw_debug.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_pm_tags.h"
+#include "viddec_parser_ops.h"
+#include "viddec_vc1_parse.h"
+#include "viddec_mp4_parse.h"
+#include "viddec_mpeg2_parse.h"
+#include "viddec_h264_parse.h"
+/*
+ Overview of Parser manager:
+ Parser manager is the glue between Kernel(main.c) and actual codecs. We abstract common functionality as much as we can
+ in this module. The parser Manager context allocates memory for Parsers. At any point in time there is only one active stream.
+ During open stream we setup all necessary initialisation for the codec we are handling. The parser manager context is
+ stored on DDR when the current stream gets swapped out by the kernel. When the next stream comes in it has it's own
+ version of parser manager.
+ Parser manager is reponsible for providing information on when its a good time to swap a stream.
+ High level algorithm of parser Manager once a stream is opened and active(RET's are returns to Kernel):
+
+ 1. create a list data structure to hold any incoming ES descriptors.
+ 2. Check to see if any of the ES buffers Desc in current list has data to be processed. If not request kernel(RET) for a buffer.
+ 3. If data is present parse until a scprefix+sc is found. If not goto step2.
+ 4. If startcode detected update list state to make ES data look like Linear buffer.
+ 5. Setup required state to provide getbits interface for codecs to access bit stream maximum 32bits at a time.
+ 6. Setup Current & Next workloads provided by Kernel.
+ 7. Call the codec to parse the data we collected between start codes.
+ 8. Query to see if we parsed frame worth of data.
+ 9. Do necessary TAG association and remove used buffers from List.
+ 10. Send information to kernel on whether workload is done or Not.(RET). When kernel reschedules start from step2.
+
+ Kernel can swap current stream at RET points described above.
+
+ Other additional things supported:
+ - Generic start code detect function which is same for most of codecs.
+ - Memory Management.
+ - Flush of stream.
+ - Emulation prevention.
+ - Interface to emit necessary tags for codec specific types.
+*/
+
+
+/* check to see if codec needs emulation prevention */
+#define EMUL_REQD(codec) ((codec == MFD_STREAM_FORMAT_VC1) || (codec_type == MFD_STREAM_FORMAT_H264) ? 1: 0)
+
+#ifdef RTL_SIMULATION
+extern void output_omar_wires( unsigned int value );
+#else
+#define output_omar_wires(x)
+#endif
+
+/* Place to store Function pointers for all supported interfaces for each codec */
+viddec_parser_ops_t parser_ops[MFD_STREAM_FORMAT_MAX];
+
+
+
+/* we need to define as external function so that for host mode we can use the same code without
+ modifications by overloading dma function with a copy function
+*/
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+void viddec_pm_init_ops()
+{
+ viddec_vc1_get_ops(&parser_ops[MFD_STREAM_FORMAT_VC1]);
+ parser_ops[MFD_STREAM_FORMAT_VC1].parse_sc = viddec_parse_sc;
+ parser_ops[MFD_STREAM_FORMAT_VC1].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags;
+ parser_ops[MFD_STREAM_FORMAT_VC1].gen_assoc_tags = viddec_generic_add_association_tags;
+
+ viddec_mpeg2_get_ops(&parser_ops[MFD_STREAM_FORMAT_MPEG]);
+ parser_ops[MFD_STREAM_FORMAT_MPEG].parse_sc = viddec_parse_sc;
+ parser_ops[MFD_STREAM_FORMAT_MPEG].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags;
+ parser_ops[MFD_STREAM_FORMAT_MPEG].gen_assoc_tags = viddec_mpeg2_add_association_tags;
+
+ viddec_h264_get_ops(&parser_ops[MFD_STREAM_FORMAT_H264]);
+ parser_ops[MFD_STREAM_FORMAT_H264].parse_sc = viddec_parse_sc;
+ parser_ops[MFD_STREAM_FORMAT_H264].gen_contrib_tags = viddec_pm_lateframe_generate_contribution_tags;
+ parser_ops[MFD_STREAM_FORMAT_H264].gen_assoc_tags = viddec_h264_add_association_tags;
+
+ viddec_mp4_get_ops(&parser_ops[MFD_STREAM_FORMAT_MPEG42]);
+ parser_ops[MFD_STREAM_FORMAT_MPEG42].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags;
+ parser_ops[MFD_STREAM_FORMAT_MPEG42].gen_assoc_tags = viddec_generic_add_association_tags;
+}
+
+/*
+ Returns size of persistent DDR memory required for the codec. If the required memory is less than max allocated
+ scratch memory in FW we always give the max scratch size.
+*/
+uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size)
+{
+ parser_ops[codec_type].get_cxt_size(size);
+ if(size->context_size > MAX_CODEC_CXT_SIZE)
+ {
+ DEB("ERROR: size(%d) of context for codec=%d is greater than max=%d\n",size->context_size,codec_type,MAX_CODEC_CXT_SIZE);
+ }
+ size->context_size = sizeof(viddec_pm_cxt_t);
+ return 1;
+}
+
+/*
+ Initialize the scratch memory allocated to the stream based on clean. if clean is true initialize to
+ start state, if not then preserve stream information.
+*/
+void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean)
+{
+ int i;
+
+ for(i=0; i<MAX_IBUFS_PER_SC; i++)
+ {
+ cxt->pending_tags.pending_tags[i] = INVALID_ENTRY;
+ }
+ cxt->frame_start_found = false;
+ cxt->found_fm_st_in_current_au = false;
+ cxt->late_frame_detect = (MFD_STREAM_FORMAT_H264 == codec_type) ? true:false;
+ cxt->pending_tags.first_buf_aligned = cxt->pending_tags.using_next = cxt->pending_tags.frame_done =false;
+ cxt->next_workload_error_eos = VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ viddec_pm_utils_list_init(&(cxt->list));
+ cxt->cur_buf.list_index = -1;
+ cxt->parse_cubby.phase=0;
+ parser_ops[codec_type].init((void *)&(cxt->codec_data[0]), persist_mem, !clean);
+ if(clean)
+ {
+ cxt->pending_inband_tags = 0;
+ }
+ else
+ {
+ /* TODO: Enable this once codecs support this function */
+ //parser_ops[codec_type].flush_preserve((void *)&(cxt->codec_data[0]), persist_mem);
+ }
+
+}
+
+void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time)
+{
+ viddec_emit_time(&(cxt->emitter), time);
+}
+
+/* add an esbuffer to list */
+static inline uint32_t viddec_pm_add_es_buf_to_list(viddec_pm_cxt_t *cxt, viddec_input_buffer_t *es_buf)
+{
+ uint32_t val , ret = PM_OVERFLOW;
+
+ val = viddec_pm_utils_list_addbuf(&(cxt->list), es_buf);
+ if(val == 1) ret = PM_SUCCESS;
+ return ret;
+}
+
+static inline uint32_t viddec_pm_check_inband_messages(viddec_pm_sc_cur_buf_t *cur_buf, uint32_t *type)
+{
+ uint32_t ret=false;
+ if(cur_buf->cur_es->flags != 0)
+ {
+ /* update offset to point to next position for loading data */
+ cur_buf->cur_offset +=(cur_buf->cur_size);
+ cur_buf->cur_size = 0;
+ switch(cur_buf->cur_es->flags)
+ {
+ case VIDDEC_STREAM_EOS:
+ {
+ *type = PM_EOS;
+ }
+ break;
+ case VIDDEC_STREAM_DISCONTINUITY:
+ {
+ *type = PM_DISCONTINUITY;
+ }
+ default:
+ break;
+ }
+ ret =true;
+ }
+ return ret;
+}
+
+/* creates an ibuf from the current position in list. Fills sc_parse_cubby_cxt */
+uint32_t viddec_pm_create_ibuf(viddec_pm_cxt_t *cxt)
+{
+ uint32_t ret = PM_NO_DATA;
+#ifndef VBP
+ viddec_sc_parse_cubby_cxt_t *cubby = &(cxt->parse_cubby);
+#endif
+ viddec_pm_sc_cur_buf_t *cur_buf = &(cxt->cur_buf);
+ viddec_pm_utils_list_t *list = &(cxt->list);
+
+ /* Step1: check if list is Empty, If yes return No data */
+ if(list->num_items > 0)
+ {
+ /* Step 2: Check to see If current index into list is empty & we have data in list,
+ if so increment index and initialise it*/
+ if(cur_buf->list_index == -1)
+ {
+ if(viddec_pm_utils_list_getbyte_position(list,
+ list->first_scprfx_length+1,
+ (uint32_t *)&(cur_buf->list_index),
+ &(cur_buf->cur_offset)) != 1)
+ {/* This return's offset and index from where we have to start for sc detect */
+ cur_buf->cur_size = 0;
+ cur_buf->cur_es = &(list->sc_ibuf[cur_buf->list_index]);
+ }
+ else
+ {
+ return PM_NO_DATA;
+ }
+ }
+
+ /* Step3: If we are done with current buffer then try to go to next item in list */
+ if((cur_buf->cur_offset + cur_buf->cur_size) >= cur_buf->cur_es->len)
+ {
+ /* Need to handle In band messages before going to next buffer */
+ //if(viddec_pm_check_inband_messages(cur_buf))
+ if(viddec_pm_check_inband_messages(cur_buf, &ret))
+ {
+ return ret;
+ }
+ /* If no items in list after the current buffer return no data */
+ if((uint32_t)(cur_buf->list_index + 1) >= list->num_items)
+ {
+ return PM_NO_DATA;
+ }
+ cur_buf->list_index++;
+ cur_buf->cur_es = &(list->sc_ibuf[cur_buf->list_index]);
+ cur_buf->cur_offset = cur_buf->cur_size = 0;
+ }
+ /* Step4: Fill the cubby with data to send to parser sc code function */
+ {
+ int32_t data_left;
+ /* data left is the leftout size in current ES buffer */
+ data_left = cur_buf->cur_es->len - (cur_buf->cur_offset + cur_buf->cur_size);
+
+ /* update offset to point to next position for loading data */
+ cur_buf->cur_offset +=(cur_buf->cur_size);
+
+#ifndef VBP
+ /* Load maximum of array size */
+ if(data_left >= SC_DETECT_BUF_SIZE)
+ {
+ data_left = SC_DETECT_BUF_SIZE;
+ }
+ /* can be zero if we have zero sized buffers in our list.EX:NEW segment */
+ if(data_left > 0)
+ {/* do a copy using Linear Dma */
+ uint32_t size , ddr_addr = 0, ddr_mask=0;
+ /* get ddr adress of current offset in ES buffer */
+#ifdef HOST_ONLY
+ ddr_addr = cur_buf->cur_offset + (uint32_t)cur_buf->cur_es->buf;
+#else
+ ddr_addr = cur_buf->cur_offset + cur_buf->cur_es->phys;
+#endif
+ ddr_mask = (ddr_addr & 3);
+ ddr_addr = ddr_addr & ~3;
+ /* return from this function can be more bytes based on input buf alignment.
+ The adress for local memory we are sending is on DWORD boundary so it should be safe.
+ */
+
+ size = cp_using_dma(ddr_addr, (uint32_t)&(cxt->scbuf[0]), data_left+ddr_mask, 0,1);//false, true);
+ cubby->size = data_left;
+
+ /* point to actual memory location which has the data(skip aligment bytes) */
+ cubby->buf = &(cxt->scbuf[ddr_mask]);
+ cur_buf->cur_size = data_left;
+ ret = PM_SUCCESS;
+ }
+ else
+ {
+ /* If we completely consumed this buffer or this is a zero sized buffer we want to check inband messages */
+ //if(viddec_pm_check_inband_messages(cur_buf))
+ if(viddec_pm_check_inband_messages(cur_buf, &ret))
+ {
+ return ret;
+ }
+ }
+#else
+ ret = PM_SUCCESS;
+#endif
+ }
+ }
+
+ return ret;
+}
+
+/*
+ Read data from esbuffer list and parse for start codes or EOS. If we consumed all the data we return no data left.
+*/
+static inline uint32_t viddec_pm_parse_for_sccode(viddec_pm_cxt_t *cxt, viddec_parser_ops_t *func)
+{
+ uint32_t ret = PM_NO_DATA;
+ uint32_t sc_boundary_found = 0;
+
+ while(!sc_boundary_found)
+ {
+ /* Create an buffer from list to parse */
+ ret = viddec_pm_create_ibuf(cxt);
+ switch(ret)
+ {
+ case PM_NO_DATA:
+ {/* No data in esbuffer list for parsing sc */
+ sc_boundary_found = 1;
+ }
+ break;
+ case PM_EOS:
+ case PM_DISCONTINUITY:
+ {
+ sc_boundary_found = 1;
+ cxt->list.end_offset = cxt->cur_buf.cur_offset+1;
+ cxt->parse_cubby.phase = 0;
+ /* we didn't find a start code so second start code length would be 0 */
+ cxt->sc_prefix_info.second_scprfx_length = 0;
+ //cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS;
+ if(ret == PM_EOS)
+ {
+ cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS;
+ }
+ if(ret == PM_DISCONTINUITY)
+ {
+ cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_DISCONTINUITY;
+ }
+ }
+ break;
+ case PM_SUCCESS:
+ default:
+ {
+ /* parse the created buffer for sc */
+ ret = func->parse_sc((void *)&(cxt->parse_cubby), (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info));
+ if(ret == 1)
+ {
+ cxt->list.end_offset = cxt->parse_cubby.sc_end_pos + cxt->cur_buf.cur_offset;
+ cxt->parse_cubby.phase = 0;
+ cxt->list.total_bytes+=cxt->parse_cubby.sc_end_pos;
+ ret = PM_SC_FOUND;
+ sc_boundary_found = 1;
+ break;
+ }
+ else
+ {
+ cxt->list.total_bytes+=cxt->cur_buf.cur_size;
+ }
+ }
+ break;
+ }
+ }
+
+ return ret;
+}
+
+/*
+ Once we are ready to flush the current workload, we update current workload on DDR with our internal information
+ that was not written before like num of items in workload, errors in stream etc...
+*/
+void viddec_pm_finalize_workload(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t codec_errors)
+{
+ viddec_emit_set_codec(&(cxt->emitter), codec_type);
+ viddec_emit_set_codec_errors(&(cxt->emitter), codec_errors);
+ viddec_emit_flush_current_wkld(&(cxt->emitter));
+ output_omar_wires( 0x5 );
+ output_omar_wires( 0x1 );
+}
+
+/*
+ After parsing between start codes we cleanup our list so that it has only buffers that are not consumed yet.
+*/
+uint32_t viddec_pm_finalize_list(viddec_pm_cxt_t *cxt)
+{
+ uint32_t ret=1;
+
+ viddec_pm_utils_list_remove_used_entries(&(cxt->list), cxt->sc_prefix_info.second_scprfx_length);
+ cxt->cur_buf.list_index = -1;
+ cxt->list.first_scprfx_length = cxt->sc_prefix_info.second_scprfx_length;
+ return ret;
+}
+
+/* Case to handle if we encounter list overflow without seeing second start code */
+void viddec_pm_handle_buffer_overflow(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf)
+{
+ uint32_t indx=0;
+ while(indx< (uint32_t)cxt->list.num_items)
+ {/* Dump tags for all entries in list to prevent buffer leak */
+ viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, true);
+ viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, true);
+ indx++;
+ }
+ /* Dump tags for the new buffer that was received */
+ viddec_emit_contr_tag(&(cxt->emitter), es_buf, 0, true);
+ viddec_emit_assoc_tag(&(cxt->emitter), es_buf->id, true);
+ /* Set errors on both current and next as both can be invalid */
+ viddec_emit_set_workload_error(&(cxt->emitter),
+ (VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE),
+ true);
+ viddec_emit_set_workload_error(&(cxt->emitter),
+ (VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE),
+ false);
+ /* cleanup the pending tags */
+ viddec_pm_generate_missed_association_tags(cxt, true);
+ viddec_pm_finalize_workload(cxt, codec_type, 0);
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_BUFFER_OVERLFOW, (int)es_buf->phys, (int)es_buf->len, 0, 0, 0, 0);
+}
+
+static inline void viddec_pm_handle_post_inband_messages(viddec_pm_cxt_t *cxt, uint32_t m_type)
+{
+ if((m_type & ~(0xFF))== PM_INBAND_MESSAGES)
+ {
+ /* If EOS decide set error on next workload too */
+ viddec_emit_set_workload_error(&(cxt->emitter), cxt->next_workload_error_eos, true);
+ if(m_type == PM_EOS)
+ {
+ viddec_emit_set_inband_tag(&(cxt->emitter), VIDDEC_WORKLOAD_IBUF_EOS, true);
+ }
+ if(m_type == PM_DISCONTINUITY)
+ {
+ cxt->pending_inband_tags = PM_DISCONTINUITY;
+ }
+ }
+}
+
+static inline uint32_t viddec_pm_handle_new_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf)
+{
+ uint32_t state = PM_SUCCESS;
+ if(es_buf != NULL)
+ {
+ state = viddec_pm_add_es_buf_to_list(cxt, es_buf);
+ if(state == PM_OVERFLOW)
+ {
+ viddec_pm_handle_buffer_overflow(cxt, codec_type, es_buf);
+ }
+ }
+ return state;
+}
+
+static inline void viddec_pm_handle_pre_inband_messages(viddec_pm_cxt_t *cxt)
+{
+ if(cxt->pending_inband_tags == PM_DISCONTINUITY)
+ {
+ viddec_emit_set_inband_tag(&(cxt->emitter), VIDDEC_WORKLOAD_IBUF_DISCONTINUITY, false);
+ cxt->pending_inband_tags = 0;
+ }
+}
+
+/*
+ Main function of parser manager.
+ It searches until start codes are found int he list if not through return type indicates kernel to provide more buffers.
+ If a start code is found it calls the codec to parse the syntax data it accumulated so far.
+ If codec says a frame is not done then continues to find the next start code.
+ If codec says frame is done it does tag association and indicates kernel a frame is done.
+*/
+uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf)
+{
+ uint32_t state = PM_SUCCESS;
+
+ /* Step1: Append Es buffer to list */
+ viddec_pm_handle_pre_inband_messages(cxt);
+ state = viddec_pm_handle_new_es_buffer(cxt, codec_type, es_buf);
+ if(state == PM_SUCCESS)
+ {
+ uint32_t scdetect_ret;
+ output_omar_wires( 0x3 );
+ /* Step2: Phase1 of parsing, parse until a sc is found */
+ scdetect_ret = viddec_pm_parse_for_sccode(cxt,&parser_ops[codec_type]);
+ switch(scdetect_ret)
+ {
+ case PM_NO_DATA:
+ {
+ /* Step3: If we consumed all the data indicate we need more buffers */
+ state = PM_NO_DATA;
+ break;
+ }
+ case PM_EOS:
+ case PM_DISCONTINUITY:
+ case PM_SC_FOUND:
+ {
+ uint32_t codec_errors=0;
+ /* Create necessary state information to make the ES buffers look like linear data */
+ viddec_pm_utils_list_updatebytepos(&(cxt->list), cxt->sc_prefix_info.second_scprfx_length);
+ if(cxt->sc_prefix_info.first_sc_detect != 1)
+ {
+ /* Step4: If we saw two start codes init state and call codec to parse */
+ uint32_t codec_ret;
+ /* Initialise the state to provide get bits for codecs */
+ viddec_pm_utils_bstream_init(&(cxt->getbits), &(cxt->list), EMUL_REQD(codec_type));
+ output_omar_wires( 0x1 );
+ /* call the codec to do synatax parsing */
+ parser_ops[codec_type].parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0]));
+ /* Check and see if frame start was detected. If we did update frame start in current au */
+ if(parser_ops[codec_type].is_frame_start((void *)&(cxt->codec_data[0])) == true)
+ {
+ cxt->frame_start_found += 1;
+ cxt->found_fm_st_in_current_au = true;
+ }
+ /* Query to see if we reached end of current frame */
+ codec_ret = parser_ops[codec_type].is_wkld_done((void *)cxt,
+ (void *)&(cxt->codec_data[0]),
+ (uint32_t)(cxt->sc_prefix_info.next_sc),
+ &codec_errors);
+
+ state = (codec_ret == VIDDEC_PARSE_FRMDONE) ? PM_WKLD_DONE : PM_SUCCESS;
+ /* generate contribution and association tags */
+ cxt->pending_tags.frame_done = (codec_ret == VIDDEC_PARSE_FRMDONE);
+ parser_ops[codec_type].gen_assoc_tags(cxt);
+ parser_ops[codec_type].gen_contrib_tags(cxt, (state != PM_WKLD_DONE));
+ }
+ else
+ {
+ /* Step4: If this is the first start code in this stream, clean up and return */
+ if(cxt->list.total_bytes != 0)
+ {
+ viddec_pm_generic_generate_contribution_tags(cxt, true);
+ viddec_generic_add_association_tags(cxt);
+ }
+ else
+ {
+ if(cxt->list.num_items >= 1)
+ {
+ uint32_t indx=0;
+ while((indx< (uint32_t)cxt->list.num_items) && (cxt->list.sc_ibuf[indx].len == 0))
+ {/* Dump all zero sized buffers until we see a buffer with valid data */
+ viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, false);
+ viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, false);
+ indx++;
+ }
+ }
+ }
+ if((scdetect_ret & ~(0xFF))!= PM_INBAND_MESSAGES)
+ {
+ state = PM_SUCCESS;//state = PM_FIRST_SC_FOUND;
+ cxt->sc_prefix_info.first_sc_detect = 0;
+ }
+ else
+ {
+ state = PM_WKLD_DONE;
+ }
+ }
+
+ viddec_pm_handle_post_inband_messages(cxt, scdetect_ret);
+
+ /* Step 5: If current frame is done, finalise the workload state with necessary information */
+ if(state == PM_WKLD_DONE)
+ {
+ DEB("\nFRAME ... DONE\n");
+ /* we decrement frame start. This can be 0 in cases like sending junk data with EOS */
+ cxt->frame_start_found -= (cxt->frame_start_found)? 1: 0;
+ if((scdetect_ret & ~(0xFF))== PM_INBAND_MESSAGES)
+ {/* If EOS dump pending tags and set state */
+ viddec_pm_generate_missed_association_tags(cxt, false);
+ state = scdetect_ret;
+ }
+ /* Write back stored state of workloads to memory to prepare for psuhing to output queue */
+ viddec_pm_finalize_workload(cxt, codec_type, codec_errors);
+ }
+ /* Step 6: Reset the list to prepare for next iteration */
+ viddec_pm_finalize_list(cxt);
+ break;
+ }
+ default:
+ break;
+ }
+ }//if(state == PM_SUCCESS)
+ return state;
+} // viddec_pm_parse_es_buffer
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c
new file mode 100644
index 0000000..f16fbcd
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c
@@ -0,0 +1,127 @@
+#include "fw_pvt.h"
+#include "viddec_fw_parser_ipclib_config.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_pm_tags.h"
+#include "viddec_fw_parser.h"
+
+extern dmem_t _dmem;
+extern viddec_parser_ops_t parser_ops[MFD_STREAM_FORMAT_MAX];
+
+static void viddec_fw_parser_peekmessages(viddec_pm_cxt_t *pm, ipc_msg_data *wkld_cur, ipc_msg_data *wkld_next, int32_t *ret_cur, int32_t *ret_next, uint32_t stream_id)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ wkld_cur->phys = wkld_next->phys = 0;
+ /* read current and next workloads by peeking to free wkld queue.This would only give us a copy
+ of message but won't actually pull it out of queue*/
+
+ *ret_cur = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_cur, sizeof(ipc_msg_data), 0);
+ *ret_next = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_next, sizeof(ipc_msg_data), 1);
+ /* NOTE: I am passing length of current workload as size for next, since next workload might not exist. This is safe since in flush we always append to current workload */
+ viddec_emit_update(&(pm->emitter), wkld_cur->phys, wkld_next->phys, wkld_cur->len, wkld_cur->len);
+}
+
+static void viddec_fw_parser_push_error_workload(viddec_pm_cxt_t *pm, ipc_msg_data *wkld_cur, uint32_t stream_id)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ /* Push the current wkld */
+ viddec_emit_set_workload_error(&(pm->emitter),
+ (VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE),
+ false);
+ viddec_emit_flush_current_wkld(&(pm->emitter));
+ FwIPC_SendMessage(fwipc, stream_id, (char *)wkld_cur, sizeof(ipc_msg_data));
+ FwIPC_ReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_cur, sizeof(ipc_msg_data));
+}
+
+int viddec_fw_parser_flush(unsigned int stream_id, unsigned int flush_type)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ mfd_pk_strm_cxt *cxt;
+ mfd_stream_info *cxt_swap;
+ viddec_pm_cxt_t *pm;
+ int32_t pos=0, ret = VIDDEC_FW_SUCCESS;/* success */
+ uint32_t workloads_in_input_q = 0;
+ cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt);
+ cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[stream_id]);
+ pm = &(cxt->pm);
+
+ workloads_in_input_q = ipc_mq_read_avail(&fwipc->wkld_q[stream_id].mq, (int32_t *)&pos);
+ pos = 0;
+ /* Check to see if output queue has space for next message */
+ if(ipc_mq_write_avail(&fwipc->snd_q[stream_id].mq,&pos) >= workloads_in_input_q)
+ {
+ /* Check how many free workloads are available. Need at least 1 */
+ if(workloads_in_input_q >= CONFIG_IPC_MESSAGE_MAX_SIZE)
+ {
+ ipc_msg_data wkld_cur, wkld_next, cur_es;
+ int32_t ret_cur=0,ret_next=0;
+
+ {/* Swap context into local memory */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) pm, sizeof(viddec_pm_cxt_t), false, false);
+ }
+
+ viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id);
+ if(workloads_in_input_q >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1))
+ {/* If we have more than 2 workloads, most likely current workload has partial data. To avoid overflow
+ lets push current and use next which is most likely empty .If there's only one workload it was
+ next for previous frame so most likely its empty in which case we don't do this logic*/
+ viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id);
+ viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id);
+ }
+ /* Empty current es buffers in list */
+ /* TODO(Assumption): we have to make sure that list flush is really succesful by checking return values.
+ If our workload size is big enough to to accomadate buf done tags then its not necessary
+ since we will guaranteed succesful writes for all es buffers */
+ viddec_pm_generate_tags_for_unused_buffers_to_flush(pm);
+ /* Check the number of ES buffers and append them to current wkld */
+ while(FwIPC_ReadMessage(fwipc, &(fwipc->rcv_q[stream_id]), (char *)&cur_es, sizeof(ipc_msg_data)) != 0)
+ {
+ /* NOTE(Assumption): Again we have to define workload size to be big enough to make sure we can fit
+ all the es buffers into current workload */
+ viddec_emit_contr_tag(&(pm->emitter), &cur_es, 0, false);
+ viddec_emit_assoc_tag(&(pm->emitter), cur_es.id, false);
+ }
+ viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id);
+ do
+ {/* Read until no workloads left */
+ viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id);
+ if(ret_cur == 0)
+ {
+ break;
+ }
+ viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id);
+ }while(1);
+ switch(flush_type)
+ {
+ case VIDDEC_STREAM_FLUSH_DISCARD:
+ {
+ /* Reset pm_context */
+ viddec_fw_init_swap_memory(stream_id, 0, 1);
+ }
+ break;
+ case VIDDEC_STREAM_FLUSH_PRESERVE:
+ {
+ /* Reset just stream information */
+ viddec_fw_init_swap_memory(stream_id, 0, 0);
+ }
+ default:
+ break;
+ }
+ {/* swap context into DDR */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) pm, sizeof(viddec_pm_cxt_t), true, false);
+ }
+ }
+ else
+ {
+ pos = 0;
+ /* check to see if I have any es buffers on input queue. If none are present we don't have to do anything */
+ if(ipc_mq_read_avail(&fwipc->rcv_q[stream_id].mq, (int32_t *)&pos) != 0)
+ ret = VIDDEC_FW_NEED_FREE_WKLD;
+ }
+ }
+ else
+ {
+ /* data present in output queue. */
+ ret =VIDDEC_FW_PORT_FULL;
+ }
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c
new file mode 100644
index 0000000..9a7d828
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c
@@ -0,0 +1,178 @@
+#include "viddec_pm.h"
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_emitter.h"
+#include "viddec_fw_workload.h"
+#include "viddec_pm_utils_bstream.h"
+
+extern void viddec_pm_utils_list_emit_pixel_tags(viddec_pm_utils_list_t *list, uint32_t start, viddec_emitter *emitter, uint32_t using_next);
+extern void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t start, uint32_t end, viddec_emitter *emitter, uint32_t is_cur_wkld, viddec_workload_item_t *wi);
+
+int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1);
+ if(ret == -1)
+ {DEB("FAILURE!!!! getbits returned %d\n", ret);}
+
+ return ret;
+}
+
+int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 0);
+ return ret;
+}
+
+int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_skipbits(&(cxt->getbits), num_bits);
+ return ret;
+}
+
+int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_emit_append(&(cxt->emitter.cur), item);
+ return ret;
+}
+
+int32_t viddec_pm_append_workitem_next(void *parent, viddec_workload_item_t *item)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_emit_append(&(cxt->emitter.next), item);
+ return ret;
+}
+
+int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_t *is_emul)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul);
+
+ return ret;
+
+}
+
+static inline int32_t viddec_pm_append_restof_pixel_data(void *parent, uint32_t cur_wkld)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+ uint32_t start=0, b_off=0;
+ uint8_t emul=0;
+ viddec_workload_item_t wi;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), &b_off, &start, &emul);
+ if(emul) start--;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES;
+ wi.es.es_flags = 0;
+ viddec_pm_utils_list_emit_slice_tags(&(cxt->list), start, cxt->list.total_bytes -1, &(cxt->emitter), cur_wkld, &wi);
+ return ret;
+}
+
+int32_t viddec_pm_append_pixeldata(void *parent)
+{
+ return viddec_pm_append_restof_pixel_data(parent, 1);
+}
+
+int32_t viddec_pm_append_pixeldata_next(void *parent)
+{
+ return viddec_pm_append_restof_pixel_data(parent, 0);
+}
+
+viddec_workload_t* viddec_pm_get_header(void *parent)
+{
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+
+ return cxt->emitter.cur.data;
+}
+
+viddec_workload_t* viddec_pm_get_next_header(void *parent)
+{
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+
+ return cxt->emitter.next.data;
+}
+
+int32_t viddec_pm_is_nomoredata(void *parent)
+{
+ int32_t ret=0;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_nomorerbspdata(&(cxt->getbits));
+ return ret;
+}
+
+uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte)
+{
+ int32_t ret=-1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_get_current_byte(&(cxt->getbits), byte);
+ return ret;
+}
+
+int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ if (end == VIDDEC_PARSE_INVALID_POS) end = (cxt->list.total_bytes -1);
+ viddec_pm_utils_list_emit_slice_tags(&(cxt->list), start, end, &(cxt->emitter), using_next, wi);
+
+ return ret;
+
+}
+
+void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error)
+{
+ viddec_pm_cxt_t *cxt;
+ cxt = (viddec_pm_cxt_t *)parent;
+ cxt->next_workload_error_eos = error;
+}
+
+void viddec_pm_set_late_frame_detect(void *parent)
+{
+ viddec_pm_cxt_t *cxt;
+ cxt = (viddec_pm_cxt_t *)parent;
+ cxt->late_frame_detect = true;
+}
+
+void viddec_pm_setup_userdata(viddec_workload_item_t *wi)
+{
+#ifdef MFDBIGENDIAN
+ wi->vwi_payload[0] = SWAP_WORD(wi->vwi_payload[0]);
+ wi->vwi_payload[1] = SWAP_WORD(wi->vwi_payload[1]);
+ wi->vwi_payload[2] = SWAP_WORD(wi->vwi_payload[2]);
+#else
+ wi=wi;
+#endif
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c
new file mode 100644
index 0000000..0a6f09b
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c
@@ -0,0 +1,21 @@
+#include "viddec_parser_ops.h"
+
+void viddec_vc1_get_ops(viddec_parser_ops_t *ops)
+{
+ return;
+}
+
+void viddec_mpeg2_get_ops(viddec_parser_ops_t *ops)
+{
+ return;
+}
+
+void viddec_mp4_get_ops(viddec_parser_ops_t *ops)
+{
+ return;
+}
+
+void viddec_h264_get_ops(viddec_parser_ops_t *ops)
+{
+ return;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c
new file mode 100644
index 0000000..b0d8842
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c
@@ -0,0 +1,304 @@
+#include "viddec_pm.h"
+#include "viddec_fw_debug.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_pm_tags.h"
+/*
+ Overview of tag association:
+
+ Contribution flags:
+ The current list has all the buffers which contribute to this particular workload. So we walkthrough the
+ list and throw buf done for all the buffers which were consumed. This can be deduced from total bytes we
+ in list which represents the bytes that were used for this acces unit.
+ For buffers which were partially used and this can only be the last buffer we throw continued tag. The
+ Parser manager tells us when to throw a continued tag. This will only happen when parser Manager detects
+ that we reached end of current frame.
+
+ Association Tags:
+ These are the tags that FW generates which indicates how to associate metadata with Frames.
+ The policy to determine which tag belongs to which frame is based on sc prefix position. If ES buffer starts with
+ or has a sc prefix its associated to next decodable frame(based on first slice or header depending on codec).
+ We use three state variables to determine where the frame starts and ends.
+ frame_start_found: Indicates we saw the beggining of frame in current list of ES buffers(which represent current acces unit).
+ This is decremented on workload done since it normally means we detected frame end.
+ found_fm_st_in_current_au:Indicates we saw the first slice in current access unit. Its mainly used to decide whether the first buffer
+ belongs to current frame or next frame. Its reset after its use.
+ Frame Done: Indicates we detected end of frame pointed by current workload.
+
+ Basic algo:
+ If we find frame start and if first buffer doesn't start with SC prefix Every consumed buffer belongs to Next frame. If first buffer
+ starts with SC prefix on that buffer belongs to Current frame.
+ If we haven't found frame start every buffer belongs to current frame.
+
+ TODO: Check for return codes from emitter
+*/
+
+
+/*
+ This function generates contribution tags current workload by walking through list of consumed buffers.
+ If frame is done(ignore_partial is false) we generate continue tags for the last item in list(if its not completely consumed).
+ This is used for all codecs except H264.
+ */
+uint32_t viddec_pm_generic_generate_contribution_tags(void *parent, uint32_t ignore_partial)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_list_t *list = &(cxt->list);
+
+ if(list->num_items != 0)
+ {
+ if(!cxt->late_frame_detect)
+ {
+ uint32_t num_items = 0;
+ while((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes))
+ {/* Walkthrough Consumed buffers and dump the tags */
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), false, false);
+ num_items++;
+ }
+ /* Dump incomplete tags if required */
+ if(!ignore_partial)
+ {/* check to see if last item is not consumed and dump continued flag */
+ if((num_items < list->num_items)
+ && (list->data[num_items].edpos >= (uint32_t)list->total_bytes))
+ {
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), true, false);
+ }
+ }
+ }
+ else
+ {
+ /* Only happens for dangling fields in MP2 Field pictures, in which case we find out the current frame was done in
+ last access unit, which is similar to H264 */
+ ret = viddec_pm_lateframe_generate_contribution_tags(parent, ignore_partial);
+ cxt->late_frame_detect = false;
+ }
+ }
+ return ret;
+}
+
+/*
+ For H264 when a frame is done it really means current frame was done in last access unit. The current access unit represnted
+ by list belongs to next frame. ignore_partial is false for frame done.
+ When frame is not done we dump all consumed buffers into next workload else they go to current workload.
+ If frame is done we throw a continued flag for first buffer in current workload if it was used in last access unit.
+ */
+uint32_t viddec_pm_lateframe_generate_contribution_tags(void *parent, uint32_t ignore_partial)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_list_t *list = &(cxt->list);
+
+ if(list->num_items != 0)
+ {
+ uint32_t num_items = 0;
+ /* If start offset is not 0 then it was partially used in last access unit. !ignore_partial means frame done*/
+ if((list->start_offset!= 0) && !ignore_partial)
+ {/* Emit continue in current if necessary. */
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), true, false);
+ }
+
+ while((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes))
+ { /* Walkthrough Consumed buffers and dump the tags to current or Next*/
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), false, !ignore_partial);
+ num_items++;
+ }
+ }
+ return ret;
+}
+
+/*
+ This function dumps tags from temporary array into a workload(we indicate either current or next from using_next).
+*/
+uint32_t viddec_pm_generate_missed_association_tags(viddec_pm_cxt_t *cxt, uint32_t using_next)
+{
+ uint32_t i=0, ret = PM_SUCCESS;
+
+ while((i < MAX_IBUFS_PER_SC) && (cxt->pending_tags.pending_tags[i] != INVALID_ENTRY))
+ {
+ viddec_emit_assoc_tag(&(cxt->emitter), cxt->pending_tags.pending_tags[i], using_next);
+ cxt->pending_tags.pending_tags[i] = INVALID_ENTRY;
+ i++;
+ }
+ return ret;
+}
+
+/* This function adds current list of es buffer to pending list. ignore_first when set tells us to ignore the first
+ buffer in list.
+*/
+void viddec_pm_add_tags_to_pendinglist(viddec_pm_cxt_t *cxt, uint32_t ignore_first)
+{
+ viddec_pm_utils_list_t *list = &(cxt->list);
+ vidded_pm_pending_tags_t *pend = &(cxt->pending_tags);
+ uint32_t index=0, t_index=0;
+
+ if(!ignore_first && (list->start_offset == 0))
+ {/* If start offset is 0 we are saying that first buffer in list starts with start code */
+ pend->first_buf_aligned = true;
+ }
+ else
+ {/* We are ignoring first item in list since we already threw a tag for this buffer */
+ index++;
+ pend->first_buf_aligned = false;
+ }
+
+ while( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes))
+ {/* walk through consumed buffers and buffer id's in pending list */
+ pend->pending_tags[t_index] = list->sc_ibuf[index].id;
+ index++;t_index++;
+ }
+ if( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes))
+ {/* If last item is partially consumed still add it to pending tags since tag association is based on start of ES buffer */
+ pend->pending_tags[t_index] = list->sc_ibuf[index].id;
+ }
+}
+
+/* Helper function to emit a association tag from pending list and resetting the value to invalid entry */
+static inline void viddec_pm_emit_pending_tag_item(viddec_emitter *emit, vidded_pm_pending_tags_t *pend, uint32_t index, uint32_t using_next)
+{
+ viddec_emit_assoc_tag(emit, pend->pending_tags[index], using_next);
+ pend->pending_tags[index] = INVALID_ENTRY;
+}
+
+/*
+ Tag association for mpeg2:
+ start frame is detected in pict header extension, but pict header represents start of frame.
+ To handle this we always store current AU list in temporary pending list. At the start of function
+ we look to see if a frame start was found, if we did we start dumping items from pending list based
+ on byte position of sc in first buffer of pending list. At the end we copy current list items to
+ pending list.
+ Limitation With Dangling fields: If we have AF1 AF2 BF1 CF1 CF2 as the sequence of fields
+ Tag assocaiation will be fine for A & B, However the first buffer tag on C will fall into B
+ We donot want to fix this issue right now as it means doubling size of pending list which
+ increases memory usage. Normally dangling fields are thrown away so worst case we will miss
+ one original PTS, So its OK not to fix it right now.
+ */
+uint32_t viddec_mpeg2_add_association_tags(void *parent)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ vidded_pm_pending_tags_t *pend = &(cxt->pending_tags);
+ uint32_t first_slice = false, index = 0;
+ /* check to see if we found a frame start in current access unit */
+ first_slice = cxt->frame_start_found && cxt->found_fm_st_in_current_au;
+ cxt->found_fm_st_in_current_au = false;
+ /* If we found frame start and first item in pending tags is start with start code
+ then it needs to go to current frame. */
+ if(first_slice && pend->first_buf_aligned && (pend->pending_tags[index] != INVALID_ENTRY))
+ {
+ viddec_pm_emit_pending_tag_item(&(cxt->emitter), pend, index, false);
+ index++;
+ }
+ /* rest of list goes to current if frame start is not found else next frame */
+ while((index < MAX_IBUFS_PER_SC) && (pend->pending_tags[index] != INVALID_ENTRY))
+ {
+ viddec_pm_emit_pending_tag_item(&(cxt->emitter), pend, index, cxt->frame_start_found);
+ index++;
+ }
+ /* Copy items to temporary List */
+ viddec_pm_add_tags_to_pendinglist(cxt, false);
+ return ret;
+}
+
+/*
+ Tag association for h264:
+ In this case when we get frame done it means current frame was done in last access unit. The data in current list belongs
+ to next frame. To handle this we always dump the buffered tags from last list and throw them in current/next frame based on pend state.
+ If the first item in current list is on sc boundary, it has to go into next so we always throw that tag in next.
+ For rest of items we store them in pending tags array and store inforamtion on where these stored tags should go into for
+ next run. Thi is detemined by start frame. we do this because at this state our next should be current and "next next" should
+ be next.
+ */
+uint32_t viddec_h264_add_association_tags(void *parent)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_list_t *list = &(cxt->list);
+ vidded_pm_pending_tags_t *pend = &(cxt->pending_tags);
+ uint32_t first_slice = false, index = 0;
+
+ /* Throw tags for items from pending list based on stored state from last run */
+ viddec_pm_generate_missed_association_tags(cxt, pend->using_next);
+ first_slice = cxt->frame_start_found && cxt->found_fm_st_in_current_au;
+ cxt->found_fm_st_in_current_au = false;
+ /* If we saw frame start and first buffer is aligned to start code throw it into next */
+ if(first_slice && (list->start_offset == 0))
+ {
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found && cxt->pending_tags.frame_done);
+ index++;
+ }
+ /* add tags to pending list */
+ viddec_pm_add_tags_to_pendinglist(cxt, (index != 0));
+ /* We want to figure out where these buffers should go into. There are three possible cases
+ current: If no frame start found these should go into next.
+ next: If one frame start is found and frame is not done then it should go to next.
+ if a frame is done then pm will push current out and next time we come here previous next is current.
+ next next: If two frame starts are found then we want it to be next next workload, which is what next will be
+ when we get called next time.
+ */
+ pend->using_next = (!cxt->pending_tags.frame_done && (cxt->frame_start_found == 1)) || (cxt->frame_start_found > 1);
+ return ret;
+}
+
+/*
+ Tag association for vc1:
+ Frame header represents start of new frame. If we saw a frame start in current access unit and the buffer starts
+ with start code it needs to go to current frame. Rest of items go to next if frame start found else current frame.
+ */
+uint32_t viddec_generic_add_association_tags(void *parent)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_list_t *list = &(cxt->list);
+ uint32_t not_first_slice = false, index = 0;
+
+ /* We check to see if this access unit is not the first one with frame start. This evaluates to true in that case */
+ not_first_slice = cxt->frame_start_found && !cxt->found_fm_st_in_current_au;
+ cxt->found_fm_st_in_current_au = false;
+ if(list->start_offset == 0)
+ {/* If start offset is 0, we have start code at beggining of buffer. If frame start was detected in this
+ access unit we put the tag in current else it goes to next */
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, not_first_slice);
+ }
+ /* Skip first item always, for start_offset=0 its already been handled above*/
+ index++;
+ while( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes))
+ {/* Walkthrough Consumed buffers and dump the tags to current or next*/
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found);
+ index++;
+ }
+ if( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes))
+ {/* Dump last item if it was partially consumed */
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found);
+ }
+ return ret;
+}
+
+/*
+ This function throws tags for buffers which were not used yet during flush.
+ */
+void viddec_pm_generate_tags_for_unused_buffers_to_flush(viddec_pm_cxt_t *cxt)
+{
+ viddec_pm_utils_list_t *list;
+ uint32_t index=0;
+
+ list = &(cxt->list);
+ /* Generate association tags from temporary pending array */
+ viddec_pm_generate_missed_association_tags(cxt, false);
+ if(list->num_items > 0)
+ {
+ /* Throw contribution flag for first item as done */
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[index]), false, false);
+ if(cxt->list.start_offset == 0)
+ {/* Throw association for first item if it was not done already */
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, false);
+ }
+ index++;
+ while(index < list->num_items)
+ {/* Walk through list and throw contribution and association flags */
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[index]), false, false);
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, false);
+ index++;
+ }
+ }
+ /* Not required to re init list structure as flush takes care of it */
+}
+
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c
new file mode 100644
index 0000000..8d3f329
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c
@@ -0,0 +1,472 @@
+#include "viddec_pm_utils_bstream.h"
+#include "viddec_fw_debug.h"
+
+/* Internal data structure for calculating required bits. */
+typedef union
+{
+ uint8_t byte[8];
+ uint32_t word[2];
+}viddec_pm_utils_getbits_t;
+
+void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt);
+uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index);
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+/* Bytes left in cubby buffer which were not consumed yet */
+static inline uint32_t viddec_pm_utils_bstream_bytesincubby(viddec_pm_utils_bstream_buf_cxt_t *cxt)
+{
+ return (cxt->buf_end - cxt->buf_index);
+}
+
+/*
+ This function checks to see if we are at the last valid byte for current access unit.
+*/
+uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+ uint32_t data_remaining = 0;
+ uint8_t ret = false;
+
+ /* How much data is remaining including current byte to be processed.*/
+ data_remaining = cxt->list->total_bytes - (cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st));
+
+ /* Start code prefix can be 000001 or 0000001. We always only check for 000001.
+ data_reamining should be 1 for 000001, as we don't count sc prefix and 1 represents current byte.
+ data_reamining should be 2 for 00000001, as we don't count sc prefix its current byte and extra 00 as we check for 000001.
+ NOTE: This is used for H264 only.
+ */
+ switch(data_remaining)
+ {
+ case 2:
+ /* If next byte is 0 and its the last byte in access unit */
+ ret = (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x0);
+ break;
+ case 1:
+ /* if the current byte is last byte */
+ ret = true;
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+/*
+ This function returns true if cubby buffer has the last byte of access unit.
+*/
+uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+ uint32_t last_byte_offset_plus_one=0;
+ uint8_t ret = false;
+ /* Check to see if the last byte Acces unit offset is the last byte for current access unit.
+ End represents the first invalid byte, so (end - st) will give number of bytes.*/
+ last_byte_offset_plus_one = cxt->au_pos + (cxt->bstrm_buf.buf_end - cxt->bstrm_buf.buf_st);
+ if((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes)
+ {
+ ret = true;
+ }
+ return ret;
+}
+
+/* This function initializes scratch buffer, which is used for staging already read data, due to DMA limitations */
+static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_scratch_cxt_t *cxt)
+{
+ cxt->st = cxt->size = cxt->bitoff=0;
+}
+
+/* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if
+ we need to go to next es buffer */
+static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_list_t *list, uint32_t index, uint32_t offset)
+{
+ uint32_t ret=0;
+ int32_t val=0;
+ val = (list->data[index].edpos <= (uint32_t)list->total_bytes) ? list->data[index].edpos: (uint32_t)list->total_bytes;
+ val = val - (int32_t)offset;
+ if(val > 0) ret = (uint32_t)val;
+ return val;
+}
+
+/* This function seeks to byte offset position starting from lst_index, if more data is present in current ES buffer pointed by
+ lst_index returns the remaining data in current buffer along with physical address of byte offset. The lst_index parameter
+ at returns index of ES buffer in list which has byte_offset */
+static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt,
+ uint32_t *lst_index,
+ uint32_t byte_offset,
+ uint32_t *physaddr)
+{
+ viddec_pm_utils_list_t *list;
+ uint32_t last_byte_offst=0, bytes_left=0;/* default return value is 0 bytes */
+
+ list = cxt->list;
+ while(*lst_index < list->num_items)
+ {
+ /* Check to see if we reached the buffer with last valid byte of current access unit, List can have data beyond current access unit */
+ last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes;
+ if(byte_offset < last_byte_offst)
+ {/* Found a match so return with data remaining */
+#if 1
+ int32_t val=0;
+ val = last_byte_offst - (int32_t)byte_offset;
+ if(val > 0) bytes_left = (uint32_t)val;
+#else
+ bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset);
+#endif
+ *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index);
+ break;
+ }
+ *lst_index+=1;
+ }
+ return bytes_left;
+}
+
+/* This function is for copying trailing bytes of cubby bitstream buffer to scratch buffer */
+static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data, uint32_t num_bytes)
+{
+ uint32_t i=0;
+ for(i=0; i<num_bytes;i++)
+ {
+ cxt->buf_scratch[i] = *data;
+ data++;cxt->size++;
+ }
+}
+
+/* This function is for copying trailing bytes from scratch buffer to bitstream buffer*/
+static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data)
+{
+ uint32_t i=0;
+ for(i=0; i<cxt->size;i++)
+ {
+ *data = cxt->buf_scratch[i];
+ data++;
+ }
+}
+
+/* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */
+static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream,
+ viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/
+ uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/
+ uint32_t *phase, /* Phase for emulation */
+ uint32_t num_bytes,/* requested number of bytes*/
+ uint32_t emul_reqd, /* On true we look for emulation prevention */
+ uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/
+ )
+{
+ int32_t ret = 1;
+ uint8_t cur_byte = 0, valid_bytes_read = 0;
+ *act_bytes = 0;
+
+ while(valid_bytes_read < num_bytes)
+ {
+ cur_byte = bstream->buf[bstream->buf_index + *act_bytes];
+ if((cur_byte == 0x3) &&(*phase == 2))
+ {/* skip emulation byte. we update the phase only if emulation prevention is enabled */
+ *phase = 0;
+ }
+ else
+ {
+ data->byte[valid_bytes_read] = cur_byte;
+ /*
+ We only update phase for first byte if bit offset is 0. If its not 0 then it was already accounted for in the past.
+ From second byte onwards we always look to update phase.
+ */
+ if((*act_bytes != 0) || (is_offset_zero))
+ {
+ if(cur_byte == 0)
+ {
+ /* Update phase only if emulation prevention is required */
+ *phase +=( ((*phase < 2) && emul_reqd ) ? 1: 0 );
+ }
+ else
+ {
+ *phase=0;
+ }
+ }
+ valid_bytes_read++;
+ }
+ *act_bytes +=1;
+ }
+ /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array
+ has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */
+ if((bstream->buf_index + *act_bytes -1) >= bstream->buf_end)
+ {
+ ret = -1;
+ }
+ return ret;
+}
+
+/*
+ This function checks to see if we have minimum amount of data else tries to reload as much as it can.
+ Always returns the data left in current buffer in parameter.
+*/
+static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *data_left)
+{
+#ifdef VBP
+ *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+#else
+ uint8_t isReload=0;
+
+ *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+ /* If we have minimum data we should continue, else try to read more data */
+ if(*data_left <MIN_DATA)
+ {
+ /* Check to see if we already read last byte of current access unit */
+ isReload = !(viddec_pm_utils_bstream_nomoredata(cxt) == 1);
+ while(isReload)
+ {
+ /* We have more data in access unit so keep reading until we get at least minimum data */
+ viddec_pm_utils_bstream_reload(cxt);
+ *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+ /* Break out of loop if we reached last byte or we have enough data */
+ isReload = !((*data_left >= MIN_DATA) || (viddec_pm_utils_bstream_nomoredata(cxt) == 1));
+ }
+ }
+#endif
+}
+/*
+ This function moves the stream position by N bits(parameter bits). The bytes parameter tells us how many bytes were
+ read for this N bits(can be different due to emulation bytes).
+*/
+static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_buf_cxt_t *bstream, uint32_t bits, uint32_t bytes)
+{
+ if((bits & 0x7) == 0)
+ {
+ bstream->buf_bitoff = 0;
+ bstream->buf_index +=bytes;
+ }
+ else
+ {
+ bstream->buf_bitoff = bits & 0x7;
+ bstream->buf_index +=(bytes - 1);
+ }
+}
+
+/*
+ This function gets physical address of the requested au offset(pos).
+*/
+
+uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index)
+{
+ uint32_t ret = 0, last_byte_offst=0;
+ viddec_pm_utils_list_t *list;
+
+ list = cxt->list;
+ while(lst_index < list->num_items)
+ {
+ last_byte_offst = (list->data[lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[lst_index].edpos: (uint32_t)list->total_bytes;
+ if(pos < last_byte_offst)
+ {
+#ifndef MFDBIGENDIAN
+ ret = (uint32_t)list->sc_ibuf[lst_index].buf;
+#else
+ ret = list->sc_ibuf[lst_index].phys;
+#endif
+ ret +=(pos - list->data[lst_index].stpos);
+ if(lst_index == 0) ret+=list->start_offset;
+ break;
+ }
+ lst_index++;
+ }
+ return ret;
+}
+
+/*
+ Actual reload function which uses dma to refill bitstream buffer.
+*/
+void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+ viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+ bstream = &(cxt->bstrm_buf);
+
+ /* Update current offset positions */
+ cxt->au_pos += (bstream->buf_index - bstream->buf_st);
+ bstream->buf_st = bstream->buf_index;
+ /* copy leftover bytes into scratch */
+ {
+ int32_t cur_bytes=0;
+ viddec_pm_utils_bstream_scratch_init(&(cxt->scratch));
+ cur_bytes = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+ if(cur_bytes > 0)
+ {
+ viddec_pm_utils_bstream_scratch_copyto(&(cxt->scratch), &(bstream->buf[bstream->buf_index]), cur_bytes);
+ cxt->scratch.bitoff = bstream->buf_bitoff;
+ }
+ }
+ /* Initiate DMA and copyback scratch data */
+ {
+ uint32_t data_left = 0, ddr_mask=0;
+ /* calculate necesary aligmnets and copy data */
+ {
+ uint32_t ddr_addr=0, data_wrote=0;
+ uint32_t byte_pos;
+ /* byte pos points to the position from where we want to read data.*/
+ byte_pos = cxt->au_pos + cxt->scratch.size;
+ data_left = viddec_pm_utils_bstream_maxbytes_from_index(cxt, &(cxt->list_off), byte_pos, &ddr_addr);
+ if(data_left > CUBBY_SIZE)
+ {
+ data_left = CUBBY_SIZE;
+ }
+ if(data_left != 0)
+ {
+ ddr_mask = ddr_addr & 0x3;
+ ddr_addr = ddr_addr & ~0x3;
+ data_wrote = cp_using_dma(ddr_addr, (uint32_t)&(bstream->buf[MIN_DATA]), (data_left + ddr_mask), 0, 1);
+ }
+ }
+ /* copy scratch data back to buffer and update offsets */
+ {
+ uint32_t index=0;
+ index = MIN_DATA + ddr_mask;
+ index -= cxt->scratch.size;
+ viddec_pm_utils_bstream_scratch_copyfrom(&(cxt->scratch), &(bstream->buf[index]));
+ bstream->buf_st = bstream->buf_index = index;
+ bstream->buf_end = data_left + cxt->scratch.size + bstream->buf_st;
+ bstream->buf_bitoff = cxt->scratch.bitoff;
+ }
+ }
+}
+
+/*
+ Init function called by parser manager after sc code detected.
+*/
+void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul)
+{
+#ifdef VBP
+ cxt->emulation_byte_counter = 0;
+#endif
+
+ cxt->au_pos = 0;
+ cxt->list = list;
+ cxt->list_off = 0;
+ cxt->phase = 0;
+ cxt->is_emul_reqd = is_emul;
+ cxt->bstrm_buf.buf_st = cxt->bstrm_buf.buf_end = cxt->bstrm_buf.buf_index = cxt->bstrm_buf.buf_bitoff = 0;
+}
+
+/* Get the requested byte position. If the byte is already present in cubby its returned
+ else we seek forward and get the requested byte.
+ Limitation:Once we seek forward we can't return back.
+*/
+int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte)
+{
+ int32_t ret = -1;
+ uint32_t data_left=0;
+ viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+ bstream = &(cxt->bstrm_buf);
+ viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+ if(data_left != 0)
+ {
+ *byte = bstream->buf[bstream->buf_index];
+ ret = 1;
+ }
+ return ret;
+}
+
+/*
+ Function to skip N bits ( N<= 32).
+*/
+int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits)
+{
+ int32_t ret = -1;
+ uint32_t data_left=0;
+ viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+ bstream = &(cxt->bstrm_buf);
+ viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+ if((num_bits <= 32) && (num_bits > 0) && (data_left != 0))
+ {
+ uint8_t bytes_required=0;
+
+ bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3;
+ if(bytes_required <= data_left)
+ {
+ viddec_pm_utils_getbits_t data;
+ uint32_t act_bytes =0;
+ if(viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1)
+ {
+ uint32_t total_bits=0;
+ total_bits=num_bits+bstream->buf_bitoff;
+ viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes);
+ ret=1;
+
+ if (act_bytes > bytes_required)
+ {
+ cxt->emulation_byte_counter = act_bytes - bytes_required;
+ }
+ }
+ }
+ }
+ return ret;
+}
+
+/*
+ Function to get N bits ( N<= 32).
+*/
+int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip)
+{
+ uint32_t data_left=0;
+ int32_t ret = -1;
+ /* STEP 1: Make sure that we have at least minimum data before we calculate bits */
+ viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+
+ if((num_bits <= 32) && (num_bits > 0) && (data_left != 0))
+ {
+ uint32_t bytes_required=0;
+ viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+ bstream = &(cxt->bstrm_buf);
+ bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3;
+
+ /* Step 2: Make sure we have bytes for requested bits */
+ if(bytes_required <= data_left)
+ {
+ uint32_t act_bytes, phase;
+ viddec_pm_utils_getbits_t data;
+ phase = cxt->phase;
+ /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */
+ if(viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1)
+ {
+ uint32_t total_bits=0;
+ uint32_t shift_by=0;
+ /* zero out upper bits */
+ /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts
+ in single statement */
+ data.byte[0] <<= bstream->buf_bitoff;
+ data.byte[0] >>= bstream->buf_bitoff;
+
+#ifndef MFDBIGENDIAN
+ data.word[0] = SWAP_WORD(data.word[0]);
+ data.word[1] = SWAP_WORD(data.word[1]);
+#endif
+ total_bits = num_bits+bstream->buf_bitoff;
+ if(total_bits > 32)
+ {
+ /* We have to use both the words to get required data */
+ shift_by = total_bits - 32;
+ data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by));
+ //total_bits -= shift_by;/* BUG */
+ }
+ else
+ {
+ shift_by = 32 - total_bits;
+ data.word[0] = data.word[0] >> shift_by;
+ }
+ *out = data.word[0];
+ if(skip)
+ {
+ /* update au byte position if needed */
+ viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes);
+ cxt->phase = phase;
+
+ if (act_bytes > bytes_required)
+ {
+ cxt->emulation_byte_counter += act_bytes - bytes_required;
+ }
+ }
+
+ ret =1;
+ }
+ }
+ }
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c
new file mode 100644
index 0000000..ccc83b3
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c
@@ -0,0 +1,221 @@
+#include "viddec_pm_utils_list.h"
+#include "viddec_fw_debug.h"
+
+/*
+ Initialize list.
+ */
+void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt)
+{
+ cxt->num_items = 0;
+ cxt->start_offset = 0;
+ cxt->end_offset = -1;
+ cxt->total_bytes = 0;
+ cxt->first_scprfx_length = 0;
+}
+
+/*
+ Add a new ES buffer to list. If not succesful returns 0.
+ */
+uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf)
+{
+ uint32_t ret = 0;
+ if((list->num_items + 1) <= MAX_IBUFS_PER_SC)
+ {
+ list->num_items +=1;
+ list->sc_ibuf[list->num_items - 1] = *es_buf;
+ ret = 1;
+ }
+ return ret;
+}
+
+/*
+ We return the index of es buffer and the offset into it for the requested byte offset.
+ EX: if byte=4, and the first es buffer in list is of length 100, we return lis_index=0, offset=3.
+ byte value should range from [1-N].
+ */
+uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset)
+{
+ uint32_t index = 0, accumulated_size=0;
+
+ /* First buffer in list is always special case, since start offset is tied to it */
+ accumulated_size = list->sc_ibuf[index].len - list->start_offset;
+ if( accumulated_size >= byte)
+ {
+ /* we found a match in first buffer itself */
+ *offset = list->start_offset + byte - 1;
+ *list_index = index;
+ return 0;
+ }
+ index++;
+ /* walkthrough the list until we find the byte */
+ while(index < list->num_items)
+ {
+ if((accumulated_size + list->sc_ibuf[index].len) >= byte)
+ {
+ *offset = byte - accumulated_size - 1;
+ *list_index = index;
+ return 0;
+ }
+ accumulated_size += list->sc_ibuf[index].len;
+ index++;
+ }
+ return 1;
+}
+
+/*
+ Since the stream data can span multiple ES buffers on different DDR locations, for our purpose
+ we store start and end position on each ES buffer to make the data look linear.
+ The start represents the linear offset of the first byte in list.
+ end-1 represents linear offset of last byte in list.
+ */
+void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length)
+{
+ uint32_t items=0;
+ uint32_t start=0, end=0;
+
+ if(list->num_items != 0)
+ {
+ end = list->sc_ibuf[0].len - list->start_offset;
+ if((int32_t)end >= list->total_bytes) end = list->total_bytes;
+ list->data[items].stpos = start;
+ list->data[items].edpos = end;
+ items++;
+ while((int32_t)end < list->total_bytes)
+ {
+ start = end;
+ end += list->sc_ibuf[items].len;
+ if((int32_t)end >= list->total_bytes) end = list->total_bytes;
+ list->data[items].stpos = start;
+ list->data[items].edpos = end;
+ items++;
+ }
+ while(items < list->num_items)
+ {
+ if(sc_prefix_length != 0)
+ {
+ start = end = list->total_bytes+1;
+ }
+ else
+ {
+ start = end = list->total_bytes;
+ }
+ list->data[items].stpos = start;
+ list->data[items].edpos = end;
+ items++;
+ }
+ /* Normal access unit sequence is SC+data+SC. We read SC+data+SC bytes so far.
+ but the current access unit should be SC+data, the Second SC belongs to next access unit.
+ So we subtract SC length to reflect that */
+ list->total_bytes -= sc_prefix_length;
+ }
+}
+
+static inline void viddec_pm_utils_list_emit_slice_tags_append(viddec_emitter_wkld *cur_wkld, viddec_workload_item_t *wi)
+{
+ /*
+ Most of the time len >0. However we can have a condition on EOS where the last buffer can be
+ zero sized in which case we want to make sure that we emit END of SLICE information.
+ */
+ if((wi->es.es_phys_len != 0) || (wi->es.es_flags&VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE))
+ {
+ viddec_emit_append(cur_wkld, wi);
+ }
+}
+
+/*
+ Emit requested tags for data from start to end position. The tags should include end byte too.
+ */
+void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t start, uint32_t end, viddec_emitter *emitter, uint32_t is_cur_wkld, viddec_workload_item_t *wi)
+{
+ if((list->num_items != 0) && ((int32_t)start < (list->total_bytes)) && ((int32_t)end <= (list->total_bytes)))
+ {
+ uint32_t flags=0, items=0;
+ viddec_emitter_wkld *cur_wkld;
+
+ flags = wi->es.es_flags;
+ cur_wkld = (is_cur_wkld != 0) ? &(emitter->cur):&(emitter->next);
+ /* Seek until we find a ES buffer entry which has the start position */
+ while(start >= list->data[items].edpos) items++;
+
+ if(end < list->data[items].edpos)
+ { /* One ES buffer has both start and end in it. So dump a single entry */
+ wi->es.es_phys_len = end - start + 1;
+ wi->es.es_phys_addr = list->sc_ibuf[items].phys + start - list->data[items].stpos;
+ /* Account for start_offset if its the first buffer in List */
+ if(items == 0) wi->es.es_phys_addr += list->start_offset;
+
+ wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE | VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE;
+ viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi);
+ }
+ else
+ {
+ /* We know that there are at least two buffers for the requested data. Dump the first item */
+ wi->es.es_phys_len = list->data[items].edpos - start;
+ wi->es.es_phys_addr = list->sc_ibuf[items].phys + start - list->data[items].stpos;
+ if(items == 0) wi->es.es_phys_addr += list->start_offset;
+ wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE;
+ viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi);
+ items++;
+ /* Dump everything in between if any until the last buffer */
+ while(end >= list->data[items].edpos)
+ {
+ wi->es.es_phys_len = list->data[items].edpos - list->data[items].stpos;
+ wi->es.es_phys_addr = list->sc_ibuf[items].phys;
+ wi->es.es_flags = flags;
+ viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi);
+ items++;
+ }
+ /* Dump ES buffer which has end in it along with end slice flag */
+ wi->es.es_phys_len = end - list->data[items].stpos + 1;
+ wi->es.es_phys_addr = list->sc_ibuf[items].phys;
+ wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE;
+ viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi);
+ }
+ }
+}
+
+/*
+ We delete the consumed buffers in our list. If there are any buffers left over which have more data
+ the get moved to the top of the list array.
+ */
+void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length)
+{
+ list->end_offset = -1;
+
+ if(list->num_items != 0)
+ {
+ if(length != 0)
+ {
+ uint32_t items = list->num_items-1, byte_pos;
+ uint32_t index=0;
+ viddec_input_buffer_t *es_buf;
+ byte_pos = list->total_bytes;
+ while((list->data[items].edpos > byte_pos) && (list->data[items].stpos > byte_pos))
+ {
+ items--;
+ }
+ if(items != 0)
+ {
+ list->start_offset = byte_pos - list->data[items].stpos;
+ while(items < list->num_items)
+ {
+ es_buf = &(list->sc_ibuf[items]);
+ list->sc_ibuf[index] = *es_buf;
+ index++;
+ items++;
+ }
+ list->num_items = index;
+ }
+ else
+ {
+ list->start_offset += (byte_pos - list->data[items].stpos);
+ }
+ }
+ else
+ {
+ list->num_items = 0;
+ list->start_offset = 0;
+ }
+ list->total_bytes = length;
+ }
+}
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h
new file mode 100644
index 0000000..bc2c239
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h
@@ -0,0 +1,200 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_COMMON_DEFS_H
+#define VIDDEC_FW_COMMON_DEFS_H
+
+#define VIDDEC_FW_PARSER_IPC_HOST_INT 0x87654321
+#define EMITTER_WORKLOAD_ENTRIES 2048
+
+/* This enum defines priority level for opening a stream */
+enum viddec_stream_priority
+{
+ viddec_stream_priority_BACKGROUND, /* Lowest priority stream */
+ viddec_stream_priority_REALTIME, /* Real time highest priority stream */
+ viddec_stream_priority_INVALID,
+};
+
+/* This enum defines supported flush types */
+enum viddec_stream_flushtype
+{
+ VIDDEC_STREAM_FLUSH_DISCARD, /* Reinitialise to start state */
+ VIDDEC_STREAM_FLUSH_PRESERVE, /* Reinitialise to start state by preserving sequence info*/
+};
+
+enum viddec_stream_inband_flags
+{
+ VIDDEC_STREAM_DEFAULT_FLAG=0, /* Default value for flags */
+ VIDDEC_STREAM_EOS, /* End of stream message */
+ VIDDEC_STREAM_DISCONTINUITY, /* new segment which forces flush and preserve */
+};
+
+/* Message descriptor for Parser's Input and output queues. needs to be 8 byte aligned */
+typedef struct viddec_input_buffer
+{
+ unsigned int flags; /* Flags for Inband messages like EOS, valid range defined in viddec_stream_inband_flags */
+ unsigned int phys;/* DDR addr of where ES/WKLD is at. */
+ unsigned int len;/* size of buffer at phys_addr */
+ unsigned int id;/* A id for the buffer which is not used or modified by the FW. */
+#ifdef HOST_ONLY
+ unsigned char *buf; /* virt pointer to buffer. This is a don't care for FW */
+#endif
+}ipc_msg_data;
+
+typedef ipc_msg_data viddec_input_buffer_t;
+typedef ipc_msg_data viddec_ipc_msg_data;
+
+/* Return types for interface functions */
+typedef enum
+{
+ VIDDEC_FW_SUCCESS, /* succesful with current operation */
+ VIDDEC_FW_NORESOURCES, /* No resources to execute the requested functionality */
+ VIDDEC_FW_FAILURE, /* Failed for Uknown reason */
+ VIDDEC_FW_INVALID_PARAM, /* The parameters that were passed are Invalid */
+ VIDDEC_FW_PORT_FULL, /* The operation failed since queue is full */
+ VIDDEC_FW_PORT_EMPTY, /* The operation failed since queue is empty */
+ VIDDEC_FW_NEED_FREE_WKLD, /* The operation failed since a free wkld is not available */
+}viddec_fw_return_types_t;
+
+/* Defines for Interrupt mask and status */
+typedef enum
+{
+ VIDDEC_FW_WKLD_DATA_AVAIL=1, /* A processed workload is available */
+ VIDDEC_FW_INPUT_WATERMARK_REACHED=2, /* The input path is below the set watermark for current stream */
+}viddec_fw_parser_int_status_t;
+
+/* Defines for attributes on stream, If not set explicitly will be default values */
+typedef enum
+{
+ VIDDEC_FW_INPUT_Q_WATERMARK, /* Define for setting Input queue watermarks */
+ VIDDEC_FW_STREAM_PRIORITY, /* Define for setting stream priority */
+}viddec_fw_stream_attributes_t;
+
+typedef struct
+{
+ unsigned int input_q_space; /* Num of messages that can be written to input queue */
+ unsigned int output_q_data; /* Num of messages in output queue */
+ unsigned int workload_q_status; /* Number of free wklds available to parser */
+}viddec_fw_q_status_t;
+
+typedef struct
+{
+ unsigned int to_fw_q_space; /* Num of messages that can be written to input queue */
+ unsigned int from_fw_q_data; /* Num of messages in output queue */
+}viddec_fw_decoder_q_status_t;
+
+enum viddec_fw_decoder_int_status
+{
+ VIDDEC_FW_DECODER_INT_STATUS_STREAM_0 = (1<< 0), /* Decoder Stream 0 Requires Service */
+ VIDDEC_FW_DECODER_INT_STATUS_STREAM_1 = (1<< 1), /* Decoder Stream 1 Requires Service */
+ VIDDEC_FW_DECODER_INT_STATUS_STREAM_2 = (1<< 2), /* Decoder Stream 2 Requires Service */
+
+
+ VIDDEC_FW_DECODER_INT_STATUS_STREAM_HIGH = (1<<30), /* Any Decoder Stream >= 30 Requires Service */
+ VIDDEC_FW_DECODER_INT_STATUS_AUTO_API = (1<<31) /* An Auto-API Function has completed */
+};
+
+/** Hardware Accelerated stream formats */
+enum viddec_stream_format
+{
+ MFD_STREAM_FORMAT_MPEG=1,
+ MFD_STREAM_FORMAT_H264,
+ MFD_STREAM_FORMAT_VC1,
+ MFD_STREAM_FORMAT_MPEG42,
+
+ MFD_STREAM_FORMAT_MAX, /* must be last */
+ MFD_STREAM_FORMAT_INVALID
+};
+
+/* Workload specific error codes */
+enum viddec_fw_workload_error_codes
+{
+ VIDDEC_FW_WORKLOAD_SUCCESS = 0,
+ VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE = (1 << 0),/* Parser/Decoder detected a non decodable error with this workload */
+ VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW = (1 << 1),/* Parser Detected more than 64 buffers between two start codes */
+ VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW = (1 << 2),/* Parser Detected overflow of currently allocated workload memory */
+ VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME = (1 << 3),/* This is impartial or empty frame which was flushed by Parser/Decoder */
+ VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM = (1 << 4),/* This is impartial or empty frame from Parser/Decoder */
+ VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED = (1 << 5),/* Parser Detected unsupported feature in the stream */
+ /* First 8 bits reserved for Non Decodable errors */
+ VIDDEC_FW_WORKLOAD_ERR_CONCEALED = (1 << 9),/* The decoder concealed some errors in this frame */
+ VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE = (1 << 10),/* Deocder/parser detected at least one of the required reference frames is missing */
+ VIDDEC_FW_WORKLOAD_ERR_IN_REFERENCE = (1 << 11),/* Deocder/parser detected at least one of the reference frames has errors in it */
+ VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD = (1 << 12),/* Parser detected at least one of the fields are missing */
+ VIDDEC_FW_WORKLOAD_ERR_PARTIAL_SLICE = (1 << 13),/* Deocder detected at least one of the fields are missing */
+ VIDDEC_FW_WORKLOAD_ERR_MACROBLOCK = (1 << 14),/* Deocder detected macroblock errors */
+ VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO = (1 << 16),/* Parser detected sequence information is missing */
+
+ VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17),/* Decoder/Parser detected errors in "top field" or "frame"*/
+ VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18),/* Decoder/Parser detected errors in "bottom field" or "frame" */
+ VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR = (1 << 19),/* Parser detected errors */
+
+};
+
+enum viddec_fw_mpeg2_error_codes
+{
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR = (1 << 24),/* Parser detected corruption in sequence header. Will use the previous good sequence info, if found. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT = (1 << 25),/* Parser detected corruption in seqeunce extension. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT = (1 << 26),/* Parser detected corruption in sequence display extension. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR = (1 << 27),/* Parser detected corruption in GOP header. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR = (1 << 26),/* Parser detected corruption in picture header. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT = (1 << 27),/* Parser detected corruption in picture coding extension. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT = (1 << 28),/* Parser detected corruption in picture display extension. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT = (1 << 29),/* Parser detected corruption in quantization matrix extension. */
+};
+
+#endif
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h b/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h
new file mode 100644
index 0000000..3a07af0
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h
@@ -0,0 +1,242 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef VIDDEC_FW_DECODER_HOST_H
+#define VIDDEC_FW_DECODER_HOST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "viddec_fw_common_defs.h"
+
+/** @weakgroup viddec Fw Decoder interface Functions */
+/** @ingroup viddec_fw_decoder */
+/*@{*/
+
+/**
+ This function returns the size required for loading fw.
+ @retval size : Required size.
+*/
+ uint32_t viddec_fw_decoder_query_fwsize(void);
+
+/**
+ This function loads Decoder Firmware and initialises necessary state information.
+ @param[in] phys : Physical address on where firmware should be loaded.
+ @param[in] len : Length of data allocated at phys.
+ @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+ @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_decoder_loadfw(uint32_t phys, uint32_t len);
+
+/**
+ This function returns required size for global memory for all supported decoders. This is a synchronous message to FW.
+ @param[out] size : returns the size required.
+ @retval VIDDEC_FW_SUCCESS : Successfuly got required information from FW.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+*/
+ uint32_t viddec_fw_decoder_query_fwsize_scratchmem(uint32_t *size);
+
+/**
+ This function sets global memory for the firmware to use.This is a synchronous message to FW.
+ @param[in] phys : Physical address on where global memory starts.
+ @param[in] len : Length of data allocated at phys.
+ @retval VIDDEC_FW_SUCCESS : Successfully setup global memory.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+*/
+ uint32_t viddec_fw_decoder_set_fw_scratchmem(uint32_t phys, uint32_t len);
+
+/**
+ This function returns the size required opening a stream. This a synchronous message to FW.
+ @param[in] codec_type : Type of codec that we want information about.
+ @param[out] size : Size of memory required for opening a stream.
+ @retval VIDDEC_FW_SUCCESS : Successfuly talked to FW and got required size.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+*/
+ uint32_t viddec_fw_decoder_query_streamsize(uint32_t codec_type, uint32_t *size);
+
+/**
+ This function opens requested codec.This a synchronous message to FW.
+ @param[in] codec_type : Type of codec that we want to open.
+ @param[in] phys : Physical address of allocated memory for this codec.
+ @param[in] prority : Priority of stream. 1 for realtime and 0 for background.
+ @param[out] strm_handle : Handle of the opened stream.
+ @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream.
+ @retval VIDDEC_FW_FAILURE : Failed to Open a stream.
+*/
+ uint32_t viddec_fw_decoder_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority);
+
+
+/**
+ This function closes stream.This a synchronous message to FW.
+ @param[in] strm_handle : Handle of the stream to close.
+*/
+ void viddec_fw_decoder_closestream(uint32_t strm_handle);
+
+/**
+ This function allows to get current status of the decoder workload queues. If the current stream is active we return
+ number of input messages that can be written to input queue and the number of messages in output queue of the stream.
+
+ Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT
+ Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is
+ written into output queue of a stream.
+ @param[in] strm_handle : The handle of stream that we want to get status of queues.
+ @param[out] status : The status of each queue gets updated in here.
+ @retval VIDDEC_FW_SUCCESS : Successfully Got the status information.
+ @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream.
+*/
+ uint32_t viddec_fw_decoder_get_queue_status(uint32_t strm_handle, viddec_fw_decoder_q_status_t *status);
+
+/**
+ This function flushes the current stream. This is a synchronous message to FW.
+ Before calling this function the host has to make sure the output queue of the firmware
+ is empty. After this function is executed the FW will read all entries in input
+ wkld buffer queue into output queue. After this operation the host has to read all entries
+ in output queue again to finish the flush operation.
+ @param[in] flush_type : Type of flush we want to perform.ex:flush and discard.
+ @param[in] strm_handle : Handle of the stream we want to flush.
+ @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream.
+ @retval VIDDEC_FW_FAILURE : Failed to flush a stream.
+*/
+ uint32_t viddec_fw_decoder_flushstream(uint32_t strm_handle, uint32_t flush_type);
+
+/**
+ This function sends an input workload buffer. The host should provide required frame buffers in this workload before
+ sending it to fw.
+ @param[in] strm_handle : The handle of stream that we want to send workload buffer to.
+ @param[in] cur_wkld : The workload buffer we want to send.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message.
+*/
+ uint32_t viddec_fw_decoder_send(uint32_t strm_handle, ipc_msg_data *cur_wkld);
+
+/**
+ This function gets the decoded workload from fw.
+ @param[in] strm_handle : The handle of stream that we want to read workload from.
+ @param[out] cur_wkld : The workload descriptor.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld.
+*/
+ uint32_t viddec_fw_decoder_recv(uint32_t strm_handle, ipc_msg_data *cur_wkld);
+
+/**
+ This function unloads Decoder Firmware and free's the resources allocated in Load fw.
+ If this function is called before load fw it will crash with a segmentation fault.
+*/
+ void viddec_fw_decoder_deinit(void);
+
+/**
+ This function gets the major and minor revison numbers of the loaded firmware.
+ @param[out] major : The major revision number.
+ @param[out] minor : The minor revision number.
+ @param[out] build : The Internal Build number.
+*/
+ void viddec_fw_decoder_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build);
+
+/**
+ This function returns the interrupt status of all streams which need to be processed. A value of zero
+ means no active streams which generated this interrupt.
+*/
+ uint32_t viddec_fw_decoder_active_pending_interrupts(void);
+
+/**
+ This function clears the interrupts for all active streams represented by status input parameter.
+ The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts().
+ @param[in] status : The status value that was returned by viddec_fw_decoder_active_pending_interrupts().
+*/
+ void viddec_fw_decoder_clear_all_pending_interrupts(uint32_t status);
+
+/**
+ This function enables/disables interrupt for the stream specified.
+ @param[in] strm_handle : The handle of stream that we want enable or disable interrupts for.
+ @param[in] enable : Boolean value if ==0 means disable Interrupts else enable.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed.
+*/
+ uint32_t viddec_fw_decoder_set_stream_interrupt_mask(uint32_t stream_handle, uint32_t enable);
+
+/**
+ This function returns which stream interrupted in the past based on status, which is a snapshot of
+ interrupt status that was cleared in the past. The host has to call clear with status information
+ before calling this function again with status value. The Host should do this operation until this function
+ returns 0, which means all the streams that generated interrupt have been processed.
+ @param[out]strm_handle : The handle of a stream that generated interrupt.
+ @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts().
+ @retval 1 : A valid stream handle was found.
+ @retval 0 : No more streams from the status which caused interrupt.
+*/
+ uint32_t viddec_fw_decoder_get_next_stream_from_interrupt_status(uint32_t status, uint32_t *stream_handle);
+
+/**
+ This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(),
+ This should be called after host performs all necessary actions for the stream.
+ @param[in] strm_handle : The handle of a stream that we want to clear to indicate we handled it.
+ @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts().
+ @retval 1 : Operation was sucessful.
+ @retval 0 : Invalid stream handle was passed.
+*/
+ uint32_t viddec_fw_decoder_clear_stream_from_interrupt_status(uint32_t *status, uint32_t stream_handle);
+
+/*@}*/
+#ifdef __cplusplus
+}
+#endif
+
+#endif//#ifndef VIDDEC_FW_DECODER_HOST_H
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h
new file mode 100644
index 0000000..a816dd4
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h
@@ -0,0 +1,281 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_FRAME_ATTR_H
+#define VIDDEC_FW_FRAME_ATTR_H
+
+#define VIDDEC_PANSCAN_MAX_OFFSETS 4
+#define VIDDEC_MAX_CPB_CNT 32
+
+/**
+This enumeration lists all the frame types defined by the MPEG, VC1 and H264 specifications.
+Frame types applicable to a single codec are specified in the comments.
+*/
+typedef enum
+{
+ VIDDEC_FRAME_TYPE_INVALID=0, /** Unknown type - default value */
+ VIDDEC_FRAME_TYPE_IDR=0x1, /** IDR frame - h264 only */
+ VIDDEC_FRAME_TYPE_I=0x2, /** I frame */
+ VIDDEC_FRAME_TYPE_P=0x3, /** P frame */
+ VIDDEC_FRAME_TYPE_B=0x4, /** B frame */
+ VIDDEC_FRAME_TYPE_BI=0x5, /** BI frame - Intracoded B frame - vc1 only */
+ VIDDEC_FRAME_TYPE_SKIP=0x6, /** Skipped frame - vc1 only */
+ VIDDEC_FRAME_TYPE_D=0x7, /** D frame - mpeg1 only */
+ VIDDEC_FRAME_TYPE_S=0x8, /** SVOP frame - mpeg4 only - sprite encoded frame - treat as P */
+ VIDDEC_FRAME_TYPE_MAX,
+} viddec_frame_type_t;
+
+/**
+This structure contains the content size info extracted from the stream.
+*/
+typedef struct viddec_rect_size
+{
+ unsigned int width;
+ unsigned int height;
+}viddec_rect_size_t;
+
+/**
+This structure contains MPEG2 specific pan scan offsets extracted from the stream.
+*/
+typedef struct viddec_mpeg2_frame_center_offset
+{
+ int horz;
+ int vert;
+}viddec_mpeg2_frame_center_offset_t;
+
+/**
+This structure contains the MPEG2 specific frame attributes.
+*/
+typedef struct viddec_mpeg2_frame_attributes
+{
+ /**
+ 10 bit unsigned integer corresponding to the display order of each coded picture
+ in the stream (or gop if gop header is present).
+ Refer to "temporal_reference" of the picture header in ITU-T H.262 Specification.
+ */
+ unsigned int temporal_ref;
+
+ /**
+ Pan/Scan rectangle info
+ Refer to the picture display extension in ITU-T H.262 Specification.
+ */
+ viddec_mpeg2_frame_center_offset_t frame_center_offset[VIDDEC_PANSCAN_MAX_OFFSETS];
+ unsigned int number_of_frame_center_offsets;
+
+ /**
+ Top-Field first flag
+ Refer to "top_field_first" of the picture coding extension in ITU-T H.262 Specification.
+ */
+ unsigned int top_field_first;
+
+ /**
+ Progressive frame flag - Indicates if current frame is progressive or not.
+ Refer to "progressive_frame" of the picture coding extension in ITU-T H.262 Specification.
+ */
+ unsigned int progressive_frame;
+
+ /**
+ Frame/field polarity for each coded picture.
+ Refer to Table 6-14 in ITU-T H.262 Specification.
+ */
+ unsigned int picture_struct;
+
+ /**
+ Repeat field/frame flag.
+ Refer to "repeat_first_field" of the picture coding extension in ITU-T H.262 Specification.
+ */
+ unsigned int repeat_first_field;
+
+}viddec_mpeg2_frame_attributes_t;
+
+/**
+This structure contains MPEG2 specific pan scan offsets extracted from the stream.
+*/
+typedef struct viddec_vc1_pan_scan_window
+{
+ unsigned int hoffset;
+ unsigned int voffset;
+ unsigned int width;
+ unsigned int height;
+}viddec_vc1_pan_scan_window_t;
+
+/**
+This structure contains the VC1 specific frame attributes.
+*/
+typedef struct viddec_vc1_frame_attributes
+{
+ /**
+ Temporal Reference of frame/field.
+ Refer to "TFCNTR" in the picture layer of the SMPTE VC1 Specification.
+ */
+ unsigned int tfcntr;
+
+ /**
+ Frame/field repeat information in the bitstream.
+ Refer to "RPTFRM", "TFF", "BFF" in the picture layer
+ of the SMPTE VC1 Specification.
+ */
+ unsigned int rptfrm;
+ unsigned int tff;
+ unsigned int rff;
+
+ /**
+ Pan-scan information in the bitstream.
+ Refer to "PANSCAN_FLAG" in the entrypoint layer, "PS_PRESENT", "PS_HOFFSET", "PS_VOFFSET",
+ "PS_WIDTH" and "PS_HEIGHT" in the picture layer of the SMPTE VC1 Specification.
+ */
+ unsigned int panscan_flag;
+ unsigned int ps_present;
+ unsigned int num_of_pan_scan_windows;
+ viddec_vc1_pan_scan_window_t pan_scan_window[VIDDEC_PANSCAN_MAX_OFFSETS];
+
+}viddec_vc1_frame_attributes_t;
+
+/**
+This structure contains the H264 specific frame attributes.
+*/
+typedef struct viddec_h264_frame_attributes
+{
+ /**
+ used_for_reference : 1 means this frame is used as ref frame of others. 0 means no any frame ref to this frame
+ */
+ ///// This flag hasn't been enable so far
+ unsigned int used_for_reference;
+
+
+ /** -
+ Picture Order Count for the current frame/field.-
+ This value is computed using information from the bitstream.-
+ Refer to Section 8.2.1, function 8-1 of the ITU-T H.264 Specification.-
+ */
+ // These fileds will be supported in future
+ int top_field_poc;
+ int bottom_field_poc;
+
+ /**
+ Display size, which is cropped from content size.
+ Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed
+ */
+ viddec_rect_size_t cropped_size;
+
+ /**
+ top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1
+ */
+ unsigned int top_field_first;
+
+ /**
+ field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1
+ */
+ unsigned int field_pic_flag;
+
+}viddec_h264_frame_attributes_t;
+
+/**
+This structure contains the MPEG4 specific frame attributes.
+*/
+typedef struct viddec_mpeg4_frame_attributes
+{
+ /**
+ Top-Field first flag
+ Refer to "top_field_first" of the Video Object Plane of the MPEG4 Spec.
+ */
+ unsigned int top_field_first;
+
+}viddec_mpeg4_frame_attributes_t;
+
+/**
+This structure groups all the frame attributes that are exported by the firmware.
+The frame attributes are split into attributes that are common to all codecs and
+that are specific to codec type.
+As of this release, it is populated only for mpeg2 only.
+*/
+typedef struct viddec_frame_attributes
+{
+ /**
+ Content size specified in the stream.
+ For MPEG2, refer to "horizontal_size_value, vertical_size_value" of the sequence header and
+ "horizontal_size_extension, vertical_size_extension" of the sequence extension in ITU-T H.262 Specification.
+ For H264, refer to "pic_width_in_mbs_minus1" and "pic_height_in_map_units_minus1" of the
+ sequence parameter set in ITU-T H.264 Specification.
+ For VC1, refer to "MAX_CODED_WIDTH" and "MAX_CODED_HEIGHT" in the sequence layer,
+ "CODED_SIZE_FLAG", "CODED_WIDTH" and "CODED_HEIGHT" in the entrypoint layer of the SMPTE VC1 Specification.
+ */
+ viddec_rect_size_t cont_size;
+
+ /**
+ Type of frame populated in the workload.
+ frame_type contains the frame type for progressive frame and the field type for the top field for interlaced frames.
+ bottom_field_type contains the field type for the bottom field for interlaced frames.
+ For MPEG2, refer to "picture_coding_type" in picture header (Table 6-12) in ITU-T H.262 Specification.
+ For H264, refer to "slice_type" in slice header (Table 7-6) in ITU-T H.264 Specification.
+ For VC1, refer to "PTYPE" and FPTYPE in the picture layer (Tables 33, 34, 35, 105) in SMPTE VC1 Specification.
+ */
+ viddec_frame_type_t frame_type;
+ viddec_frame_type_t bottom_field_type;
+
+ /** Codec specific attributes */
+ union
+ {
+ viddec_mpeg2_frame_attributes_t mpeg2;
+ viddec_vc1_frame_attributes_t vc1;
+ viddec_h264_frame_attributes_t h264;
+ viddec_mpeg4_frame_attributes_t mpeg4;
+ };
+
+}viddec_frame_attributes_t;
+
+#endif /* VIDDEC_FRAME_ATTR_H */
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h
new file mode 100644
index 0000000..66e5f59
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h
@@ -0,0 +1,738 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_ITEM_TYPES_H
+#define VIDDEC_FW_ITEM_TYPES_H
+
+
+/* The following macros are defined to pack data into 32 bit words.
+ mask: A 32 bit value of N 1 bits starting from lsb where N represents the length of data we are packing.
+ start: Bit start position of data we want.
+ ex: If we want to pack Height(16bits), width(16bits) where width is from (1:16) and height is from (17:32), these are
+ the start and mask values for width and height.
+ width: start = 0 mask=0xFFFF
+ Height:start= 16 mask=0xFFFF
+
+ extract: will extract data from x_32 of unsigned integer type from bit start offset(0 to 31) of length based on mask and returns in
+ unsigned integer type.
+ insert: Will insert N bits from val_32 ,where N is length based on mask, into x_32 from bit offset based on start. val_32 is expected to
+ be a unsigned int of N bits starting with lsb.
+*/
+
+#define viddec_fw_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) )
+#define viddec_fw_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start))))
+
+/* Workload items type. Each item here represents data that Parser detected ex:slice data which
+ is used either by host or decoder.*/
+enum workload_item_type
+{
+ VIDDEC_WORKLOAD_INVALID=0x0, /* Unknown type */
+ VIDDEC_WORKLOAD_PIXEL_ES=0x100, /* Slice data tag */
+ VIDDEC_WORKLOAD_TAG=0x200, /* Frame association tag */
+ VIDDEC_WORKLOAD_USERDATA=0x300, /* user data tag */
+ VIDDEC_WORKLOAD_DECODER_INFO=0x400, /* decoder specific data tag which decoder module understands*/
+ VIDDEC_WORKLOAD_IBUF_DONE=0x500, /* Es buffer completely used tag */
+ VIDDEC_WORKLOAD_IBUF_CONTINUED=0x600, /* Es buffer partially used tag */
+ VIDDEC_WORKLOAD_TAG_BUFFER_LOOSE_START=0x700, /* ??? */
+ VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER=0x800, /* Reorder frames in DPB tag */
+ VIDDEC_WORKLOAD_DISPLAY_FRAME=0x900, /* Display order in DPB tag, for H264 NOT required??? */
+
+ VIDDEC_WORKLOAD_SEQUENCE_INFO=0xa00, /* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */
+ VIDDEC_WORKLOAD_DISPLAY_INFO=0xb00, /* MPEG2 Seq Disp Ext, H264 VUI */
+ VIDDEC_WORKLOAD_GOP_INFO=0xc00, /* MPEG2 GOP, VC1 Entrypoint */
+ VIDDEC_WORKLOAD_SEQ_USER_DATA=0xd00, /* MPEG2, VC1 Sequence Level User data */
+ VIDDEC_WORKLOAD_GOP_USER_DATA=0xe00, /* MPEG2, VC1 Gop Level User data */
+ VIDDEC_WORKLOAD_FRM_USER_DATA=0xf00, /* MPEG2 Picture User data, VC1 Frame User data */
+ VIDDEC_WORKLOAD_FLD_USER_DATA=0x1000, /* MPEG2, VC1 Field User data */
+ VIDDEC_WORKLOAD_SLC_USER_DATA=0x1100, /* VC1 Slice User data */
+ VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA=0x1200, /* MPEG4 Visual Object User data */
+ VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA=0x1300, /* MPEG4 Video Object Layer User data */
+
+ VIDDEC_WORKLOAD_MPEG2_SEQ_EXT=0x1150, /* MPEG2 Only - Sequence Extension */
+ VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C=0x1200, /* VC1 Only */
+
+ VIDDEC_WORKLOAD_H264_CROPPING=0x1400, /* H264 only */
+ VIDDEC_WORKLOAD_H264_PAN_SCAN=0x1500, /* H264 only */
+ VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO=0x2100, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_PIC_TIMING=0x1600, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT=0x1700, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED=0x1800, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED=0x1900, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_RECOVERY_POINT=0x1a00, /* H264 only */
+ VIDDEC_WORKLOAD_IBUF_EOS=0x1b00, /* EOS tag on last workload used for current stream */
+ VIDDEC_WORKLOAD_IBUF_DISCONTINUITY=0x1c00, /* Discontinuity tag on first workload after discontinuity */
+
+ VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ=0x1d00, /* MPEG4 Only - Visual Sequence */
+ VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ=0x1e00, /* MPEG4 Only - Video Object Layer */
+ VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ=0x1f00, /* MPEG4 Only - Group of Video Object Planes */
+ VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT=0x2000, /* MPEG4 Only - Video Plane with Short Header */
+
+ VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0 = 0x10000, /* required reference frames tag, last eight bits tell the id of frame in dpb */
+ VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 = 0x20000, /* release frames tag, last eight bits tell the id of frame in dpb */
+ VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 = 0x30000, /* Display order in DPB tag, for H264 */
+ VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 = 0x40000, /* Release frames but not display, for H264 */
+ VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 = 0x50000, /* Release list while EOS, last eight bits tell the id of frame in dpb */
+ VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 = 0x60000, /* Diaplay list while EOS, last eight bits tell the id of frame in dpb */
+
+ VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0 = 0x70000, /* required for H264 as it needs whole DPB for each frame */
+ VIDDEC_WORKLOAD_H264_REFR_LIST_0 = 0x80000, /* ref list 0 for H264 */
+ VIDDEC_WORKLOAD_H264_REFR_LIST_1 = 0x90000, /* ref list 1 for H264 */
+ VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY = 0xa0000, /* eos items begin after this */
+ VIDDEC_WORKLOAD_DECODER_SPECIFIC = 0x100000, /* pvt info for decoder tags */
+ VIDDEC_WORKLOAD_MAX,
+};
+
+/* 16-byte workload */
+typedef struct viddec_workload_item
+{
+ enum workload_item_type vwi_type;
+ union
+ {
+ struct
+ {
+ unsigned int es_phys_addr;
+ unsigned int es_phys_len;
+ unsigned int es_flags;
+ }es;
+ struct
+ {
+ unsigned int tag_phys_addr;
+ unsigned int tag_phys_len;
+ unsigned int tag_value;
+ }tag;
+ struct
+ {
+ unsigned int data_offset;
+ unsigned int data_payload[2];
+ }data;
+ struct
+ {
+ signed int reference_id; /* Assigned by parser */
+ unsigned int luma_phys_addr; /* assigned by host, for DM */
+ unsigned int chroma_phys_addr; /* assigned by host, for DM */
+ }ref_frame;
+ struct /* when vwi_type == VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER */
+ {
+ signed int ref_table_offset; /* Index of first "reordered" */
+ /* index from Current[] for Next[offset+0], Ref[offset+1], Ref[offset+2], Ref[offset+3] */
+ unsigned int ref_reorder_00010203;
+ /* index from Current[] for Next[offset+4], Ref[offset+5], Ref[offset+6], Ref[offset+7] */
+ unsigned int ref_reorder_04050607;
+ } ref_reorder;
+ struct
+ {
+ /* we pack a maximum of 11 bytes of user data and 1 byte for size */
+ /* TODO: we can pack 12 bytes and use bottom 8 bits of type to indicate size */
+#define viddec_fw_get_user_data_size(x) ((x)->user_data.size)
+#define viddec_fw_get_user_data(x) (unsigned char *)&((x)->user_data.data_payload[0])
+ unsigned char size;
+ unsigned char data_payload[11];
+ /*
+ ITEM TYPES WHICH use this:
+ VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED, VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED,
+ VIDDEC_WORKLOAD_SEQ_USER_DATA, VIDDEC_WORKLOAD_GOP_USER_DATA,
+ VIDDEC_WORKLOAD_FRM_USER_DATA, VIDDEC_WORKLOAD_FLD_USER_DATA, VIDDEC_WORKLOAD_SLC_USER_DATA,
+ */
+ }user_data;
+ struct
+ {
+ // Sequence Header Item I (From LSB):
+ // - horizontal_size_value - 12 bits
+ // - vertical_size_value - 12 bits
+ // - aspect_ratio_information - 4 bits
+ // - frame_rate_code - 4 bits
+ #define viddec_fw_mp2_sh_get_horizontal_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 0, 0xFFF)
+ #define viddec_fw_mp2_sh_get_vertical_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF)
+ #define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF)
+ #define viddec_fw_mp2_sh_get_frame_rate_code(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF)
+ #define viddec_fw_mp2_sh_set_horizontal_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 0, 0xFFF)
+ #define viddec_fw_mp2_sh_set_vertical_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF)
+ #define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF)
+ #define viddec_fw_mp2_sh_set_frame_rate_code(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF)
+ unsigned int seq_hdr_item_1;
+
+ // Sequence Header Item II (From LSB):
+ // - bit_rate_value - 18 bits
+ // - vbv_buffer_size_value - 10 bits
+ // - remaining pad bits
+ #define viddec_fw_mp2_sh_get_bit_rate_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 0, 0x3FFFF)
+ #define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF)
+ #define viddec_fw_mp2_sh_set_bit_rate_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 0, 0x3FFFF)
+ #define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF)
+ unsigned int seq_hdr_item_2;
+
+ unsigned int pad;
+ } mp2_sh; // mp2 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+ struct
+ {
+ // Sequence Extension Item I (From LSB):
+ // - profile_and_level_indication - 8 bits
+ // - progressive_sequence - 1 bit
+ // - chroma_format - 2 bits
+ // - horizontal_size_extension - 2 bits
+ // - vertical_size_extension - 2 bits
+ // - bit_rate_extension - 12 bits
+ // - remaining pad bits
+ #define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 0, 0xFF)
+ #define viddec_fw_mp2_se_get_progressive_sequence(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 8, 0x1)
+ #define viddec_fw_mp2_se_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 9, 0x3)
+ #define viddec_fw_mp2_se_get_horizontal_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3)
+ #define viddec_fw_mp2_se_get_vertical_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3)
+ #define viddec_fw_mp2_se_get_bit_rate_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF)
+ #define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 0, 0xFF)
+ #define viddec_fw_mp2_se_set_progressive_sequence(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 8, 0x1)
+ #define viddec_fw_mp2_se_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 9, 0x3)
+ #define viddec_fw_mp2_se_set_horizontal_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3)
+ #define viddec_fw_mp2_se_set_vertical_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3)
+ #define viddec_fw_mp2_se_set_bit_rate_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF)
+ unsigned int seq_ext_item_1;
+
+ // Sequence Extension Item II (From LSB):
+ // - vbv_buffer_size_extension - 8 bits
+ // - frame_rate_extension_n - 2 bits
+ // - frame_rate_extension_d - 5 bits
+ // - remaining pad bits
+ #define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 0, 0xFF)
+ #define viddec_fw_mp2_se_get_frame_rate_extension_n(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 8, 0x3)
+ #define viddec_fw_mp2_se_get_frame_rate_extension_d(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F)
+ #define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 0, 0xFF)
+ #define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 8, 0x3)
+ #define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F)
+ unsigned int seq_ext_item_2;
+
+ unsigned int pad;
+ } mp2_se; // mp2 item of type VIDDEC_WORKLOAD_MPEG2_SEQ_EXT
+ struct
+ {
+ // Sequence Display Extension Item I (From LSB):
+ // - display_horizontal_size - 14 bits
+ // - display_vertical_size - 14 bits
+ // - video_format - 3 bits
+ // - color_description - 1 bit
+ #define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 0, 0x3FFF)
+ #define viddec_fw_mp2_sde_get_display_vertical_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF)
+ #define viddec_fw_mp2_sde_get_video_format(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7)
+ #define viddec_fw_mp2_sde_get_color_description(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1)
+ #define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 0, 0x3FFF)
+ #define viddec_fw_mp2_sde_set_display_vertical_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF)
+ #define viddec_fw_mp2_sde_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7)
+ #define viddec_fw_mp2_sde_set_color_description(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1)
+ unsigned int seq_disp_ext_item_1;
+
+ // Sequence Display Extension II (From LSB):
+ // - color_primaries - 8 bits
+ // - transfer_characteristics - 8 bits
+ // - remaining pad bits
+ #define viddec_fw_mp2_sde_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 0, 0xFF)
+ #define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 8, 0xFF)
+ #define viddec_fw_mp2_sde_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 0, 0xFF)
+ #define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 8, 0xFF)
+ unsigned int seq_disp_ext_item_2;
+
+ unsigned int pad;
+ } mp2_sde; // mp2 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+ struct
+ {
+ // Group of Pictures Header Item I (From LSB):
+ // - closed_gop - 1 bit
+ // - broken_link - 1 bit
+ // - remaining pad bits
+ #define viddec_fw_mp2_gop_get_closed_gop(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 0, 0x1)
+ #define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 1, 0x1)
+ #define viddec_fw_mp2_gop_set_closed_gop(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 0, 0x1)
+ #define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 1, 0x1)
+ unsigned int gop_hdr_item_1;
+
+ unsigned int pad1;
+ unsigned int pad2;
+ } mp2_gop; // mp2 item of type VIDDEC_WORKLOAD_GOP_INFO
+ struct
+ {
+ #define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3)
+ #define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3)
+
+ #define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7)
+ #define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7)
+
+ #define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3)
+ #define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3)
+
+ #define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1)
+ #define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1)
+
+ #define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF)
+ #define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF)
+
+ #define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF)
+ #define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF)
+
+ #define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F)
+ #define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F)
+
+ #define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7)
+ #define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7)
+
+ #define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1)
+ #define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1)
+
+ #define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1)
+ #define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1)
+
+ #define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1)
+ #define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1)
+
+ #define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1)
+ #define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1)
+
+ #define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1)
+ #define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1)
+
+ uint32_t size; // profile:2, level:3, colordiff_format:2, pulldown:1, max_coded_width:12, max_coded_height:12
+ uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, interlace:1, tfcntrflag:1, finterpflag:1, psf:1, display_ext:1
+ uint32_t pad;
+ } vc1_sl; // vc1 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+ struct
+ {
+ // This item is populated when display_ext flag is set in the sequence layer
+ // therefore, no need to provide this flag
+ #define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF)
+ #define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF)
+
+ #define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF)
+ #define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF)
+
+ #define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1)
+ #define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1)
+
+ #define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1)
+ #define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1)
+
+ #define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1)
+ #define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1)
+
+ #define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1)
+ #define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1)
+
+ #define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF)
+ #define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF)
+
+ #define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF)
+ #define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF)
+
+ #define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF)
+ #define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF)
+
+ #define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF)
+ #define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF)
+
+ #define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF)
+ #define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF)
+
+ #define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF)
+ #define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF)
+
+ #define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF)
+ #define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF)
+
+ #define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF)
+ #define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF)
+
+ uint32_t size; // disp_horiz_size:14, disp_vert_size:14, aspect_ratio_flag:1, color_format_flag:1, framerate_flag:1, framerateind:1
+ uint32_t framerate; // aspect_ratio:4, frameratenr:8, frameratedr:4, framerateexp:16
+ uint32_t aspectsize; // aspect_ratio_horiz_size:8, aspect_ratio_vert_size:8, color_prim:8, transfer_char:8
+ } vc1_sl_de; // vc1 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+ struct
+ {
+ #define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF)
+ #define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF)
+
+ #define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF)
+ #define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF)
+
+ #define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F)
+ #define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F)
+
+ #define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7)
+ #define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7)
+
+ #define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF)
+ #define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF)
+
+ #define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7)
+ #define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7)
+
+ #define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1)
+ #define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1)
+
+ #define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1)
+ #define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1)
+
+ #define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7)
+ #define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7)
+
+ #define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1)
+ #define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1)
+
+ uint32_t size; // horiz_size:16, vert_size:16
+ uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, profile:4, level:3, cbr:1, rangered:1, maxbframes:3, finterpflag:1
+ uint32_t pad;
+ } vc1_sh_struct_a_c; // vc1 item of type VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C
+ struct
+ {
+ #define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1)
+ #define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1)
+
+ #define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF)
+ #define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF)
+
+ #define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF)
+ #define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF)
+
+ #define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1)
+ #define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1)
+
+ #define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1)
+ #define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1)
+
+ #define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1)
+ #define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1)
+
+ #define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1)
+ #define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1)
+
+ #define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7)
+ #define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7)
+
+ #define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1)
+ #define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1)
+
+ #define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7)
+ #define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7)
+
+ uint32_t size; // coded_size_flag:1, coded_width:12, coded_height:12
+ uint32_t flags; // broken_link:1, closed_entry:1, panscan_flag:1, range_mapy_flag:1, range_mapy:3, range_mapuv_flag:1, range_mapuv:3
+ uint32_t pad;
+ } vc1_ep; // vc1 item of type VIDDEC_WORKLOAD_GOP_INFO
+ struct
+ {
+ /*
+ 0-7 bits for profile_idc.
+ 8-15 bits for level_idc.
+ 16-17 bits for chroma_format_idc.
+ 18-22 bits for num_ref_frames.
+ 23 for gaps_in_frame_num_value_allowed_flag.
+ 24 for frame_mbs_only_flag.
+ 25 for frame_cropping_flag.
+ 26 for vui_parameters_present_flag.
+ */
+#define viddec_fw_h264_sps_get_profile_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 0, 0xFF)
+#define viddec_fw_h264_sps_set_profile_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 0, 0xFF)
+#define viddec_fw_h264_sps_get_level_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 8, 0xFF)
+#define viddec_fw_h264_sps_set_level_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 8, 0xFF)
+#define viddec_fw_h264_sps_get_chroma_format_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 16, 0x3)
+#define viddec_fw_h264_sps_set_chroma_format_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 16, 0x3)
+#define viddec_fw_h264_sps_get_num_ref_frames(x) viddec_fw_bitfields_extract( (x)->sps_messages, 18, 0x1F)
+#define viddec_fw_h264_sps_set_num_ref_frames(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 18, 0x1F)
+#define viddec_fw_h264_sps_get_gaps_in_frame_num_value_allowed_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 23, 0x1)
+#define viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 23, 0x1)
+#define viddec_fw_h264_sps_get_frame_mbs_only_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 24, 0x1)
+#define viddec_fw_h264_sps_set_frame_mbs_only_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 24, 0x1)
+#define viddec_fw_h264_sps_get_frame_cropping_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 25, 0x1)
+#define viddec_fw_h264_sps_set_frame_cropping_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 25, 0x1)
+#define viddec_fw_h264_sps_get_vui_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 26, 0x1)
+#define viddec_fw_h264_sps_set_vui_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 26, 0x1)
+ unsigned int sps_messages;
+ unsigned int pic_width_in_mbs_minus1;
+ unsigned int pic_height_in_map_units_minus1;
+ } h264_sps; // h264 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+
+ struct
+ {
+#define viddec_fw_h264_cropping_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF)
+#define viddec_fw_h264_cropping_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF)
+ unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */
+#define viddec_fw_h264_cropping_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF)
+#define viddec_fw_h264_cropping_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF)
+ unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */
+ unsigned int pad;
+ } h264_cropping; // h264 item of type VIDDEC_WORKLOAD_H264_CROPPING
+
+ struct
+ {
+ /* 0 bit for aspect_ratio_info_present_flag
+ 1 st bit for video_signal_type_present_flag
+ 2 nd bit for colour_description_present_flag
+ 3 rd bit for timing_info_present_flag
+ 4 th bit for nal_hrd_parameters_present_flag
+ 5 th bit for vcl_hrd_parameters_present_flag
+ 6 th bit for fixed_frame_rate_flag
+ 7 th bit for pic_struct_present_flag
+ 8 th bit for low_delay_hrd_flag
+ 9,10,11 bits for video_format
+ */
+#define viddec_fw_h264_vui_get_aspect_ratio_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 0, 0x1)
+#define viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 0, 0x1)
+#define viddec_fw_h264_vui_get_video_signal_type_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 1, 0x1)
+#define viddec_fw_h264_vui_set_video_signal_type_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 1, 0x1)
+#define viddec_fw_h264_vui_get_colour_description_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 2, 0x1)
+#define viddec_fw_h264_vui_set_colour_description_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 2, 0x1)
+#define viddec_fw_h264_vui_get_timing_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 3, 0x1)
+#define viddec_fw_h264_vui_set_timing_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 3, 0x1)
+#define viddec_fw_h264_vui_get_nal_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 4, 0x1)
+#define viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 4, 0x1)
+#define viddec_fw_h264_vui_get_vcl_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 5, 0x1)
+#define viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 5, 0x1)
+#define viddec_fw_h264_vui_get_fixed_frame_rate_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 6, 0x1)
+#define viddec_fw_h264_vui_set_fixed_frame_rate_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 6, 0x1)
+#define viddec_fw_h264_vui_get_pic_struct_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 7, 0x1)
+#define viddec_fw_h264_vui_set_pic_struct_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 7, 0x1)
+#define viddec_fw_h264_vui_get_low_delay_hrd_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 8, 0x1)
+#define viddec_fw_h264_vui_set_low_delay_hrd_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 8, 0x1)
+#define viddec_fw_h264_vui_get_video_format(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 9, 0x7)
+#define viddec_fw_h264_vui_set_video_format(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 9, 0x7)
+ unsigned int vui_flags_and_format;
+
+#define viddec_fw_h264_vui_get_aspect_ratio_idc(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 0, 0xFF)
+#define viddec_fw_h264_vui_set_aspect_ratio_idc(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 0, 0xFF)
+#define viddec_fw_h264_vui_get_colour_primaries(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 8, 0xFF)
+#define viddec_fw_h264_vui_set_colour_primaries(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 8, 0xFF)
+#define viddec_fw_h264_vui_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 16, 0xFF)
+#define viddec_fw_h264_vui_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 16, 0xFF)
+ /* lower 8 bits for aspect_ratio, next 8bits for color primaries and next 8 bits for transfer characteristics */
+ unsigned int aspc_color_transfer;
+
+#define viddec_fw_h264_vui_get_sar_width(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 16, 0xFFFF)
+#define viddec_fw_h264_vui_get_sar_height(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 0, 0xFFFF)
+#define viddec_fw_h264_vui_set_sar_width(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 16, 0xFFFF)
+#define viddec_fw_h264_vui_set_sar_height(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 0, 0xFFFF)
+ unsigned int sar_width_height; /* Lower 16 for height upper 16 for width */
+ } h264_vui; // h264 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+ struct
+ {
+#define viddec_fw_h264_vui_get_num_units_in_tick_flag(x) viddec_fw_bitfields_extract( (x)->num_units_in_tick, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_set_num_units_in_tick_flag(x, val) viddec_fw_bitfields_insert( (x)->num_units_in_tick, val, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_get_time_scale_flag(x) viddec_fw_bitfields_extract( (x)->time_scale, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_set_time_scale_flag(x, val) viddec_fw_bitfields_insert( (x)->time_scale, val, 0, 0xFFFFFFFF)
+ unsigned int num_units_in_tick;
+ unsigned int time_scale;
+ unsigned int pad1;
+ } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO
+ struct
+ {
+ unsigned int pic_struct; /* 4 bit length */
+ unsigned int pad1;
+ unsigned int pad2;
+ } h264_sei_pic_timing; // h264 item of type VIDDEC_WORKLOAD_SEI_PIC_TIMING
+ struct
+ {
+ unsigned int pan_scan_rect_id;
+
+#define viddec_fw_h264_sei_pan_scan_get_cancel_flag(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 0, 0x1)
+#define viddec_fw_h264_sei_pan_scan_get_cnt_minus1(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 1, 0x3)
+#define viddec_fw_h264_sei_pan_scan_set_cancel_flag(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 0, 0x1)
+#define viddec_fw_h264_sei_pan_scan_set_cnt_minus1(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 1, 0x3)
+ unsigned int pan_scan_cancel_and_cnt; /* 0 bit for cancel flag and 2 bits for cnt_minus1 */
+ unsigned int pan_scan_rect_repetition_period;
+ } h264_sei_pan_scan; // h264 item of type VIDDEC_WORKLOAD_H264_PAN_SCAN
+
+ struct
+ {
+
+#define viddec_fw_h264_pan_scan_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF)
+ unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */
+
+#define viddec_fw_h264_pan_scan_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF)
+ unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */
+
+ unsigned int pad;
+ } h264_pan_scan_rect; // h264 item of type VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT
+ struct
+ {
+ unsigned int recovery_frame_cnt;
+#define viddec_fw_h264_h264_sei_recovery_get_exact_match_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 0, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_get_broken_link_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 1, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 0, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 1, 0x1)
+ unsigned int broken_and_exctmatch_flags; /* 0 bit for exact match, 1 bit for brokenlink */
+
+ unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */
+
+ } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT
+
+ struct
+ {
+ // Visual Sequence (From LSB):
+ // - profile_and_level_indication - 8 bits
+ #define viddec_fw_mp4_vs_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->vs_item, 0, 0xFF)
+ #define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val, 0, 0xFF)
+ unsigned int vs_item;
+
+ // Visual Object - video_signal_type
+ // - video_signal_type - 1b
+ // - video_format - 3b
+ // - video_range - 1b
+ // - colour_description - 1b
+ #define viddec_fw_mp4_vo_get_colour_description(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1)
+ #define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1)
+ #define viddec_fw_mp4_vo_get_video_range(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1)
+ #define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1)
+ #define viddec_fw_mp4_vo_get_video_format(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 1, 0x7)
+ #define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 1, 0x7)
+ #define viddec_fw_mp4_vo_get_video_signal_type(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 0, 0x1)
+ #define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 0, 0x1)
+ unsigned int video_signal_type;
+
+ // Visual Object - video_signal_type
+ // - color_primaries - 8 bits
+ // - transfer_characteristics - 8 bits
+ #define viddec_fw_mp4_vo_get_transfer_char(x) viddec_fw_bitfields_extract( (x)->color_desc, 8, 0xFF)
+ #define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 8, 0xFF)
+ #define viddec_fw_mp4_vo_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->color_desc, 0, 0xFF)
+ #define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 0, 0xFF)
+ unsigned int color_desc;
+ } mp4_vs_vo; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ
+
+ struct
+ {
+ // Video Object Layer(From LSB):
+ // - aspect_ratio_info - 4b
+ // - par_width - 8b
+ // - par_height - 8b
+ // - vol_control_param - 1b
+ // - chroma_format - 2b
+ // - interlaced - 1b
+ // - fixed_vop_rate - 1b
+ #define viddec_fw_mp4_vol_get_fixed_vop_rate(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1)
+ #define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1)
+ #define viddec_fw_mp4_vol_get_interlaced(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1)
+ #define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1)
+ #define viddec_fw_mp4_vol_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3)
+ #define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3)
+ #define viddec_fw_mp4_vol_get_control_param(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1)
+ #define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1)
+ #define viddec_fw_mp4_vol_get_par_height(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF)
+ #define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF)
+ #define viddec_fw_mp4_vol_get_par_width(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF)
+ #define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF)
+ #define viddec_fw_mp4_vol_get_aspect_ratio_info(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF)
+ #define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF)
+ unsigned int vol_aspect_ratio;
+
+ // Video Object Layer(From LSB):
+ // - vbv_parameters - 1b
+ // - bit_rate - 30b
+ #define viddec_fw_mp4_vol_get_bit_rate(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF)
+ #define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF)
+ #define viddec_fw_mp4_vol_get_vbv_param(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1)
+ #define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1)
+ unsigned int vol_bit_rate;
+
+ // Video Object Layer(From LSB):
+ // - fixed_vop_time_increment - 16b
+ // - vop_time_increment_resolution - 16b
+ #define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF)
+ #define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF)
+ #define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF)
+ #define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF)
+ unsigned int vol_frame_rate;
+ } mp4_vol; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ
+
+ struct
+ {
+ // Group of Video Object Planes(From LSB):
+ // - time_code - 18b
+ // - closed_gov - 1b
+ // - broken_link - 1b
+ #define viddec_fw_mp4_gvop_get_broken_link(x) viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1)
+ #define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1)
+ #define viddec_fw_mp4_gvop_get_closed_gov(x) viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1)
+ #define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1)
+ #define viddec_fw_mp4_gvop_get_time_code(x) viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF)
+ #define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF)
+ unsigned int gvop_info;
+
+ unsigned int pad1;
+ unsigned int pad2;
+ } mp4_gvop; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ
+
+ struct
+ {
+ // Group of Video Object Planes(From LSB):
+ // - source_format - 3b
+ #define viddec_fw_mp4_vpsh_get_source_format(x) viddec_fw_bitfields_extract((x)->info, 0, 0x7)
+ #define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7)
+ unsigned int info;
+
+ unsigned int pad1;
+ unsigned int pad2;
+ } mp4_vpsh; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT
+
+ unsigned int vwi_payload[3];
+ };
+}viddec_workload_item_t;
+
+#endif /* VIDDEC_ITEM_TYPES_H */
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h b/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h
new file mode 100644
index 0000000..6d26555
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h
@@ -0,0 +1,237 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef VIDDEC_FW_PARSER_HOST_H
+#define VIDDEC_FW_PARSER_HOST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "viddec_fw_common_defs.h"
+
+/** @weakgroup viddec Fw Parser interface Functions */
+/** @ingroup viddec_fw_parser */
+/*@{*/
+
+/**
+ This function returns the size required for loading fw.
+ @retval size : Required size.
+*/
+ uint32_t viddec_fw_parser_query_fwsize(void);
+
+/**
+ This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW.
+ @param[in] phys : Physical address on where firmware should be loaded.
+ @param[in] len : Length of data allocated at phys.
+ @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+ @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_loadfw(uint32_t phys, uint32_t len);
+
+/**
+ This function returns the size required opening a stream. This a synchronous message to FW.
+ @param[in] codec_type : Type of codec that we want information about.
+ @param[out] num_wklds : Number of wklds required for initialisation.
+ @param[out] size : Size of memory required for opening a stream.
+*/
+ void viddec_fw_parser_query_streamsize(uint32_t codec_type, uint32_t *num_wklds, uint32_t *size);
+
+/**
+ This function opens requested codec.This a synchronous message to FW.
+ @param[in] codec_type : Type of codec that we want to open.
+ @param[in] phys : Physical address of allocated memory for this codec.
+ @param[in] prority : Priority of stream. 1 for realtime and 0 for background.
+ @param[out] strm_handle : Handle of the opened stream.
+ @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream.
+ @retval VIDDEC_FW_FAILURE : Failed to Open a stream.
+ @retval VIDDEC_FW_NORESOURCES : Failed to Open a stream as we are out of resources.
+*/
+ uint32_t viddec_fw_parser_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority);
+
+/**
+ This function closes stream.This a synchronous message to FW.
+ For the close stream to be effective, host has to do flush with discard first and then close the stream.
+ @param[in] strm_handle : Handle of the stream to close.
+*/
+ void viddec_fw_parser_closestream(uint32_t strm_handle);
+
+/**
+ This function flushes the current stream. This is a synchronous message to FW.
+ Before calling this function the host has to make sure the output queue of the firmware
+ is empty. After this function is executed the FW will read all entries in input
+ es buffer queue into a free or partial workload and push it into output queue.
+ After this operation the host has to read all entries in output queue again to
+ finish the flush operation.
+ @param[in] flush_type : Type of flush we want to perform.ex:flush and discard.
+ @param[in] strm_handle : Handle of the stream we want to flush.
+ @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+ @retval VIDDEC_FW_NEED_FREE_WKLD : Failed to flush sice a free wkld was not available.
+*/
+ uint32_t viddec_fw_parser_flushstream(uint32_t strm_handle, uint32_t flush_type);
+
+/**
+ This function sends an input es buffer.
+ @param[in] strm_handle : The handle of stream that we want to send es buffer to.
+ @param[in] message : The es buffer we want to send.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_send(uint32_t strm_handle, ipc_msg_data *message);
+
+/**
+ This function gets the next processed workload. The host is required to add free workloads
+ to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue.
+ @param[in] strm_handle : The handle of stream that we want to read workload from.
+ @param[out] message : The workload descriptor.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_recv(uint32_t strm_handle, ipc_msg_data *message);
+
+/**
+ This function adds a free workload to current stream.
+ @param[in] strm_handle : The handle of stream that we want to write workload to.
+ @param[out] message : The workload descriptor.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_FULL : Workload port is full,unsuccesful in writing wkld.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_addwkld(uint32_t strm_handle, ipc_msg_data *message);
+
+/**
+ This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts.
+ The driver can disable/enable Interrupts if it needs for this particular stream.
+
+ @param[in] strm_handle : The handle of stream that we want to get mask from
+ @param[in] mask : This is read as boolean variable, true to enable, false to disable.
+ @retval VIDDEC_FW_SUCCESS : Successfully set mask.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_set_interruptmask(uint32_t strm_handle, uint32_t mask);
+/**
+ This function gets the interrupt status for current stream.
+ When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams,
+ by calling this function. The status is what the FW thinks the current state of stream is. The status information that
+ FW provides is complete information on all possible events that are defined. The host should only access this information
+ in its ISR at which state FW doesn't modify this information.
+
+ @param[in] strm_handle : The handle of stream that we want to get mask from
+ @param[out] status : The status of the stream based on viddec_fw_parser_int_status_t enum.
+ @retval VIDDEC_FW_SUCCESS : Successfully in reading status.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_getstatus(uint32_t strm_handle, uint32_t *status);
+
+/**
+ This function allows to set stream attributes that are supported.
+ @param[in] strm_handle : The handle of stream that we want to set attribute on.
+ @param[in] type : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t.
+ @param[in] value : The value of the type that we want to set.
+ @retval VIDDEC_FW_SUCCESS : Successfully Set the attribute.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_set_stream_attributes(uint32_t strm_handle, uint32_t type, uint32_t value);
+
+/**
+ This function allows to get current status of all the parser queues. If the current stream is active we return
+ number of inout messages that can be written to input queue, no of messages in output queue and number of
+ free available workloads the stream has.
+ Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT
+ Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or
+ a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT
+ FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee
+ one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT
+ to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will
+ give only one INT and host should try to empty output queue.
+ @param[in] strm_handle : The handle of stream that we want to get status of queues.
+ @param[out] status : The status of each queue gets updated in here.
+ @retval VIDDEC_FW_SUCCESS : Successfully Got the status information.
+ @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream.
+*/
+ uint32_t viddec_fw_parser_get_queue_status(uint32_t strm_handle, viddec_fw_q_status_t *status);
+
+/**
+ This function unloads Parser Firmware and free's the resources allocated in Load fw.
+ If this function is called before load fw it will crash with a segmentation fault.
+*/
+ void viddec_fw_parser_deinit(void);
+
+/**
+ This function gets the major and minor revison numbers of the loaded firmware.
+ @param[out] major : The major revision numner.
+ @param[out] minor : The minor revision number.
+ @param[out] build : The Internal Build number.
+*/
+ void viddec_fw_parser_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build);
+
+/**
+ This function clears the global interrupt. This is the last thing host calls before exiting ISR.
+*/
+ void viddec_fw_parser_clear_global_interrupt(void);
+
+/*@}*/
+#ifdef __cplusplus
+}
+#endif
+
+#endif//#ifndef VIDDEC_FW_PARSER_HOST_H
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_workload.h b/mix_vbp/viddec_fw/include/viddec_fw_workload.h
new file mode 100644
index 0000000..73c5ab3
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_workload.h
@@ -0,0 +1,152 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_WORKLOAD_H
+#define VIDDEC_FW_WORKLOAD_H
+
+#include <stdint.h>
+#include "viddec_fw_item_types.h"
+#include "viddec_fw_frame_attr.h"
+#include "viddec_fw_common_defs.h"
+
+#define VIDDEC_WORKLOAD_FLAGS_ES_START_FRAME (1 << 0)
+#define VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE (1 << 1)
+#define VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE (1 << 2)
+#define VIDDEC_WORKLOAD_FLAGS_ES_END_FRAME (1 << 3)
+
+#define VIDDEC_FRAME_REFERENCE_IS_VALID (0x1<<1)
+// PIP Output Frame request bits
+#define BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE 24
+#define BMSK_VIDDEC_FRAME_REFERENCE_PIP_MODE (0x3<<BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE)
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_NORMAL 0x0
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_W_HALF 0x1
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_W_QUARTER 0x2
+
+/** Frame reference information to pass to video decoder when performing a workload (frame decode) */
+typedef struct viddec_frame_reference
+{
+ signed int driver_frame_id;
+ unsigned int luma_phys_addr;
+ unsigned int chroma_phys_addr;
+ int internal_id; /* Used by workload manager only */
+}viddec_frame_reference_t;
+
+#define WORKLOAD_REFERENCE_FRAME (1 << 16)
+#define WORKLOAD_SKIPPED_FRAME (1 << 17)
+/**
+Bitmask to indicate that this workload has range adjustment and needs a range_adjusted_out buffer for successful decode.
+Will be used for VC1 only.
+*/
+#define WORKLOAD_FLAGS_RA_FRAME (1 << 21)
+#define WORKLOAD_REFERENCE_FRAME_BMASK 0x000000ff
+
+/** This structure contains all the information required to fully decode one frame of data */
+/**
+ num_error_mb: This field is populated at the output of the decoder.
+ Currently, its valid only for MPEG2.
+ For other codecs, it defaults to 0.
+
+ range_adjusted_out: Frame buffer needed to store range adjusted frames for VC1 only.
+ Range adjustment in VC1 requires that the luma/chroma values in the decoded frame be modified
+ before the frame can be displayed. In this case, we need a new frame buffer to store he adjusted values.
+ The parser will indicate this requirement by setting the WORKLOAD_FLAGS_RA_FRAME bit in the
+ is_reference_frame of the workload. The decoder expects this field to be valid when range adjustment
+ is indicated and populates this frame buffer along with frame_out.
+
+ Expectation from user:
+ Before feeding workload to the decoder, do the following:
+ If pip is indicated/needed,
+ provide the pip_out buffer
+ If range adjustment is indicated (WORKLOAD_FLAGS_RA_FRAME bit in is_reference_frame is set),
+ provide range_adjusted_out buffer
+ Provide frame_out buffer.
+
+ After workload is returned from the decoder, do the following:
+ If pip is indicated,
+ display the pip_out buffer
+ Else If range adjustment is indicated,
+ display range_adjusted_out buffer
+ Else
+ display frame_out buffer.
+*/
+typedef struct viddec_workload
+{
+ enum viddec_stream_format codec;
+ signed int is_reference_frame;
+ unsigned int result;
+ unsigned int time;
+ unsigned int num_items;/* number of viddec_workload_item_t in current workload */
+ unsigned int num_error_mb; /* Number of error macroblocks in the current picture. */
+ viddec_frame_attributes_t attrs;
+
+ viddec_frame_reference_t frame_out; /* output frame */
+ viddec_frame_reference_t range_adjusted_out; /* for VC1 only */
+ viddec_frame_reference_t pip_out; /* PIP Buffer */
+
+ /* Alignment is needed because the packing different between host and vSparc */
+ __attribute__ ((aligned (16))) viddec_workload_item_t item[1];
+
+ /* ------------------------------------------------------ */
+ /* ------------------------------------------------------ */
+ /* ------------------------------------------------------ */
+ /* This structure is ALLOC_EXTENDED with workload_items */
+ /* ------------------------------------------------------ */
+ /* ------------------------------------------------------ */
+ /* ------------------------------------------------------ */
+} viddec_workload_t;
+
+#endif /* VIDDEC_WORKLOAD_H */