aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--mix_audio/AUTHORS1
-rw-r--r--mix_audio/COPYING26
-rw-r--r--mix_audio/ChangeLog112
-rw-r--r--mix_audio/INSTALL4
-rw-r--r--mix_audio/Makefile.am7
-rw-r--r--mix_audio/NEWS1
-rw-r--r--mix_audio/README2
-rw-r--r--mix_audio/autogen.sh19
-rw-r--r--mix_audio/configure.ac137
-rw-r--r--mix_audio/docs/Makefile.am4
-rw-r--r--mix_audio/docs/reference/Makefile.am4
-rw-r--r--mix_audio/docs/reference/MixAudio/Makefile.am96
-rw-r--r--mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml39
-rw-r--r--mix_audio/docs/reference/MixAudio/MixAudio-sections.txt187
-rw-r--r--mix_audio/docs/reference/MixAudio/MixAudio.types6
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html689
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html823
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html221
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html391
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html139
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html94
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp124
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2186
-rw-r--r--mix_audio/docs/reference/MixAudio/html/MixAudio.html1286
-rw-r--r--mix_audio/docs/reference/MixAudio/html/api-index-full.html259
-rw-r--r--mix_audio/docs/reference/MixAudio/html/ch01.html56
-rw-r--r--mix_audio/docs/reference/MixAudio/html/index.html60
-rw-r--r--mix_audio/docs/reference/MixAudio/html/index.sgml134
-rw-r--r--mix_audio/docs/reference/MixAudio/html/style.css167
-rw-r--r--mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html37
-rw-r--r--mix_audio/m4/Makefile.am1
-rw-r--r--mix_audio/m4/as-mix-version.m435
-rw-r--r--mix_audio/mixaudio.spec56
-rw-r--r--mix_audio/pkgconfig/Makefile.am11
-rw-r--r--mix_audio/pkgconfig/mixaudio.pc.in12
-rw-r--r--mix_audio/src/Makefile.am61
-rw-r--r--mix_audio/src/amhelper.c120
-rw-r--r--mix_audio/src/amhelper.h25
-rw-r--r--mix_audio/src/intel_sst_ioctl.h337
-rw-r--r--mix_audio/src/mixacp.c322
-rw-r--r--mix_audio/src/mixacp.h367
-rw-r--r--mix_audio/src/mixacpaac.c360
-rw-r--r--mix_audio/src/mixacpaac.h413
-rw-r--r--mix_audio/src/mixacpmp3.c175
-rw-r--r--mix_audio/src/mixacpmp3.h170
-rw-r--r--mix_audio/src/mixacpwma.c205
-rw-r--r--mix_audio/src/mixacpwma.h235
-rw-r--r--mix_audio/src/mixaip.c167
-rw-r--r--mix_audio/src/mixaip.h132
-rw-r--r--mix_audio/src/mixaudio.c2092
-rw-r--r--mix_audio/src/mixaudio.h574
-rw-r--r--mix_audio/src/mixaudiotypes.h27
-rw-r--r--mix_audio/src/pvt.h9
-rw-r--r--mix_audio/src/sst_proxy.c435
-rw-r--r--mix_audio/src/sst_proxy.h17
-rw-r--r--mix_audio/tests/Makefile.am2
-rw-r--r--mix_audio/tests/smoke/Makefile.am25
-rw-r--r--mix_audio/tests/smoke/mixaudiosmoke.c77
-rw-r--r--mix_common/AUTHORS1
-rw-r--r--mix_common/COPYING26
-rw-r--r--mix_common/ChangeLog28
-rw-r--r--mix_common/INSTALL291
-rw-r--r--mix_common/Makefile.am10
-rw-r--r--mix_common/NEWS1
-rw-r--r--mix_common/README1
-rw-r--r--mix_common/autogen.sh8
-rw-r--r--mix_common/configure.ac39
-rw-r--r--mix_common/m4/as-mix-version.m435
-rw-r--r--mix_common/mixcommon.pc.in11
-rw-r--r--mix_common/mixcommon.spec43
-rw-r--r--mix_common/src/Makefile.am23
-rw-r--r--mix_common/src/mixdrmparams.c163
-rw-r--r--mix_common/src/mixdrmparams.h123
-rw-r--r--mix_common/src/mixlog.c257
-rw-r--r--mix_common/src/mixlog.h47
-rw-r--r--mix_common/src/mixparams.c274
-rw-r--r--mix_common/src/mixparams.h202
-rw-r--r--mix_common/src/mixresult.h90
-rw-r--r--mix_vbp/AUTHORS1
-rw-r--r--mix_vbp/COPYING26
-rw-r--r--mix_vbp/ChangeLog2
-rw-r--r--mix_vbp/INSTALL4
-rw-r--r--mix_vbp/Makefile.am9
-rw-r--r--mix_vbp/Merge_readme.txt2
-rw-r--r--mix_vbp/NEWS2
-rw-r--r--mix_vbp/README2
-rw-r--r--mix_vbp/autogen.sh19
-rw-r--r--mix_vbp/configure.ac77
-rw-r--r--mix_vbp/m4/Makefile.am1
-rw-r--r--mix_vbp/m4/as-mix-version.m435
-rw-r--r--mix_vbp/mixvbp.pc.in11
-rw-r--r--mix_vbp/mixvbp.spec52
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h1034
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h172
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h107
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h314
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c786
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c228
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c4171
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c82
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c198
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c128
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c1176
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c740
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c513
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c575
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c559
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c1306
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h195
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h231
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c32
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c114
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c1039
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c380
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c461
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h231
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c371
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c98
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h11
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c278
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h527
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c134
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h11
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c596
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h17
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c422
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h11
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c287
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h13
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c143
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h111
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c16
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h224
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c557
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h136
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c753
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c100
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c257
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h608
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c198
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c97
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c101
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c257
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c82
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c101
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c403
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c149
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c368
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c130
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c345
-rw-r--r--mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c691
-rw-r--r--mix_vbp/viddec_fw/fw/include/stdint.h23
-rw-r--r--mix_vbp/viddec_fw/fw/include/viddec_debug.h31
-rw-r--r--mix_vbp/viddec_fw/fw/include/viddec_fw_version.h7
-rw-r--r--mix_vbp/viddec_fw/fw/parser/Makefile.am205
-rw-r--r--mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c224
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h114
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h87
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h96
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h80
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h194
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h106
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h95
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h24
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h17
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h81
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h51
-rw-r--r--mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/main.c608
-rw-r--r--mix_vbp/viddec_fw/fw/parser/utils.c253
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c1568
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h48
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_loader.c162
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_loader.h318
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c1277
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h49
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_trace.c28
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_trace.h47
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_utils.c548
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_utils.h106
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c1029
-rw-r--r--mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h54
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_emit.c78
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_intr.c56
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c119
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c190
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c6
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm.c554
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c127
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c178
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c21
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c304
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c472
-rw-r--r--mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c221
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_common_defs.h200
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h242
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h281
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_item_types.h738
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_parser_host.h237
-rw-r--r--mix_vbp/viddec_fw/include/viddec_fw_workload.h152
-rw-r--r--mix_video/AUTHORS1
-rw-r--r--mix_video/COPYING26
-rw-r--r--mix_video/ChangeLog2
-rw-r--r--mix_video/INSTALL4
-rw-r--r--mix_video/Makefile.am9
-rw-r--r--mix_video/NEWS2
-rw-r--r--mix_video/README2
-rw-r--r--mix_video/autogen.sh19
-rw-r--r--mix_video/configure.ac137
-rw-r--r--mix_video/m4/Makefile.am1
-rw-r--r--mix_video/m4/as-mix-version.m435
-rw-r--r--mix_video/mixvideo.pc.in12
-rw-r--r--mix_video/mixvideo.spec67
-rw-r--r--mix_video/mixvideoint.pc.in12
-rw-r--r--mix_video/src/Makefile.am136
-rw-r--r--mix_video/src/Makefile.old40
-rw-r--r--mix_video/src/mixbuffer.c220
-rw-r--r--mix_video/src/mixbuffer.h130
-rw-r--r--mix_video/src/mixbuffer_private.h39
-rw-r--r--mix_video/src/mixbufferpool.c484
-rw-r--r--mix_video/src/mixbufferpool.h150
-rw-r--r--mix_video/src/mixdisplay.c539
-rw-r--r--mix_video/src/mixdisplay.h233
-rw-r--r--mix_video/src/mixdisplayx11.c205
-rw-r--r--mix_video/src/mixdisplayx11.h141
-rw-r--r--mix_video/src/mixdrmparams.c189
-rw-r--r--mix_video/src/mixdrmparams.h126
-rw-r--r--mix_video/src/mixframemanager.c775
-rw-r--r--mix_video/src/mixframemanager.h164
-rw-r--r--mix_video/src/mixsurfacepool.c652
-rw-r--r--mix_video/src/mixsurfacepool.h158
-rw-r--r--mix_video/src/mixvideo.c1638
-rw-r--r--mix_video/src/mixvideo.h208
-rw-r--r--mix_video/src/mixvideo_private.h57
-rw-r--r--mix_video/src/mixvideocaps.c261
-rw-r--r--mix_video/src/mixvideocaps.h137
-rw-r--r--mix_video/src/mixvideoconfigparams.c157
-rw-r--r--mix_video/src/mixvideoconfigparams.h126
-rw-r--r--mix_video/src/mixvideoconfigparamsdec.c534
-rw-r--r--mix_video/src/mixvideoconfigparamsdec.h195
-rw-r--r--mix_video/src/mixvideoconfigparamsdec_h264.c213
-rw-r--r--mix_video/src/mixvideoconfigparamsdec_h264.h130
-rw-r--r--mix_video/src/mixvideoconfigparamsdec_mp42.c244
-rw-r--r--mix_video/src/mixvideoconfigparamsdec_mp42.h141
-rw-r--r--mix_video/src/mixvideoconfigparamsdec_vc1.c188
-rw-r--r--mix_video/src/mixvideoconfigparamsdec_vc1.h134
-rw-r--r--mix_video/src/mixvideoconfigparamsenc.c688
-rw-r--r--mix_video/src/mixvideoconfigparamsenc.h254
-rw-r--r--mix_video/src/mixvideoconfigparamsenc_h264.c322
-rw-r--r--mix_video/src/mixvideoconfigparamsenc_h264.h160
-rw-r--r--mix_video/src/mixvideoconfigparamsenc_mpeg4.c300
-rw-r--r--mix_video/src/mixvideoconfigparamsenc_mpeg4.h152
-rw-r--r--mix_video/src/mixvideoconfigparamsenc_preview.c222
-rw-r--r--mix_video/src/mixvideoconfigparamsenc_preview.h124
-rw-r--r--mix_video/src/mixvideodecodeparams.c204
-rw-r--r--mix_video/src/mixvideodecodeparams.h139
-rw-r--r--mix_video/src/mixvideodef.h114
-rw-r--r--mix_video/src/mixvideoencodeparams.c204
-rw-r--r--mix_video/src/mixvideoencodeparams.h140
-rw-r--r--mix_video/src/mixvideoformat.c401
-rw-r--r--mix_video/src/mixvideoformat.h160
-rw-r--r--mix_video/src/mixvideoformat_h264.c1663
-rw-r--r--mix_video/src/mixvideoformat_h264.h129
-rw-r--r--mix_video/src/mixvideoformat_mp42.c1416
-rw-r--r--mix_video/src/mixvideoformat_mp42.h117
-rw-r--r--mix_video/src/mixvideoformat_vc1.c1749
-rw-r--r--mix_video/src/mixvideoformat_vc1.h123
-rw-r--r--mix_video/src/mixvideoformatenc.c502
-rw-r--r--mix_video/src/mixvideoformatenc.h178
-rw-r--r--mix_video/src/mixvideoformatenc_h264.c1954
-rw-r--r--mix_video/src/mixvideoformatenc_h264.h137
-rw-r--r--mix_video/src/mixvideoformatenc_mpeg4.c1713
-rw-r--r--mix_video/src/mixvideoformatenc_mpeg4.h137
-rw-r--r--mix_video/src/mixvideoformatenc_preview.c1187
-rw-r--r--mix_video/src/mixvideoformatenc_preview.h133
-rw-r--r--mix_video/src/mixvideoformatqueue.h24
-rw-r--r--mix_video/src/mixvideoframe.c391
-rw-r--r--mix_video/src/mixvideoframe.h144
-rw-r--r--mix_video/src/mixvideoframe_private.h68
-rw-r--r--mix_video/src/mixvideoinitparams.c219
-rw-r--r--mix_video/src/mixvideoinitparams.h138
-rw-r--r--mix_video/src/mixvideolog.h25
-rw-r--r--mix_video/src/mixvideorenderparams.c420
-rw-r--r--mix_video/src/mixvideorenderparams.h158
-rw-r--r--mix_video/src/mixvideorenderparams_internal.h36
-rw-r--r--mix_video/src/test.c87
-rw-r--r--mix_video/test/Makefile.am2
-rw-r--r--mix_video/test/autogen.sh1
-rw-r--r--mix_video/test/configure.ac53
-rw-r--r--mix_video/test/src/Makefile.am22
-rw-r--r--mix_video/test/src/test_framemanager.c200
294 files changed, 74577 insertions, 0 deletions
diff --git a/mix_audio/AUTHORS b/mix_audio/AUTHORS
new file mode 100644
index 0000000..d74d027
--- /dev/null
+++ b/mix_audio/AUTHORS
@@ -0,0 +1 @@
+echo.choi@intel.com
diff --git a/mix_audio/COPYING b/mix_audio/COPYING
new file mode 100644
index 0000000..a4f852c
--- /dev/null
+++ b/mix_audio/COPYING
@@ -0,0 +1,26 @@
+INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License)
+
+IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING.
+Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software.
+
+
+LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions:
+1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software.
+2. You may not reverse engineer, decompile, or disassemble the Software.
+3. You may not sublicense the Software.
+4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions.
+5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL).
+OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights.
+EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software.
+LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS.
+TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate.
+APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations.
+GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052.
+CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos.
+ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion.
+ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel.
+NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties.
+SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions.
+WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself.
+CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions.
+
diff --git a/mix_audio/ChangeLog b/mix_audio/ChangeLog
new file mode 100644
index 0000000..3eb86f3
--- /dev/null
+++ b/mix_audio/ChangeLog
@@ -0,0 +1,112 @@
+2010-01-18 Echo Choi <echo@firefly>
+
+ * Updated version to 0.3.5 and submit for build.
+ * Updated call to Audio Manager to use stream name.
+ * Removed the check to allow decode to be called during PAUSE.
+
+2010-01-11 Echo Choi <echo@firefly>
+
+ * Updated version to 0.3.4
+ * Updated MixCommon dependency to v 0.1.6.
+ * Updated the parameter conversion code for AAC to detect codec value from parameters.
+ * Fixed and added more enum types for AAC parameters definitions.
+ * Added methods to replace AAC parameters direct accessing.
+ * Added psPresentFlag for AAC param object.
+ * Updated gtk-doc documentation.
+ * Added get_stream_byte_decoded API.
+
+2010-01-04 Echo Choi <echo@firefly>
+
+ * Fixed code review issues: declare const for char*
+ * Fixed code review issues: array size calculation.
+
+2009-12-23 Echo Choi <echo@firefly>
+
+ * Added aac core operating frequency param for AAC Param object. Needed to configure HE-AAC decoder.
+ * Fixed the log message category for DRAIN debug log.
+
+2009-11-19 Echo Choi <echo@firefly>
+
+ * Added more utility function to populate param object.
+ * Added MixAudio API to read output configuration (get params)
+
+2009-11-18 Echo Choi <echo@firefly>
+
+ * Added return code that inform caller to interpret errno for error.
+ * Fixed more error checkings.
+
+2009-11-17 Echo Choi <echo@firefly>
+
+ * Added default invalid value for various enumerations.
+ * Fixed some bugs in type declarations.
+ * Cleaned up code. Added pointer checks, state checks.
+
+2009-11-15 Echo Choi <echo@firefly>
+
+ * Updated version to 0.3.3 and package for build.
+ * Fixed DRAIN state test condition.
+
+2009-11-13 Echo Choi <echo@firefly>
+
+ * Updated MixCommon version dependency as MixAudio is using new definitions from MixCommon.
+ * Fixed issues reported by klocwork.
+
+2009-11-11 Echo Choi <echo@firefly>
+
+ * Fixed a mem leak in the stub code.
+
+2009-11-01 Echo Choi <echo@firefly>
+
+ * Increased version number to 0.3.2 and package for build.
+
+2009-10-28 Echo Choi <echo@firefly>
+
+ * Renamed MPEG_FORMAT member of AAC params to MPEG_ID.
+
+2009-10-23 Echo Choi <echo@firefly>
+
+ * Updated version to 0.3.1 for build.
+ * Added code to assign op_align to sst structure in deterministic case.
+ * Added stub code to write input bytes to file during dnr request.
+ * Fixed MixAudio::decode() method to use correct 64-bit type for decode consumed/produced.
+
+2009-10-18 Echo Choi <echo@firefly>
+
+ * Added dbus-glib dependency.
+ * Updated AAC param object to include additonal fields for HE-AAC support.
+
+2009-10-16 Echo Choi <echo@firefly>
+
+ * Moved mixdrmparams.* to MixCommon package.
+ * Changed mix_audio_decode API to include output parameters for bytes consumed and produceds
+ * Updated version to 0.3.0 to reflect API change in mix_audio_decode.
+
+2009-10-08 Echo Choi <echo@firefly>
+
+ * Package for 0.2.6 build.
+
+2009-10-02 Echo Choi <echo@firefly>
+
+ * Updated version number to 0.2.6
+ * Defined new range for error code that encapsulate errno when system calls to SST API shall fail.
+ * Added internal states to track PAUSED_DRAINING, and added code to deal with this state.
+
+2009-08-17 Echo Choi <echo@firefly>
+
+ * Updated SST API struct to align with build 0.04.008.
+ * Added bit-mask based runtime log mechanism.
+
+2009-08-14 Echo Choi <echo@firefly>
+
+ * Fixed return value check after DROP call.
+ * Added method to dump status upon SST call failure.
+
+2009-08-13 Echo Choi <echo@firefly>
+
+ * Updated API definitions to sync with v0.5 documentation.
+
+2009-08-10 Echo Choi <echo@firefly>
+
+ * Fixed stop_drop so it is called even if the state is STOPPED
+
+
diff --git a/mix_audio/INSTALL b/mix_audio/INSTALL
new file mode 100644
index 0000000..50e1648
--- /dev/null
+++ b/mix_audio/INSTALL
@@ -0,0 +1,4 @@
+run the following to build and install:
+./autogen.sh
+./configure
+make
diff --git a/mix_audio/Makefile.am b/mix_audio/Makefile.am
new file mode 100644
index 0000000..2ed4bcd
--- /dev/null
+++ b/mix_audio/Makefile.am
@@ -0,0 +1,7 @@
+SUBDIRS = src tests pkgconfig
+
+#Uncomment the following line if building documentation using gtkdoc
+#SUBDIRS += docs
+
+EXTRA_DIST = autogen.sh m4
+DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
diff --git a/mix_audio/NEWS b/mix_audio/NEWS
new file mode 100644
index 0000000..82302b4
--- /dev/null
+++ b/mix_audio/NEWS
@@ -0,0 +1 @@
+no.
diff --git a/mix_audio/README b/mix_audio/README
new file mode 100644
index 0000000..b4292a0
--- /dev/null
+++ b/mix_audio/README
@@ -0,0 +1,2 @@
+MIX Audio is an user library interface for various hardware audio codecs available on the platform.
+
diff --git a/mix_audio/autogen.sh b/mix_audio/autogen.sh
new file mode 100644
index 0000000..13a1d76
--- /dev/null
+++ b/mix_audio/autogen.sh
@@ -0,0 +1,19 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+package=MixAudio
+
+#Uncomment the follow line if building documentation using gtkdoc
+#gtkdocize --flavour no-tmpl || exit 1
+aclocal -I m4/ $ACLOCAL_FLAGS || exit 1
+libtoolize --copy --force || exit 1
+autoheader -v || exit 1
+autoconf -v || exit 1
+automake -a -c -v || exit 1
+
+echo "Now type ./configure to configure $package."
+exit 0
diff --git a/mix_audio/configure.ac b/mix_audio/configure.ac
new file mode 100644
index 0000000..bcbb4ab
--- /dev/null
+++ b/mix_audio/configure.ac
@@ -0,0 +1,137 @@
+AC_INIT("","",[echo.choi@intel.com])
+
+AC_CONFIG_MACRO_DIR(m4)
+
+AS_MIX_VERSION(mixaudio, MIXAUDIO, 0, 3, 5)
+
+dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode
+AM_MAINTAINER_MODE
+
+AM_INIT_AUTOMAKE($PACKAGE, $VERSION)
+
+MIXAUDIO_PKG_DEPS="glib-2.0, gobject-2.0, gthread-2.0 mixcommon"
+AC_SUBST(MIXAUDIO_PKG_DEPS)
+
+dnl make aclocal work in maintainer mode
+AC_SUBST(ACLOCAL_AMFLAGS, "-I m4")
+
+AM_CONFIG_HEADER(config.h)
+
+dnl check for tools
+AC_PROG_CC_C_O
+AC_PROG_CC
+AC_PROG_LIBTOOL
+
+MIX_CFLAGS="-Wall -Werror -O"
+
+AC_ARG_ENABLE([lpestub],
+ [ --enable-lpestub Stub LPE methods],
+ [case "${enableval}" in
+ yes) lpestub=true ;;
+ no) lpestub=false ;;
+ *) AC_MSG_ERROR([bad value ${enableval} for --enable-lpestub]) ;;
+ esac],[lpestub=false])
+
+AM_CONDITIONAL([LPESTUB], [test x$lpestub = xtrue])
+
+AC_ARG_ENABLE([workaround],
+ [ --enable-workaround Enable workaround for LPE DROP],
+ [case "${enableval}" in
+ yes) workaround=true ;;
+ no) workaround=false ;;
+ *) AC_MSG_ERROR([bad value ${enableval} for --enable-workaround]) ;;
+ esac],[workaround=false])
+
+AM_CONDITIONAL([WORKAROUND], [test x$workaround = xtrue])
+
+AC_ARG_ENABLE([audiomanager],
+ [ --enable-audiomanager Audio Manager methods(default=enable)],
+ [case "${enableval}" in
+ yes) audiomanager=true ;;
+ no) audiomanager=false ;;
+ *) AC_MSG_ERROR([bad value ${enableval} for --enable-audiomanager]) ;;
+ esac],[audiomanager=true])
+
+AM_CONDITIONAL([AUDIO_MANAGER], [test x$audiomanager = xtrue])
+
+dnl decide on error flags
+dnl AS_COMPILER_FLAG(-Wall, MIX_ERROR="$MIX_ERROR -Wall", MIX_ERROR="$MIX_ERROR")
+dnl AS_COMPILER_FLAG(-Werror,MIX_ERROR="$MIX_ERROR -Werror",MIX_ERROR="$MIX_ERROR")
+
+dnl Check for pkgconfig first
+AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes, no)
+
+dnl Give error and exit if we don't have pkgconfig
+if test "x$HAVE_PKGCONFIG" = "xno"; then
+ AC_MSG_ERROR(you need to have pkgconfig installed !)
+fi
+
+dnl GLib
+dnl FIXME: need to align with moblin glib version
+dnl FIXME: currently using an earlier version so it can be built on dev box.
+GLIB_REQ=2.18
+
+dnl Check for glib2 without extra fat, useful for the unversioned tool frontends
+dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+if test "x$HAVE_GLIB" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no)
+if test "x$HAVE_GOBJECT" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no)
+if test "x$HAVE_GTRHEAD" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+MIXCOMMON_REQUIRED=0.1.6
+PKG_CHECK_MODULES(MIXCOMMON, mixcommon >= $MIXCOMMON_REQUIRED , HAVE_MIXCOMMON=yes, HAVE_MIXCOMMON=no)
+if test "x$HAVE_MIXCOMMON" = "xno"; then
+ AC_MSG_ERROR(You need mixcommon development package $MIXCOMMON_REQUIRED installed !)
+fi
+
+if test "x$audiomanager" = "xtrue"; then
+ PKG_CHECK_MODULES(DBUS_GLIB, dbus-glib-1)
+fi
+
+dnl Check for documentation xrefs
+dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`"
+dnl AC_SUBST(GLIB_PREFIX)
+
+AC_SUBST(GLIB_CFLAGS)
+AC_SUBST(GLIB_LIBS)
+AC_SUBST(GOBJECT_CFLAGS)
+AC_SUBST(GOBJECT_LIBS)
+AC_SUBST(MIX_CFLAGS)
+AC_SUBST(GTHREAD_CFLAGS)
+AC_SUBST(GTHREAD_LIBS)
+AC_SUBST(MIXCOMMON_CFLAGS)
+AC_SUBST(MIXCOMMON_LIBS)
+
+dnl check for gtk-doc
+dnl GTK_DOC_CHECK(1.9)
+
+AC_CONFIG_FILES(
+Makefile
+src/Makefile
+tests/Makefile
+tests/smoke/Makefile
+pkgconfig/Makefile
+pkgconfig/mixaudio.pc
+)
+
+dnl Additional Makefiles if we are building document with gtkdoc.
+dnl Un-comment this section to enable building of documentation.
+dnl AC_CONFIG_FILES(
+dnl docs/Makefile
+dnl docs/reference/Makefile
+dnl docs/reference/MixAudio/Makefile
+dnl )
+
+AC_OUTPUT
+
+
diff --git a/mix_audio/docs/Makefile.am b/mix_audio/docs/Makefile.am
new file mode 100644
index 0000000..621e3f7
--- /dev/null
+++ b/mix_audio/docs/Makefile.am
@@ -0,0 +1,4 @@
+SUBDIRS = reference
+
+DIST_SUBDIRS = reference
+
diff --git a/mix_audio/docs/reference/Makefile.am b/mix_audio/docs/reference/Makefile.am
new file mode 100644
index 0000000..85bde95
--- /dev/null
+++ b/mix_audio/docs/reference/Makefile.am
@@ -0,0 +1,4 @@
+SUBDIRS = MixAudio
+
+DIST_SUBDIRS = MixAudio
+
diff --git a/mix_audio/docs/reference/MixAudio/Makefile.am b/mix_audio/docs/reference/MixAudio/Makefile.am
new file mode 100644
index 0000000..adf494c
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/Makefile.am
@@ -0,0 +1,96 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+
+## Process this file with automake to produce Makefile.in
+
+# We require automake 1.6 at least.
+AUTOMAKE_OPTIONS = 1.6
+
+# This is a blank Makefile.am for using gtk-doc.
+# Copy this to your project's API docs directory and modify the variables to
+# suit your project. See the GTK+ Makefiles in gtk+/docs/reference for examples
+# of using the various options.
+
+# The name of the module, e.g. 'glib'.
+DOC_MODULE=MixAudio
+
+# The top-level SGML file. You can change this if you want to.
+DOC_MAIN_SGML_FILE=$(DOC_MODULE)-docs.sgml
+
+# The directory containing the source code. Relative to $(srcdir).
+# gtk-doc will search all .c & .h files beneath here for inline comments
+# documenting the functions and macros.
+# e.g. DOC_SOURCE_DIR=../../../gtk
+DOC_SOURCE_DIR=$(top_srcdir)/src
+
+# Extra options to pass to gtkdoc-scangobj. Not normally needed.
+#SCANGOBJ_OPTIONS=--type-init-func="g_init(NULL,NULL)"
+
+# Extra options to supply to gtkdoc-scan.
+# e.g. SCAN_OPTIONS=--deprecated-guards="GTK_DISABLE_DEPRECATED"
+SCAN_OPTIONS=--rebuild-sections --rebuild-types
+#SCAN_OPTIONS=--rebuild-sections
+
+# Extra options to supply to gtkdoc-mkdb.
+# e.g. MKDB_OPTIONS=--sgml-mode --output-format=xml
+MKDB_OPTIONS=--sgml-mode --output-format=xml
+
+# Extra options to supply to gtkdoc-mktmpl
+# e.g. MKTMPL_OPTIONS=--only-section-tmpl
+MKTMPL_OPTIONS=
+
+# Extra options to supply to gtkdoc-fixref. Not normally needed.
+# e.g. FIXXREF_OPTIONS=--extra-dir=../gdk-pixbuf/html --extra-dir=../gdk/html
+FIXXREF_OPTIONS=
+
+# Used for dependencies. The docs will be rebuilt if any of these change.
+# e.g. HFILE_GLOB=$(top_srcdir)/gtk/*.h
+# e.g. CFILE_GLOB=$(top_srcdir)/gtk/*.c
+HFILE_GLOB=$(top_srcdir)/src/*.h
+CFILE_GLOB=$(top_srcdir)/src/*.c
+
+# Header files to ignore when scanning.
+# e.g. IGNORE_HFILES=gtkdebug.h gtkintl.h
+IGNORE_HFILES=*~ intel_sst_ioctl.h pvt.h sst_proxy.h amhelper.h
+
+# Images to copy into HTML directory.
+# e.g. HTML_IMAGES=$(top_srcdir)/gtk/stock-icons/stock_about_24.png
+HTML_IMAGES=
+
+# Extra SGML files that are included by $(DOC_MAIN_SGML_FILE).
+# e.g. content_files=running.sgml building.sgml changes-2.0.sgml
+content_files=
+
+# SGML files where gtk-doc abbrevations (#GtkWidget) are expanded
+# These files must be listed here *and* in content_files
+# e.g. expand_content_files=running.sgml
+expand_content_files=
+
+# CFLAGS and LDFLAGS for compiling gtkdoc-scangobj with your library.
+# Only needed if you are using gtkdoc-scangobj to dynamically query widget
+# signals and properties.
+# e.g. INCLUDES=-I$(top_srcdir) -I$(top_builddir) $(GTK_DEBUG_FLAGS)
+# e.g. GTKDOC_LIBS=$(top_builddir)/gtk/$(gtktargetlib)
+AM_CFLAGS=$(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXCOMMON_CFLAGS)
+GTKDOC_LIBS=$(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXCOMMON_LIBS) $(top_srcdir)/src/libmixaudio.la
+
+# This includes the standard gtk-doc make rules, copied by gtkdocize.
+include $(top_srcdir)/gtk-doc.make
+
+# Other files to distribute
+# e.g. EXTRA_DIST += version.xml.in
+EXTRA_DIST +=
+
+# Files not to distribute
+# for --rebuild-types in $(SCAN_OPTIONS), e.g. $(DOC_MODULE).types
+# for --rebuild-sections in $(SCAN_OPTIONS) e.g. $(DOC_MODULE)-sections.txt
+#DISTCLEANFILES =
+
+# Comment this out if you want your docs-status tested during 'make check'
+#TESTS = $(GTKDOC_CHECK)
+
diff --git a/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml b/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml
new file mode 100644
index 0000000..7627fe9
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/MixAudio-docs.sgml
@@ -0,0 +1,39 @@
+<?xml version="1.0"?>
+<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.1.2//EN"
+ "http://www.oasis-open.org/docbook/xml/4.1.2/docbookx.dtd">
+<book id="index" xmlns:xi="http://www.w3.org/2003/XInclude">
+ <bookinfo>
+ <title>MixAudio Reference Manual</title>
+ <releaseinfo>
+ MixAudio version 0.3
+<!-- The latest version of this documentation can be found on-line at
+ <ulink role="online-location" url="http://[SERVER]/MixAudio/index.html">http://[SERVER]/MixAudio/</ulink>.
+ -->
+ </releaseinfo>
+ </bookinfo>
+
+ <chapter>
+ <title>Mix Audio API</title>
+ <xi:include href="xml/mixacpaac.xml"/>
+ <xi:include href="xml/mixacpmp3.xml"/>
+ <xi:include href="xml/mixacpwma.xml"/>
+ <xi:include href="xml/mixacp.xml"/>
+ <xi:include href="xml/mixaip.xml"/>
+ <xi:include href="xml/mixaudio.xml"/>
+ <xi:include href="xml/mixaudiotypes.xml"/>
+<!-- <xi:include href="xml/mixdrmparams.xml"/> -->
+<!-- <xi:include href="xml/mixparams.xml"/> -->
+<!-- <xi:include href="xml/mixresult.xml"/> -->
+ </chapter>
+
+ <chapter id="tree-hierarchy">
+ <title>Object Hierarchy</title>
+ <xi:include href="xml/tree_index.sgml"/>
+ </chapter>
+
+ <index id="api-index-full">
+ <title>API Index</title>
+ <xi:include href="xml/object_index.sgml"><xi:fallback /></xi:include>
+ </index>
+
+</book>
diff --git a/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt b/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt
new file mode 100644
index 0000000..d96a685
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/MixAudio-sections.txt
@@ -0,0 +1,187 @@
+<SECTION>
+<FILE>mixacpwma</FILE>
+MixAudioWMAVersion
+<TITLE>MixAudioConfigParamsWMA</TITLE>
+MixAudioConfigParamsWMA
+mix_acp_wma_new
+mix_acp_wma_ref
+mix_acp_wma_unref
+MIX_ACP_WMA_CHANNEL_MASK
+MIX_ACP_WMA_FORMAT_TAG
+MIX_ACP_WMA_BLOCK_ALIGN
+MIX_ACP_WMA_ENCODE_OPT
+MIX_ACP_WMA_PCM_BIT_WIDTH
+mix_acp_wma_get_version
+mix_acp_wma_set_version
+<SUBSECTION Standard>
+MIX_AUDIOCONFIGPARAMSWMA
+MIX_IS_AUDIOCONFIGPARAMSWMA
+MIX_TYPE_AUDIOCONFIGPARAMSWMA
+mix_acp_wma_get_type
+MIX_AUDIOCONFIGPARAMSWMA_CLASS
+MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS
+MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS
+</SECTION>
+
+<SECTION>
+<FILE>mixacp</FILE>
+MixACPOpAlign
+MixACPBPSType
+MixDecodeMode
+<TITLE>MixAudioConfigParams</TITLE>
+MixAudioConfigParams
+mix_acp_new
+mix_acp_ref
+mix_acp_unref
+MIX_ACP_DECODEMODE
+MIX_ACP_NUM_CHANNELS
+MIX_ACP_BITRATE
+MIX_ACP_SAMPLE_FREQ
+mix_acp_get_decodemode
+mix_acp_set_decodemode
+mix_acp_get_streamname
+mix_acp_set_streamname
+mix_acp_set_audio_manager
+mix_acp_get_audio_manager
+mix_acp_is_streamname_valid
+mix_acp_get_bps
+mix_acp_set_bps
+mix_acp_get_op_align
+mix_acp_set_op_align
+<SUBSECTION Standard>
+MIX_AUDIOCONFIGPARAMS
+MIX_IS_AUDIOCONFIGPARAMS
+MIX_TYPE_AUDIOCONFIGPARAMS
+mix_acp_get_type
+MIX_AUDIOCONFIGPARAMS_CLASS
+MIX_IS_AUDIOCONFIGPARAMS_CLASS
+MIX_AUDIOCONFIGPARAMS_GET_CLASS
+</SECTION>
+
+<SECTION>
+<FILE>mixacpaac</FILE>
+MixAACBitrateType
+MixAACBitstreamFormt
+MixAACProfile
+MixAACMpegID
+<TITLE>MixAudioConfigParamsAAC</TITLE>
+MixAudioConfigParamsAAC
+mix_acp_aac_new
+mix_acp_aac_ref
+mix_acp_aac_unref
+mix_acp_aac_set_mpeg_id
+mix_acp_aac_get_mpeg_id
+MIX_ACP_AAC_CRC
+mix_acp_aac_set_aot
+mix_acp_aac_get_aot
+MIX_ACP_AAC_SBR_FLAG
+MIX_ACP_AAC_PS_FLAG
+MIX_ACP_AAC_PCE_FLAG
+MIX_ACP_AAC_SAMPLE_RATE
+MIX_ACP_AAC_CHANNELS
+mix_acp_aac_get_bit_stream_format
+mix_acp_aac_set_bit_stream_format
+mix_acp_aac_get_aac_profile
+mix_acp_aac_set_aac_profile
+mix_acp_aac_get_bit_rate_type
+mix_acp_aac_set_bit_rate_type
+<SUBSECTION Standard>
+MIX_AUDIOCONFIGPARAMSAAC
+MIX_IS_AUDIOCONFIGPARAMSAAC
+MIX_TYPE_AUDIOCONFIGPARAMSAAC
+mix_acp_aac_get_type
+MIX_AUDIOCONFIGPARAMSAAC_CLASS
+MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS
+MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS
+</SECTION>
+
+<SECTION>
+<FILE>mixaudio</FILE>
+MixStreamState
+MixState
+MixCodecMode
+MixVolType
+MixVolRamp
+MixIOVec
+MixDeviceState
+<TITLE>MixAudio</TITLE>
+MixAudio
+mix_audio_new
+mix_audio_ref
+mix_audio_unref
+mix_audio_get_version
+mix_audio_initialize
+mix_audio_configure
+mix_audio_decode
+mix_audio_capture_encode
+mix_audio_start
+mix_audio_stop_drop
+mix_audio_stop_drain
+mix_audio_pause
+mix_audio_resume
+mix_audio_get_timestamp
+mix_audio_set_mute
+mix_audio_get_mute
+mix_audio_get_max_vol
+mix_audio_get_min_vol
+mix_audio_get_volume
+mix_audio_set_volume
+mix_audio_deinitialize
+mix_audio_get_stream_state
+mix_audio_get_state
+mix_audio_am_is_enabled
+mix_audio_is_am_available
+mix_audio_get_output_configuration
+mix_audio_get_stream_byte_decoded
+<SUBSECTION Standard>
+MIX_AUDIO
+MIX_IS_AUDIO
+MIX_TYPE_AUDIO
+mix_audio_get_type
+MIX_AUDIO_CLASS
+MIX_IS_AUDIO_CLASS
+MIX_AUDIO_GET_CLASS
+</SECTION>
+
+<SECTION>
+<FILE>mixaip</FILE>
+<TITLE>MixAudioInitParams</TITLE>
+MixAudioInitParams
+mix_aip_new
+mix_aip_ref
+mix_aip_unref
+<SUBSECTION Standard>
+MIX_AUDIOINITPARAMS
+MIX_IS_AUDIOINITPARAMS
+MIX_TYPE_AUDIOINITPARAMS
+mix_aip_get_type
+MIX_AUDIOINITPARAMS_CLASS
+MIX_IS_AUDIOINITPARAMS_CLASS
+MIX_AUDIOINITPARAMS_GET_CLASS
+</SECTION>
+
+<SECTION>
+<FILE>mixacpmp3</FILE>
+<TITLE>MixAudioConfigParamsMP3</TITLE>
+MixAudioConfigParamsMP3
+mix_acp_mp3_new
+mix_acp_mp3_ref
+mix_acp_mp3_unref
+MIX_ACP_MP3_CRC
+MIX_ACP_MP3_MPEG_FORMAT
+MIX_ACP_MP3_MPEG_LAYER
+<SUBSECTION Standard>
+MIX_AUDIOCONFIGPARAMSMP3
+MIX_IS_AUDIOCONFIGPARAMSMP3
+MIX_TYPE_AUDIOCONFIGPARAMSMP3
+mix_acp_mp3_get_type
+MIX_AUDIOCONFIGPARAMSMP3_CLASS
+MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS
+MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS
+</SECTION>
+
+<SECTION>
+<FILE>mixaudiotypes</FILE>
+MixAudioManager
+</SECTION>
+
diff --git a/mix_audio/docs/reference/MixAudio/MixAudio.types b/mix_audio/docs/reference/MixAudio/MixAudio.types
new file mode 100644
index 0000000..0a80168
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/MixAudio.types
@@ -0,0 +1,6 @@
+mix_acp_wma_get_type
+mix_acp_get_type
+mix_acp_aac_get_type
+mix_audio_get_type
+mix_aip_get_type
+mix_acp_mp3_get_type
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html
new file mode 100644
index 0000000..1dd3b14
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParams.html
@@ -0,0 +1,689 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>MixAudioConfigParams</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="ch01.html" title="Mix Audio API">
+<link rel="prev" href="MixAudio-MixAudioConfigParamsWMA.html" title="MixAudioConfigParamsWMA">
+<link rel="next" href="MixAudio-MixAudioInitParams.html" title="MixAudioInitParams">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
+<tr valign="middle">
+<td><a accesskey="p" href="MixAudio-MixAudioConfigParamsWMA.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td><a accesskey="u" href="ch01.html"><img src="up.png" width="24" height="24" border="0" alt="Up"></a></td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="MixAudio-MixAudioInitParams.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr>
+<tr><td colspan="5" class="shortcuts">
+<a href="#MixAudio-MixAudioConfigParams.synopsis" class="shortcut">Top</a>
+  | 
+ <a href="#MixAudio-MixAudioConfigParams.description" class="shortcut">Description</a>
+</td></tr>
+</table>
+<div class="refentry" lang="en">
+<a name="MixAudio-MixAudioConfigParams"></a><div class="titlepage"></div>
+<div class="refnamediv"><table width="100%"><tr>
+<td valign="top">
+<h2><span class="refentrytitle"><a name="MixAudio-MixAudioConfigParams.top_of_page"></a>MixAudioConfigParams</span></h2>
+<p>MixAudioConfigParams — MixAudio configuration parameters object.</p>
+</td>
+<td valign="top" align="right"></td>
+</tr></table></div>
+<div class="refsynopsisdiv">
+<a name="MixAudio-MixAudioConfigParams.synopsis"></a><h2>Synopsis</h2>
+<pre class="synopsis">
+
+#include &lt;mixacp.h&gt;
+
+enum <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign">MixACPOpAlign</a>;
+enum <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType">MixACPBPSType</a>;
+enum <a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode">MixDecodeMode</a>;
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a>;
+<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> * <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-new" title="mix_acp_new ()">mix_acp_new</a> (void);
+<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> * <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-ref" title="mix_acp_ref ()">mix_acp_ref</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *mix);
+#define <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-unref" title="mix_acp_unref()">mix_acp_unref</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-DECODEMODE--CAPS" title="MIX_ACP_DECODEMODE()">MIX_ACP_DECODEMODE</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-NUM-CHANNELS--CAPS" title="MIX_ACP_NUM_CHANNELS()">MIX_ACP_NUM_CHANNELS</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-BITRATE--CAPS" title="MIX_ACP_BITRATE()">MIX_ACP_BITRATE</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-SAMPLE-FREQ--CAPS" title="MIX_ACP_SAMPLE_FREQ()">MIX_ACP_SAMPLE_FREQ</a> (obj)
+<a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode">MixDecodeMode</a> <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-decodemode" title="mix_acp_get_decodemode ()">mix_acp_get_decodemode</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-decodemode" title="mix_acp_set_decodemode ()">mix_acp_set_decodemode</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode">MixDecodeMode</a> mode);
+gchar * <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-streamname" title="mix_acp_get_streamname ()">mix_acp_get_streamname</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-streamname" title="mix_acp_set_streamname ()">mix_acp_set_streamname</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ const gchar *streamname);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-audio-manager" title="mix_acp_set_audio_manager ()">mix_acp_set_audio_manager</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ <a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager">MixAudioManager</a> am);
+<a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager">MixAudioManager</a> <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-audio-manager" title="mix_acp_get_audio_manager ()">mix_acp_get_audio_manager</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);
+gboolean <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-is-streamname-valid" title="mix_acp_is_streamname_valid ()">mix_acp_is_streamname_valid</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);
+<a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType">MixACPBPSType</a> <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-bps" title="mix_acp_get_bps ()">mix_acp_get_bps</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-bps" title="mix_acp_set_bps ()">mix_acp_set_bps</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType">MixACPBPSType</a> type);
+<a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign">MixACPOpAlign</a> <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-op-align" title="mix_acp_get_op_align ()">mix_acp_get_op_align</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-op-align" title="mix_acp_set_op_align ()">mix_acp_set_op_align</a> (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign">MixACPOpAlign</a> op_align);
+</pre>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioConfigParams.description"></a><h2>Description</h2>
+<p>
+<a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> configuration parameters object which is used to communicate audio specific parameters.
+</p>
+<p>
+This object is should not be instantiated as codec specific parameters are definied in individual derive classes.</p>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioConfigParams.details"></a><h2>Details</h2>
+<div class="refsect2" lang="en">
+<a name="MixACPOpAlign"></a><h3>enum MixACPOpAlign</h3>
+<pre class="programlisting">typedef enum {
+ MIX_ACP_OUTPUT_ALIGN_UNKNOWN=-1,
+ MIX_ACP_OUTPUT_ALIGN_16=0,
+ MIX_ACP_OUTPUT_ALIGN_MSB,
+ MIX_ACP_OUTPUT_ALIGN_LSB,
+ MIX_ACP_OUTPUT_ALIGN_LAST
+} MixACPOpAlign;
+</pre>
+<p>
+Audio Output alignment.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-ACP-OUTPUT-ALIGN-UNKNOWN--CAPS"></a><span class="term"><code class="literal">MIX_ACP_OUTPUT_ALIGN_UNKNOWN</code></span></p></td>
+<td> Output alignment undefined.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-ACP-OUTPUT-ALIGN-16--CAPS"></a><span class="term"><code class="literal">MIX_ACP_OUTPUT_ALIGN_16</code></span></p></td>
+<td></td>
+</tr>
+<tr>
+<td><p><a name="MIX-ACP-OUTPUT-ALIGN-MSB--CAPS"></a><span class="term"><code class="literal">MIX_ACP_OUTPUT_ALIGN_MSB</code></span></p></td>
+<td> Output word is MSB aligned
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-ACP-OUTPUT-ALIGN-LSB--CAPS"></a><span class="term"><code class="literal">MIX_ACP_OUTPUT_ALIGN_LSB</code></span></p></td>
+<td> Output word is LSB aligned
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-ACP-OUTPUT-ALIGN-LAST--CAPS"></a><span class="term"><code class="literal">MIX_ACP_OUTPUT_ALIGN_LAST</code></span></p></td>
+<td> Last entry in list.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixACPBPSType"></a><h3>enum MixACPBPSType</h3>
+<pre class="programlisting">typedef enum {
+ MIX_ACP_BPS_UNKNOWN=0,
+ MIX_ACP_BPS_16=16,
+ MIX_ACP_BPS_24=24,
+} MixACPBPSType;
+</pre>
+<p>
+Audio Output Size in bits per sample.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-ACP-BPS-UNKNOWN--CAPS"></a><span class="term"><code class="literal">MIX_ACP_BPS_UNKNOWN</code></span></p></td>
+<td> Bit Per Sample undefined.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-ACP-BPS-16--CAPS"></a><span class="term"><code class="literal">MIX_ACP_BPS_16</code></span></p></td>
+<td> Output bits per sample is 16 bits
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-ACP-BPS-24--CAPS"></a><span class="term"><code class="literal">MIX_ACP_BPS_24</code></span></p></td>
+<td> Output bits per sample is 24 bits
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixDecodeMode"></a><h3>enum MixDecodeMode</h3>
+<pre class="programlisting">typedef enum {
+ MIX_DECODE_NULL=0,
+ MIX_DECODE_DIRECTRENDER,
+ MIX_DECODE_DECODERETURN,
+ MIX_DECODE_LAST
+} MixDecodeMode;
+</pre>
+<p>
+Operation Mode for a MI-X session. See <a class="link" href="MixAudio.html#mix-audio-configure" title="mix_audio_configure ()"><code class="function">mix_audio_configure()</code></a>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-DECODE-NULL--CAPS"></a><span class="term"><code class="literal">MIX_DECODE_NULL</code></span></p></td>
+<td> Undefined decode mode.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-DECODE-DIRECTRENDER--CAPS"></a><span class="term"><code class="literal">MIX_DECODE_DIRECTRENDER</code></span></p></td>
+<td> Stream is configured in Direct Render mode
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-DECODE-DECODERETURN--CAPS"></a><span class="term"><code class="literal">MIX_DECODE_DECODERETURN</code></span></p></td>
+<td> Stream is configured in Decode Return mode
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-DECODE-LAST--CAPS"></a><span class="term"><code class="literal">MIX_DECODE_LAST</code></span></p></td>
+<td> Last index in the enumeration.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixAudioConfigParams"></a><h3>MixAudioConfigParams</h3>
+<pre class="programlisting">typedef struct {
+ MixParams parent;
+
+ /* Audio Session Parameters */
+ MixDecodeMode decode_mode;
+ gchar *stream_name;
+ MixAudioManager audio_manager;
+
+ /* Audio Format Parameters */
+ gint num_channels;
+ gint bit_rate;
+ gint sample_freq;
+ MixACPBPSType bits_per_sample;
+ MixACPOpAlign op_align;
+} MixAudioConfigParams;
+</pre>
+<p>
+<em class="parameter"><code>MixAudio</code></em> configuration parameters object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term">MixParams <em class="structfield"><code>parent</code></em>;</span></p></td>
+<td> parent.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode">MixDecodeMode</a> <em class="structfield"><code>decode_mode</code></em>;</span></p></td>
+<td> Decode Mode to use for current session. See <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-decodemode" title="mix_acp_set_decodemode ()"><span class="type">mix_acp_set_decodemode</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gchar *<em class="structfield"><code>stream_name</code></em>;</span></p></td>
+<td> Stream name. See <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-streamname" title="mix_acp_set_streamname ()"><span class="type">mix_acp_set_streamname</span></a>. This object will release the string upon destruction.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager">MixAudioManager</a> <em class="structfield"><code>audio_manager</code></em>;</span></p></td>
+<td> Type of Audio Manager. See <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-audio-manager" title="mix_acp_set_audio_manager ()"><span class="type">mix_acp_set_audio_manager</span></a>.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint <em class="structfield"><code>num_channels</code></em>;</span></p></td>
+<td> Number of output channels. See <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-NUM-CHANNELS--CAPS" title="MIX_ACP_NUM_CHANNELS()"><span class="type">MIX_ACP_NUM_CHANNELS</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint <em class="structfield"><code>bit_rate</code></em>;</span></p></td>
+<td> <span class="emphasis"><em>Optional.</em></span> See <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-BITRATE--CAPS" title="MIX_ACP_BITRATE()"><span class="type">MIX_ACP_BITRATE</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint <em class="structfield"><code>sample_freq</code></em>;</span></p></td>
+<td> Output frequency. See <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-SAMPLE-FREQ--CAPS" title="MIX_ACP_SAMPLE_FREQ()"><span class="type">MIX_ACP_SAMPLE_FREQ</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType">MixACPBPSType</a> <em class="structfield"><code>bits_per_sample</code></em>;</span></p></td>
+<td> Number of output bit per sample. See <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-bps" title="mix_acp_set_bps ()"><span class="type">mix_acp_set_bps</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign">MixACPOpAlign</a> <em class="structfield"><code>op_align</code></em>;</span></p></td>
+<td> Output Byte Alignment. See <a class="link" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-op-align" title="mix_acp_set_op_align ()"><span class="type">mix_acp_set_op_align</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-new"></a><h3>mix_acp_new ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> * mix_acp_new (void);</pre>
+<p>
+Use this method to create new instance of <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> A newly allocated instance of <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-ref"></a><h3>mix_acp_ref ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> * mix_acp_ref (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *mix);</pre>
+<p>
+Add reference count.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> object to add reference
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> the MixAudioConfigParams instance where reference count has been increased.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-unref"></a><h3>mix_acp_unref()</h3>
+<pre class="programlisting">#define mix_acp_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+</pre>
+<p>
+Decrement reference count of the object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> object to unref.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-DECODEMODE--CAPS"></a><h3>MIX_ACP_DECODEMODE()</h3>
+<pre class="programlisting">#define MIX_ACP_DECODEMODE(obj) (MIX_AUDIOCONFIGPARAMS(obj)-&gt;decode_mode)
+</pre>
+<p>
+MixAudioConfigParam.decode_mode accessor.
+</p>
+<p>
+Configure the decode mode to one of <a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode"><span class="type">MixDecodeMode</span></a> value.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-NUM-CHANNELS--CAPS"></a><h3>MIX_ACP_NUM_CHANNELS()</h3>
+<pre class="programlisting">#define MIX_ACP_NUM_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMS(obj)-&gt;num_channels)
+</pre>
+<p>
+MixAudioConfigParam.num_channels accessor.
+</p>
+<p>
+Configure the number of output channels. This value need to be exact the same as the supported output channel in the audio since down-mixing is not supported.
+</p>
+<p>
+This value can be used during <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DECODERETURN--CAPS"><span class="type">MIX_DECODE_DECODERETURN</span></a> mode for buffer size/duration calculation.
+</p>
+<p>
+In Moorestown, number of channel must be 1 or 2.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-BITRATE--CAPS"></a><h3>MIX_ACP_BITRATE()</h3>
+<pre class="programlisting">#define MIX_ACP_BITRATE(obj) (MIX_AUDIOCONFIGPARAMS(obj)-&gt;bit_rate)
+</pre>
+<p>
+MixAudioConfigParam.bit_rate accessor.
+</p>
+<p>
+Bit rate of the current audio.
+</p>
+<p>
+<em><span class="remark">Optional</span></em></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-SAMPLE-FREQ--CAPS"></a><h3>MIX_ACP_SAMPLE_FREQ()</h3>
+<pre class="programlisting">#define MIX_ACP_SAMPLE_FREQ(obj) (MIX_AUDIOCONFIGPARAMS(obj)-&gt;sample_freq)
+</pre>
+<p>
+MixAudioConfigParam.sample_freq accessor.
+</p>
+<p>
+Output sampling frequency.
+</p>
+<p>
+This value can be used during <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DECODERETURN--CAPS"><span class="type">MIX_DECODE_DECODERETURN</span></a> mode for buffer size/duration calculation.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-get-decodemode"></a><h3>mix_acp_get_decodemode ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode">MixDecodeMode</a> mix_acp_get_decodemode (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);</pre>
+<p>
+Retrieve currently configured <a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode"><span class="type">MixDecodeMode</span></a>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode"><span class="type">MixDecodeMode</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-set-decodemode"></a><h3>mix_acp_set_decodemode ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_set_decodemode (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode">MixDecodeMode</a> mode);</pre>
+<p>
+Configure session for one of the <a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode"><span class="type">MixDecodeMode</span></a>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mode</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode" title="enum MixDecodeMode"><span class="type">MixDecodeMode</span></a> to set
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT</span>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-get-streamname"></a><h3>mix_acp_get_streamname ()</h3>
+<pre class="programlisting">gchar * mix_acp_get_streamname (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);</pre>
+<p>
+Return copy of streamname. caller must free with <code class="function">g_free()</code></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> pointer to a copy of the stream name. NULL if name is not available.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-set-streamname"></a><h3>mix_acp_set_streamname ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_set_streamname (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ const gchar *streamname);</pre>
+<p>
+Set the stream name. The object will make a copy of the input stream name string.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>streamname</code></em> :</span></p></td>
+<td> Stream name to set
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT</span>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-set-audio-manager"></a><h3>mix_acp_set_audio_manager ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_set_audio_manager (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ <a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager">MixAudioManager</a> am);</pre>
+<p>
+Set the Audio Manager to one of the <a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager"><span class="type">MixAudioManager</span></a>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>am</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager"><span class="type">MixAudioManager</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT</span>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-get-audio-manager"></a><h3>mix_acp_get_audio_manager ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager">MixAudioManager</a> mix_acp_get_audio_manager (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);</pre>
+<p>
+Retrieve name of currently configured audio manager.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager"><span class="type">MixAudioManager</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-is-streamname-valid"></a><h3>mix_acp_is_streamname_valid ()</h3>
+<pre class="programlisting">gboolean mix_acp_is_streamname_valid (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);</pre>
+<p>
+Check if stream name is valid considering the current Decode Mode.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> boolean indicates if stream name is valid.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-get-bps"></a><h3>mix_acp_get_bps ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType">MixACPBPSType</a> mix_acp_get_bps (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);</pre>
+<p>
+Retrive currently configured bit-per-stream value.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType"><span class="type">MixACPBPSType</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-set-bps"></a><h3>mix_acp_set_bps ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_set_bps (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType">MixACPBPSType</a> type);</pre>
+<p>
+Configure bit-per-stream of one of the supported <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType"><span class="type">MixACPBPSType</span></a>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mode</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType" title="enum MixACPBPSType"><span class="type">MixACPBPSType</span></a> to set
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT</span>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-get-op-align"></a><h3>mix_acp_get_op_align ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign">MixACPOpAlign</a> mix_acp_get_op_align (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj);</pre>
+<p>
+Get Output Alignment.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a> object
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign"><span class="type">MixACPOpAlign</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-set-op-align"></a><h3>mix_acp_set_op_align ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_set_op_align (<a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign">MixACPOpAlign</a> op_align);</pre>
+<p>
+Set Output Alignment to one of the <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign"><span class="type">MixACPOpAlign</span></a> value.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a> object
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>op_align</code></em> :</span></p></td>
+<td> One of the supported <a class="link" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign" title="enum MixACPOpAlign"><span class="type">MixACPOpAlign</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> MIX_RESULT
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+</div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html
new file mode 100644
index 0000000..46e4e8e
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsAAC.html
@@ -0,0 +1,823 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>MixAudioConfigParamsAAC</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="ch01.html" title="Mix Audio API">
+<link rel="prev" href="ch01.html" title="Mix Audio API">
+<link rel="next" href="MixAudio-MixAudioConfigParamsMP3.html" title="MixAudioConfigParamsMP3">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
+<tr valign="middle">
+<td><a accesskey="p" href="ch01.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td><a accesskey="u" href="ch01.html"><img src="up.png" width="24" height="24" border="0" alt="Up"></a></td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="MixAudio-MixAudioConfigParamsMP3.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr>
+<tr><td colspan="5" class="shortcuts">
+<a href="#MixAudio-MixAudioConfigParamsAAC.synopsis" class="shortcut">Top</a>
+  | 
+ <a href="#MixAudio-MixAudioConfigParamsAAC.description" class="shortcut">Description</a>
+</td></tr>
+</table>
+<div class="refentry" lang="en">
+<a name="MixAudio-MixAudioConfigParamsAAC"></a><div class="titlepage"></div>
+<div class="refnamediv"><table width="100%"><tr>
+<td valign="top">
+<h2><span class="refentrytitle"><a name="MixAudio-MixAudioConfigParamsAAC.top_of_page"></a>MixAudioConfigParamsAAC</span></h2>
+<p>MixAudioConfigParamsAAC — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format.</p>
+</td>
+<td valign="top" align="right"></td>
+</tr></table></div>
+<div class="refsynopsisdiv">
+<a name="MixAudio-MixAudioConfigParamsAAC.synopsis"></a><h2>Synopsis</h2>
+<pre class="synopsis">
+
+#include &lt;mixacpaac.h&gt;
+
+enum <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType" title="enum MixAACBitrateType">MixAACBitrateType</a>;
+enum <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt" title="enum MixAACBitstreamFormt">MixAACBitstreamFormt</a>;
+enum <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile" title="enum MixAACProfile">MixAACProfile</a>;
+enum <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID" title="enum MixAACMpegID">MixAACMpegID</a>;
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a>;
+<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> * <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-new" title="mix_acp_aac_new ()">mix_acp_aac_new</a> (void);
+<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> * <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-ref" title="mix_acp_aac_ref ()">mix_acp_aac_ref</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *mix);
+#define <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-unref" title="mix_acp_aac_unref()">mix_acp_aac_unref</a> (obj)
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-mpeg-id" title="mix_acp_aac_set_mpeg_id ()">mix_acp_aac_set_mpeg_id</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID" title="enum MixAACMpegID">MixAACMpegID</a> mpegid);
+<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID" title="enum MixAACMpegID">MixAACMpegID</a> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-mpeg-id" title="mix_acp_aac_get_mpeg_id ()">mix_acp_aac_get_mpeg_id</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);
+#define <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CRC--CAPS" title="MIX_ACP_AAC_CRC()">MIX_ACP_AAC_CRC</a> (obj)
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aot" title="mix_acp_aac_set_aot ()">mix_acp_aac_set_aot</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ guint aot);
+guint <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aot" title="mix_acp_aac_get_aot ()">mix_acp_aac_get_aot</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);
+#define <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SBR-FLAG--CAPS" title="MIX_ACP_AAC_SBR_FLAG()">MIX_ACP_AAC_SBR_FLAG</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PS-FLAG--CAPS" title="MIX_ACP_AAC_PS_FLAG()">MIX_ACP_AAC_PS_FLAG</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PCE-FLAG--CAPS" title="MIX_ACP_AAC_PCE_FLAG()">MIX_ACP_AAC_PCE_FLAG</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SAMPLE-RATE--CAPS" title="MIX_ACP_AAC_SAMPLE_RATE()">MIX_ACP_AAC_SAMPLE_RATE</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CHANNELS--CAPS" title="MIX_ACP_AAC_CHANNELS()">MIX_ACP_AAC_CHANNELS</a> (obj)
+<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt" title="enum MixAACBitstreamFormt">MixAACBitstreamFormt</a> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-stream-format" title="mix_acp_aac_get_bit_stream_format ()">mix_acp_aac_get_bit_stream_format</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-stream-format" title="mix_acp_aac_set_bit_stream_format ()">mix_acp_aac_set_bit_stream_format</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt" title="enum MixAACBitstreamFormt">MixAACBitstreamFormt</a> bit_stream_format);
+<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile" title="enum MixAACProfile">MixAACProfile</a> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aac-profile" title="mix_acp_aac_get_aac_profile ()">mix_acp_aac_get_aac_profile</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aac-profile" title="mix_acp_aac_set_aac_profile ()">mix_acp_aac_set_aac_profile</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile" title="enum MixAACProfile">MixAACProfile</a> aac_profile);
+<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType" title="enum MixAACBitrateType">MixAACBitrateType</a> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-rate-type" title="mix_acp_aac_get_bit_rate_type ()">mix_acp_aac_get_bit_rate_type</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-rate-type" title="mix_acp_aac_set_bit_rate_type ()">mix_acp_aac_set_bit_rate_type</a> (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType" title="enum MixAACBitrateType">MixAACBitrateType</a> bit_rate_type);
+</pre>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioConfigParamsAAC.description"></a><h2>Description</h2>
+<p>
+A data object which stores audio specific parameters for the following formats:
+</p>
+<div class="itemizedlist"><ul type="disc">
+<li>AAC-LC</li>
+<li>HE-AAC v1</li>
+<li>HE-AAC v2</li>
+</ul></div>
+<p>
+</p>
+<p>
+Additional parameters must be set in the parent object <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a></p>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioConfigParamsAAC.details"></a><h2>Details</h2>
+<div class="refsect2" lang="en">
+<a name="MixAACBitrateType"></a><h3>enum MixAACBitrateType</h3>
+<pre class="programlisting">typedef enum {
+ MIX_AAC_BR_NULL=-1,
+ MIX_AAC_BR_CONSTANT=0,
+ MIX_AAC_BR_VARIABLE,
+ MIX_AAC_BR_LAST
+} MixAACBitrateType;
+</pre>
+<p>
+Types of bitrate in AAC.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-AAC-BR-NULL--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BR_NULL</code></span></p></td>
+<td> Undefined bit rate type.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-BR-CONSTANT--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BR_CONSTANT</code></span></p></td>
+<td> Constant bit rate.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-BR-VARIABLE--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BR_VARIABLE</code></span></p></td>
+<td> Variable bit rate.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-BR-LAST--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BR_LAST</code></span></p></td>
+<td> last entry.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixAACBitstreamFormt"></a><h3>enum MixAACBitstreamFormt</h3>
+<pre class="programlisting">typedef enum {
+ MIX_AAC_BS_NULL=-1,
+ MIX_AAC_BS_ADTS=0,
+ MIX_AAC_BS_ADIF,
+ MIX_AAC_BS_RAW,
+ MIX_AAC_BS_LAST
+} MixAACBitstreamFormt;
+</pre>
+<p>
+AAC bitstream format.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-AAC-BS-NULL--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BS_NULL</code></span></p></td>
+<td> Undefined bitstream format.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-BS-ADTS--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BS_ADTS</code></span></p></td>
+<td> Bitstream is in ADTS format.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-BS-ADIF--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BS_ADIF</code></span></p></td>
+<td> Bitstream is in ADIF format.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-BS-RAW--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BS_RAW</code></span></p></td>
+<td> Bitstream is in raw format.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-BS-LAST--CAPS"></a><span class="term"><code class="literal">MIX_AAC_BS_LAST</code></span></p></td>
+<td> Last entry.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixAACProfile"></a><h3>enum MixAACProfile</h3>
+<pre class="programlisting">typedef enum {
+ MIX_AAC_PROFILE_NULL=-1,
+ MIX_AAC_PROFILE_MAIN=0,
+ MIX_AAC_PROFILE_LC,
+ MIX_AAC_PROFILE_SSR,
+ MIX_AAC_PROFILE_LAST
+} MixAACProfile;
+</pre>
+<p>
+AAC profiles definitions.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-AAC-PROFILE-NULL--CAPS"></a><span class="term"><code class="literal">MIX_AAC_PROFILE_NULL</code></span></p></td>
+<td> Undefined profile.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-PROFILE-MAIN--CAPS"></a><span class="term"><code class="literal">MIX_AAC_PROFILE_MAIN</code></span></p></td>
+<td> <span class="emphasis"><em>Not Supported</em></span> AAC Main profile.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-PROFILE-LC--CAPS"></a><span class="term"><code class="literal">MIX_AAC_PROFILE_LC</code></span></p></td>
+<td> AAC-LC profile, including support of SBR and PS tool.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-PROFILE-SSR--CAPS"></a><span class="term"><code class="literal">MIX_AAC_PROFILE_SSR</code></span></p></td>
+<td> <span class="emphasis"><em>Not Supported</em></span> SSR profile.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-PROFILE-LAST--CAPS"></a><span class="term"><code class="literal">MIX_AAC_PROFILE_LAST</code></span></p></td>
+<td> Last entry.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixAACMpegID"></a><h3>enum MixAACMpegID</h3>
+<pre class="programlisting">typedef enum {
+ MIX_AAC_MPEG_ID_NULL=-1,
+ MIX_AAC_MPEG_2_ID = 0,
+ MIX_AAC_MPEG_4_ID = 1,
+ MIX_AAC_MPEG_LAST
+} MixAACMpegID;
+</pre>
+<p>
+AAC MPEG ID.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-AAC-MPEG-ID-NULL--CAPS"></a><span class="term"><code class="literal">MIX_AAC_MPEG_ID_NULL</code></span></p></td>
+<td> Undefined MPEG ID.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-MPEG-2-ID--CAPS"></a><span class="term"><code class="literal">MIX_AAC_MPEG_2_ID</code></span></p></td>
+<td> Indicate MPEG 2 Audio.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-MPEG-4-ID--CAPS"></a><span class="term"><code class="literal">MIX_AAC_MPEG_4_ID</code></span></p></td>
+<td> Indicate MPEG 4 Audio.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AAC-MPEG-LAST--CAPS"></a><span class="term"><code class="literal">MIX_AAC_MPEG_LAST</code></span></p></td>
+<td> last entry.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixAudioConfigParamsAAC"></a><h3>MixAudioConfigParamsAAC</h3>
+<pre class="programlisting">typedef struct {
+ MixAudioConfigParams parent;
+
+ /* Audio Format Parameters */
+ MixAACMpegID MPEG_id;
+ MixAACBitstreamFormt bit_stream_format;
+ MixAACProfile aac_profile;
+ guint aot;
+ guint aac_sample_rate;
+ guint aac_channels;
+ MixAACBitrateType bit_rate_type;
+ gboolean CRC;
+ guint sbrPresentFlag;
+ guint psPresentFlag;
+ gboolean pce_present;
+ gint8 syntc_id[2];
+ gint8 syntc_tag[2];
+ gint num_syntc_elems;
+} MixAudioConfigParamsAAC;
+</pre>
+<p>
+MixAudio Parameter object</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> <em class="structfield"><code>parent</code></em>;</span></p></td>
+<td> parent.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID" title="enum MixAACMpegID">MixAACMpegID</a> <em class="structfield"><code>MPEG_id</code></em>;</span></p></td>
+<td> MPEG ID. See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-mpeg-id" title="mix_acp_aac_set_mpeg_id ()"><span class="type">mix_acp_aac_set_mpeg_id</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt" title="enum MixAACBitstreamFormt">MixAACBitstreamFormt</a> <em class="structfield"><code>bit_stream_format</code></em>;</span></p></td>
+<td> Bitstream format. See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-stream-format" title="mix_acp_aac_set_bit_stream_format ()"><span class="type">mix_acp_aac_set_bit_stream_format</span></a>.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile" title="enum MixAACProfile">MixAACProfile</a> <em class="structfield"><code>aac_profile</code></em>;</span></p></td>
+<td> AAC profile. See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aac-profile" title="mix_acp_aac_set_aac_profile ()"><span class="type">mix_acp_aac_set_aac_profile</span></a>.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint <em class="structfield"><code>aot</code></em>;</span></p></td>
+<td> Audio object type. See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aot" title="mix_acp_aac_set_aot ()"><span class="type">mix_acp_aac_set_aot</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint <em class="structfield"><code>aac_sample_rate</code></em>;</span></p></td>
+<td> See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SAMPLE-RATE--CAPS" title="MIX_ACP_AAC_SAMPLE_RATE()"><span class="type">MIX_ACP_AAC_SAMPLE_RATE</span></a> macro.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint <em class="structfield"><code>aac_channels</code></em>;</span></p></td>
+<td> See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CHANNELS--CAPS" title="MIX_ACP_AAC_CHANNELS()"><span class="type">MIX_ACP_AAC_CHANNELS</span></a> macro.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType" title="enum MixAACBitrateType">MixAACBitrateType</a> <em class="structfield"><code>bit_rate_type</code></em>;</span></p></td>
+<td> Bitrate type. See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-rate-type" title="mix_acp_aac_set_bit_rate_type ()"><span class="type">mix_acp_aac_set_bit_rate_type</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gboolean <em class="structfield"><code>CRC</code></em>;</span></p></td>
+<td> CRC check 0:disable, 1:enable.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint <em class="structfield"><code>sbrPresentFlag</code></em>;</span></p></td>
+<td> See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SBR-FLAG--CAPS" title="MIX_ACP_AAC_SBR_FLAG()"><span class="type">MIX_ACP_AAC_SBR_FLAG</span></a> macro.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint <em class="structfield"><code>psPresentFlag</code></em>;</span></p></td>
+<td> See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PS-FLAG--CAPS" title="MIX_ACP_AAC_PS_FLAG()"><span class="type">MIX_ACP_AAC_PS_FLAG</span></a> macro.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gboolean <em class="structfield"><code>pce_present</code></em>;</span></p></td>
+<td> <span class="emphasis"><em>Not Used.</em></span> See <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PCE-FLAG--CAPS" title="MIX_ACP_AAC_PCE_FLAG()"><span class="type">MIX_ACP_AAC_PCE_FLAG</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint8 <em class="structfield"><code>syntc_id</code></em>[2];</span></p></td>
+<td> <span class="emphasis"><em>Not Used.</em></span> 0 for ID_SCE(Dula Mono), -1 for raw.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint8 <em class="structfield"><code>syntc_tag</code></em>[2];</span></p></td>
+<td> <span class="emphasis"><em>Not Used.</em></span> -1 for raw. 0-16 for rest of the streams.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint <em class="structfield"><code>num_syntc_elems</code></em>;</span></p></td>
+<td> <span class="emphasis"><em>Not Used.</em></span> Number of syntatic elements.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-new"></a><h3>mix_acp_aac_new ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> * mix_acp_aac_new (void);</pre>
+<p>
+Use this method to create new instance of <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> A newly allocated instance of <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-ref"></a><h3>mix_acp_aac_ref ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> * mix_acp_aac_ref (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *mix);</pre>
+<p>
+Add reference count.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> object to add reference
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> the MixAudioConfigParamsAAC instance where reference count has been increased.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-unref"></a><h3>mix_acp_aac_unref()</h3>
+<pre class="programlisting">#define mix_acp_aac_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+</pre>
+<p>
+Decrement reference count of the object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> object to unref.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-set-mpeg-id"></a><h3>mix_acp_aac_set_mpeg_id ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_aac_set_mpeg_id (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID" title="enum MixAACMpegID">MixAACMpegID</a> mpegid);</pre>
+<p>
+Configure decoder to treat audio as MPEG 2 or MPEG 4.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mpegid</code></em> :</span></p></td>
+<td> MPEG ID to set.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>return</code></em> :</span></p></td>
+<td> MIX_RESULT
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-get-mpeg-id"></a><h3>mix_acp_aac_get_mpeg_id ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID" title="enum MixAACMpegID">MixAACMpegID</a> mix_acp_aac_get_mpeg_id (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);</pre>
+<p>
+Retrieve currently configured mpeg id value.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a> object
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> MPEG ID.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-AAC-CRC--CAPS"></a><h3>MIX_ACP_AAC_CRC()</h3>
+<pre class="programlisting">#define MIX_ACP_AAC_CRC(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)-&gt;CRC)
+</pre>
+<p>
+<span class="type">MixAudioConfigParamAAC</span>.CRC accessor.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a> object.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-set-aot"></a><h3>mix_acp_aac_set_aot ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_aac_set_aot (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ guint aot);</pre>
+<p>
+Audio Object Type for the MPEG-4 audio stream. Valid value are:
+</p>
+<p>
+2 - for AAC-LC
+</p>
+<p>
+5 - for SBR
+</p>
+<p>
+Method returns MIX_RESULT_NOT_SUPPORTED for not supported value.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>aot</code></em> :</span></p></td>
+<td> Audio Object Type.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-get-aot"></a><h3>mix_acp_aac_get_aot ()</h3>
+<pre class="programlisting">guint mix_acp_aac_get_aot (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);</pre>
+<p>
+To retrieve currently configured audio object type.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>aot</code></em> :</span></p></td>
+<td> Pointer to receive the Audio Object Type.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>return</code></em> :</span></p></td>
+<td> Currently configured audio object type. Or 0 if not yet specified.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-AAC-SBR-FLAG--CAPS"></a><h3>MIX_ACP_AAC_SBR_FLAG()</h3>
+<pre class="programlisting">#define MIX_ACP_AAC_SBR_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)-&gt;sbrPresentFlag)
+</pre>
+<p>
+MixAudioConfigParamAAC.sbrPresentFlag accessor.
+</p>
+<p>
+Applicable only when <em class="parameter"><code>bit_stream_format</code></em>==<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BS-RAW--CAPS"><span class="type">MIX_AAC_BS_RAW</span></a>. Indicates whether SBR data is present.
+</p>
+<p>
+0: Absent
+</p>
+<p>
+1: Present
+</p>
+<p>
+-1 (0xffffffff): indicates implicit signalling.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-AAC-PS-FLAG--CAPS"></a><h3>MIX_ACP_AAC_PS_FLAG()</h3>
+<pre class="programlisting">#define MIX_ACP_AAC_PS_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)-&gt;psPresentFlag)
+</pre>
+<p>
+MixAudioConfigParamAAC.psPresentFlag accessor.
+</p>
+<p>
+Applicable only when <em class="parameter"><code>bit_stream_format</code></em>==<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BS-RAW--CAPS"><span class="type">MIX_AAC_BS_RAW</span></a>. Indicates whether PS data is present.
+</p>
+<p>
+0: Absent
+</p>
+<p>
+1: Present
+</p>
+<p>
+-1 (0xffffffff): indicates implicit signalling.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-AAC-PCE-FLAG--CAPS"></a><h3>MIX_ACP_AAC_PCE_FLAG()</h3>
+<pre class="programlisting">#define MIX_ACP_AAC_PCE_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)-&gt;pce_present)
+</pre>
+<p>
+MixAudioConfigParamAAC.pce_present accessor.
+</p>
+<p>
+Applicable only when <em class="parameter"><code>bit_stream_format</code></em>==<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BS-RAW--CAPS"><span class="type">MIX_AAC_BS_RAW</span></a>. Indicates PCE data presence.
+</p>
+<p>
+1:present
+</p>
+<p>
+0:absent.
+</p>
+<p>
+<em><span class="remark">Not Used on Moorestown.</span></em></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a> object.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-AAC-SAMPLE-RATE--CAPS"></a><h3>MIX_ACP_AAC_SAMPLE_RATE()</h3>
+<pre class="programlisting">#define MIX_ACP_AAC_SAMPLE_RATE(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)-&gt;aac_sample_rate)
+</pre>
+<p>
+MixAudioConfigParamAAC.aac_sample_rate accessor.
+</p>
+<p>
+Plain AAC decoder operating sample rate. Which could be different from the output sampling rate with HE AAC v1 and v2.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a> object.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-AAC-CHANNELS--CAPS"></a><h3>MIX_ACP_AAC_CHANNELS()</h3>
+<pre class="programlisting">#define MIX_ACP_AAC_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)-&gt;aac_channels)
+</pre>
+<p>
+MixAudioConfigParamAAC.aac_channels accessor.
+</p>
+<p>
+Indicates the number of output channels used by AAC decoder before SBR or PS tools are applied.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-get-bit-stream-format"></a><h3>mix_acp_aac_get_bit_stream_format ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt" title="enum MixAACBitstreamFormt">MixAACBitstreamFormt</a> mix_acp_aac_get_bit_stream_format (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);</pre>
+<p>
+Return the bitstream format currently configured.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt" title="enum MixAACBitstreamFormt"><span class="type">MixAACBitstreamFormt</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-set-bit-stream-format"></a><h3>mix_acp_aac_set_bit_stream_format ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_aac_set_bit_stream_format (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt" title="enum MixAACBitstreamFormt">MixAACBitstreamFormt</a> bit_stream_format);</pre>
+<p>
+Set the type of bitstream format as specified in <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt" title="enum MixAACBitstreamFormt"><span class="type">MixAACBitstreamFormt</span></a>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>bit_stream_format</code></em> :</span></p></td>
+<td> Bit stream format.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> MIX_RESULT
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-get-aac-profile"></a><h3>mix_acp_aac_get_aac_profile ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile" title="enum MixAACProfile">MixAACProfile</a> mix_acp_aac_get_aac_profile (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);</pre>
+<p>
+Retrieve the AAC profile currently configured.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile" title="enum MixAACProfile"><span class="type">MixAACProfile</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-set-aac-profile"></a><h3>mix_acp_aac_set_aac_profile ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_aac_set_aac_profile (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile" title="enum MixAACProfile">MixAACProfile</a> aac_profile);</pre>
+<p>
+Configure AAC profile for current session.
+</p>
+<p>
+Only <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-PROFILE-LC--CAPS"><span class="type">MIX_AAC_PROFILE_LC</span></a> is supported in Moorestown.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>aac_profile</code></em> :</span></p></td>
+<td> AAC profile to set.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> MIX_RESULT
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-get-bit-rate-type"></a><h3>mix_acp_aac_get_bit_rate_type ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType" title="enum MixAACBitrateType">MixAACBitrateType</a> mix_acp_aac_get_bit_rate_type (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj);</pre>
+<p>
+Retrieve the bit rate type currently configured.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType" title="enum MixAACBitrateType"><span class="type">MixAACBitrateType</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-aac-set-bit-rate-type"></a><h3>mix_acp_aac_set_bit_rate_type ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_aac_set_bit_rate_type (<a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType" title="enum MixAACBitrateType">MixAACBitrateType</a> bit_rate_type);</pre>
+<p>
+Set the bit rate type used.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC" title="MixAudioConfigParamsAAC"><span class="type">MixAudioConfigParamsAAC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>bit_rate_type</code></em> :</span></p></td>
+<td> Bit rate type to set.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> MIX_RESULT
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+</div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html
new file mode 100644
index 0000000..8c97357
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsMP3.html
@@ -0,0 +1,221 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>MixAudioConfigParamsMP3</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="ch01.html" title="Mix Audio API">
+<link rel="prev" href="MixAudio-MixAudioConfigParamsAAC.html" title="MixAudioConfigParamsAAC">
+<link rel="next" href="MixAudio-MixAudioConfigParamsWMA.html" title="MixAudioConfigParamsWMA">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
+<tr valign="middle">
+<td><a accesskey="p" href="MixAudio-MixAudioConfigParamsAAC.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td><a accesskey="u" href="ch01.html"><img src="up.png" width="24" height="24" border="0" alt="Up"></a></td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="MixAudio-MixAudioConfigParamsWMA.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr>
+<tr><td colspan="5" class="shortcuts">
+<a href="#MixAudio-MixAudioConfigParamsMP3.synopsis" class="shortcut">Top</a>
+  | 
+ <a href="#MixAudio-MixAudioConfigParamsMP3.description" class="shortcut">Description</a>
+</td></tr>
+</table>
+<div class="refentry" lang="en">
+<a name="MixAudio-MixAudioConfigParamsMP3"></a><div class="titlepage"></div>
+<div class="refnamediv"><table width="100%"><tr>
+<td valign="top">
+<h2><span class="refentrytitle"><a name="MixAudio-MixAudioConfigParamsMP3.top_of_page"></a>MixAudioConfigParamsMP3</span></h2>
+<p>MixAudioConfigParamsMP3 — Audio configuration parameters for MP3 audio.</p>
+</td>
+<td valign="top" align="right"></td>
+</tr></table></div>
+<div class="refsynopsisdiv">
+<a name="MixAudio-MixAudioConfigParamsMP3.synopsis"></a><h2>Synopsis</h2>
+<pre class="synopsis">
+
+#include &lt;mixacpmp3.h&gt;
+
+ <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3">MixAudioConfigParamsMP3</a>;
+<a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3">MixAudioConfigParamsMP3</a> * <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-new" title="mix_acp_mp3_new ()">mix_acp_mp3_new</a> (void);
+<a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3">MixAudioConfigParamsMP3</a> * <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-ref" title="mix_acp_mp3_ref ()">mix_acp_mp3_ref</a> (<a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3">MixAudioConfigParamsMP3</a> *mix);
+#define <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-unref" title="mix_acp_mp3_unref()">mix_acp_mp3_unref</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-CRC--CAPS" title="MIX_ACP_MP3_CRC()">MIX_ACP_MP3_CRC</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-FORMAT--CAPS" title="MIX_ACP_MP3_MPEG_FORMAT()">MIX_ACP_MP3_MPEG_FORMAT</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-LAYER--CAPS" title="MIX_ACP_MP3_MPEG_LAYER()">MIX_ACP_MP3_MPEG_LAYER</a> (obj)
+</pre>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioConfigParamsMP3.description"></a><h2>Description</h2>
+<p>
+A data object which stores audio specific parameters for MP3 audio.
+</p>
+<p>
+Additional parameters must be set in the parent object <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a></p>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioConfigParamsMP3.details"></a><h2>Details</h2>
+<div class="refsect2" lang="en">
+<a name="MixAudioConfigParamsMP3"></a><h3>MixAudioConfigParamsMP3</h3>
+<pre class="programlisting">typedef struct {
+ MixAudioConfigParams parent;
+
+ /* Audio Format Parameters */
+ gboolean CRC;
+ gint MPEG_format;
+ gint MPEG_layer;
+} MixAudioConfigParamsMP3;
+</pre>
+<p>
+MI-X Audio Parameter object for MP3 Audio.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> <em class="structfield"><code>parent</code></em>;</span></p></td>
+<td> parent.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gboolean <em class="structfield"><code>CRC</code></em>;</span></p></td>
+<td> CRC. See <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-CRC--CAPS" title="MIX_ACP_MP3_CRC()"><span class="type">MIX_ACP_MP3_CRC</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint <em class="structfield"><code>MPEG_format</code></em>;</span></p></td>
+<td> <span class="emphasis"><em>Optional</em></span>MPEG format of the mpeg audio. See <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-FORMAT--CAPS" title="MIX_ACP_MP3_MPEG_FORMAT()"><span class="type">MIX_ACP_MP3_MPEG_FORMAT</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint <em class="structfield"><code>MPEG_layer</code></em>;</span></p></td>
+<td> <span class="emphasis"><em>Optional</em></span>MPEG layer of the mpeg audio. See <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-LAYER--CAPS" title="MIX_ACP_MP3_MPEG_LAYER()"><span class="type">MIX_ACP_MP3_MPEG_LAYER</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-mp3-new"></a><h3>mix_acp_mp3_new ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3">MixAudioConfigParamsMP3</a> * mix_acp_mp3_new (void);</pre>
+<p>
+Use this method to create new instance of <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3"><span class="type">MixAudioConfigParamsMP3</span></a></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> A newly allocated instance of <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3"><span class="type">MixAudioConfigParamsMP3</span></a>
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-mp3-ref"></a><h3>mix_acp_mp3_ref ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3">MixAudioConfigParamsMP3</a> * mix_acp_mp3_ref (<a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3">MixAudioConfigParamsMP3</a> *mix);</pre>
+<p>
+Add reference count.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> object to add reference
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> the MixAudioConfigParamsMP3 instance where reference count has been increased.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-mp3-unref"></a><h3>mix_acp_mp3_unref()</h3>
+<pre class="programlisting">#define mix_acp_mp3_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+</pre>
+<p>
+Decrement reference count of the object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> object to unref.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-MP3-CRC--CAPS"></a><h3>MIX_ACP_MP3_CRC()</h3>
+<pre class="programlisting">#define MIX_ACP_MP3_CRC(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)-&gt;CRC)
+</pre>
+<p>
+MixAudioConfigParamMP3.CRC accessor.
+</p>
+<p>
+<em><span class="remark">Optional</span></em></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3"><span class="type">MixAudioConfigParamsMP3</span></a> object.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-MP3-MPEG-FORMAT--CAPS"></a><h3>MIX_ACP_MP3_MPEG_FORMAT()</h3>
+<pre class="programlisting">#define MIX_ACP_MP3_MPEG_FORMAT(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)-&gt;MPEG_format)
+</pre>
+<p>
+MixAudioConfigParamMP3.MPEG_format accessor.
+</p>
+<p>
+Supported MPEG format should be 1 or 2.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3"><span class="type">MixAudioConfigParamsMP3</span></a> object.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-MP3-MPEG-LAYER--CAPS"></a><h3>MIX_ACP_MP3_MPEG_LAYER()</h3>
+<pre class="programlisting">#define MIX_ACP_MP3_MPEG_LAYER(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)-&gt;MPEG_layer)
+</pre>
+<p>
+MixAudioConfigParamMP3.MPEG_layer accessor.
+</p>
+<p>
+Supported layer should be 1, 2, or 3.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3" title="MixAudioConfigParamsMP3"><span class="type">MixAudioConfigParamsMP3</span></a> object.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+</div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html
new file mode 100644
index 0000000..efd14ca
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioConfigParamsWMA.html
@@ -0,0 +1,391 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>MixAudioConfigParamsWMA</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="ch01.html" title="Mix Audio API">
+<link rel="prev" href="MixAudio-MixAudioConfigParamsMP3.html" title="MixAudioConfigParamsMP3">
+<link rel="next" href="MixAudio-MixAudioConfigParams.html" title="MixAudioConfigParams">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
+<tr valign="middle">
+<td><a accesskey="p" href="MixAudio-MixAudioConfigParamsMP3.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td><a accesskey="u" href="ch01.html"><img src="up.png" width="24" height="24" border="0" alt="Up"></a></td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="MixAudio-MixAudioConfigParams.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr>
+<tr><td colspan="5" class="shortcuts">
+<a href="#MixAudio-MixAudioConfigParamsWMA.synopsis" class="shortcut">Top</a>
+  | 
+ <a href="#MixAudio-MixAudioConfigParamsWMA.description" class="shortcut">Description</a>
+</td></tr>
+</table>
+<div class="refentry" lang="en">
+<a name="MixAudio-MixAudioConfigParamsWMA"></a><div class="titlepage"></div>
+<div class="refnamediv"><table width="100%"><tr>
+<td valign="top">
+<h2><span class="refentrytitle"><a name="MixAudio-MixAudioConfigParamsWMA.top_of_page"></a>MixAudioConfigParamsWMA</span></h2>
+<p>MixAudioConfigParamsWMA — Audio parameters for WMA audio.</p>
+</td>
+<td valign="top" align="right"></td>
+</tr></table></div>
+<div class="refsynopsisdiv">
+<a name="MixAudio-MixAudioConfigParamsWMA.synopsis"></a><h2>Synopsis</h2>
+<pre class="synopsis">
+
+#include &lt;mixacpwma.h&gt;
+
+enum <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion" title="enum MixAudioWMAVersion">MixAudioWMAVersion</a>;
+ <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a>;
+<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> * <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-new" title="mix_acp_wma_new ()">mix_acp_wma_new</a> (void);
+<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> * <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-ref" title="mix_acp_wma_ref ()">mix_acp_wma_ref</a> (<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> *mix);
+#define <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-unref" title="mix_acp_wma_unref()">mix_acp_wma_unref</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-CHANNEL-MASK--CAPS" title="MIX_ACP_WMA_CHANNEL_MASK()">MIX_ACP_WMA_CHANNEL_MASK</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-FORMAT-TAG--CAPS" title="MIX_ACP_WMA_FORMAT_TAG()">MIX_ACP_WMA_FORMAT_TAG</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-BLOCK-ALIGN--CAPS" title="MIX_ACP_WMA_BLOCK_ALIGN()">MIX_ACP_WMA_BLOCK_ALIGN</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-ENCODE-OPT--CAPS" title="MIX_ACP_WMA_ENCODE_OPT()">MIX_ACP_WMA_ENCODE_OPT</a> (obj)
+#define <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-PCM-BIT-WIDTH--CAPS" title="MIX_ACP_WMA_PCM_BIT_WIDTH()">MIX_ACP_WMA_PCM_BIT_WIDTH</a> (obj)
+<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion" title="enum MixAudioWMAVersion">MixAudioWMAVersion</a> <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-get-version" title="mix_acp_wma_get_version ()">mix_acp_wma_get_version</a> (<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> *obj);
+MIX_RESULT <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-set-version" title="mix_acp_wma_set_version ()">mix_acp_wma_set_version</a> (<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion" title="enum MixAudioWMAVersion">MixAudioWMAVersion</a> ver);
+</pre>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioConfigParamsWMA.description"></a><h2>Description</h2>
+<p>
+A data object which stores audio specific parameters for WMA.
+</p>
+<p>
+In Moorestown, only WMA2 is supported.
+</p>
+<p>
+Additional parameters must be set in the parent object <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a></p>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioConfigParamsWMA.details"></a><h2>Details</h2>
+<div class="refsect2" lang="en">
+<a name="MixAudioWMAVersion"></a><h3>enum MixAudioWMAVersion</h3>
+<pre class="programlisting">typedef enum {
+ MIX_AUDIO_WMA_VUNKNOWN,
+ MIX_AUDIO_WMA_V9,
+ MIX_AUDIO_WMA_V10,
+ MIX_AUDIO_WMA_V10P,
+ MIX_AUDIO_WMA_LAST
+} MixAudioWMAVersion;
+</pre>
+<p>
+WMA version.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-AUDIO-WMA-VUNKNOWN--CAPS"></a><span class="term"><code class="literal">MIX_AUDIO_WMA_VUNKNOWN</code></span></p></td>
+<td> WMA version undefined.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AUDIO-WMA-V9--CAPS"></a><span class="term"><code class="literal">MIX_AUDIO_WMA_V9</code></span></p></td>
+<td> WMA 9
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AUDIO-WMA-V10--CAPS"></a><span class="term"><code class="literal">MIX_AUDIO_WMA_V10</code></span></p></td>
+<td> <span class="emphasis"><em>Not Supported</em></span> WMA 10
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AUDIO-WMA-V10P--CAPS"></a><span class="term"><code class="literal">MIX_AUDIO_WMA_V10P</code></span></p></td>
+<td> <span class="emphasis"><em>Not Supported</em></span> WMA 10 Pro
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AUDIO-WMA-LAST--CAPS"></a><span class="term"><code class="literal">MIX_AUDIO_WMA_LAST</code></span></p></td>
+<td> last entry.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixAudioConfigParamsWMA"></a><h3>MixAudioConfigParamsWMA</h3>
+<pre class="programlisting">typedef struct {
+ MixAudioConfigParams parent;
+
+ /* Audio Format Parameters */
+ guint32 channel_mask;
+ guint16 format_tag;
+ guint16 block_align;
+ guint16 wma_encode_opt;/* Encoder option */
+ guint8 pcm_bit_width; /* source pcm bit width */
+ MixAudioWMAVersion wma_version;
+} MixAudioConfigParamsWMA;
+</pre>
+<p>
+MI-X Audio Parameter object</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> <em class="structfield"><code>parent</code></em>;</span></p></td>
+<td> parent.
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint32 <em class="structfield"><code>channel_mask</code></em>;</span></p></td>
+<td> Channel Mask. See <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-CHANNEL-MASK--CAPS" title="MIX_ACP_WMA_CHANNEL_MASK()"><span class="type">MIX_ACP_WMA_CHANNEL_MASK</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint16 <em class="structfield"><code>format_tag</code></em>;</span></p></td>
+<td> Format tag. See <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-FORMAT-TAG--CAPS" title="MIX_ACP_WMA_FORMAT_TAG()"><span class="type">MIX_ACP_WMA_FORMAT_TAG</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint16 <em class="structfield"><code>block_align</code></em>;</span></p></td>
+<td></td>
+</tr>
+<tr>
+<td><p><span class="term">guint16 <em class="structfield"><code>wma_encode_opt</code></em>;</span></p></td>
+<td> Encoder option. See <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-ENCODE-OPT--CAPS" title="MIX_ACP_WMA_ENCODE_OPT()"><span class="type">MIX_ACP_WMA_ENCODE_OPT</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term">guint8 <em class="structfield"><code>pcm_bit_width</code></em>;</span></p></td>
+<td> Source pcm bit width. See <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-PCM-BIT-WIDTH--CAPS" title="MIX_ACP_WMA_PCM_BIT_WIDTH()"><span class="type">MIX_ACP_WMA_PCM_BIT_WIDTH</span></a>
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion" title="enum MixAudioWMAVersion">MixAudioWMAVersion</a> <em class="structfield"><code>wma_version</code></em>;</span></p></td>
+<td> WMA version. See <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-set-version" title="mix_acp_wma_set_version ()"><span class="type">mix_acp_wma_set_version</span></a>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-wma-new"></a><h3>mix_acp_wma_new ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> * mix_acp_wma_new (void);</pre>
+<p>
+Use this method to create new instance of <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> A newly allocated instance of <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a>
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-wma-ref"></a><h3>mix_acp_wma_ref ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> * mix_acp_wma_ref (<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> *mix);</pre>
+<p>
+Add reference count.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> object to add reference
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> the MixAudioConfigParamsWMA instance where reference count has been increased.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-wma-unref"></a><h3>mix_acp_wma_unref()</h3>
+<pre class="programlisting">#define mix_acp_wma_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+</pre>
+<p>
+Decrement reference count of the object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> object to unref.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-WMA-CHANNEL-MASK--CAPS"></a><h3>MIX_ACP_WMA_CHANNEL_MASK()</h3>
+<pre class="programlisting">#define MIX_ACP_WMA_CHANNEL_MASK(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)-&gt;channel_mask)
+</pre>
+<p>
+MixAudioConfigParamWMA.channel_mask accessor.
+</p>
+<p>
+Channel mask must be one of the following:
+</p>
+<p>
+4: For single (1) channel output.
+</p>
+<p>
+3: For stereo (2) channels output.
+</p>
+<p>
+Only 1 or 2 output channels are supported.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-WMA-FORMAT-TAG--CAPS"></a><h3>MIX_ACP_WMA_FORMAT_TAG()</h3>
+<pre class="programlisting">#define MIX_ACP_WMA_FORMAT_TAG(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)-&gt;format_tag)
+</pre>
+<p>
+MixAudioConfigParamWMA.format_tag accessor.
+</p>
+<p>
+<em><span class="remark">In Moorestown, only value 0x0161 combined with use of <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-AUDIO-WMA-V9--CAPS"><span class="type">MIX_AUDIO_WMA_V9</span></a> is supported.</span></em></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-WMA-BLOCK-ALIGN--CAPS"></a><h3>MIX_ACP_WMA_BLOCK_ALIGN()</h3>
+<pre class="programlisting">#define MIX_ACP_WMA_BLOCK_ALIGN(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)-&gt;block_align)
+</pre>
+<p>
+MixAudioConfigParamWMA.block_align accessor.
+</p>
+<p>
+Block alignment indicates packet size. Available from ASF Header.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-WMA-ENCODE-OPT--CAPS"></a><h3>MIX_ACP_WMA_ENCODE_OPT()</h3>
+<pre class="programlisting">#define MIX_ACP_WMA_ENCODE_OPT(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)-&gt;wma_encode_opt)
+</pre>
+<p>
+MixAudioConfigParamWMA.wma_encode_opt accessor.
+</p>
+<p>
+Encoder option available from ASF header.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MIX-ACP-WMA-PCM-BIT-WIDTH--CAPS"></a><h3>MIX_ACP_WMA_PCM_BIT_WIDTH()</h3>
+<pre class="programlisting">#define MIX_ACP_WMA_PCM_BIT_WIDTH(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)-&gt;pcm_bit_width)
+</pre>
+<p>
+MixAudioConfigParamWMA.pcm_bit_width accessor.
+</p>
+<p>
+Source pcm bit width available from ASF Header.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a> object
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-wma-get-version"></a><h3>mix_acp_wma_get_version ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion" title="enum MixAudioWMAVersion">MixAudioWMAVersion</a> mix_acp_wma_get_version (<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> *obj);</pre>
+<p>
+Get WMA Version.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a> object
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> MixAudioWMAVersion
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-acp-wma-set-version"></a><h3>mix_acp_wma_set_version ()</h3>
+<pre class="programlisting">MIX_RESULT mix_acp_wma_set_version (<a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a> *obj,
+ <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion" title="enum MixAudioWMAVersion">MixAudioWMAVersion</a> ver);</pre>
+<p>
+Set WMA Version.
+</p>
+<p>
+<em><span class="remark">In Moorestown, only <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-AUDIO-WMA-V9--CAPS"><span class="type">MIX_AUDIO_WMA_V9</span></a> is supported</span></em></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA" title="MixAudioConfigParamsWMA"><span class="type">MixAudioConfigParamsWMA</span></a> object
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>ver</code></em> :</span></p></td>
+<td> MixAudioWMAVersion to set.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> MIX_RESULT.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+</div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html
new file mode 100644
index 0000000..5aa7e45
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-MixAudioInitParams.html
@@ -0,0 +1,139 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>MixAudioInitParams</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="ch01.html" title="Mix Audio API">
+<link rel="prev" href="MixAudio-MixAudioConfigParams.html" title="MixAudioConfigParams">
+<link rel="next" href="MixAudio.html" title="MixAudio">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
+<tr valign="middle">
+<td><a accesskey="p" href="MixAudio-MixAudioConfigParams.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td><a accesskey="u" href="ch01.html"><img src="up.png" width="24" height="24" border="0" alt="Up"></a></td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="MixAudio.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr>
+<tr><td colspan="5" class="shortcuts">
+<a href="#MixAudio-MixAudioInitParams.synopsis" class="shortcut">Top</a>
+  | 
+ <a href="#MixAudio-MixAudioInitParams.description" class="shortcut">Description</a>
+</td></tr>
+</table>
+<div class="refentry" lang="en">
+<a name="MixAudio-MixAudioInitParams"></a><div class="titlepage"></div>
+<div class="refnamediv"><table width="100%"><tr>
+<td valign="top">
+<h2><span class="refentrytitle"><a name="MixAudio-MixAudioInitParams.top_of_page"></a>MixAudioInitParams</span></h2>
+<p>MixAudioInitParams — Initialization parameters object.</p>
+</td>
+<td valign="top" align="right"></td>
+</tr></table></div>
+<div class="refsynopsisdiv">
+<a name="MixAudio-MixAudioInitParams.synopsis"></a><h2>Synopsis</h2>
+<pre class="synopsis">
+
+#include &lt;mixacp.h&gt;
+
+ <a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a>;
+<a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a> * <a class="link" href="MixAudio-MixAudioInitParams.html#mix-aip-new" title="mix_aip_new ()">mix_aip_new</a> (void);
+<a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a> * <a class="link" href="MixAudio-MixAudioInitParams.html#mix-aip-ref" title="mix_aip_ref ()">mix_aip_ref</a> (<a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a> *mix);
+#define <a class="link" href="MixAudio-MixAudioInitParams.html#mix-aip-unref" title="mix_aip_unref()">mix_aip_unref</a> (obj)
+</pre>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioInitParams.description"></a><h2>Description</h2>
+<p>
+A data object which stores initialization specific parameters.
+</p>
+<p>
+Not Implemented in Moorestown.</p>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-MixAudioInitParams.details"></a><h2>Details</h2>
+<div class="refsect2" lang="en">
+<a name="MixAudioInitParams"></a><h3>MixAudioInitParams</h3>
+<pre class="programlisting">typedef struct {
+ MixParams parent;
+} MixAudioInitParams;
+</pre>
+<p>
+<em class="parameter"><code>MixAudio</code></em> initialization parameter object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term">MixParams <em class="structfield"><code>parent</code></em>;</span></p></td>
+<td> Parent.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-aip-new"></a><h3>mix_aip_new ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a> * mix_aip_new (void);</pre>
+<p>
+Use this method to create new instance of <a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams"><span class="type">MixAudioInitParams</span></a></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> A newly allocated instance of <a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams"><span class="type">MixAudioInitParams</span></a>
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-aip-ref"></a><h3>mix_aip_ref ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a> * mix_aip_ref (<a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a> *mix);</pre>
+<p>
+Add reference count.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> object to add reference
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> the MixAudioInitParams instance where reference count has been increased.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-aip-unref"></a><h3>mix_aip_unref()</h3>
+<pre class="programlisting">#define mix_aip_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+</pre>
+<p>
+Decrement reference count of the object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> object to unref.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+</div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html b/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html
new file mode 100644
index 0000000..7166107
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio-mixaudiotypes.html
@@ -0,0 +1,94 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Mix Audio Types</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="ch01.html" title="Mix Audio API">
+<link rel="prev" href="MixAudio.html" title="MixAudio">
+<link rel="next" href="tree-hierarchy.html" title="Object Hierarchy">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
+<tr valign="middle">
+<td><a accesskey="p" href="MixAudio.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td><a accesskey="u" href="ch01.html"><img src="up.png" width="24" height="24" border="0" alt="Up"></a></td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="tree-hierarchy.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr>
+<tr><td colspan="5" class="shortcuts">
+<a href="#MixAudio-mixaudiotypes.synopsis" class="shortcut">Top</a>
+  | 
+ <a href="#MixAudio-mixaudiotypes.description" class="shortcut">Description</a>
+</td></tr>
+</table>
+<div class="refentry" lang="en">
+<a name="MixAudio-mixaudiotypes"></a><div class="titlepage"></div>
+<div class="refnamediv"><table width="100%"><tr>
+<td valign="top">
+<h2><span class="refentrytitle"><a name="MixAudio-mixaudiotypes.top_of_page"></a>Mix Audio Types</span></h2>
+<p>Mix Audio Types — Miscellanous types used by <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> API.</p>
+</td>
+<td valign="top" align="right"></td>
+</tr></table></div>
+<div class="refsynopsisdiv">
+<a name="MixAudio-mixaudiotypes.synopsis"></a><h2>Synopsis</h2>
+<pre class="synopsis">
+
+#include &lt;mixaudiotypes.h&gt;
+
+enum <a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager">MixAudioManager</a>;
+</pre>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-mixaudiotypes.description"></a><h2>Description</h2>
+<p>
+Miscellanous types used by <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> API.</p>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio-mixaudiotypes.details"></a><h2>Details</h2>
+<div class="refsect2" lang="en">
+<a name="MixAudioManager"></a><h3>enum MixAudioManager</h3>
+<pre class="programlisting">typedef enum {
+ MIX_AUDIOMANAGER_NONE = 0,
+ MIX_AUDIOMANAGER_INTELAUDIOMANAGER,
+ MIX_AUDIOMANAGER_LAST
+} MixAudioManager;
+</pre>
+<p>
+Audio Manager enumerations.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-AUDIOMANAGER-NONE--CAPS"></a><span class="term"><code class="literal">MIX_AUDIOMANAGER_NONE</code></span></p></td>
+<td> No Audio Manager.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AUDIOMANAGER-INTELAUDIOMANAGER--CAPS"></a><span class="term"><code class="literal">MIX_AUDIOMANAGER_INTELAUDIOMANAGER</code></span></p></td>
+<td> Intel Audio Manager.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AUDIOMANAGER-LAST--CAPS"></a><span class="term"><code class="literal">MIX_AUDIOMANAGER_LAST</code></span></p></td>
+<td> Last index.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+</div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp
new file mode 100644
index 0000000..9063304
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp
@@ -0,0 +1,124 @@
+<?xml version="1.0" encoding="utf-8" standalone="no"?>
+<!DOCTYPE book PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "">
+<book xmlns="http://www.devhelp.net/book" title="MixAudio Reference Manual" link="index.html" author="" name="MixAudio">
+ <chapters>
+ <sub name="Mix Audio API" link="ch01.html">
+ <sub name="MixAudioConfigParamsAAC" link="MixAudio-MixAudioConfigParamsAAC.html"/>
+ <sub name="MixAudioConfigParamsMP3" link="MixAudio-MixAudioConfigParamsMP3.html"/>
+ <sub name="MixAudioConfigParamsWMA" link="MixAudio-MixAudioConfigParamsWMA.html"/>
+ <sub name="MixAudioConfigParams" link="MixAudio-MixAudioConfigParams.html"/>
+ <sub name="MixAudioInitParams" link="MixAudio-MixAudioInitParams.html"/>
+ <sub name="MixAudio" link="MixAudio.html"/>
+ <sub name="Mix Audio Types" link="MixAudio-mixaudiotypes.html"/>
+ </sub>
+ <sub name="Object Hierarchy" link="tree-hierarchy.html"/>
+ <sub name="API Index" link="api-index-full.html"/>
+ </chapters>
+ <functions>
+ <function name="enum MixAACBitrateType" link="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType"/>
+ <function name="enum MixAACBitstreamFormt" link="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt"/>
+ <function name="enum MixAACProfile" link="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile"/>
+ <function name="enum MixAACMpegID" link="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID"/>
+ <function name="MixAudioConfigParamsAAC" link="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC"/>
+ <function name="mix_acp_aac_new ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-new"/>
+ <function name="mix_acp_aac_ref ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-ref"/>
+ <function name="mix_acp_aac_unref()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-unref"/>
+ <function name="mix_acp_aac_set_mpeg_id ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-mpeg-id"/>
+ <function name="mix_acp_aac_get_mpeg_id ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-mpeg-id"/>
+ <function name="MIX_ACP_AAC_CRC()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CRC--CAPS"/>
+ <function name="mix_acp_aac_set_aot ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aot"/>
+ <function name="mix_acp_aac_get_aot ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aot"/>
+ <function name="MIX_ACP_AAC_SBR_FLAG()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SBR-FLAG--CAPS"/>
+ <function name="MIX_ACP_AAC_PS_FLAG()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PS-FLAG--CAPS"/>
+ <function name="MIX_ACP_AAC_PCE_FLAG()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PCE-FLAG--CAPS"/>
+ <function name="MIX_ACP_AAC_SAMPLE_RATE()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SAMPLE-RATE--CAPS"/>
+ <function name="MIX_ACP_AAC_CHANNELS()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CHANNELS--CAPS"/>
+ <function name="mix_acp_aac_get_bit_stream_format ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-stream-format"/>
+ <function name="mix_acp_aac_set_bit_stream_format ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-stream-format"/>
+ <function name="mix_acp_aac_get_aac_profile ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aac-profile"/>
+ <function name="mix_acp_aac_set_aac_profile ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aac-profile"/>
+ <function name="mix_acp_aac_get_bit_rate_type ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-rate-type"/>
+ <function name="mix_acp_aac_set_bit_rate_type ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-rate-type"/>
+ <function name="MixAudioConfigParamsMP3" link="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3"/>
+ <function name="mix_acp_mp3_new ()" link="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-new"/>
+ <function name="mix_acp_mp3_ref ()" link="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-ref"/>
+ <function name="mix_acp_mp3_unref()" link="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-unref"/>
+ <function name="MIX_ACP_MP3_CRC()" link="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-CRC--CAPS"/>
+ <function name="MIX_ACP_MP3_MPEG_FORMAT()" link="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-FORMAT--CAPS"/>
+ <function name="MIX_ACP_MP3_MPEG_LAYER()" link="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-LAYER--CAPS"/>
+ <function name="enum MixAudioWMAVersion" link="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion"/>
+ <function name="MixAudioConfigParamsWMA" link="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA"/>
+ <function name="mix_acp_wma_new ()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-new"/>
+ <function name="mix_acp_wma_ref ()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-ref"/>
+ <function name="mix_acp_wma_unref()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-unref"/>
+ <function name="MIX_ACP_WMA_CHANNEL_MASK()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-CHANNEL-MASK--CAPS"/>
+ <function name="MIX_ACP_WMA_FORMAT_TAG()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-FORMAT-TAG--CAPS"/>
+ <function name="MIX_ACP_WMA_BLOCK_ALIGN()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-BLOCK-ALIGN--CAPS"/>
+ <function name="MIX_ACP_WMA_ENCODE_OPT()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-ENCODE-OPT--CAPS"/>
+ <function name="MIX_ACP_WMA_PCM_BIT_WIDTH()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-PCM-BIT-WIDTH--CAPS"/>
+ <function name="mix_acp_wma_get_version ()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-get-version"/>
+ <function name="mix_acp_wma_set_version ()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-set-version"/>
+ <function name="enum MixACPOpAlign" link="MixAudio-MixAudioConfigParams.html#MixACPOpAlign"/>
+ <function name="enum MixACPBPSType" link="MixAudio-MixAudioConfigParams.html#MixACPBPSType"/>
+ <function name="enum MixDecodeMode" link="MixAudio-MixAudioConfigParams.html#MixDecodeMode"/>
+ <function name="MixAudioConfigParams" link="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams"/>
+ <function name="mix_acp_new ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-new"/>
+ <function name="mix_acp_ref ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-ref"/>
+ <function name="mix_acp_unref()" link="MixAudio-MixAudioConfigParams.html#mix-acp-unref"/>
+ <function name="MIX_ACP_DECODEMODE()" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-DECODEMODE--CAPS"/>
+ <function name="MIX_ACP_NUM_CHANNELS()" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-NUM-CHANNELS--CAPS"/>
+ <function name="MIX_ACP_BITRATE()" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-BITRATE--CAPS"/>
+ <function name="MIX_ACP_SAMPLE_FREQ()" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-SAMPLE-FREQ--CAPS"/>
+ <function name="mix_acp_get_decodemode ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-decodemode"/>
+ <function name="mix_acp_set_decodemode ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-decodemode"/>
+ <function name="mix_acp_get_streamname ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-streamname"/>
+ <function name="mix_acp_set_streamname ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-streamname"/>
+ <function name="mix_acp_set_audio_manager ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-audio-manager"/>
+ <function name="mix_acp_get_audio_manager ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-audio-manager"/>
+ <function name="mix_acp_is_streamname_valid ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-is-streamname-valid"/>
+ <function name="mix_acp_get_bps ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-bps"/>
+ <function name="mix_acp_set_bps ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-bps"/>
+ <function name="mix_acp_get_op_align ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-op-align"/>
+ <function name="mix_acp_set_op_align ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-op-align"/>
+ <function name="MixAudioInitParams" link="MixAudio-MixAudioInitParams.html#MixAudioInitParams"/>
+ <function name="mix_aip_new ()" link="MixAudio-MixAudioInitParams.html#mix-aip-new"/>
+ <function name="mix_aip_ref ()" link="MixAudio-MixAudioInitParams.html#mix-aip-ref"/>
+ <function name="mix_aip_unref()" link="MixAudio-MixAudioInitParams.html#mix-aip-unref"/>
+ <function name="enum MixStreamState" link="MixAudio.html#MixStreamState"/>
+ <function name="enum MixState" link="MixAudio.html#MixState"/>
+ <function name="enum MixCodecMode" link="MixAudio.html#MixCodecMode"/>
+ <function name="enum MixVolType" link="MixAudio.html#MixVolType"/>
+ <function name="enum MixVolRamp" link="MixAudio.html#MixVolRamp"/>
+ <function name="MixIOVec" link="MixAudio.html#MixIOVec"/>
+ <function name="enum MixDeviceState" link="MixAudio.html#MixDeviceState"/>
+ <function name="MixAudio" link="MixAudio.html#MixAudio-struct"/>
+ <function name="mix_audio_new ()" link="MixAudio.html#mix-audio-new"/>
+ <function name="mix_audio_ref ()" link="MixAudio.html#mix-audio-ref"/>
+ <function name="mix_audio_unref()" link="MixAudio.html#mix-audio-unref"/>
+ <function name="mix_audio_get_version ()" link="MixAudio.html#mix-audio-get-version"/>
+ <function name="mix_audio_initialize ()" link="MixAudio.html#mix-audio-initialize"/>
+ <function name="mix_audio_configure ()" link="MixAudio.html#mix-audio-configure"/>
+ <function name="mix_audio_decode ()" link="MixAudio.html#mix-audio-decode"/>
+ <function name="mix_audio_capture_encode ()" link="MixAudio.html#mix-audio-capture-encode"/>
+ <function name="mix_audio_start ()" link="MixAudio.html#mix-audio-start"/>
+ <function name="mix_audio_stop_drop ()" link="MixAudio.html#mix-audio-stop-drop"/>
+ <function name="mix_audio_stop_drain ()" link="MixAudio.html#mix-audio-stop-drain"/>
+ <function name="mix_audio_pause ()" link="MixAudio.html#mix-audio-pause"/>
+ <function name="mix_audio_resume ()" link="MixAudio.html#mix-audio-resume"/>
+ <function name="mix_audio_get_timestamp ()" link="MixAudio.html#mix-audio-get-timestamp"/>
+ <function name="mix_audio_set_mute ()" link="MixAudio.html#mix-audio-set-mute"/>
+ <function name="mix_audio_get_mute ()" link="MixAudio.html#mix-audio-get-mute"/>
+ <function name="mix_audio_get_max_vol ()" link="MixAudio.html#mix-audio-get-max-vol"/>
+ <function name="mix_audio_get_min_vol ()" link="MixAudio.html#mix-audio-get-min-vol"/>
+ <function name="mix_audio_get_volume ()" link="MixAudio.html#mix-audio-get-volume"/>
+ <function name="mix_audio_set_volume ()" link="MixAudio.html#mix-audio-set-volume"/>
+ <function name="mix_audio_deinitialize ()" link="MixAudio.html#mix-audio-deinitialize"/>
+ <function name="mix_audio_get_stream_state ()" link="MixAudio.html#mix-audio-get-stream-state"/>
+ <function name="mix_audio_get_state ()" link="MixAudio.html#mix-audio-get-state"/>
+ <function name="mix_audio_am_is_enabled ()" link="MixAudio.html#mix-audio-am-is-enabled"/>
+ <function name="mix_audio_is_am_available ()" link="MixAudio.html#mix-audio-is-am-available"/>
+ <function name="mix_audio_get_output_configuration ()" link="MixAudio.html#mix-audio-get-output-configuration"/>
+ <function name="mix_audio_get_stream_byte_decoded ()" link="MixAudio.html#mix-audio-get-stream-byte-decoded"/>
+ <function name="enum MixAudioManager" link="MixAudio-mixaudiotypes.html#MixAudioManager"/>
+ </functions>
+</book>
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2 b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2
new file mode 100644
index 0000000..f9e0358
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio.devhelp2
@@ -0,0 +1,186 @@
+<?xml version="1.0" encoding="utf-8" standalone="no"?>
+<!DOCTYPE book PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "">
+<book xmlns="http://www.devhelp.net/book" title="MixAudio Reference Manual" link="index.html" author="" name="MixAudio" version="2" language="c">
+ <chapters>
+ <sub name="Mix Audio API" link="ch01.html">
+ <sub name="MixAudioConfigParamsAAC" link="MixAudio-MixAudioConfigParamsAAC.html"/>
+ <sub name="MixAudioConfigParamsMP3" link="MixAudio-MixAudioConfigParamsMP3.html"/>
+ <sub name="MixAudioConfigParamsWMA" link="MixAudio-MixAudioConfigParamsWMA.html"/>
+ <sub name="MixAudioConfigParams" link="MixAudio-MixAudioConfigParams.html"/>
+ <sub name="MixAudioInitParams" link="MixAudio-MixAudioInitParams.html"/>
+ <sub name="MixAudio" link="MixAudio.html"/>
+ <sub name="Mix Audio Types" link="MixAudio-mixaudiotypes.html"/>
+ </sub>
+ <sub name="Object Hierarchy" link="tree-hierarchy.html"/>
+ <sub name="API Index" link="api-index-full.html"/>
+ </chapters>
+ <functions>
+ <keyword type="enum" name="enum MixAACBitrateType" link="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType"/>
+ <keyword type="enum" name="enum MixAACBitstreamFormt" link="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt"/>
+ <keyword type="enum" name="enum MixAACProfile" link="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile"/>
+ <keyword type="enum" name="enum MixAACMpegID" link="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID"/>
+ <keyword type="struct" name="MixAudioConfigParamsAAC" link="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC"/>
+ <keyword type="function" name="mix_acp_aac_new ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-new"/>
+ <keyword type="function" name="mix_acp_aac_ref ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-ref"/>
+ <keyword type="macro" name="mix_acp_aac_unref()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-unref"/>
+ <keyword type="function" name="mix_acp_aac_set_mpeg_id ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-mpeg-id"/>
+ <keyword type="function" name="mix_acp_aac_get_mpeg_id ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-mpeg-id"/>
+ <keyword type="macro" name="MIX_ACP_AAC_CRC()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CRC--CAPS"/>
+ <keyword type="function" name="mix_acp_aac_set_aot ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aot"/>
+ <keyword type="function" name="mix_acp_aac_get_aot ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aot"/>
+ <keyword type="macro" name="MIX_ACP_AAC_SBR_FLAG()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SBR-FLAG--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_AAC_PS_FLAG()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PS-FLAG--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_AAC_PCE_FLAG()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PCE-FLAG--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_AAC_SAMPLE_RATE()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SAMPLE-RATE--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_AAC_CHANNELS()" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CHANNELS--CAPS"/>
+ <keyword type="function" name="mix_acp_aac_get_bit_stream_format ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-stream-format"/>
+ <keyword type="function" name="mix_acp_aac_set_bit_stream_format ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-stream-format"/>
+ <keyword type="function" name="mix_acp_aac_get_aac_profile ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aac-profile"/>
+ <keyword type="function" name="mix_acp_aac_set_aac_profile ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aac-profile"/>
+ <keyword type="function" name="mix_acp_aac_get_bit_rate_type ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-rate-type"/>
+ <keyword type="function" name="mix_acp_aac_set_bit_rate_type ()" link="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-rate-type"/>
+ <keyword type="struct" name="MixAudioConfigParamsMP3" link="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3"/>
+ <keyword type="function" name="mix_acp_mp3_new ()" link="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-new"/>
+ <keyword type="function" name="mix_acp_mp3_ref ()" link="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-ref"/>
+ <keyword type="macro" name="mix_acp_mp3_unref()" link="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-unref"/>
+ <keyword type="macro" name="MIX_ACP_MP3_CRC()" link="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-CRC--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_MP3_MPEG_FORMAT()" link="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-FORMAT--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_MP3_MPEG_LAYER()" link="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-LAYER--CAPS"/>
+ <keyword type="enum" name="enum MixAudioWMAVersion" link="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion"/>
+ <keyword type="struct" name="MixAudioConfigParamsWMA" link="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA"/>
+ <keyword type="function" name="mix_acp_wma_new ()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-new"/>
+ <keyword type="function" name="mix_acp_wma_ref ()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-ref"/>
+ <keyword type="macro" name="mix_acp_wma_unref()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-unref"/>
+ <keyword type="macro" name="MIX_ACP_WMA_CHANNEL_MASK()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-CHANNEL-MASK--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_WMA_FORMAT_TAG()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-FORMAT-TAG--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_WMA_BLOCK_ALIGN()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-BLOCK-ALIGN--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_WMA_ENCODE_OPT()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-ENCODE-OPT--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_WMA_PCM_BIT_WIDTH()" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-PCM-BIT-WIDTH--CAPS"/>
+ <keyword type="function" name="mix_acp_wma_get_version ()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-get-version"/>
+ <keyword type="function" name="mix_acp_wma_set_version ()" link="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-set-version"/>
+ <keyword type="enum" name="enum MixACPOpAlign" link="MixAudio-MixAudioConfigParams.html#MixACPOpAlign"/>
+ <keyword type="enum" name="enum MixACPBPSType" link="MixAudio-MixAudioConfigParams.html#MixACPBPSType"/>
+ <keyword type="enum" name="enum MixDecodeMode" link="MixAudio-MixAudioConfigParams.html#MixDecodeMode"/>
+ <keyword type="struct" name="MixAudioConfigParams" link="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams"/>
+ <keyword type="function" name="mix_acp_new ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-new"/>
+ <keyword type="function" name="mix_acp_ref ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-ref"/>
+ <keyword type="macro" name="mix_acp_unref()" link="MixAudio-MixAudioConfigParams.html#mix-acp-unref"/>
+ <keyword type="macro" name="MIX_ACP_DECODEMODE()" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-DECODEMODE--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_NUM_CHANNELS()" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-NUM-CHANNELS--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_BITRATE()" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-BITRATE--CAPS"/>
+ <keyword type="macro" name="MIX_ACP_SAMPLE_FREQ()" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-SAMPLE-FREQ--CAPS"/>
+ <keyword type="function" name="mix_acp_get_decodemode ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-decodemode"/>
+ <keyword type="function" name="mix_acp_set_decodemode ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-decodemode"/>
+ <keyword type="function" name="mix_acp_get_streamname ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-streamname"/>
+ <keyword type="function" name="mix_acp_set_streamname ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-streamname"/>
+ <keyword type="function" name="mix_acp_set_audio_manager ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-audio-manager"/>
+ <keyword type="function" name="mix_acp_get_audio_manager ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-audio-manager"/>
+ <keyword type="function" name="mix_acp_is_streamname_valid ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-is-streamname-valid"/>
+ <keyword type="function" name="mix_acp_get_bps ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-bps"/>
+ <keyword type="function" name="mix_acp_set_bps ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-bps"/>
+ <keyword type="function" name="mix_acp_get_op_align ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-get-op-align"/>
+ <keyword type="function" name="mix_acp_set_op_align ()" link="MixAudio-MixAudioConfigParams.html#mix-acp-set-op-align"/>
+ <keyword type="struct" name="MixAudioInitParams" link="MixAudio-MixAudioInitParams.html#MixAudioInitParams"/>
+ <keyword type="function" name="mix_aip_new ()" link="MixAudio-MixAudioInitParams.html#mix-aip-new"/>
+ <keyword type="function" name="mix_aip_ref ()" link="MixAudio-MixAudioInitParams.html#mix-aip-ref"/>
+ <keyword type="macro" name="mix_aip_unref()" link="MixAudio-MixAudioInitParams.html#mix-aip-unref"/>
+ <keyword type="enum" name="enum MixStreamState" link="MixAudio.html#MixStreamState"/>
+ <keyword type="enum" name="enum MixState" link="MixAudio.html#MixState"/>
+ <keyword type="enum" name="enum MixCodecMode" link="MixAudio.html#MixCodecMode"/>
+ <keyword type="enum" name="enum MixVolType" link="MixAudio.html#MixVolType"/>
+ <keyword type="enum" name="enum MixVolRamp" link="MixAudio.html#MixVolRamp"/>
+ <keyword type="struct" name="MixIOVec" link="MixAudio.html#MixIOVec"/>
+ <keyword type="enum" name="enum MixDeviceState" link="MixAudio.html#MixDeviceState"/>
+ <keyword type="struct" name="MixAudio" link="MixAudio.html#MixAudio-struct"/>
+ <keyword type="function" name="mix_audio_new ()" link="MixAudio.html#mix-audio-new"/>
+ <keyword type="function" name="mix_audio_ref ()" link="MixAudio.html#mix-audio-ref"/>
+ <keyword type="macro" name="mix_audio_unref()" link="MixAudio.html#mix-audio-unref"/>
+ <keyword type="function" name="mix_audio_get_version ()" link="MixAudio.html#mix-audio-get-version"/>
+ <keyword type="function" name="mix_audio_initialize ()" link="MixAudio.html#mix-audio-initialize"/>
+ <keyword type="function" name="mix_audio_configure ()" link="MixAudio.html#mix-audio-configure"/>
+ <keyword type="function" name="mix_audio_decode ()" link="MixAudio.html#mix-audio-decode"/>
+ <keyword type="function" name="mix_audio_capture_encode ()" link="MixAudio.html#mix-audio-capture-encode"/>
+ <keyword type="function" name="mix_audio_start ()" link="MixAudio.html#mix-audio-start"/>
+ <keyword type="function" name="mix_audio_stop_drop ()" link="MixAudio.html#mix-audio-stop-drop"/>
+ <keyword type="function" name="mix_audio_stop_drain ()" link="MixAudio.html#mix-audio-stop-drain"/>
+ <keyword type="function" name="mix_audio_pause ()" link="MixAudio.html#mix-audio-pause"/>
+ <keyword type="function" name="mix_audio_resume ()" link="MixAudio.html#mix-audio-resume"/>
+ <keyword type="function" name="mix_audio_get_timestamp ()" link="MixAudio.html#mix-audio-get-timestamp"/>
+ <keyword type="function" name="mix_audio_set_mute ()" link="MixAudio.html#mix-audio-set-mute"/>
+ <keyword type="function" name="mix_audio_get_mute ()" link="MixAudio.html#mix-audio-get-mute"/>
+ <keyword type="function" name="mix_audio_get_max_vol ()" link="MixAudio.html#mix-audio-get-max-vol"/>
+ <keyword type="function" name="mix_audio_get_min_vol ()" link="MixAudio.html#mix-audio-get-min-vol"/>
+ <keyword type="function" name="mix_audio_get_volume ()" link="MixAudio.html#mix-audio-get-volume"/>
+ <keyword type="function" name="mix_audio_set_volume ()" link="MixAudio.html#mix-audio-set-volume"/>
+ <keyword type="function" name="mix_audio_deinitialize ()" link="MixAudio.html#mix-audio-deinitialize"/>
+ <keyword type="function" name="mix_audio_get_stream_state ()" link="MixAudio.html#mix-audio-get-stream-state"/>
+ <keyword type="function" name="mix_audio_get_state ()" link="MixAudio.html#mix-audio-get-state"/>
+ <keyword type="function" name="mix_audio_am_is_enabled ()" link="MixAudio.html#mix-audio-am-is-enabled"/>
+ <keyword type="function" name="mix_audio_is_am_available ()" link="MixAudio.html#mix-audio-is-am-available"/>
+ <keyword type="function" name="mix_audio_get_output_configuration ()" link="MixAudio.html#mix-audio-get-output-configuration"/>
+ <keyword type="function" name="mix_audio_get_stream_byte_decoded ()" link="MixAudio.html#mix-audio-get-stream-byte-decoded"/>
+ <keyword type="enum" name="enum MixAudioManager" link="MixAudio-mixaudiotypes.html#MixAudioManager"/>
+ <keyword type="constant" name="MIX_AAC_BR_NULL" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BR-NULL--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_BR_CONSTANT" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BR-CONSTANT--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_BR_VARIABLE" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BR-VARIABLE--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_BR_LAST" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BR-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_BS_NULL" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BS-NULL--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_BS_ADTS" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BS-ADTS--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_BS_ADIF" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BS-ADIF--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_BS_RAW" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BS-RAW--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_BS_LAST" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-BS-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_PROFILE_NULL" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-PROFILE-NULL--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_PROFILE_MAIN" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-PROFILE-MAIN--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_PROFILE_LC" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-PROFILE-LC--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_PROFILE_SSR" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-PROFILE-SSR--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_PROFILE_LAST" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-PROFILE-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_MPEG_ID_NULL" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-MPEG-ID-NULL--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_MPEG_2_ID" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-MPEG-2-ID--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_MPEG_4_ID" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-MPEG-4-ID--CAPS"/>
+ <keyword type="constant" name="MIX_AAC_MPEG_LAST" link="MixAudio-MixAudioConfigParamsAAC.html#MIX-AAC-MPEG-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIO_WMA_VUNKNOWN" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-AUDIO-WMA-VUNKNOWN--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIO_WMA_V9" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-AUDIO-WMA-V9--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIO_WMA_V10" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-AUDIO-WMA-V10--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIO_WMA_V10P" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-AUDIO-WMA-V10P--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIO_WMA_LAST" link="MixAudio-MixAudioConfigParamsWMA.html#MIX-AUDIO-WMA-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_ACP_OUTPUT_ALIGN_UNKNOWN" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-OUTPUT-ALIGN-UNKNOWN--CAPS"/>
+ <keyword type="constant" name="MIX_ACP_OUTPUT_ALIGN_16" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-OUTPUT-ALIGN-16--CAPS"/>
+ <keyword type="constant" name="MIX_ACP_OUTPUT_ALIGN_MSB" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-OUTPUT-ALIGN-MSB--CAPS"/>
+ <keyword type="constant" name="MIX_ACP_OUTPUT_ALIGN_LSB" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-OUTPUT-ALIGN-LSB--CAPS"/>
+ <keyword type="constant" name="MIX_ACP_OUTPUT_ALIGN_LAST" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-OUTPUT-ALIGN-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_ACP_BPS_UNKNOWN" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-BPS-UNKNOWN--CAPS"/>
+ <keyword type="constant" name="MIX_ACP_BPS_16" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-BPS-16--CAPS"/>
+ <keyword type="constant" name="MIX_ACP_BPS_24" link="MixAudio-MixAudioConfigParams.html#MIX-ACP-BPS-24--CAPS"/>
+ <keyword type="constant" name="MIX_DECODE_NULL" link="MixAudio-MixAudioConfigParams.html#MIX-DECODE-NULL--CAPS"/>
+ <keyword type="constant" name="MIX_DECODE_DIRECTRENDER" link="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"/>
+ <keyword type="constant" name="MIX_DECODE_DECODERETURN" link="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DECODERETURN--CAPS"/>
+ <keyword type="constant" name="MIX_DECODE_LAST" link="MixAudio-MixAudioConfigParams.html#MIX-DECODE-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_STREAM_NULL" link="MixAudio.html#MIX-STREAM-NULL--CAPS"/>
+ <keyword type="constant" name="MIX_STREAM_STOPPED" link="MixAudio.html#MIX-STREAM-STOPPED--CAPS"/>
+ <keyword type="constant" name="MIX_STREAM_PLAYING" link="MixAudio.html#MIX-STREAM-PLAYING--CAPS"/>
+ <keyword type="constant" name="MIX_STREAM_PAUSED" link="MixAudio.html#MIX-STREAM-PAUSED--CAPS"/>
+ <keyword type="constant" name="MIX_STREAM_DRAINING" link="MixAudio.html#MIX-STREAM-DRAINING--CAPS"/>
+ <keyword type="constant" name="MIX_STREAM_LAST" link="MixAudio.html#MIX-STREAM-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_STATE_NULL" link="MixAudio.html#MIX-STATE-NULL--CAPS"/>
+ <keyword type="constant" name="MIX_STATE_UNINITIALIZED" link="MixAudio.html#MIX-STATE-UNINITIALIZED--CAPS"/>
+ <keyword type="constant" name="MIX_STATE_INITIALIZED" link="MixAudio.html#MIX-STATE-INITIALIZED--CAPS"/>
+ <keyword type="constant" name="MIX_STATE_CONFIGURED" link="MixAudio.html#MIX-STATE-CONFIGURED--CAPS"/>
+ <keyword type="constant" name="MIX_STATE_LAST" link="MixAudio.html#MIX-STATE-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_CODING_INVALID" link="MixAudio.html#MIX-CODING-INVALID--CAPS"/>
+ <keyword type="constant" name="MIX_CODING_ENCODE" link="MixAudio.html#MIX-CODING-ENCODE--CAPS"/>
+ <keyword type="constant" name="MIX_CODING_DECODE" link="MixAudio.html#MIX-CODING-DECODE--CAPS"/>
+ <keyword type="constant" name="MIX_CODING_LAST" link="MixAudio.html#MIX-CODING-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_VOL_PERCENT" link="MixAudio.html#MIX-VOL-PERCENT--CAPS"/>
+ <keyword type="constant" name="MIX_VOL_DECIBELS" link="MixAudio.html#MIX-VOL-DECIBELS--CAPS"/>
+ <keyword type="constant" name="MIX_VOL_LAST" link="MixAudio.html#MIX-VOL-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_RAMP_LINEAR" link="MixAudio.html#MIX-RAMP-LINEAR--CAPS"/>
+ <keyword type="constant" name="MIX_RAMP_EXPONENTIAL" link="MixAudio.html#MIX-RAMP-EXPONENTIAL--CAPS"/>
+ <keyword type="constant" name="MIX_RAMP_LAST" link="MixAudio.html#MIX-RAMP-LAST--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIO_DEV_CLOSED" link="MixAudio.html#MIX-AUDIO-DEV-CLOSED--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIO_DEV_OPENED" link="MixAudio.html#MIX-AUDIO-DEV-OPENED--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIO_DEV_ALLOCATED" link="MixAudio.html#MIX-AUDIO-DEV-ALLOCATED--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIOMANAGER_NONE" link="MixAudio-mixaudiotypes.html#MIX-AUDIOMANAGER-NONE--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIOMANAGER_INTELAUDIOMANAGER" link="MixAudio-mixaudiotypes.html#MIX-AUDIOMANAGER-INTELAUDIOMANAGER--CAPS"/>
+ <keyword type="constant" name="MIX_AUDIOMANAGER_LAST" link="MixAudio-mixaudiotypes.html#MIX-AUDIOMANAGER-LAST--CAPS"/>
+ </functions>
+</book>
diff --git a/mix_audio/docs/reference/MixAudio/html/MixAudio.html b/mix_audio/docs/reference/MixAudio/html/MixAudio.html
new file mode 100644
index 0000000..2f53577
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/MixAudio.html
@@ -0,0 +1,1286 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>MixAudio</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="ch01.html" title="Mix Audio API">
+<link rel="prev" href="MixAudio-MixAudioInitParams.html" title="MixAudioInitParams">
+<link rel="next" href="MixAudio-mixaudiotypes.html" title="Mix Audio Types">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
+<tr valign="middle">
+<td><a accesskey="p" href="MixAudio-MixAudioInitParams.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td><a accesskey="u" href="ch01.html"><img src="up.png" width="24" height="24" border="0" alt="Up"></a></td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="MixAudio-mixaudiotypes.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr>
+<tr><td colspan="5" class="shortcuts">
+<a href="#MixAudio.synopsis" class="shortcut">Top</a>
+  | 
+ <a href="#MixAudio.description" class="shortcut">Description</a>
+  | 
+ <a href="#MixAudio.object-hierarchy" class="shortcut">Object Hierarchy</a>
+</td></tr>
+</table>
+<div class="refentry" lang="en">
+<a name="MixAudio"></a><div class="titlepage"></div>
+<div class="refnamediv"><table width="100%"><tr>
+<td valign="top">
+<h2><span class="refentrytitle"><a name="MixAudio.top_of_page"></a>MixAudio</span></h2>
+<p>MixAudio — Object to support a single stream playback using hardware accelerated decoder.</p>
+</td>
+<td valign="top" align="right"></td>
+</tr></table></div>
+<div class="refsynopsisdiv">
+<a name="MixAudio.synopsis"></a><h2>Synopsis</h2>
+<pre class="synopsis">
+
+#include &lt;mixaudio.h&gt;
+
+enum <a class="link" href="MixAudio.html#MixStreamState" title="enum MixStreamState">MixStreamState</a>;
+enum <a class="link" href="MixAudio.html#MixState" title="enum MixState">MixState</a>;
+enum <a class="link" href="MixAudio.html#MixCodecMode" title="enum MixCodecMode">MixCodecMode</a>;
+enum <a class="link" href="MixAudio.html#MixVolType" title="enum MixVolType">MixVolType</a>;
+enum <a class="link" href="MixAudio.html#MixVolRamp" title="enum MixVolRamp">MixVolRamp</a>;
+ <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec">MixIOVec</a>;
+enum <a class="link" href="MixAudio.html#MixDeviceState" title="enum MixDeviceState">MixDeviceState</a>;
+ <a class="link" href="MixAudio.html#MixAudio-struct" title="MixAudio">MixAudio</a>;
+<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> * <a class="link" href="MixAudio.html#mix-audio-new" title="mix_audio_new ()">mix_audio_new</a> (void);
+<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> * <a class="link" href="MixAudio.html#mix-audio-ref" title="mix_audio_ref ()">mix_audio_ref</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);
+#define <a class="link" href="MixAudio.html#mix-audio-unref" title="mix_audio_unref()">mix_audio_unref</a> (obj)
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-version" title="mix_audio_get_version ()">mix_audio_get_version</a> (guint *major,
+ guint *minor);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-initialize" title="mix_audio_initialize ()">mix_audio_initialize</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio.html#MixCodecMode" title="enum MixCodecMode">MixCodecMode</a> mode,
+ <a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a> *aip,
+ MixDrmParams *drminitparams);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-configure" title="mix_audio_configure ()">mix_audio_configure</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *audioconfigparams,
+ MixDrmParams *drmparams);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()">mix_audio_decode</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ const <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec">MixIOVec</a> *iovin,
+ gint iovincnt,
+ guint64 *insize,
+ <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec">MixIOVec</a> *iovout,
+ gint iovoutcnt,
+ guint64 *outsize);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-capture-encode" title="mix_audio_capture_encode ()">mix_audio_capture_encode</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec">MixIOVec</a> *iovout,
+ gint iovoutcnt);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-start" title="mix_audio_start ()">mix_audio_start</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-stop-drop" title="mix_audio_stop_drop ()">mix_audio_stop_drop</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-stop-drain" title="mix_audio_stop_drain ()">mix_audio_stop_drain</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-pause" title="mix_audio_pause ()">mix_audio_pause</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-resume" title="mix_audio_resume ()">mix_audio_resume</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-timestamp" title="mix_audio_get_timestamp ()">mix_audio_get_timestamp</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ guint64 *msecs);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-set-mute" title="mix_audio_set_mute ()">mix_audio_set_mute</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gboolean mute);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-mute" title="mix_audio_get_mute ()">mix_audio_get_mute</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gboolean *muted);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-max-vol" title="mix_audio_get_max_vol ()">mix_audio_get_max_vol</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gint *maxvol);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-min-vol" title="mix_audio_get_min_vol ()">mix_audio_get_min_vol</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gint *minvol);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-volume" title="mix_audio_get_volume ()">mix_audio_get_volume</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gint *currvol,
+ <a class="link" href="MixAudio.html#MixVolType" title="enum MixVolType">MixVolType</a> type);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-set-volume" title="mix_audio_set_volume ()">mix_audio_set_volume</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gint currvol,
+ <a class="link" href="MixAudio.html#MixVolType" title="enum MixVolType">MixVolType</a> type,
+ gulong msecs,
+ <a class="link" href="MixAudio.html#MixVolRamp" title="enum MixVolRamp">MixVolRamp</a> ramptype);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-deinitialize" title="mix_audio_deinitialize ()">mix_audio_deinitialize</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-stream-state" title="mix_audio_get_stream_state ()">mix_audio_get_stream_state</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio.html#MixStreamState" title="enum MixStreamState">MixStreamState</a> *streamState);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-state" title="mix_audio_get_state ()">mix_audio_get_state</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio.html#MixState" title="enum MixState">MixState</a> *state);
+gboolean <a class="link" href="MixAudio.html#mix-audio-am-is-enabled" title="mix_audio_am_is_enabled ()">mix_audio_am_is_enabled</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-is-am-available" title="mix_audio_is_am_available ()">mix_audio_is_am_available</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager">MixAudioManager</a> am,
+ gboolean *avail);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-output-configuration" title="mix_audio_get_output_configuration ()">mix_audio_get_output_configuration</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> **audioconfigparams);
+MIX_RESULT <a class="link" href="MixAudio.html#mix-audio-get-stream-byte-decoded" title="mix_audio_get_stream_byte_decoded ()">mix_audio_get_stream_byte_decoded</a> (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ guint64 *byte);
+</pre>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio.object-hierarchy"></a><h2>Object Hierarchy</h2>
+<pre class="synopsis">
+ GObject
+ +----MixAudio
+</pre>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio.description"></a><h2>Description</h2>
+<p>
+<a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object provide thread-safe API for application and/or multimedia framework to take advantage of Intel Smart Sound Technology(TM) driver for hardware audio decode and render.
+</p>
+<p>
+Each <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object represents one streaming session with the Intel Smart Sound driver and provides configuration and control of the decoding and playback options.
+</p>
+<p>
+The <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object also support integration with Intel Audio Manager service.
+</p>
+<p>
+An application can utilize the <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object by calling the following sequence:
+</p>
+<div class="orderedlist"><ol type="1">
+<li>
+<a class="link" href="MixAudio.html#mix-audio-new" title="mix_audio_new ()"><code class="function">mix_audio_new()</code></a> to create a <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> instance.</li>
+<li>
+<a class="link" href="MixAudio.html#mix-audio-initialize" title="mix_audio_initialize ()"><code class="function">mix_audio_initialize()</code></a> to allocate Intel Smart Sound Technology resource.</li>
+<li>
+<a class="link" href="MixAudio.html#mix-audio-configure" title="mix_audio_configure ()"><code class="function">mix_audio_configure()</code></a> to configure stream parameters.</li>
+<li>
+<a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()"><code class="function">mix_audio_decode()</code></a> can be called repeatedly for decoding and, optionally, rendering.</li>
+<li>
+<a class="link" href="MixAudio.html#mix-audio-start" title="mix_audio_start ()"><code class="function">mix_audio_start()</code></a> is called after the 1st <a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()"><code class="function">mix_audio_decode()</code></a> method to start rendering.</li>
+<li>
+<a class="link" href="MixAudio.html#mix-audio-stop-drain" title="mix_audio_stop_drain ()"><code class="function">mix_audio_stop_drain()</code></a> is called after the last buffer is passed for decoding in with <a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()"><code class="function">mix_audio_decode()</code></a>. </li>
+<li>
+<a class="link" href="MixAudio.html#mix-audio-deinitialize" title="mix_audio_deinitialize ()"><code class="function">mix_audio_deinitialize()</code></a> to free resource once playback is completed.</li>
+</ol></div>
+<p>
+</p>
+<p>
+Since <a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()"><code class="function">mix_audio_decode()</code></a> is a blocking call during playback, the following methods are called in a seperate thread to control progress:
+</p>
+<div class="itemizedlist"><ul type="disc">
+<li><a class="link" href="MixAudio.html#mix-audio-start" title="mix_audio_start ()"><code class="function">mix_audio_start()</code></a></li>
+<li><a class="link" href="MixAudio.html#mix-audio-pause" title="mix_audio_pause ()"><code class="function">mix_audio_pause()</code></a></li>
+<li><a class="link" href="MixAudio.html#mix-audio-resume" title="mix_audio_resume ()"><code class="function">mix_audio_resume()</code></a></li>
+<li><a class="link" href="MixAudio.html#mix-audio-stop-drop" title="mix_audio_stop_drop ()"><code class="function">mix_audio_stop_drop()</code></a></li>
+</ul></div>
+</div>
+<div class="refsect1" lang="en">
+<a name="MixAudio.details"></a><h2>Details</h2>
+<div class="refsect2" lang="en">
+<a name="MixStreamState"></a><h3>enum MixStreamState</h3>
+<pre class="programlisting">typedef enum {
+ MIX_STREAM_NULL=0,
+ MIX_STREAM_STOPPED,
+ MIX_STREAM_PLAYING,
+ MIX_STREAM_PAUSED,
+ MIX_STREAM_DRAINING,
+ MIX_STREAM_LAST
+} MixStreamState;
+</pre>
+<p>
+Stream State during Decode and Render or Encode mode. These states do not apply to Decode and Return mode.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-STREAM-NULL--CAPS"></a><span class="term"><code class="literal">MIX_STREAM_NULL</code></span></p></td>
+<td> Stream is not allocated.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-STREAM-STOPPED--CAPS"></a><span class="term"><code class="literal">MIX_STREAM_STOPPED</code></span></p></td>
+<td> Stream is at STOP state. This is the only state DNR is allowed.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-STREAM-PLAYING--CAPS"></a><span class="term"><code class="literal">MIX_STREAM_PLAYING</code></span></p></td>
+<td> Stream is at Playing state.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-STREAM-PAUSED--CAPS"></a><span class="term"><code class="literal">MIX_STREAM_PAUSED</code></span></p></td>
+<td> Stream is Paused.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-STREAM-DRAINING--CAPS"></a><span class="term"><code class="literal">MIX_STREAM_DRAINING</code></span></p></td>
+<td> Stream is draining -- remaining of the buffer in the device are playing. This state is special due to the limitation that no other control operations are allowed at this state. Stream will become <em class="parameter"><code>MIX_STREAM_STOPPED</code></em> automatically when this data draining has completed.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-STREAM-LAST--CAPS"></a><span class="term"><code class="literal">MIX_STREAM_LAST</code></span></p></td>
+<td> Last index in the enumeration.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixState"></a><h3>enum MixState</h3>
+<pre class="programlisting">typedef enum {
+ MIX_STATE_NULL=0,
+ MIX_STATE_UNINITIALIZED,
+ MIX_STATE_INITIALIZED,
+ MIX_STATE_CONFIGURED,
+ MIX_STATE_LAST
+} MixState;
+</pre>
+<p>
+The varies states the device is in.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-STATE-NULL--CAPS"></a><span class="term"><code class="literal">MIX_STATE_NULL</code></span></p></td>
+<td></td>
+</tr>
+<tr>
+<td><p><a name="MIX-STATE-UNINITIALIZED--CAPS"></a><span class="term"><code class="literal">MIX_STATE_UNINITIALIZED</code></span></p></td>
+<td> MIX is not initialized.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-STATE-INITIALIZED--CAPS"></a><span class="term"><code class="literal">MIX_STATE_INITIALIZED</code></span></p></td>
+<td> MIX is initialized.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-STATE-CONFIGURED--CAPS"></a><span class="term"><code class="literal">MIX_STATE_CONFIGURED</code></span></p></td>
+<td> MIX is configured successfully.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-STATE-LAST--CAPS"></a><span class="term"><code class="literal">MIX_STATE_LAST</code></span></p></td>
+<td> Last index in the enumeration.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixCodecMode"></a><h3>enum MixCodecMode</h3>
+<pre class="programlisting">typedef enum {
+ MIX_CODING_INVALID=0,
+ MIX_CODING_ENCODE,
+ MIX_CODING_DECODE,
+ MIX_CODING_LAST
+} MixCodecMode;
+</pre>
+<p>
+Mode where device is operating on. See <a class="link" href="MixAudio.html#mix-audio-initialize" title="mix_audio_initialize ()"><code class="function">mix_audio_initialize()</code></a>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-CODING-INVALID--CAPS"></a><span class="term"><code class="literal">MIX_CODING_INVALID</code></span></p></td>
+<td> Indicates device uninitialied for any mode.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-CODING-ENCODE--CAPS"></a><span class="term"><code class="literal">MIX_CODING_ENCODE</code></span></p></td>
+<td> Indicates device is opened for encoding.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-CODING-DECODE--CAPS"></a><span class="term"><code class="literal">MIX_CODING_DECODE</code></span></p></td>
+<td> Indicates device is opened for decoding.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-CODING-LAST--CAPS"></a><span class="term"><code class="literal">MIX_CODING_LAST</code></span></p></td>
+<td> Last index in the enumeration.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixVolType"></a><h3>enum MixVolType</h3>
+<pre class="programlisting">typedef enum {
+ MIX_VOL_PERCENT=0,
+ MIX_VOL_DECIBELS,
+ MIX_VOL_LAST
+} MixVolType;
+</pre>
+<p>
+See <code class="function">mix_audio_getvolume()</code> and <code class="function">mix_audio_setvolume()</code>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-VOL-PERCENT--CAPS"></a><span class="term"><code class="literal">MIX_VOL_PERCENT</code></span></p></td>
+<td> volume is expressed in percentage.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-VOL-DECIBELS--CAPS"></a><span class="term"><code class="literal">MIX_VOL_DECIBELS</code></span></p></td>
+<td> volume is expressed in decibel.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-VOL-LAST--CAPS"></a><span class="term"><code class="literal">MIX_VOL_LAST</code></span></p></td>
+<td> last entry.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixVolRamp"></a><h3>enum MixVolRamp</h3>
+<pre class="programlisting">typedef enum
+{
+ MIX_RAMP_LINEAR = 0,
+ MIX_RAMP_EXPONENTIAL,
+ MIX_RAMP_LAST
+} MixVolRamp;
+</pre>
+<p>
+See <code class="function">mix_audio_getvolume()</code> and <code class="function">mix_audio_setvolume()</code>.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-RAMP-LINEAR--CAPS"></a><span class="term"><code class="literal">MIX_RAMP_LINEAR</code></span></p></td>
+<td> volume is expressed in percentage.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-RAMP-EXPONENTIAL--CAPS"></a><span class="term"><code class="literal">MIX_RAMP_EXPONENTIAL</code></span></p></td>
+<td> volume is expressed in decibel.
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-RAMP-LAST--CAPS"></a><span class="term"><code class="literal">MIX_RAMP_LAST</code></span></p></td>
+<td> last entry.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixIOVec"></a><h3>MixIOVec</h3>
+<pre class="programlisting">typedef struct {
+ guchar *data;
+ gint size;
+} MixIOVec;
+</pre>
+<p>
+Scatter-gather style structure. To be used by <a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()"><code class="function">mix_audio_decode()</code></a> method for input and output buffer.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term">guchar *<em class="structfield"><code>data</code></em>;</span></p></td>
+<td> data pointer
+</td>
+</tr>
+<tr>
+<td><p><span class="term">gint <em class="structfield"><code>size</code></em>;</span></p></td>
+<td> size of buffer in <em class="parameter"><code>data</code></em>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixDeviceState"></a><h3>enum MixDeviceState</h3>
+<pre class="programlisting">typedef enum {
+ MIX_AUDIO_DEV_CLOSED=0,
+ MIX_AUDIO_DEV_OPENED,
+ MIX_AUDIO_DEV_ALLOCATED
+} MixDeviceState;
+</pre>
+<p>
+Device state.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><a name="MIX-AUDIO-DEV-CLOSED--CAPS"></a><span class="term"><code class="literal">MIX_AUDIO_DEV_CLOSED</code></span></p></td>
+<td> TBD
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AUDIO-DEV-OPENED--CAPS"></a><span class="term"><code class="literal">MIX_AUDIO_DEV_OPENED</code></span></p></td>
+<td> TBD
+</td>
+</tr>
+<tr>
+<td><p><a name="MIX-AUDIO-DEV-ALLOCATED--CAPS"></a><span class="term"><code class="literal">MIX_AUDIO_DEV_ALLOCATED</code></span></p></td>
+<td> TBD
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="MixAudio-struct"></a><h3>MixAudio</h3>
+<pre class="programlisting">typedef struct {
+ GObject parent;
+} MixAudio;
+</pre>
+<p>
+MI-X Audio object</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term">GObject <em class="structfield"><code>parent</code></em>;</span></p></td>
+<td> Parent object.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-new"></a><h3>mix_audio_new ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> * mix_audio_new (void);</pre>
+<p>
+Use this method to create new instance of <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> A newly allocated instance of <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a>
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-ref"></a><h3>mix_audio_ref ()</h3>
+<pre class="programlisting"><a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> * mix_audio_ref (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);</pre>
+<p>
+Add reference count.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> object to add reference
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> the MixAudio instance where reference count has been increased.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-unref"></a><h3>mix_audio_unref()</h3>
+<pre class="programlisting">#define mix_audio_unref(obj) g_object_unref (G_OBJECT(obj))
+</pre>
+<p>
+Decrement reference count of the object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>obj</code></em> :</span></p></td>
+<td> object to unref.
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-version"></a><h3>mix_audio_get_version ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_version (guint *major,
+ guint *minor);</pre>
+<p>
+Returns the version of the MI-X library.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody><tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span>
+</td>
+</tr></tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-initialize"></a><h3>mix_audio_initialize ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_initialize (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio.html#MixCodecMode" title="enum MixCodecMode">MixCodecMode</a> mode,
+ <a class="link" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams" title="MixAudioInitParams">MixAudioInitParams</a> *aip,
+ MixDrmParams *drminitparams);</pre>
+<p>
+This function will initialize an encode or decode session with this <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> instance. During this call, the device will be opened. If the device is not available, an error is returned to the caller so that an alternative (e.g. software decoding) can be configured instead. Use <a class="link" href="MixAudio.html#mix-audio-deinitialize" title="mix_audio_deinitialize ()"><code class="function">mix_audio_deinitialize()</code></a> to close the device.
+</p>
+<p>
+A previous initialized session must be de-initialized using <a class="link" href="MixAudio.html#mix-audio-deinitialize" title="mix_audio_deinitialize ()"><code class="function">mix_audio_deinitialize()</code></a> before it can be initialized again.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mode</code></em> :</span></p></td>
+<td> Requested <a class="link" href="MixAudio.html#MixCodecMode" title="enum MixCodecMode"><span class="type">MixCodecMode</span></a>.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>aip</code></em> :</span></p></td>
+<td> Audio initialization parameters.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>drminitparams</code></em> :</span></p></td>
+<td> <span class="emphasis"><em>Optional.</em></span> DRM initialization param if applicable.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on successful initilaization. <span class="type">MIX_RESULT_ALREADY_INIT</span> if session is already initialized.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-configure"></a><h3>mix_audio_configure ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_configure (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> *audioconfigparams,
+ MixDrmParams *drmparams);</pre>
+<p>
+This function can be used to configure a stream for the current session. The caller can use this function to do the following:
+</p>
+<p>
+</p>
+<div class="itemizedlist"><ul type="disc">
+<li>Choose decoding mode (direct-render or decode-return)</li>
+<li>Provide DRM parameters (using DRMparams object)</li>
+<li>Provide stream parameters (using STRMparams objects)</li>
+<li>Provide a stream name for the Intel Smart Sound Technology stream</li>
+</ul></div>
+<p>
+</p>
+<p>
+SST stream parameters will be set during this call, and stream resources allocated in SST.
+</p>
+<p>
+</p>
+<div class="note" style="margin-left: 0.5in; margin-right: 0.5in;">
+<h3 class="title">Intel Audio Manager support:</h3>
+<p>If Intel Audio Manager support is enabled, and if <em class="parameter"><code>mode</code></em> is specified to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>, the SST stream will be registered with Intel Audio Manager in the context of this call, using the stream name provided in <em class="parameter"><code>streamname</code></em>. Application will receive a notification from Intel Audio Manager that the stream has been created during or soon after this call. The application should be ready to handle either possibility. A stream ID (associated with the stream name) will be provided by Intel Audio Manager which will be used for subsequent notifications from Intel Audio Manager or calls to Intel Audio Manager for muting, pause and resume. See <code class="function">mix_audio_getstreamid()</code></p>
+<p>If a stream is already registered with Intel Audio Manager, application must pass the same <em class="parameter"><code>streamname</code></em> argument to retain the session. Otherwise, the existing stream will be unregistered and a new stream will be registered with the new <em class="parameter"><code>streamname</code></em>.
+</p>
+</div>
+<p>
+</p>
+<p>
+If <em class="parameter"><code>mode</code></em> is specified to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a> but direct-render mode is not available (due to end user use of alternative output device), an error indication will be returned to the caller so that an alternate pipeline configuration can be created (e.g. including a Pulse Audio sink, and support for output buffers). In this case, the caller will need to call <a class="link" href="MixAudio.html#mix-audio-configure" title="mix_audio_configure ()"><code class="function">mix_audio_configure()</code></a> again to with <em class="parameter"><code>mode</code></em> specify as <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DECODERETURN--CAPS"><span class="type">MIX_DECODE_DECODERETURN</span></a> to request decode-return mode.
+</p>
+<p>
+This method can be called multiple times if reconfiguration of the stream is needed. However, this method must be called when the stream is in <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a> state.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>audioconfigparams</code></em> :</span></p></td>
+<td> a <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams"><span class="type">MixAudioConfigParams</span></a> derived object containing information for the specific stream type.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>drmparams</code></em> :</span></p></td>
+<td> <span class="emphasis"><em>Optional.</em></span> DRM initialization param if applicable.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> Result indicates successful or not.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-decode"></a><h3>mix_audio_decode ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_decode (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ const <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec">MixIOVec</a> *iovin,
+ gint iovincnt,
+ guint64 *insize,
+ <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec">MixIOVec</a> *iovout,
+ gint iovoutcnt,
+ guint64 *outsize);</pre>
+<p>
+This function is used to initiate HW accelerated decoding of encoded data buffers. This function may be used in two major modes, direct-render or decode-return.
+</p>
+<p>
+With direct-render, input buffers are provided by the caller which hold encoded audio data, and no output buffers are provided. The encoded data is decoded, and the decoded data is sent directly to the output speaker. This allows very low power audio rendering and is the best choice of operation for longer battery life.
+</p>
+<p>
+</p>
+<div class="note" style="margin-left: 0.5in; margin-right: 0.5in;">
+<h3 class="title">Intel Audio Manager Support</h3>
+However, if the user has connected a different target output device, such as Bluetooth headphones, this mode cannot be used as the decoded audio must be directed to the Pulse Audio stack where the output to Bluetooth device can be supported, per Intel Audio Manager guidelines. This mode is called decode-return, and requires the caller to provide output buffers for the decoded data.
+</div>
+<p>
+</p>
+<p>
+Input buffers in both modes are one or more user space buffers using a scatter/gather style vector interface.
+</p>
+<p>
+Output buffers for the decode-return mode are one or more user space buffers in a scatter style vector interface. Buffers will be filled in order and lengths of data filled will be returned.
+</p>
+<p>
+This call will block until data has been completely copied or queued to the driver. All user space buffers may be used or released when this call returns.
+</p>
+<p>
+Note: If the stream is configured as <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>, and whenever the stream in <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a> state, the call to <a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()"><code class="function">mix_audio_decode()</code></a> will not start the playback until <a class="link" href="MixAudio.html#mix-audio-start" title="mix_audio_start ()"><code class="function">mix_audio_start()</code></a> is called. This behavior would allow application to queue up data but delay the playback until appropriate time.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>iovin</code></em> :</span></p></td>
+<td> a pointer to an array of <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec"><span class="type">MixIOVec</span></a> structure that contains the input buffers
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>iovincnt</code></em> :</span></p></td>
+<td> the number of entry in the <em class="parameter"><code>iovin</code></em> array
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>iovout</code></em> :</span></p></td>
+<td> a pointer to an arrya of <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec"><span class="type">MixIOVec</span></a> structure that represent the output buffer. During input, each size in the <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec"><span class="type">MixIOVec</span></a> array represents the available buffer size pointed to by data. Upon return, each size value will be updated to reflect how much data has been filled. This parameter is ignored if stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>. See <a class="link" href="MixAudio.html#mix-audio-configure" title="mix_audio_configure ()"><code class="function">mix_audio_configure()</code></a> for more detail.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>iovoutcnt</code></em> :</span></p></td>
+<td> in/out parameter which when input, it contains the number of entry available in the <em class="parameter"><code>iovout</code></em> array. Upon return, this value will be updated to reflect how many entry in the <em class="parameter"><code>iovout</code></em> array has been populated with data. This parameter is ignored if stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>. See <a class="link" href="MixAudio.html#mix-audio-configure" title="mix_audio_configure ()"><code class="function">mix_audio_configure()</code></a> for more detail.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>outsize</code></em> :</span></p></td>
+<td> Total number of bytes returned for the decode session. This parameter is ignored if stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT</span>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-capture-encode"></a><h3>mix_audio_capture_encode ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_capture_encode (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio.html#MixIOVec" title="MixIOVec">MixIOVec</a> *iovout,
+ gint iovoutcnt);</pre>
+<p>
+To read encoded data from device.
+</p>
+<p>
+<em><span class="comment">
+NOTE: May need to rename to "read_encoded" or other name. Since "encode" seems to mean taking raw audio and convert to compressed audio.
+</span></em></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>iovout</code></em> :</span></p></td>
+<td> Capture audio samples.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>iovoutcnt</code></em> :</span></p></td>
+<td> Number of entry in the input vector <em class="parameter"><code>iovout</code></em>.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT</span>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-start"></a><h3>mix_audio_start ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_start (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);</pre>
+<p>
+If the stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>, application use this call to change the stream out of the <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a> state. If <a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()"><code class="function">mix_audio_decode()</code></a> is called and blocking in a seperate thread prior to this call. This method causes the device to start rendering data.
+</p>
+<p>
+In <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DECODERETURN--CAPS"><span class="type">MIX_DECODE_DECODERETURN</span></a>, this method is no op.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> if the resulting state is either <a class="link" href="MixAudio.html#MIX-STREAM-PLAYING--CAPS"><span class="type">MIX_STREAM_PLAYING</span></a> or <a class="link" href="MixAudio.html#MIX-STREAM-PAUSED--CAPS"><span class="type">MIX_STREAM_PAUSED</span></a>. Fail code otherwise.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-stop-drop"></a><h3>mix_audio_stop_drop ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_stop_drop (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);</pre>
+<p>
+If the stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>, application uses this function to stop the processing and playback of audio.
+</p>
+<p>
+All remaining frames to be decoded or rendered will be discarded and playback will stop immediately, unblocks any pending <a class="link" href="MixAudio.html#mix-audio-decode" title="mix_audio_decode ()"><code class="function">mix_audio_decode()</code></a>.
+</p>
+<p>
+If <span class="type">MIX_STOP_DRAIN</span> is requested, the call will block with stream state set to <a class="link" href="MixAudio.html#MIX-STREAM-DRAINING--CAPS"><span class="type">MIX_STREAM_DRAINING</span></a>, and return only until all remaining frame in previously submitted buffers are decoded and rendered. When <span class="type">MIX_STOP_DRAIN</span> returns successfully, the stream would have reached <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a> successfully.
+</p>
+<p>
+After this call, timestamp retrived by <code class="function">mix_audio_gettimestamp()</code> is reset to zero.
+</p>
+<p>
+Note that this method returns <span class="type">MIX_RESULT_WRONG_STATE</span> if the stream is in <a class="link" href="MixAudio.html#MIX-STREAM-DRAINING--CAPS"><span class="type">MIX_STREAM_DRAINING</span></a> state.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> if the resulting state has successfully reached <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a>. Fail code otherwise.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-stop-drain"></a><h3>mix_audio_stop_drain ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_stop_drain (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);</pre>
+<p>
+If the stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>, application uses this function to stop the processing and playback of audio.
+</p>
+<p>
+The call will block with stream state set to <a class="link" href="MixAudio.html#MIX-STREAM-DRAINING--CAPS"><span class="type">MIX_STREAM_DRAINING</span></a>, and return only until all remaining frame in previously submitted buffers are decoded and rendered.
+</p>
+<p>
+Note that this method blocks until <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a> is reached if it is called when the stream is already in <a class="link" href="MixAudio.html#MIX-STREAM-DRAINING--CAPS"><span class="type">MIX_STREAM_DRAINING</span></a> state.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> if the resulting state has successfully reached <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a>. Fail code otherwise.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-pause"></a><h3>mix_audio_pause ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_pause (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);</pre>
+<p>
+If the stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>, application uses this call to change the stream state from <a class="link" href="MixAudio.html#MIX-STREAM-PLAYING--CAPS"><span class="type">MIX_STREAM_PLAYING</span></a> to <a class="link" href="MixAudio.html#MIX-STREAM-PAUSED--CAPS"><span class="type">MIX_STREAM_PAUSED</span></a>. Note that this method returns sucessful only when the resulting state reaches <a class="link" href="MixAudio.html#MIX-STREAM-PAUSED--CAPS"><span class="type">MIX_STREAM_PAUSED</span></a>. Meaning it will return fail code if it is called in a state such as <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a>, where transitioning to <a class="link" href="MixAudio.html#MIX-STREAM-PAUSED--CAPS"><span class="type">MIX_STREAM_PAUSED</span></a> is not possible.
+</p>
+<p>
+In some situation, where there is potential race condition with the DRAINING operation, this method may return MIX_RESULT_NEED_RETRY to indicate last operation result is inclusive and request caller to call again.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> if <a class="link" href="MixAudio.html#MIX-STREAM-PAUSED--CAPS"><span class="type">MIX_STREAM_PAUSED</span></a> state is reached successfully. <span class="type">MIX_RESULT_WRONG_STATE</span> if operation is not allowed with the current state.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-resume"></a><h3>mix_audio_resume ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_resume (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);</pre>
+<p>
+If the stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a>, application uses this call to change the stream state to <a class="link" href="MixAudio.html#MIX-STREAM-PLAYING--CAPS"><span class="type">MIX_STREAM_PLAYING</span></a>. Note that this method returns sucessful only when the resulting state reaches <a class="link" href="MixAudio.html#MIX-STREAM-PAUSED--CAPS"><span class="type">MIX_STREAM_PAUSED</span></a>. Meaning it will return fail code if it is called in a state such as <a class="link" href="MixAudio.html#MIX-STREAM-DRAINING--CAPS"><span class="type">MIX_STREAM_DRAINING</span></a>, where transitioning to <a class="link" href="MixAudio.html#MIX-STREAM-PLAYING--CAPS"><span class="type">MIX_STREAM_PLAYING</span></a> is not possible.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> if <a class="link" href="MixAudio.html#MIX-STREAM-PLAYING--CAPS"><span class="type">MIX_STREAM_PLAYING</span></a> state is reached successfully. <span class="type">MIX_RESULT_WRONG_STATE</span> if operation is not allowed with the current state.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-timestamp"></a><h3>mix_audio_get_timestamp ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_timestamp (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ guint64 *msecs);</pre>
+<p>
+This function can be used to retrieve the current timestamp for audio playback in milliseconds. The timestamp will reflect the amount of audio data rendered since the start of stream, or since the last stop. Note that the timestamp is always reset to zero when the stream enter <a class="link" href="MixAudio.html#MIX-STREAM-STOPPED--CAPS"><span class="type">MIX_STREAM_STOPPED</span></a> state. The timestamp is an unsigned long value, so the value will wrap when the timestamp reaches <span class="type">ULONG_MAX</span>. This function is only valid in direct-render mode.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>msecs</code></em> :</span></p></td>
+<td> play time in milliseconds.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> if the timestamp is available. <span class="type">MIX_RESULT_WRONG_MODE</span> if operation is not allowed with the current mode.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-set-mute"></a><h3>mix_audio_set_mute ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_set_mute (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gboolean mute);</pre>
+<p>
+This function is used to mute and unmute audio playback. While muted, playback would continue but silently. This function is only valid when the session is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a> mode.
+</p>
+<p>
+Note that playback volumn may change due to change of global settings while stream is muted.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mute</code></em> :</span></p></td>
+<td> Turn mute on/off.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on success or other fail code.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-mute"></a><h3>mix_audio_get_mute ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_mute (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gboolean *muted);</pre>
+<p>
+Get Mute.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>muted</code></em> :</span></p></td>
+<td> current mute state.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on success or other fail code.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-max-vol"></a><h3>mix_audio_get_max_vol ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_max_vol (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gint *maxvol);</pre>
+<p>
+This function can be used if the application will be setting the audio volume using decibels instead of percentage. The maximum volume in decibels supported by the driver will be returned. This value can be used to determine the upper bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a> mode.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>maxvol</code></em> :</span></p></td>
+<td> pointer to receive max volumn.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on success or other fail code.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-min-vol"></a><h3>mix_audio_get_min_vol ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_min_vol (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gint *minvol);</pre>
+<p>
+This function can be used if the application will be setting the audio volume using decibels instead of percentage. The minimum volume in decibels supported by the driver will be returned. This value can be used to determine the lower bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a> mode.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>minvol</code></em> :</span></p></td>
+<td> pointer to receive max volumn.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on success or other fail code.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-volume"></a><h3>mix_audio_get_volume ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_volume (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gint *currvol,
+ <a class="link" href="MixAudio.html#MixVolType" title="enum MixVolType">MixVolType</a> type);</pre>
+<p>
+This function returns the current volume setting in either decibels or percentage. This function is only valid if stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a> mode.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>currvol</code></em> :</span></p></td>
+<td> Current volume. Note that if <em class="parameter"><code>type</code></em> equals <a class="link" href="MixAudio.html#MIX-VOL-PERCENT--CAPS"><span class="type">MIX_VOL_PERCENT</span></a>, this value will be return within the range of 0 to 100 inclusive.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>type</code></em> :</span></p></td>
+<td> The type represented by <em class="parameter"><code>currvol</code></em>.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on success or other fail code.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-set-volume"></a><h3>mix_audio_set_volume ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_set_volume (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ gint currvol,
+ <a class="link" href="MixAudio.html#MixVolType" title="enum MixVolType">MixVolType</a> type,
+ gulong msecs,
+ <a class="link" href="MixAudio.html#MixVolRamp" title="enum MixVolRamp">MixVolRamp</a> ramptype);</pre>
+<p>
+This function sets the current volume setting in either decibels or percentage. This function is only valid if the stream is configured to <a class="link" href="MixAudio-MixAudioConfigParams.html#MIX-DECODE-DIRECTRENDER--CAPS"><span class="type">MIX_DECODE_DIRECTRENDER</span></a> mode.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>currvol</code></em> :</span></p></td>
+<td> Current volume. Note that if <em class="parameter"><code>type</code></em> equals <a class="link" href="MixAudio.html#MIX-VOL-PERCENT--CAPS"><span class="type">MIX_VOL_PERCENT</span></a>, this value will be trucated to within the range of 0 to 100 inclusive.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>type</code></em> :</span></p></td>
+<td> The type represented by <em class="parameter"><code>currvol</code></em>.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on success or other fail code.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-deinitialize"></a><h3>mix_audio_deinitialize ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_deinitialize (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);</pre>
+<p>
+This function will uninitialize a session with this MI-X instance. During this call, the SST device will be closed and resources including mmapped buffers would be freed.This function should be called by the application once <code class="function">mix_audio_init()</code> has been called.
+</p>
+<p>
+</p>
+<div class="note" style="margin-left: 0.5in; margin-right: 0.5in;">
+<h3 class="title">Intel Audio Manager Support</h3>
+The SST stream would be unregistered with Intel Audio Manager if it was registered.
+</div>
+<p>
+</p>
+<p>
+Note that if this method should not fail normally. If it does return failure, the state of this object and the underlying mechanism is compromised and application should not attempt to reuse this object.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on success or other fail code.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-stream-state"></a><h3>mix_audio_get_stream_state ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_stream_state (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio.html#MixStreamState" title="enum MixStreamState">MixStreamState</a> *streamState);</pre>
+<p>
+Get the stream state of the current stream.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>streamState</code></em> :</span></p></td>
+<td> pointer to receive stream state.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT</span>
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-state"></a><h3>mix_audio_get_state ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_state (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio.html#MixState" title="enum MixState">MixState</a> *state);</pre>
+<p>
+Get the device state of the audio session.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>state</code></em> :</span></p></td>
+<td> pointer to receive state
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> Current device state.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-am-is-enabled"></a><h3>mix_audio_am_is_enabled ()</h3>
+<pre class="programlisting">gboolean mix_audio_am_is_enabled (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix);</pre>
+<p>
+This method checks if the current session is configure to use Intel Audio Manager. Note that Intel Audio Manager is considered disabled if the stream has not be initialized to use the service explicitly.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> boolean indicates if Intel Audio Manager is enabled with the current session.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-is-am-available"></a><h3>mix_audio_is_am_available ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_is_am_available (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio-mixaudiotypes.html#MixAudioManager" title="enum MixAudioManager">MixAudioManager</a> am,
+ gboolean *avail);</pre>
+<p>
+Check if AM is available.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> TBD
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>am</code></em> :</span></p></td>
+<td> TBD
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>avail</code></em> :</span></p></td>
+<td> TBD
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> TBD
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-output-configuration"></a><h3>mix_audio_get_output_configuration ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_output_configuration (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ <a class="link" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams" title="MixAudioConfigParams">MixAudioConfigParams</a> **audioconfigparams);</pre>
+<p>
+This method retrieve the current configuration. This can be called after initialization. If a stream has been configured, it returns the corresponding derive object of MixAudioConfigParams.</p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>audioconfigparams</code></em> :</span></p></td>
+<td> double pointer to hold output configuration.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> on success or other fail code.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+<hr>
+<div class="refsect2" lang="en">
+<a name="mix-audio-get-stream-byte-decoded"></a><h3>mix_audio_get_stream_byte_decoded ()</h3>
+<pre class="programlisting">MIX_RESULT mix_audio_get_stream_byte_decoded (<a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a> *mix,
+ guint64 *byte);</pre>
+<p>
+Retrive the culmulative byte decoded.
+</p>
+<p>
+<em><span class="remark">Not Implemented.</span></em></p>
+<div class="variablelist"><table border="0">
+<col align="left" valign="top">
+<tbody>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>mix</code></em> :</span></p></td>
+<td> <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> object.
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>msecs</code></em> :</span></p></td>
+<td> stream byte decoded..
+</td>
+</tr>
+<tr>
+<td><p><span class="term"><em class="parameter"><code>returns</code></em> :</span></p></td>
+<td> <span class="type">MIX_RESULT_SUCCESS</span> if the value is available. <span class="type">MIX_RESULT_WRONG_MODE</span> if operation is not allowed with the current mode.
+</td>
+</tr>
+</tbody>
+</table></div>
+</div>
+</div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/api-index-full.html b/mix_audio/docs/reference/MixAudio/html/api-index-full.html
new file mode 100644
index 0000000..99c830e
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/api-index-full.html
@@ -0,0 +1,259 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>API Index</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="index.html" title="MixAudio Reference Manual">
+<link rel="prev" href="tree-hierarchy.html" title="Object Hierarchy">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2"><tr valign="middle">
+<td><a accesskey="p" href="tree-hierarchy.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td> </td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td> </td>
+</tr></table>
+<div class="index">
+<div class="titlepage"><div><div><h2 class="title">
+<a name="api-index-full"></a>API Index</h2></div></div></div>
+<div class="informaltable"><table width="100%" border="0">
+<colgroup>
+<col>
+<col>
+<col>
+</colgroup>
+<tbody><tr>
+<td>GObject</td>
+<td><a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a></td>
+<td class="auto-generated"> </td>
+</tr></tbody>
+</table></div>
+<div class="index"><div class="indexdiv">
+<h3>M</h3>
+<dl>
+<dt>MixAACBitrateType, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType">enum MixAACBitrateType</a>
+</dt>
+<dt>MixAACBitstreamFormt, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt">enum MixAACBitstreamFormt</a>
+</dt>
+<dt>MixAACMpegID, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID">enum MixAACMpegID</a>
+</dt>
+<dt>MixAACProfile, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile">enum MixAACProfile</a>
+</dt>
+<dt>MixACPBPSType, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#MixACPBPSType">enum MixACPBPSType</a>
+</dt>
+<dt>MixACPOpAlign, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#MixACPOpAlign">enum MixACPOpAlign</a>
+</dt>
+<dt>MixAudio, <a class="indexterm" href="MixAudio.html#MixAudio-struct">MixAudio</a>
+</dt>
+<dt>MixAudioConfigParams, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#MixAudioConfigParams">MixAudioConfigParams</a>
+</dt>
+<dt>MixAudioConfigParamsAAC, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC">MixAudioConfigParamsAAC</a>
+</dt>
+<dt>MixAudioConfigParamsMP3, <a class="indexterm" href="MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3">MixAudioConfigParamsMP3</a>
+</dt>
+<dt>MixAudioConfigParamsWMA, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA">MixAudioConfigParamsWMA</a>
+</dt>
+<dt>MixAudioInitParams, <a class="indexterm" href="MixAudio-MixAudioInitParams.html#MixAudioInitParams">MixAudioInitParams</a>
+</dt>
+<dt>MixAudioManager, <a class="indexterm" href="MixAudio-mixaudiotypes.html#MixAudioManager">enum MixAudioManager</a>
+</dt>
+<dt>MixAudioWMAVersion, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion">enum MixAudioWMAVersion</a>
+</dt>
+<dt>MixCodecMode, <a class="indexterm" href="MixAudio.html#MixCodecMode">enum MixCodecMode</a>
+</dt>
+<dt>MixDecodeMode, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#MixDecodeMode">enum MixDecodeMode</a>
+</dt>
+<dt>MixDeviceState, <a class="indexterm" href="MixAudio.html#MixDeviceState">enum MixDeviceState</a>
+</dt>
+<dt>MixIOVec, <a class="indexterm" href="MixAudio.html#MixIOVec">MixIOVec</a>
+</dt>
+<dt>MixState, <a class="indexterm" href="MixAudio.html#MixState">enum MixState</a>
+</dt>
+<dt>MixStreamState, <a class="indexterm" href="MixAudio.html#MixStreamState">enum MixStreamState</a>
+</dt>
+<dt>MixVolRamp, <a class="indexterm" href="MixAudio.html#MixVolRamp">enum MixVolRamp</a>
+</dt>
+<dt>MixVolType, <a class="indexterm" href="MixAudio.html#MixVolType">enum MixVolType</a>
+</dt>
+<dt>MIX_ACP_AAC_CHANNELS, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CHANNELS--CAPS">MIX_ACP_AAC_CHANNELS()</a>
+</dt>
+<dt>MIX_ACP_AAC_CRC, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CRC--CAPS">MIX_ACP_AAC_CRC()</a>
+</dt>
+<dt>mix_acp_aac_get_aac_profile, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aac-profile">mix_acp_aac_get_aac_profile ()</a>
+</dt>
+<dt>mix_acp_aac_get_aot, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aot">mix_acp_aac_get_aot ()</a>
+</dt>
+<dt>mix_acp_aac_get_bit_rate_type, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-rate-type">mix_acp_aac_get_bit_rate_type ()</a>
+</dt>
+<dt>mix_acp_aac_get_bit_stream_format, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-stream-format">mix_acp_aac_get_bit_stream_format ()</a>
+</dt>
+<dt>mix_acp_aac_get_mpeg_id, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-mpeg-id">mix_acp_aac_get_mpeg_id ()</a>
+</dt>
+<dt>mix_acp_aac_new, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-new">mix_acp_aac_new ()</a>
+</dt>
+<dt>MIX_ACP_AAC_PCE_FLAG, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PCE-FLAG--CAPS">MIX_ACP_AAC_PCE_FLAG()</a>
+</dt>
+<dt>MIX_ACP_AAC_PS_FLAG, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PS-FLAG--CAPS">MIX_ACP_AAC_PS_FLAG()</a>
+</dt>
+<dt>mix_acp_aac_ref, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-ref">mix_acp_aac_ref ()</a>
+</dt>
+<dt>MIX_ACP_AAC_SAMPLE_RATE, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SAMPLE-RATE--CAPS">MIX_ACP_AAC_SAMPLE_RATE()</a>
+</dt>
+<dt>MIX_ACP_AAC_SBR_FLAG, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SBR-FLAG--CAPS">MIX_ACP_AAC_SBR_FLAG()</a>
+</dt>
+<dt>mix_acp_aac_set_aac_profile, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aac-profile">mix_acp_aac_set_aac_profile ()</a>
+</dt>
+<dt>mix_acp_aac_set_aot, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aot">mix_acp_aac_set_aot ()</a>
+</dt>
+<dt>mix_acp_aac_set_bit_rate_type, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-rate-type">mix_acp_aac_set_bit_rate_type ()</a>
+</dt>
+<dt>mix_acp_aac_set_bit_stream_format, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-stream-format">mix_acp_aac_set_bit_stream_format ()</a>
+</dt>
+<dt>mix_acp_aac_set_mpeg_id, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-mpeg-id">mix_acp_aac_set_mpeg_id ()</a>
+</dt>
+<dt>mix_acp_aac_unref, <a class="indexterm" href="MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-unref">mix_acp_aac_unref()</a>
+</dt>
+<dt>MIX_ACP_BITRATE, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-BITRATE--CAPS">MIX_ACP_BITRATE()</a>
+</dt>
+<dt>MIX_ACP_DECODEMODE, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-DECODEMODE--CAPS">MIX_ACP_DECODEMODE()</a>
+</dt>
+<dt>mix_acp_get_audio_manager, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-audio-manager">mix_acp_get_audio_manager ()</a>
+</dt>
+<dt>mix_acp_get_bps, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-bps">mix_acp_get_bps ()</a>
+</dt>
+<dt>mix_acp_get_decodemode, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-decodemode">mix_acp_get_decodemode ()</a>
+</dt>
+<dt>mix_acp_get_op_align, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-op-align">mix_acp_get_op_align ()</a>
+</dt>
+<dt>mix_acp_get_streamname, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-get-streamname">mix_acp_get_streamname ()</a>
+</dt>
+<dt>mix_acp_is_streamname_valid, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-is-streamname-valid">mix_acp_is_streamname_valid ()</a>
+</dt>
+<dt>MIX_ACP_MP3_CRC, <a class="indexterm" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-CRC--CAPS">MIX_ACP_MP3_CRC()</a>
+</dt>
+<dt>MIX_ACP_MP3_MPEG_FORMAT, <a class="indexterm" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-FORMAT--CAPS">MIX_ACP_MP3_MPEG_FORMAT()</a>
+</dt>
+<dt>MIX_ACP_MP3_MPEG_LAYER, <a class="indexterm" href="MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-LAYER--CAPS">MIX_ACP_MP3_MPEG_LAYER()</a>
+</dt>
+<dt>mix_acp_mp3_new, <a class="indexterm" href="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-new">mix_acp_mp3_new ()</a>
+</dt>
+<dt>mix_acp_mp3_ref, <a class="indexterm" href="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-ref">mix_acp_mp3_ref ()</a>
+</dt>
+<dt>mix_acp_mp3_unref, <a class="indexterm" href="MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-unref">mix_acp_mp3_unref()</a>
+</dt>
+<dt>mix_acp_new, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-new">mix_acp_new ()</a>
+</dt>
+<dt>MIX_ACP_NUM_CHANNELS, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-NUM-CHANNELS--CAPS">MIX_ACP_NUM_CHANNELS()</a>
+</dt>
+<dt>mix_acp_ref, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-ref">mix_acp_ref ()</a>
+</dt>
+<dt>MIX_ACP_SAMPLE_FREQ, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#MIX-ACP-SAMPLE-FREQ--CAPS">MIX_ACP_SAMPLE_FREQ()</a>
+</dt>
+<dt>mix_acp_set_audio_manager, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-audio-manager">mix_acp_set_audio_manager ()</a>
+</dt>
+<dt>mix_acp_set_bps, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-bps">mix_acp_set_bps ()</a>
+</dt>
+<dt>mix_acp_set_decodemode, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-decodemode">mix_acp_set_decodemode ()</a>
+</dt>
+<dt>mix_acp_set_op_align, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-op-align">mix_acp_set_op_align ()</a>
+</dt>
+<dt>mix_acp_set_streamname, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-set-streamname">mix_acp_set_streamname ()</a>
+</dt>
+<dt>mix_acp_unref, <a class="indexterm" href="MixAudio-MixAudioConfigParams.html#mix-acp-unref">mix_acp_unref()</a>
+</dt>
+<dt>MIX_ACP_WMA_BLOCK_ALIGN, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-BLOCK-ALIGN--CAPS">MIX_ACP_WMA_BLOCK_ALIGN()</a>
+</dt>
+<dt>MIX_ACP_WMA_CHANNEL_MASK, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-CHANNEL-MASK--CAPS">MIX_ACP_WMA_CHANNEL_MASK()</a>
+</dt>
+<dt>MIX_ACP_WMA_ENCODE_OPT, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-ENCODE-OPT--CAPS">MIX_ACP_WMA_ENCODE_OPT()</a>
+</dt>
+<dt>MIX_ACP_WMA_FORMAT_TAG, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-FORMAT-TAG--CAPS">MIX_ACP_WMA_FORMAT_TAG()</a>
+</dt>
+<dt>mix_acp_wma_get_version, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-get-version">mix_acp_wma_get_version ()</a>
+</dt>
+<dt>mix_acp_wma_new, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-new">mix_acp_wma_new ()</a>
+</dt>
+<dt>MIX_ACP_WMA_PCM_BIT_WIDTH, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-PCM-BIT-WIDTH--CAPS">MIX_ACP_WMA_PCM_BIT_WIDTH()</a>
+</dt>
+<dt>mix_acp_wma_ref, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-ref">mix_acp_wma_ref ()</a>
+</dt>
+<dt>mix_acp_wma_set_version, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-set-version">mix_acp_wma_set_version ()</a>
+</dt>
+<dt>mix_acp_wma_unref, <a class="indexterm" href="MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-unref">mix_acp_wma_unref()</a>
+</dt>
+<dt>mix_aip_new, <a class="indexterm" href="MixAudio-MixAudioInitParams.html#mix-aip-new">mix_aip_new ()</a>
+</dt>
+<dt>mix_aip_ref, <a class="indexterm" href="MixAudio-MixAudioInitParams.html#mix-aip-ref">mix_aip_ref ()</a>
+</dt>
+<dt>mix_aip_unref, <a class="indexterm" href="MixAudio-MixAudioInitParams.html#mix-aip-unref">mix_aip_unref()</a>
+</dt>
+<dt>mix_audio_am_is_enabled, <a class="indexterm" href="MixAudio.html#mix-audio-am-is-enabled">mix_audio_am_is_enabled ()</a>
+</dt>
+<dt>mix_audio_capture_encode, <a class="indexterm" href="MixAudio.html#mix-audio-capture-encode">mix_audio_capture_encode ()</a>
+</dt>
+<dt>mix_audio_configure, <a class="indexterm" href="MixAudio.html#mix-audio-configure">mix_audio_configure ()</a>
+</dt>
+<dt>mix_audio_decode, <a class="indexterm" href="MixAudio.html#mix-audio-decode">mix_audio_decode ()</a>
+</dt>
+<dt>mix_audio_deinitialize, <a class="indexterm" href="MixAudio.html#mix-audio-deinitialize">mix_audio_deinitialize ()</a>
+</dt>
+<dt>mix_audio_get_max_vol, <a class="indexterm" href="MixAudio.html#mix-audio-get-max-vol">mix_audio_get_max_vol ()</a>
+</dt>
+<dt>mix_audio_get_min_vol, <a class="indexterm" href="MixAudio.html#mix-audio-get-min-vol">mix_audio_get_min_vol ()</a>
+</dt>
+<dt>mix_audio_get_mute, <a class="indexterm" href="MixAudio.html#mix-audio-get-mute">mix_audio_get_mute ()</a>
+</dt>
+<dt>mix_audio_get_output_configuration, <a class="indexterm" href="MixAudio.html#mix-audio-get-output-configuration">mix_audio_get_output_configuration ()</a>
+</dt>
+<dt>mix_audio_get_state, <a class="indexterm" href="MixAudio.html#mix-audio-get-state">mix_audio_get_state ()</a>
+</dt>
+<dt>mix_audio_get_stream_byte_decoded, <a class="indexterm" href="MixAudio.html#mix-audio-get-stream-byte-decoded">mix_audio_get_stream_byte_decoded ()</a>
+</dt>
+<dt>mix_audio_get_stream_state, <a class="indexterm" href="MixAudio.html#mix-audio-get-stream-state">mix_audio_get_stream_state ()</a>
+</dt>
+<dt>mix_audio_get_timestamp, <a class="indexterm" href="MixAudio.html#mix-audio-get-timestamp">mix_audio_get_timestamp ()</a>
+</dt>
+<dt>mix_audio_get_version, <a class="indexterm" href="MixAudio.html#mix-audio-get-version">mix_audio_get_version ()</a>
+</dt>
+<dt>mix_audio_get_volume, <a class="indexterm" href="MixAudio.html#mix-audio-get-volume">mix_audio_get_volume ()</a>
+</dt>
+<dt>mix_audio_initialize, <a class="indexterm" href="MixAudio.html#mix-audio-initialize">mix_audio_initialize ()</a>
+</dt>
+<dt>mix_audio_is_am_available, <a class="indexterm" href="MixAudio.html#mix-audio-is-am-available">mix_audio_is_am_available ()</a>
+</dt>
+<dt>mix_audio_new, <a class="indexterm" href="MixAudio.html#mix-audio-new">mix_audio_new ()</a>
+</dt>
+<dt>mix_audio_pause, <a class="indexterm" href="MixAudio.html#mix-audio-pause">mix_audio_pause ()</a>
+</dt>
+<dt>mix_audio_ref, <a class="indexterm" href="MixAudio.html#mix-audio-ref">mix_audio_ref ()</a>
+</dt>
+<dt>mix_audio_resume, <a class="indexterm" href="MixAudio.html#mix-audio-resume">mix_audio_resume ()</a>
+</dt>
+<dt>mix_audio_set_mute, <a class="indexterm" href="MixAudio.html#mix-audio-set-mute">mix_audio_set_mute ()</a>
+</dt>
+<dt>mix_audio_set_volume, <a class="indexterm" href="MixAudio.html#mix-audio-set-volume">mix_audio_set_volume ()</a>
+</dt>
+<dt>mix_audio_start, <a class="indexterm" href="MixAudio.html#mix-audio-start">mix_audio_start ()</a>
+</dt>
+<dt>mix_audio_stop_drain, <a class="indexterm" href="MixAudio.html#mix-audio-stop-drain">mix_audio_stop_drain ()</a>
+</dt>
+<dt>mix_audio_stop_drop, <a class="indexterm" href="MixAudio.html#mix-audio-stop-drop">mix_audio_stop_drop ()</a>
+</dt>
+<dt>mix_audio_unref, <a class="indexterm" href="MixAudio.html#mix-audio-unref">mix_audio_unref()</a>
+</dt>
+</dl>
+</div></div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/ch01.html b/mix_audio/docs/reference/MixAudio/html/ch01.html
new file mode 100644
index 0000000..2ab25e8
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/ch01.html
@@ -0,0 +1,56 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Mix Audio API</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="index.html" title="MixAudio Reference Manual">
+<link rel="prev" href="index.html" title="MixAudio Reference Manual">
+<link rel="next" href="MixAudio-MixAudioConfigParamsAAC.html" title="MixAudioConfigParamsAAC">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2"><tr valign="middle">
+<td><a accesskey="p" href="index.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td> </td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="MixAudio-MixAudioConfigParamsAAC.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr></table>
+<div class="chapter" lang="en">
+<div class="titlepage"><div><div><h2 class="title">
+<a name="id2563229"></a>Mix Audio API</h2></div></div></div>
+<div class="toc"><dl>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioConfigParamsAAC.html">MixAudioConfigParamsAAC</a></span><span class="refpurpose"> — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioConfigParamsMP3.html">MixAudioConfigParamsMP3</a></span><span class="refpurpose"> — Audio configuration parameters for MP3 audio.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioConfigParamsWMA.html">MixAudioConfigParamsWMA</a></span><span class="refpurpose"> — Audio parameters for WMA audio.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioConfigParams.html">MixAudioConfigParams</a></span><span class="refpurpose"> — MixAudio configuration parameters object.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioInitParams.html">MixAudioInitParams</a></span><span class="refpurpose"> — Initialization parameters object.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio.html">MixAudio</a></span><span class="refpurpose"> — Object to support a single stream playback using hardware accelerated decoder.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-mixaudiotypes.html">Mix Audio Types</a></span><span class="refpurpose"> — Miscellanous types used by <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> API.</span>
+</dt>
+</dl></div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/index.html b/mix_audio/docs/reference/MixAudio/html/index.html
new file mode 100644
index 0000000..ab60f03
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/index.html
@@ -0,0 +1,60 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>MixAudio Reference Manual</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="next" href="ch01.html" title="Mix Audio API">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<div class="book" lang="en">
+<div class="titlepage">
+<div>
+<div><table class="navigation" id="top" width="100%" cellpadding="2" cellspacing="0"><tr><th valign="middle"><p class="title">MixAudio Reference Manual</p></th></tr></table></div>
+<div><p class="releaseinfo">
+ MixAudio version 0.3
+
+ </p></div>
+</div>
+<hr>
+</div>
+<div class="toc"><dl>
+<dt><span class="chapter"><a href="ch01.html">Mix Audio API</a></span></dt>
+<dd><dl>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioConfigParamsAAC.html">MixAudioConfigParamsAAC</a></span><span class="refpurpose"> — Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioConfigParamsMP3.html">MixAudioConfigParamsMP3</a></span><span class="refpurpose"> — Audio configuration parameters for MP3 audio.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioConfigParamsWMA.html">MixAudioConfigParamsWMA</a></span><span class="refpurpose"> — Audio parameters for WMA audio.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioConfigParams.html">MixAudioConfigParams</a></span><span class="refpurpose"> — MixAudio configuration parameters object.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-MixAudioInitParams.html">MixAudioInitParams</a></span><span class="refpurpose"> — Initialization parameters object.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio.html">MixAudio</a></span><span class="refpurpose"> — Object to support a single stream playback using hardware accelerated decoder.</span>
+</dt>
+<dt>
+<span class="refentrytitle"><a href="MixAudio-mixaudiotypes.html">Mix Audio Types</a></span><span class="refpurpose"> — Miscellanous types used by <a class="link" href="MixAudio.html" title="MixAudio"><span class="type">MixAudio</span></a> API.</span>
+</dt>
+</dl></dd>
+<dt><span class="chapter"><a href="tree-hierarchy.html">Object Hierarchy</a></span></dt>
+<dt><span class="index"><a href="api-index-full.html">API Index</a></span></dt>
+</dl></div>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/docs/reference/MixAudio/html/index.sgml b/mix_audio/docs/reference/MixAudio/html/index.sgml
new file mode 100644
index 0000000..0cc1a2a
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/index.sgml
@@ -0,0 +1,134 @@
+<ANCHOR id="MixAudio-MixAudioConfigParamsAAC" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html">
+<ANCHOR id="MixAudio-MixAudioConfigParamsAAC.synopsis" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MixAudio-MixAudioConfigParamsAAC.synopsis">
+<ANCHOR id="MixAudio-MixAudioConfigParamsAAC.description" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MixAudio-MixAudioConfigParamsAAC.description">
+<ANCHOR id="MixAudio-MixAudioConfigParamsAAC.details" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MixAudio-MixAudioConfigParamsAAC.details">
+<ANCHOR id="MixAACBitrateType" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MixAACBitrateType">
+<ANCHOR id="MixAACBitstreamFormt" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MixAACBitstreamFormt">
+<ANCHOR id="MixAACProfile" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MixAACProfile">
+<ANCHOR id="MixAACMpegID" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MixAACMpegID">
+<ANCHOR id="MixAudioConfigParamsAAC" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MixAudioConfigParamsAAC">
+<ANCHOR id="mix-acp-aac-new" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-new">
+<ANCHOR id="mix-acp-aac-ref" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-ref">
+<ANCHOR id="mix-acp-aac-unref" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-unref">
+<ANCHOR id="mix-acp-aac-set-mpeg-id" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-mpeg-id">
+<ANCHOR id="mix-acp-aac-get-mpeg-id" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-mpeg-id">
+<ANCHOR id="MIX-ACP-AAC-CRC--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CRC--CAPS">
+<ANCHOR id="mix-acp-aac-set-aot" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aot">
+<ANCHOR id="mix-acp-aac-get-aot" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aot">
+<ANCHOR id="MIX-ACP-AAC-SBR-FLAG--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SBR-FLAG--CAPS">
+<ANCHOR id="MIX-ACP-AAC-PS-FLAG--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PS-FLAG--CAPS">
+<ANCHOR id="MIX-ACP-AAC-PCE-FLAG--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-PCE-FLAG--CAPS">
+<ANCHOR id="MIX-ACP-AAC-SAMPLE-RATE--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-SAMPLE-RATE--CAPS">
+<ANCHOR id="MIX-ACP-AAC-CHANNELS--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#MIX-ACP-AAC-CHANNELS--CAPS">
+<ANCHOR id="mix-acp-aac-get-bit-stream-format" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-stream-format">
+<ANCHOR id="mix-acp-aac-set-bit-stream-format" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-stream-format">
+<ANCHOR id="mix-acp-aac-get-aac-profile" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-aac-profile">
+<ANCHOR id="mix-acp-aac-set-aac-profile" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-aac-profile">
+<ANCHOR id="mix-acp-aac-get-bit-rate-type" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-get-bit-rate-type">
+<ANCHOR id="mix-acp-aac-set-bit-rate-type" href="MixAudio/MixAudio-MixAudioConfigParamsAAC.html#mix-acp-aac-set-bit-rate-type">
+<ANCHOR id="MixAudio-MixAudioConfigParamsMP3" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html">
+<ANCHOR id="MixAudio-MixAudioConfigParamsMP3.synopsis" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#MixAudio-MixAudioConfigParamsMP3.synopsis">
+<ANCHOR id="MixAudio-MixAudioConfigParamsMP3.description" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#MixAudio-MixAudioConfigParamsMP3.description">
+<ANCHOR id="MixAudio-MixAudioConfigParamsMP3.details" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#MixAudio-MixAudioConfigParamsMP3.details">
+<ANCHOR id="MixAudioConfigParamsMP3" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#MixAudioConfigParamsMP3">
+<ANCHOR id="mix-acp-mp3-new" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-new">
+<ANCHOR id="mix-acp-mp3-ref" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-ref">
+<ANCHOR id="mix-acp-mp3-unref" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#mix-acp-mp3-unref">
+<ANCHOR id="MIX-ACP-MP3-CRC--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-CRC--CAPS">
+<ANCHOR id="MIX-ACP-MP3-MPEG-FORMAT--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-FORMAT--CAPS">
+<ANCHOR id="MIX-ACP-MP3-MPEG-LAYER--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsMP3.html#MIX-ACP-MP3-MPEG-LAYER--CAPS">
+<ANCHOR id="MixAudio-MixAudioConfigParamsWMA" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html">
+<ANCHOR id="MixAudio-MixAudioConfigParamsWMA.synopsis" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MixAudio-MixAudioConfigParamsWMA.synopsis">
+<ANCHOR id="MixAudio-MixAudioConfigParamsWMA.description" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MixAudio-MixAudioConfigParamsWMA.description">
+<ANCHOR id="MixAudio-MixAudioConfigParamsWMA.details" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MixAudio-MixAudioConfigParamsWMA.details">
+<ANCHOR id="MixAudioWMAVersion" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MixAudioWMAVersion">
+<ANCHOR id="MixAudioConfigParamsWMA" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MixAudioConfigParamsWMA">
+<ANCHOR id="mix-acp-wma-new" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-new">
+<ANCHOR id="mix-acp-wma-ref" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-ref">
+<ANCHOR id="mix-acp-wma-unref" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-unref">
+<ANCHOR id="MIX-ACP-WMA-CHANNEL-MASK--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-CHANNEL-MASK--CAPS">
+<ANCHOR id="MIX-ACP-WMA-FORMAT-TAG--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-FORMAT-TAG--CAPS">
+<ANCHOR id="MIX-ACP-WMA-BLOCK-ALIGN--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-BLOCK-ALIGN--CAPS">
+<ANCHOR id="MIX-ACP-WMA-ENCODE-OPT--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-ENCODE-OPT--CAPS">
+<ANCHOR id="MIX-ACP-WMA-PCM-BIT-WIDTH--CAPS" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#MIX-ACP-WMA-PCM-BIT-WIDTH--CAPS">
+<ANCHOR id="mix-acp-wma-get-version" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-get-version">
+<ANCHOR id="mix-acp-wma-set-version" href="MixAudio/MixAudio-MixAudioConfigParamsWMA.html#mix-acp-wma-set-version">
+<ANCHOR id="MixAudio-MixAudioConfigParams" href="MixAudio/MixAudio-MixAudioConfigParams.html">
+<ANCHOR id="MixAudio-MixAudioConfigParams.synopsis" href="MixAudio/MixAudio-MixAudioConfigParams.html#MixAudio-MixAudioConfigParams.synopsis">
+<ANCHOR id="MixAudio-MixAudioConfigParams.description" href="MixAudio/MixAudio-MixAudioConfigParams.html#MixAudio-MixAudioConfigParams.description">
+<ANCHOR id="MixAudio-MixAudioConfigParams.details" href="MixAudio/MixAudio-MixAudioConfigParams.html#MixAudio-MixAudioConfigParams.details">
+<ANCHOR id="MixACPOpAlign" href="MixAudio/MixAudio-MixAudioConfigParams.html#MixACPOpAlign">
+<ANCHOR id="MixACPBPSType" href="MixAudio/MixAudio-MixAudioConfigParams.html#MixACPBPSType">
+<ANCHOR id="MixDecodeMode" href="MixAudio/MixAudio-MixAudioConfigParams.html#MixDecodeMode">
+<ANCHOR id="MixAudioConfigParams" href="MixAudio/MixAudio-MixAudioConfigParams.html#MixAudioConfigParams">
+<ANCHOR id="mix-acp-new" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-new">
+<ANCHOR id="mix-acp-ref" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-ref">
+<ANCHOR id="mix-acp-unref" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-unref">
+<ANCHOR id="MIX-ACP-DECODEMODE--CAPS" href="MixAudio/MixAudio-MixAudioConfigParams.html#MIX-ACP-DECODEMODE--CAPS">
+<ANCHOR id="MIX-ACP-NUM-CHANNELS--CAPS" href="MixAudio/MixAudio-MixAudioConfigParams.html#MIX-ACP-NUM-CHANNELS--CAPS">
+<ANCHOR id="MIX-ACP-BITRATE--CAPS" href="MixAudio/MixAudio-MixAudioConfigParams.html#MIX-ACP-BITRATE--CAPS">
+<ANCHOR id="MIX-ACP-SAMPLE-FREQ--CAPS" href="MixAudio/MixAudio-MixAudioConfigParams.html#MIX-ACP-SAMPLE-FREQ--CAPS">
+<ANCHOR id="mix-acp-get-decodemode" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-get-decodemode">
+<ANCHOR id="mix-acp-set-decodemode" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-set-decodemode">
+<ANCHOR id="mix-acp-get-streamname" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-get-streamname">
+<ANCHOR id="mix-acp-set-streamname" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-set-streamname">
+<ANCHOR id="mix-acp-set-audio-manager" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-set-audio-manager">
+<ANCHOR id="mix-acp-get-audio-manager" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-get-audio-manager">
+<ANCHOR id="mix-acp-is-streamname-valid" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-is-streamname-valid">
+<ANCHOR id="mix-acp-get-bps" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-get-bps">
+<ANCHOR id="mix-acp-set-bps" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-set-bps">
+<ANCHOR id="mix-acp-get-op-align" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-get-op-align">
+<ANCHOR id="mix-acp-set-op-align" href="MixAudio/MixAudio-MixAudioConfigParams.html#mix-acp-set-op-align">
+<ANCHOR id="MixAudio-MixAudioInitParams" href="MixAudio/MixAudio-MixAudioInitParams.html">
+<ANCHOR id="MixAudio-MixAudioInitParams.synopsis" href="MixAudio/MixAudio-MixAudioInitParams.html#MixAudio-MixAudioInitParams.synopsis">
+<ANCHOR id="MixAudio-MixAudioInitParams.description" href="MixAudio/MixAudio-MixAudioInitParams.html#MixAudio-MixAudioInitParams.description">
+<ANCHOR id="MixAudio-MixAudioInitParams.details" href="MixAudio/MixAudio-MixAudioInitParams.html#MixAudio-MixAudioInitParams.details">
+<ANCHOR id="MixAudioInitParams" href="MixAudio/MixAudio-MixAudioInitParams.html#MixAudioInitParams">
+<ANCHOR id="mix-aip-new" href="MixAudio/MixAudio-MixAudioInitParams.html#mix-aip-new">
+<ANCHOR id="mix-aip-ref" href="MixAudio/MixAudio-MixAudioInitParams.html#mix-aip-ref">
+<ANCHOR id="mix-aip-unref" href="MixAudio/MixAudio-MixAudioInitParams.html#mix-aip-unref">
+<ANCHOR id="MixAudio" href="MixAudio/MixAudio.html">
+<ANCHOR id="MixAudio.synopsis" href="MixAudio/MixAudio.html#MixAudio.synopsis">
+<ANCHOR id="MixAudio.object-hierarchy" href="MixAudio/MixAudio.html#MixAudio.object-hierarchy">
+<ANCHOR id="MixAudio.description" href="MixAudio/MixAudio.html#MixAudio.description">
+<ANCHOR id="MixAudio.details" href="MixAudio/MixAudio.html#MixAudio.details">
+<ANCHOR id="MixStreamState" href="MixAudio/MixAudio.html#MixStreamState">
+<ANCHOR id="MixState" href="MixAudio/MixAudio.html#MixState">
+<ANCHOR id="MixCodecMode" href="MixAudio/MixAudio.html#MixCodecMode">
+<ANCHOR id="MixVolType" href="MixAudio/MixAudio.html#MixVolType">
+<ANCHOR id="MixVolRamp" href="MixAudio/MixAudio.html#MixVolRamp">
+<ANCHOR id="MixIOVec" href="MixAudio/MixAudio.html#MixIOVec">
+<ANCHOR id="MixDeviceState" href="MixAudio/MixAudio.html#MixDeviceState">
+<ANCHOR id="MixAudio-struct" href="MixAudio/MixAudio.html#MixAudio-struct">
+<ANCHOR id="mix-audio-new" href="MixAudio/MixAudio.html#mix-audio-new">
+<ANCHOR id="mix-audio-ref" href="MixAudio/MixAudio.html#mix-audio-ref">
+<ANCHOR id="mix-audio-unref" href="MixAudio/MixAudio.html#mix-audio-unref">
+<ANCHOR id="mix-audio-get-version" href="MixAudio/MixAudio.html#mix-audio-get-version">
+<ANCHOR id="mix-audio-initialize" href="MixAudio/MixAudio.html#mix-audio-initialize">
+<ANCHOR id="mix-audio-configure" href="MixAudio/MixAudio.html#mix-audio-configure">
+<ANCHOR id="mix-audio-decode" href="MixAudio/MixAudio.html#mix-audio-decode">
+<ANCHOR id="mix-audio-capture-encode" href="MixAudio/MixAudio.html#mix-audio-capture-encode">
+<ANCHOR id="mix-audio-start" href="MixAudio/MixAudio.html#mix-audio-start">
+<ANCHOR id="mix-audio-stop-drop" href="MixAudio/MixAudio.html#mix-audio-stop-drop">
+<ANCHOR id="mix-audio-stop-drain" href="MixAudio/MixAudio.html#mix-audio-stop-drain">
+<ANCHOR id="mix-audio-pause" href="MixAudio/MixAudio.html#mix-audio-pause">
+<ANCHOR id="mix-audio-resume" href="MixAudio/MixAudio.html#mix-audio-resume">
+<ANCHOR id="mix-audio-get-timestamp" href="MixAudio/MixAudio.html#mix-audio-get-timestamp">
+<ANCHOR id="mix-audio-set-mute" href="MixAudio/MixAudio.html#mix-audio-set-mute">
+<ANCHOR id="mix-audio-get-mute" href="MixAudio/MixAudio.html#mix-audio-get-mute">
+<ANCHOR id="mix-audio-get-max-vol" href="MixAudio/MixAudio.html#mix-audio-get-max-vol">
+<ANCHOR id="mix-audio-get-min-vol" href="MixAudio/MixAudio.html#mix-audio-get-min-vol">
+<ANCHOR id="mix-audio-get-volume" href="MixAudio/MixAudio.html#mix-audio-get-volume">
+<ANCHOR id="mix-audio-set-volume" href="MixAudio/MixAudio.html#mix-audio-set-volume">
+<ANCHOR id="mix-audio-deinitialize" href="MixAudio/MixAudio.html#mix-audio-deinitialize">
+<ANCHOR id="mix-audio-get-stream-state" href="MixAudio/MixAudio.html#mix-audio-get-stream-state">
+<ANCHOR id="mix-audio-get-state" href="MixAudio/MixAudio.html#mix-audio-get-state">
+<ANCHOR id="mix-audio-am-is-enabled" href="MixAudio/MixAudio.html#mix-audio-am-is-enabled">
+<ANCHOR id="mix-audio-is-am-available" href="MixAudio/MixAudio.html#mix-audio-is-am-available">
+<ANCHOR id="mix-audio-get-output-configuration" href="MixAudio/MixAudio.html#mix-audio-get-output-configuration">
+<ANCHOR id="mix-audio-get-stream-byte-decoded" href="MixAudio/MixAudio.html#mix-audio-get-stream-byte-decoded">
+<ANCHOR id="MixAudio-mixaudiotypes" href="MixAudio/MixAudio-mixaudiotypes.html">
+<ANCHOR id="MixAudio-mixaudiotypes.synopsis" href="MixAudio/MixAudio-mixaudiotypes.html#MixAudio-mixaudiotypes.synopsis">
+<ANCHOR id="MixAudio-mixaudiotypes.description" href="MixAudio/MixAudio-mixaudiotypes.html#MixAudio-mixaudiotypes.description">
+<ANCHOR id="MixAudio-mixaudiotypes.details" href="MixAudio/MixAudio-mixaudiotypes.html#MixAudio-mixaudiotypes.details">
+<ANCHOR id="MixAudioManager" href="MixAudio/MixAudio-mixaudiotypes.html#MixAudioManager">
diff --git a/mix_audio/docs/reference/MixAudio/html/style.css b/mix_audio/docs/reference/MixAudio/html/style.css
new file mode 100644
index 0000000..bb44c28
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/style.css
@@ -0,0 +1,167 @@
+.synopsis, .classsynopsis
+{
+ background: #eeeeee;
+ border: solid 1px #aaaaaa;
+ padding: 0.5em;
+}
+.programlisting
+{
+ background: #eeeeff;
+ border: solid 1px #aaaaff;
+ padding: 0.5em;
+}
+.variablelist
+{
+ padding: 4px;
+ margin-left: 3em;
+}
+.variablelist td:first-child
+{
+ vertical-align: top;
+}
+
+/* this is needed so that the local anchors are displayed below the naviagtion */
+@media screen {
+ sup a.footnote
+ {
+ position: relative;
+ top: 0em ! important;
+ }
+ div.refnamediv a[name], div.refsect1 a[name]
+ {
+ position: relative;
+ top: -4.5em;
+ }
+ table.navigation#top
+ {
+ background: #ffeeee;
+ border: solid 1px #ffaaaa;
+ margin-top: 0;
+ margin-bottom: 0;
+ position: fixed;
+ top: 0;
+ left: 0;
+ height: 2em;
+ z-index: 1;
+ }
+ .navigation a
+ {
+ color: #770000;
+ }
+ .navigation a:visited
+ {
+ color: #550000;
+ }
+ td.shortcuts
+ {
+ color: #770000;
+ font-size: 80%;
+ white-space: nowrap;
+ }
+ div.refentry, div.chapter, div.reference, div.part, div.book, div.glossary, div.sect1, div.appendix, div.preface
+ {
+ position: relative;
+ top: 3em;
+ z-index: 0;
+ }
+ div.glossary, div.index
+ {
+ position: relative;
+ top: 2em;
+ z-index: 0;
+ }
+ div.refnamediv
+ {
+ margin-top: 2em;
+ }
+ body
+ {
+ padding-bottom: 20em;
+ }
+}
+@media print {
+ table.navigation {
+ visibility: collapse;
+ display: none;
+ }
+ div.titlepage table.navigation {
+ visibility: visible;
+ display: table;
+ background: #ffeeee;
+ border: solid 1px #ffaaaa;
+ margin-top: 0;
+ margin-bottom: 0;
+ top: 0;
+ left: 0;
+ height: 2em;
+ }
+}
+
+.navigation .title
+{
+ font-size: 200%;
+}
+
+
+div.gallery-float
+{
+ float: left;
+ padding: 10px;
+}
+div.gallery-float img
+{
+ border-style: none;
+}
+div.gallery-spacer
+{
+ clear: both;
+}
+a
+{
+ text-decoration: none;
+}
+a:hover
+{
+ text-decoration: underline;
+ color: #FF0000;
+}
+
+div.table table
+{
+ border-collapse: collapse;
+ border-spacing: 0px;
+ border-style: solid;
+ border-color: #777777;
+ border-width: 1px;
+}
+
+div.table table td, div.table table th
+{
+ border-style: solid;
+ border-color: #777777;
+ border-width: 1px;
+ padding: 3px;
+ vertical-align: top;
+}
+
+div.table table th
+{
+ background-color: #eeeeee;
+}
+
+hr
+{
+ color: #777777;
+ background: #777777;
+ border: 0;
+ height: 1px;
+ clear: both;
+}
+
+.footer
+{
+ padding-top: 3.5em;
+ color: #777777;
+ text-align: center;
+ font-size: 80%;
+}
diff --git a/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html b/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html
new file mode 100644
index 0000000..e6f8029
--- /dev/null
+++ b/mix_audio/docs/reference/MixAudio/html/tree-hierarchy.html
@@ -0,0 +1,37 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Object Hierarchy</title>
+<meta name="generator" content="DocBook XSL Stylesheets V1.73.2">
+<link rel="start" href="index.html" title="MixAudio Reference Manual">
+<link rel="up" href="index.html" title="MixAudio Reference Manual">
+<link rel="prev" href="MixAudio-mixaudiotypes.html" title="Mix Audio Types">
+<link rel="next" href="api-index-full.html" title="API Index">
+<meta name="generator" content="GTK-Doc V1.11 (XML mode)">
+<link rel="stylesheet" href="style.css" type="text/css">
+<link rel="chapter" href="ch01.html" title="Mix Audio API">
+<link rel="chapter" href="tree-hierarchy.html" title="Object Hierarchy">
+<link rel="index" href="api-index-full.html" title="API Index">
+</head>
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+<table class="navigation" id="top" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2"><tr valign="middle">
+<td><a accesskey="p" href="MixAudio-mixaudiotypes.html"><img src="left.png" width="24" height="24" border="0" alt="Prev"></a></td>
+<td> </td>
+<td><a accesskey="h" href="index.html"><img src="home.png" width="24" height="24" border="0" alt="Home"></a></td>
+<th width="100%" align="center">MixAudio Reference Manual</th>
+<td><a accesskey="n" href="api-index-full.html"><img src="right.png" width="24" height="24" border="0" alt="Next"></a></td>
+</tr></table>
+<div class="chapter" lang="en">
+<div class="titlepage"><div><div><h2 class="title">
+<a name="tree-hierarchy"></a>Object Hierarchy</h2></div></div></div>
+<pre class="screen">
+ GObject
+ <a class="link" href="MixAudio.html" title="MixAudio">MixAudio</a>
+</pre>
+</div>
+<div class="footer">
+<hr>
+ Generated by GTK-Doc V1.11</div>
+</body>
+</html>
diff --git a/mix_audio/m4/Makefile.am b/mix_audio/m4/Makefile.am
new file mode 100644
index 0000000..66381d4
--- /dev/null
+++ b/mix_audio/m4/Makefile.am
@@ -0,0 +1 @@
+EXTRA_DIST +=
diff --git a/mix_audio/m4/as-mix-version.m4 b/mix_audio/m4/as-mix-version.m4
new file mode 100644
index 0000000..8b09d7c
--- /dev/null
+++ b/mix_audio/m4/as-mix-version.m4
@@ -0,0 +1,35 @@
+dnl as-mix-version.m4
+
+dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE)
+
+dnl example
+dnl AS_MIX_VERSION(mixaudio,MIXAUDIO, 0, 3, 2,)
+dnl for a 0.3.2 release version
+
+dnl this macro
+dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE
+dnl - defines [$PREFIX], VERSION
+dnl - AC_SUBST's all defined vars
+
+AC_DEFUN([AS_MIX_VERSION],
+[
+ PACKAGE=[$1]
+ [$2]_MAJOR=[$3]
+ [$2]_MINOR=[$4]
+ [$2]_REVISION=[$5]
+ [$2]_CURRENT=m4_eval([$3] + [$4])
+ [$2]_AGE=[$4]
+ VERSION=[$3].[$4].[$5]
+
+ AC_SUBST([$2]_MAJOR)
+ AC_SUBST([$2]_MINOR)
+ AC_SUBST([$2]_REVISION)
+ AC_SUBST([$2]_CURRENT)
+ AC_SUBST([$2]_AGE)
+
+ AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name])
+ AC_SUBST(PACKAGE)
+ AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version])
+ AC_SUBST(VERSION)
+
+])
diff --git a/mix_audio/mixaudio.spec b/mix_audio/mixaudio.spec
new file mode 100644
index 0000000..e618d51
--- /dev/null
+++ b/mix_audio/mixaudio.spec
@@ -0,0 +1,56 @@
+# INTEL CONFIDENTIAL
+# Copyright 2009 Intel Corporation All Rights Reserved.
+# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+#
+# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+
+Summary: MIX Audio
+Name: mixaudio
+Version: 0.3.5
+Release: 1
+Source0: %{name}-%{version}.tar.gz
+NoSource: 0
+License: Intel Proprietary
+Group: System Environment/Libraries
+BuildRoot: %{_tmppath}/%{name}-root
+ExclusiveArch: i586 i386
+BuildRequires: glib2-devel mixcommon-devel dbus-glib-devel
+
+%description
+MIX Audio is an user library interface for various hardware audio codecs
+available on the platform.
+
+%package devel
+Summary: Libraries include files
+Group: Development/Libraries
+Requires: %{name} = %{version}
+
+%description devel
+The %{name}-devel package contains the header files and static libraries
+for building applications which use %{name}.
+
+%prep
+%setup -q
+
+%build
+%autogen
+%configure --prefix=%{_prefix}
+make
+
+%install
+%make_install
+
+%clean
+rm -rf $RPM_BUILD_ROOT
+
+%files
+%defattr(-,root,root)
+%{_libdir}/libmixaudio.so.*
+
+%files devel
+%defattr(-,root,root)
+%{_libdir}/libmixaudio.so
+%{_libdir}/libmixaudio.la
+%{_libdir}/pkgconfig/mixaudio.pc
+%{_includedir}/*.h
+%doc COPYING
diff --git a/mix_audio/pkgconfig/Makefile.am b/mix_audio/pkgconfig/Makefile.am
new file mode 100644
index 0000000..ceea4fa
--- /dev/null
+++ b/mix_audio/pkgconfig/Makefile.am
@@ -0,0 +1,11 @@
+### all of the standard pc files we need to generate
+pcfiles = mixaudio.pc
+
+all-local: $(pcfiles)
+
+pkgconfigdir = $(libdir)/pkgconfig
+pkgconfig_DATA = $(pcfiles)
+
+EXTRA_DIST = mixaudio.pc.in
+
+CLEANFILES = $(pcfiles)
diff --git a/mix_audio/pkgconfig/mixaudio.pc.in b/mix_audio/pkgconfig/mixaudio.pc.in
new file mode 100644
index 0000000..b521b5b
--- /dev/null
+++ b/mix_audio/pkgconfig/mixaudio.pc.in
@@ -0,0 +1,12 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=@includedir@
+toolsdir=${exec_prefix}/bin
+
+Name: MixAudio
+Description: Intel MIX Audio
+Requires: @MIXAUDIO_PKG_DEPS@
+Version: @VERSION@
+Libs: -L${libdir} -lmixaudio
+Cflags: -I${includedir}
diff --git a/mix_audio/src/Makefile.am b/mix_audio/src/Makefile.am
new file mode 100644
index 0000000..b03751b
--- /dev/null
+++ b/mix_audio/src/Makefile.am
@@ -0,0 +1,61 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+lib_LTLIBRARIES = libmixaudio.la
+#noinst_LTLIBRARIES = libmixaudio_stub.la
+
+##############################################################################
+# sources used to compile
+libmixaudio_la_SOURCES = mixaudio.c \
+ sst_proxy.c \
+ mixaip.c \
+ mixacp.c \
+ mixacpmp3.c \
+ mixacpwma.c \
+ mixacpaac.c
+
+# flags used to compile this plugin
+# add other _CFLAGS and _LIBS as needed
+libmixaudio_la_CFLAGS = $(DBUS_GLIB_CFLAGS) $(GLIB_CFLAGS) $(MIX_CFLAGS) $(GOBJECT_CFLAGS) $(GTHREAD_CFLAGS) -DMIXAUDIO_CURRENT=@MIXAUDIO_CURRENT@ -DMIXAUDIO_AGE=@MIXAUDIO_AGE@ -DMIXAUDIO_REVISION=@MIXAUDIO_REVISION@ $(MIXCOMMON_CFLAGS) -DMIX_LOG_ENABLE
+libmixaudio_la_LIBADD = $(DBUS_GLIB_LIBS) $(GLIB_LIBS) $(GOBJECT_LIBS) $(GTHREAD_LIBS) $(MIXCOMMON_LIBS)
+libmixaudio_la_LDFLAGS = $(DBUS_GLIB_LIBS)$(GLIB_LIBS) $(GOBJECT_LIBS) $(GTHREAD_LIBS) -version-info @MIXAUDIO_CURRENT@:@MIXAUDIO_REVISION@:@MIXAUDIO_AGE@ $(MIXCOMMON_LIBS)
+
+libmixaudio_la_LIBTOOLFLAGS = --tag=disable-static
+
+# additional flags to enable backdoor or workaround
+if LPESTUB
+libmixaudio_la_CFLAGS += -DLPESTUB
+endif
+
+if WORKAROUND
+libmixaudio_la_CFLAGS += -DDROP_WORKAROUND
+endif
+
+#libmixaudio_stub_la_SOURCES = $(libmixaudio_la_SOURCES)
+#libmixaudio_stub_la_CFLAGS = $(libmixaudio_la_CFLAGS) -DLPESTUB
+#libmixaudio_stub_la_LIBADD = $(libmixaudio_la_LIBADD)
+#libmixaudio_stub_la_LDFLAGS = $(libmixaudio_la_LDFLAGS)
+#libmixaudio_stub_la_LIBTOOLFLAGS = $(libmixaudio_la_LIBTOOLFLAGS)
+
+# headers we need but don't want installed
+noinst_HEADERS = intel_sst_ioctl.h sst_proxy.h pvt.h amhelper.h
+
+# TODO: decide whehter a /usr/include/mix is needed for mix headers
+include_HEADERS = mixaudio.h \
+ mixaudiotypes.h \
+ mixaip.h \
+ mixacp.h \
+ mixacpmp3.h \
+ mixacpwma.h \
+ mixacpaac.h
+
+if AUDIO_MANAGER
+libmixaudio_la_CFLAGS += -DAUDIO_MANAGER
+libmixaudio_la_SOURCES += amhelper.c
+#include_HEADERS += amhelper.h
+endif
+
diff --git a/mix_audio/src/amhelper.c b/mix_audio/src/amhelper.c
new file mode 100644
index 0000000..501ece7
--- /dev/null
+++ b/mix_audio/src/amhelper.c
@@ -0,0 +1,120 @@
+#include "amhelper.h"
+#include <mixlog.h>
+
+static DBusGConnection *connection;
+
+static DBusGProxy *proxy_lpe = NULL;
+
+static gboolean am_enable=FALSE;
+
+/* Connect to am dbus server
+ * return -1 means failed
+ * return 0 means succeeded
+ * */
+gint dbus_init() {
+ GError *error;
+ const char *name = "org.moblin.audiomanager";
+
+ const char *path_lpe = "/org/moblin/audiomanager/lpe";
+ const char *interface_lpe = "org.moblin.audiomanager.lpe";
+
+ const gchar* env = g_getenv("MIX_AM");
+ if (env && env[0] == '1') {
+ am_enable = TRUE;
+ }
+ else
+ am_enable = FALSE;
+
+ if (am_enable) {
+ error = NULL;
+ connection = dbus_g_bus_get(DBUS_BUS_SESSION, &error);
+
+ if (connection == NULL) {
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "Failed to open connection to bus: %s\n",
+ error->message);
+ g_error_free(error);
+ return -1;
+ }
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_VERBOSE, "Successfully get a dbus connection\n");
+
+ proxy_lpe = dbus_g_proxy_new_for_name(connection, name,
+ path_lpe, interface_lpe);
+ if (proxy_lpe == NULL) {
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "Failed to connect to AM dbus server\n");
+ return -1;
+ }
+ else {
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_VERBOSE, "Successfully connected to AM dbus\npath: %s\ninterface: %s\n",
+ path_lpe, interface_lpe);
+ }
+ }
+ return 0;
+}
+
+gint32 lpe_stream_register(guint32 lpe_stream_id, char* media_role, char* lpe_stream_name, guint32 stream_type)
+{
+ GError *error;
+ gint32 s_output = 0;
+ error = NULL;
+
+ if (am_enable) {
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "lpe_stream_id: %d\n", lpe_stream_id);
+
+ if (lpe_stream_id == 0) {
+ return 0;
+ }
+ if(!dbus_g_proxy_call (proxy_lpe, "LPEStreamRegister", &error, G_TYPE_UINT,
+ lpe_stream_id, G_TYPE_STRING, media_role, G_TYPE_STRING, lpe_stream_name, G_TYPE_UINT, stream_type,
+ G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID)) {
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "LPEStreamRegister failed: %s\n", error->message);
+ g_error_free(error);
+ return s_output;
+ }
+
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "LPEStreamRegister returned am stream id %d\n", s_output);
+ }
+
+ return s_output;
+}
+
+gint32 lpe_stream_unregister(guint32 am_stream_id)
+{
+ GError *error;
+ gint32 s_output = 0;
+
+ if (am_enable) {
+ error = NULL;
+ if(!dbus_g_proxy_call (proxy_lpe, "LPEStreamUnregister", &error, G_TYPE_UINT, am_stream_id,
+ G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID)){
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_WARNING, "LPEStreamUnregister failed: %s\n", error->message);
+ g_error_free(error);
+ return s_output;
+ }
+ }
+ return s_output;
+}
+
+gint32 lpe_stream_notify_pause(guint32 stream_id)
+{
+ GError *error;
+ gint32 s_output=0;
+
+ if (am_enable) {
+ dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyPause", &error, G_TYPE_UINT, stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID);
+ }
+
+ return s_output;
+}
+
+gint32 lpe_stream_notify_resume(guint32 stream_id)
+{
+ GError *error;
+ gint32 s_output=0;
+
+ if (am_enable) {
+ dbus_g_proxy_call (proxy_lpe, "LPEStreamNotifyResume", &error, G_TYPE_UINT, stream_id, G_TYPE_INVALID, G_TYPE_INT, &s_output, G_TYPE_INVALID);
+ }
+
+ return s_output;
+}
+
diff --git a/mix_audio/src/amhelper.h b/mix_audio/src/amhelper.h
new file mode 100644
index 0000000..9ec115c
--- /dev/null
+++ b/mix_audio/src/amhelper.h
@@ -0,0 +1,25 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_AM_HELPER_H__
+#define __MIX_AM_HELPER_H__
+
+#include <dbus/dbus.h>
+#include <dbus/dbus-glib.h>
+
+gint dbus_init();
+
+gint32 lpe_stream_register(guint32 lpe_stream_id, char* media_role, char* lpe_stream_name, guint32 stream_type);
+
+gint32 lpe_stream_unregister(guint32 am_stream_id);
+
+gint32 lpe_stream_notify_pause(guint32 stream_id);
+
+gint32 lpe_stream_notify_resume(guint32 stream_id);
+
+#endif
diff --git a/mix_audio/src/intel_sst_ioctl.h b/mix_audio/src/intel_sst_ioctl.h
new file mode 100644
index 0000000..7fecf12
--- /dev/null
+++ b/mix_audio/src/intel_sst_ioctl.h
@@ -0,0 +1,337 @@
+#ifndef __INTEL_SST_IOCTL_H__
+#define __INTEL_SST_IOCTL_H__
+
+enum sst_codec_types {
+/* AUDIO/MUSIC CODEC Type Definitions */
+ SST_CODEC_TYPE_UNKNOWN = 0,
+ SST_CODEC_TYPE_PCM, /* Pass through Audio codec */
+ SST_CODEC_TYPE_MP3,
+ SST_CODEC_TYPE_MP24,
+ SST_CODEC_TYPE_AAC,
+ SST_CODEC_TYPE_AACP,
+ SST_CODEC_TYPE_eAACP,
+ SST_CODEC_TYPE_WMA9,
+ SST_CODEC_TYPE_WMA10,
+ SST_CODEC_TYPE_WMA10P,
+ SST_CODEC_TYPE_RA,
+ SST_CODEC_TYPE_DDAC3,
+ SST_CODEC_TYPE_STEREO_TRUE_HD,
+ SST_CODEC_TYPE_STEREO_HD_PLUS,
+
+ /* VOICE CODEC Type Definitions */
+ SST_CODEC_TYPE_VOICE_PCM = 0x21, /* Pass through voice codec */
+ SST_CODEC_SRC = 0x64,
+ SST_CODEC_MIXER = 0x65,
+ SST_CODEC_DOWN_MIXER = 0x66,
+ SST_CODEC_VOLUME_CONTROL = 0x67,
+ SST_CODEC_OEM1 = 0xC8,
+ SST_CODEC_OEM2 = 0xC9,
+};
+
+enum snd_sst_stream_ops {
+ STREAM_OPS_PLAYBACK = 0, /* Decode */
+ STREAM_OPS_CAPTURE, /* Encode */
+ STREAM_OPS_PLAYBACK_DRM, /* Play Audio/Voice */
+ STREAM_OPS_PLAYBACK_ALERT, /* Play Audio/Voice */
+ STREAM_OPS_CAPTURE_VOICE_CALL, /* CSV Voice recording */
+};
+
+enum stream_type {
+ STREAM_TYPE_MUSIC = 1,
+ STREAM_TYPE_VOICE
+};
+
+/* Firmware Version info */
+struct snd_sst_fw_version {
+ __u8 build; /* build number*/
+ __u8 minor; /* minor number*/
+ __u8 major; /* major number*/
+ __u8 type; /* build type*/
+};
+
+/* Port info structure */
+struct snd_sst_port_info {
+ __u16 port_type;
+ __u16 reserved;
+};
+
+/* Mixer info structure */
+struct snd_sst_mix_info {
+ __u16 max_streams;
+ __u16 reserved;
+};
+
+/* PCM Parameters */
+struct snd_pcm_params {
+ __u16 codec; /* codec type */
+ __u8 num_chan; /* 1=Mono, 2=Stereo */
+ __u8 pcm_wd_sz; /* 16/24 - bit*/
+ __u32 brate; /* Bitrate in bits per second */
+ __u32 sfreq; /* Sampling rate in Hz */
+ __u16 frame_size;
+ __u16 samples_per_frame; /* Frame size num samples per frame */
+ __u32 period_count; /* period elapsed time count, in samples,*/
+};
+
+/* MP3 Music Parameters Message */
+struct snd_mp3_params {
+ __u16 codec;
+ __u8 num_chan; /* 1=Mono, 2=Stereo */
+ __u8 pcm_wd_sz; /* 16/24 - bit*/
+ __u32 brate; /* Use the hard coded value. */
+ __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */
+ __u8 crc_check; /* crc_check - disable (0) or enable (1) */
+ __u8 op_align; /* op align 0- 16 bit, 1- MSB, 2 LSB*/
+ __u16 reserved; /* Unused */
+};
+
+#define AAC_BIT_STREAM_ADTS 0
+#define AAC_BIT_STREAM_ADIF 1
+#define AAC_BIT_STREAM_RAW 2
+
+/* AAC Music Parameters Message */
+struct snd_aac_params {
+ __u16 codec;
+ __u8 num_chan; /* 1=Mono, 2=Stereo*/
+ __u8 pcm_wd_sz; /* 16/24 - bit*/
+ __u32 brate;
+ __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */
+ __u32 aac_srate; /* Plain AAC decoder operating sample rate */
+ __u8 mpg_id; /* 0=MPEG-2, 1=MPEG-4 */
+ __u8 bs_format; /* input bit stream format adts=0, adif=1, raw=2 */
+ __u8 aac_profile; /* 0=Main Profile, 1=LC profile, 3=SSR profile */
+ __u8 ext_chl; /* No.of external channels */
+ __u8 aot; /* Audio object type. 1=Main , 2=LC , 3=SSR, 4=SBR*/
+ __u8 op_align; /* output alignment 0=16 bit , 1=MSB, 2= LSB align */
+ __u8 brate_type; /* 0=CBR, 1=VBR */
+ __u8 crc_check; /* crc check 0= disable, 1=enable */
+ __s8 bit_stream_format[8]; /* input bit stream format adts/adif/raw */
+ __u8 jstereo; /* Joint stereo Flag */
+ __u8 sbr_present; /* 1 = SBR Present, 0 = SBR absent, for RAW */
+ __u8 downsample; /* 1 = Downsampling ON, 0 = Downsampling OFF */
+ __u8 num_syntc_elems; /* 1- Mono/stereo, 0 - Dual Mono, 0 - for raw */
+ __s8 syntc_id[2]; /* 0 for ID_SCE(Dula Mono), -1 for raw */
+ __s8 syntc_tag[2]; /* raw - -1 and 0 -16 for rest of the streams */
+ __u8 pce_present; /* Flag. 1- present 0 - not present, for RAW */
+ __u8 reserved;
+ __u16 reserved1;
+
+};
+
+/* WMA Music Parameters Message */
+struct snd_wma_params {
+ __u16 codec;
+ __u8 num_chan; /* 1=Mono, 2=Stereo */
+ __u8 pcm_wd_sz; /* 16/24 - bit*/
+ __u32 brate; /* Use the hard coded value. */
+ __u32 sfreq; /* Sampling freq eg. 8000, 441000, 48000 */
+ __u32 channel_mask; /* Channel Mask */
+ __u16 format_tag; /* Format Tag */
+ __u16 block_align; /* packet size */
+ __u16 wma_encode_opt;/* Encoder option */
+ __u8 op_align; /* op align 0- 16 bit, 1- MSB, 2 LSB*/
+ __u8 pcm_src; /* input pcm bit width*/
+};
+
+/* Pre processing param structure */
+struct snd_prp_params {
+ __u32 reserved; /* No pre-processing defined yet */
+};
+
+/* Post processing Capability info structure */
+struct snd_sst_postproc_info {
+ __u32 src_min; /* Supported SRC Min sampling freq */
+ __u32 src_max; /* Supported SRC Max sampling freq */
+ __u8 src; /* 0=Not supported, 1=Supported */
+ __u8 bass_boost; /* 0=Not Supported, 1=Supported */
+ __u8 stereo_widening; /* 0=Not Supported, 1=Supported */
+ __u8 volume_control; /* 0=Not Supported, 1=Supported */
+ __s16 min_vol; /* Minimum value of Volume in dB */
+ __s16 max_vol; /* Maximum value of Volume in dB */
+ __u8 mute_control; /*0=No Mute, 1=Mute*/
+ __u8 reserved1;
+ __u16 reserved2;
+};
+
+/* pre processing Capability info structure */
+struct snd_sst_prp_info {
+ __s16 min_vol; /* Minimum value of Volume in dB */
+ __s16 max_vol; /* Maximum value of Volume in dB */
+ __u8 volume_control; /* 0=Not Supported, 1=Supported */
+ __u8 reserved1; /* for 32 bit alignment */
+ __u16 reserved2; /* for 32 bit alignment */
+} __attribute__ ((packed));
+
+/* Firmware capabilities info */
+struct snd_sst_fw_info {
+ struct snd_sst_fw_version fw_version; /* Firmware version */
+ __u8 audio_codecs_supported[8]; /* Codecs supported by FW */
+ __u32 recommend_min_duration; /* Min duration for Low power Playback*/
+ __u8 max_pcm_streams_supported; /*Max number of PCM streams supported */
+ __u8 max_enc_streams_supported; /*Max number of Encoded streams */
+ __u16 reserved; /* 32 bit alignment*/
+ struct snd_sst_postproc_info pop_info; /* Post processing capability*/
+ struct snd_sst_prp_info prp_info; /* pre_processing mod cap info */
+ struct snd_sst_port_info port_info[2]; /* Port info */
+ struct snd_sst_mix_info mix_info; /* Mixer info */
+ __u32 min_input_buf; /*minmum i/p buffer for decode*/
+};
+
+/* Add the codec parameter structures for new codecs to be supported */
+#define CODEC_PARAM_STRUCTURES \
+ struct snd_pcm_params pcm_params; \
+ struct snd_mp3_params mp3_params; \
+ struct snd_aac_params aac_params; \
+ struct snd_wma_params wma_params;
+
+/* Pre and Post Processing param structures */
+#define PPP_PARAM_STRUCTURES \
+ struct snd_prp_params prp_params;
+
+/* Codec params struture */
+union snd_sst_codec_params {
+ CODEC_PARAM_STRUCTURES;
+};
+
+/* Pre-processing params struture */
+union snd_sst_ppp_params{
+ PPP_PARAM_STRUCTURES;
+};
+
+struct snd_sst_stream_params {
+ union snd_sst_codec_params uc;
+} __attribute__ ((packed));
+
+struct snd_sst_params {
+ __u32 result;
+ __u32 stream_id;
+ __u8 codec;
+ __u8 ops;
+ __u8 stream_type;
+ struct snd_sst_stream_params sparams;
+};
+
+/*ioctl related stuff here*/
+struct snd_sst_pmic_config {
+ __u32 sfreq; /* Sampling rate in Hz */
+ __u16 num_chan; /* Mono =1 or Stereo =2 */
+ __u16 pcm_wd_sz; /* Number of bits per sample */
+} __attribute__ ((packed));
+
+struct snd_sst_get_stream_params {
+ struct snd_sst_params codec_params;
+ struct snd_sst_pmic_config pcm_params;
+};
+
+enum snd_sst_target_type {
+ SND_SST_TARGET_PMIC = 1,
+ SND_SST_TARGET_OTHER,
+};
+
+enum snd_sst_port_action {
+ SND_SST_PORT_PREPARE = 1,
+ SND_SST_PORT_ACTIVATE,
+};
+
+/* Target selection per device structure */
+struct snd_sst_slot_info {
+ __u8 mix_enable; /* Mixer enable or disable */
+ __u8 device_type;
+ __u8 device_instance; /* 0, 1, 2 */
+ __u8 target_type;
+ __u16 slot[2];
+ __u8 master;
+ __u8 action;
+ __u16 reserved;
+ struct snd_sst_pmic_config pcm_params;
+} __attribute__ ((packed));
+
+/* Target device list structure */
+struct snd_sst_target_device {
+ __u32 device_route;
+ struct snd_sst_slot_info devices[2];
+} __attribute__ ((packed));
+
+struct snd_sst_driver_info {
+ __u32 version; /* Version of the driver */
+ __u32 active_pcm_streams;
+ __u32 active_enc_streams;
+ __u32 max_pcm_streams;
+ __u32 max_enc_streams;
+ __u32 buf_per_stream;
+};
+
+struct snd_sst_vol {
+ __u32 stream_id;
+ __s32 volume;
+ __u32 ramp_duration;
+ __u32 ramp_type; /* Ramp type, default=0 */
+};
+
+struct snd_sst_mute {
+ __u32 stream_id;
+ __u32 mute;
+};
+
+enum snd_sst_buff_type {
+ SST_BUF_USER = 1,
+ SST_BUF_MMAP,
+ SST_BUF_RAR,
+};
+
+struct snd_sst_mmap_buff_entry {
+ unsigned int offset;
+ unsigned int size;
+};
+
+struct snd_sst_mmap_buffs {
+ unsigned int entries;
+ enum snd_sst_buff_type type;
+ struct snd_sst_mmap_buff_entry *buff;
+};
+
+struct snd_sst_buff_entry {
+ void *buffer;
+ unsigned int size;
+};
+
+struct snd_sst_buffs {
+ unsigned int entries;
+ __u8 type;
+ struct snd_sst_buff_entry *buff_entry;
+};
+
+struct snd_sst_dbufs {
+ unsigned long long input_bytes_consumed;
+ unsigned long long output_bytes_produced;
+ struct snd_sst_buffs *ibufs;
+ struct snd_sst_buffs *obufs;
+};
+
+/*IOCTL defined here*/
+/*SST MMF IOCTLS only*/
+#define SNDRV_SST_STREAM_SET_PARAMS _IOR('L', 0x00, \
+ struct snd_sst_stream_params *)
+#define SNDRV_SST_STREAM_GET_PARAMS _IOWR('L', 0x01, \
+ struct snd_sst_get_stream_params *)
+#define SNDRV_SST_STREAM_GET_TSTAMP _IOWR('L', 0x02, __u64 *)
+#define SNDRV_SST_STREAM_DECODE _IOWR('L', 0x03, struct snd_sst_dbufs *)
+#define SNDRV_SST_STREAM_BYTES_DECODED _IOWR('L', 0x04, __u64 *)
+#define SNDRV_SST_STREAM_START _IO('A', 0x42)
+#define SNDRV_SST_STREAM_DROP _IO('A', 0x43)
+#define SNDRV_SST_STREAM_DRAIN _IO('A', 0x44)
+#define SNDRV_SST_STREAM_PAUSE _IOW('A', 0x45, int)
+#define SNDRV_SST_STREAM_RESUME _IO('A', 0x47)
+#define SNDRV_SST_MMAP_PLAY _IOW('L', 0x05, struct snd_sst_mmap_buffs *)
+#define SNDRV_SST_MMAP_CAPTURE _IOW('L', 0x06, struct snd_sst_mmap_buffs *)
+/*SST common ioctls */
+#define SNDRV_SST_DRIVER_INFO _IOR('L', 0x10, struct snd_sst_driver_info *)
+#define SNDRV_SST_SET_VOL _IOW('L', 0x11, struct snd_sst_vol *)
+#define SNDRV_SST_GET_VOL _IOW('L', 0x12, struct snd_sst_vol *)
+#define SNDRV_SST_MUTE _IOW('L', 0x13, struct snd_sst_mute *)
+/*AM Ioctly only*/
+#define SNDRV_SST_FW_INFO _IOR('L', 0x20, struct snd_sst_fw_info *)
+#define SNDRV_SST_SET_TARGET_DEVICE _IOW('L', 0x21, \
+ struct snd_sst_target_device *)
+
+#endif /*__INTEL_SST_IOCTL_H__*/
diff --git a/mix_audio/src/mixacp.c b/mix_audio/src/mixacp.c
new file mode 100644
index 0000000..e7ce507
--- /dev/null
+++ b/mix_audio/src/mixacp.c
@@ -0,0 +1,322 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+ * SECTION:mixacp
+ * @short_description: MixAudio configuration parameters object.
+ * @include: mixacp.h
+ *
+ * #MixAudio configuration parameters object which is used to communicate audio specific parameters.
+ *
+ * This object is should not be instantiated as codec specific parameters are definied in individual derive classes.
+ */
+
+#include "mixacp.h"
+#include <mixlog.h>
+
+static GType _mix_acp_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_acp_type = g_define_type_id; }
+
+gboolean mix_acp_copy(MixParams* target, const MixParams *src);
+MixParams* mix_acp_dup(const MixParams *obj);
+gboolean mix_acp_equal(MixParams* first, MixParams *second);
+static void mix_acp_finalize(MixParams *obj);
+
+G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParams, mix_acp, MIX_TYPE_PARAMS, _do_init);
+
+void
+_mix_acp_initialize (void)
+{
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref (mix_acp_get_type ());
+}
+
+static void mix_acp_init (MixAudioConfigParams *self)
+{
+ self->decode_mode = MIX_DECODE_NULL;
+ self->stream_name = NULL;
+ self->audio_manager=MIX_AUDIOMANAGER_NONE;
+ self->num_channels = 0;
+ self->bit_rate = 0;
+ self->sample_freq = 0;
+ self->bits_per_sample = MIX_ACP_BPS_16;
+ self->op_align = MIX_ACP_OUTPUT_ALIGN_16;
+}
+
+static void mix_acp_class_init(MixAudioConfigParamsClass *klass)
+{
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent (klass);
+
+ mixparams_class->finalize = mix_acp_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction)mix_acp_copy;
+ mixparams_class->dup = (MixParamsDupFunction)mix_acp_dup;
+ mixparams_class->equal = (MixParamsEqualFunction)mix_acp_equal;
+
+ klass->print_params = NULL;
+}
+
+MixAudioConfigParams *mix_acp_new(void)
+{
+ MixAudioConfigParams *ret = (MixAudioConfigParams *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMS);
+
+ return ret;
+}
+
+void mix_acp_finalize(MixParams *obj)
+{
+ /* clean up here. */
+ MixAudioConfigParams *acp = MIX_AUDIOCONFIGPARAMS(obj);
+
+ if (acp->stream_name) {
+ g_free(acp->stream_name);
+ acp->stream_name = NULL;
+ }
+
+ /* Chain up parent */
+ if (parent_class->finalize)
+ parent_class->finalize(obj);
+}
+
+MixAudioConfigParams *mix_acp_ref(MixAudioConfigParams *mix)
+{
+ return (MixAudioConfigParams*)mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_acp_dup:
+ * @obj: a #MixAudioConfigParams object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams* mix_acp_dup(const MixParams *obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_AUDIOCONFIGPARAMS(obj))
+ {
+ MixAudioConfigParams *duplicate = mix_acp_new();
+ if (mix_acp_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj)))
+ {
+ ret = MIX_PARAMS(duplicate);
+ }
+ else
+ {
+ mix_acp_unref(duplicate);
+ }
+ }
+
+ return ret;
+}
+
+/**
+ * mix_acp_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_acp_copy(MixParams* target, const MixParams *src)
+{
+ if (MIX_IS_AUDIOCONFIGPARAMS(target) && MIX_IS_AUDIOCONFIGPARAMS(src))
+ {
+ MixAudioConfigParams *t = MIX_AUDIOCONFIGPARAMS(target);
+ MixAudioConfigParams *s = MIX_AUDIOCONFIGPARAMS(src);
+
+ t->decode_mode = s->decode_mode;
+ t->stream_name = g_strdup(s->stream_name);
+ t->audio_manager=s->audio_manager;
+ t->num_channels = s->num_channels;
+ t->bit_rate = s->bit_rate;
+ t->sample_freq = s->sample_freq;
+ t->bits_per_sample = s->bits_per_sample;
+ t->op_align = s->op_align;
+
+ // Now chainup base class
+ if (parent_class->copy)
+ {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src));
+ }
+ else
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/**
+ * mix_acp_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_acp_equal(MixParams* first, MixParams *second)
+{
+ gboolean ret = FALSE;
+
+ if (first && second)
+ {
+ if (first == second) return TRUE;
+ }
+ else
+ {
+ // one of them is NULL.
+ return FALSE;
+ }
+
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+
+ if (ret && MIX_IS_AUDIOCONFIGPARAMS(first) && MIX_IS_AUDIOCONFIGPARAMS(second))
+ {
+ MixAudioConfigParams *acp1 = MIX_AUDIOCONFIGPARAMS(first);
+ MixAudioConfigParams *acp2 = MIX_AUDIOCONFIGPARAMS(second);
+
+ ret = (acp1->decode_mode == acp2->decode_mode) &&
+ (acp1->audio_manager == acp2->audio_manager) &&
+ (acp1->num_channels == acp2->num_channels) &&
+ (acp1->bit_rate == acp2->bit_rate) &&
+ (acp1->sample_freq == acp2->sample_freq) &&
+ (acp1->bits_per_sample == acp2->bits_per_sample) &&
+ (acp1->op_align == acp2->op_align) &&
+ (!g_strcmp0(acp1->stream_name, acp2->stream_name));
+ //g_strcmp0 handles NULL gracefully
+ }
+
+ return ret;
+}
+
+
+gboolean mix_acp_is_streamname_valid(MixAudioConfigParams *obj)
+{
+ if (MIX_IS_AUDIOCONFIGPARAMS(obj))
+ if ((obj->stream_name) && (obj->stream_name[0] != 0)) return TRUE;
+
+ return FALSE;
+}
+
+gchar *mix_acp_get_streamname(MixAudioConfigParams *obj)
+{
+ gchar *ret = NULL;
+ if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj)) && obj->stream_name)
+ {
+ ret = g_strdup(obj->stream_name);
+ }
+ return ret;
+}
+
+MIX_RESULT mix_acp_set_streamname(MixAudioConfigParams *obj, const gchar *streamname)
+{
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ if (!obj) return MIX_RESULT_NULL_PTR;
+
+ if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj)))
+ {
+ if (obj->stream_name)
+ {
+ g_free(obj->stream_name);
+ obj->stream_name = NULL;
+ }
+
+ if (streamname) obj->stream_name = g_strdup(streamname);
+
+ ret = MIX_RESULT_SUCCESS;
+ }
+ else
+ {
+ ret = MIX_RESULT_INVALID_PARAM;
+ }
+
+ return ret;
+}
+
+MixACPBPSType mix_acp_get_bps(MixAudioConfigParams *obj)
+{
+ if (G_LIKELY(obj))
+ return obj->bits_per_sample;
+ else
+ return 0;
+}
+
+MIX_RESULT mix_acp_set_bps(MixAudioConfigParams *obj, MixACPBPSType type)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (!obj) return MIX_RESULT_NULL_PTR;
+
+ if (G_LIKELY(MIX_IS_AUDIOCONFIGPARAMS(obj)))
+ {
+ switch (type)
+ {
+ case MIX_ACP_BPS_UNKNOWN:
+ case MIX_ACP_BPS_16:
+ case MIX_ACP_BPS_24:
+ obj->bits_per_sample = type;
+ break;
+ default:
+ ret = MIX_RESULT_INVALID_PARAM;
+ break;
+ }
+ }
+ else
+ {
+ ret = MIX_RESULT_INVALID_PARAM;
+ }
+
+ return ret;
+}
+
+
+MixACPOpAlign mix_acp_get_op_align(MixAudioConfigParams *obj)
+{
+ return (obj->op_align);
+}
+
+MIX_RESULT mix_acp_set_op_align(MixAudioConfigParams *obj, MixACPOpAlign op_align)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if ((op_align >= MIX_ACP_OUTPUT_ALIGN_16) && (op_align < MIX_ACP_OUTPUT_ALIGN_LAST))
+ obj->op_align = op_align;
+ else ret=MIX_RESULT_INVALID_PARAM;
+
+ return ret;
+}
+
+void mix_acp_print_params(MixAudioConfigParams *obj)
+{
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "decode_mode: %d\n", obj->decode_mode);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "stream_name: %s\n", obj->stream_name);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "audio_manager: %d\n", obj->audio_manager);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "num_channels: %d\n", obj->num_channels);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_rate: %d\n", obj->bit_rate);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "sample_freq: %d\n", obj->sample_freq);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bits_per_sample: %d\n", obj->bits_per_sample);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "op_align: %d\n", obj->op_align);
+
+ MixAudioConfigParamsClass *klass = MIX_AUDIOCONFIGPARAMS_GET_CLASS(obj);
+ if (klass->print_params)
+ {
+ klass->print_params(obj);
+ }
+}
+
diff --git a/mix_audio/src/mixacp.h b/mix_audio/src/mixacp.h
new file mode 100644
index 0000000..0acd309
--- /dev/null
+++ b/mix_audio/src/mixacp.h
@@ -0,0 +1,367 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_AUDIOCONFIGPARAMS_H__
+#define __MIX_AUDIOCONFIGPARAMS_H__
+
+
+#include "mixparams.h"
+#include "mixresult.h"
+#include "mixaudiotypes.h"
+
+/**
+ * MixACPOpAlign:
+ * @MIX_ACP_OUTPUT_ALIGN_UNKNOWN: Output alignment undefined.
+ * @IX_ACP_OUTPUT_ALIGN_16: Output word is 16-bit aligned
+ * @MIX_ACP_OUTPUT_ALIGN_MSB: Output word is MSB aligned
+ * @MIX_ACP_OUTPUT_ALIGN_LSB: Output word is LSB aligned
+ * @MIX_ACP_OUTPUT_ALIGN_LAST: Last entry in list.
+ *
+ * Audio Output alignment.
+ *
+ */
+typedef enum {
+ MIX_ACP_OUTPUT_ALIGN_UNKNOWN=-1,
+ MIX_ACP_OUTPUT_ALIGN_16=0,
+ MIX_ACP_OUTPUT_ALIGN_MSB,
+ MIX_ACP_OUTPUT_ALIGN_LSB,
+ MIX_ACP_OUTPUT_ALIGN_LAST
+} MixACPOpAlign;
+
+/**
+ * MixACPBPSType:
+ * @MIX_ACP_BPS_UNKNOWN: Bit Per Sample undefined.
+ * @MIX_ACP_BPS_16: Output bits per sample is 16 bits
+ * @MIX_ACP_BPS_24: Output bits per sample is 24 bits
+ *
+ * Audio Output Size in bits per sample.
+ *
+ */
+typedef enum {
+ MIX_ACP_BPS_UNKNOWN=0,
+ MIX_ACP_BPS_16=16,
+ MIX_ACP_BPS_24=24,
+} MixACPBPSType;
+
+/**
+ * MIX_TYPE_AUDIOCONFIGPARAMS:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_AUDIOCONFIGPARAMS (mix_acp_get_type ())
+
+/**
+ * MIX_AUDIOCONFIGPARAMS:
+ * @obj: object to be type-casted.
+ *
+ * Type casting.
+ */
+#define MIX_AUDIOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParams))
+
+/**
+ * MIX_IS_AUDIOCONFIGPARAMS:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixAudioConfigParams
+ */
+#define MIX_IS_AUDIOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMS))
+
+/**
+ * MIX_AUDIOCONFIGPARAMS_CLASS:
+ * @klass: class to be type-casted.
+ *
+ * Type casting.
+ */
+#define MIX_AUDIOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParamsClass))
+
+/**
+ * MIX_IS_AUDIOCONFIGPARAMS_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixAudioConfigParamsClass
+ */
+#define MIX_IS_AUDIOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMS))
+
+/**
+ * MIX_AUDIOCONFIGPARAMS_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_AUDIOCONFIGPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMS, MixAudioConfigParamsClass))
+
+typedef struct _MixAudioConfigParams MixAudioConfigParams;
+typedef struct _MixAudioConfigParamsClass MixAudioConfigParamsClass;
+
+/**
+ * MixDecodeMode:
+ * @MIX_DECODE_NULL: Undefined decode mode.
+ * @MIX_DECODE_DIRECTRENDER: Stream is configured in Direct Render mode
+ * @MIX_DECODE_DECODERETURN: Stream is configured in Decode Return mode
+ * @MIX_DECODE_LAST: Last index in the enumeration.
+ *
+ * Operation Mode for a MI-X session. See mix_audio_configure().
+ *
+ */
+typedef enum {
+ MIX_DECODE_NULL=0,
+ MIX_DECODE_DIRECTRENDER,
+ MIX_DECODE_DECODERETURN,
+ MIX_DECODE_LAST
+} MixDecodeMode;
+
+/**
+ * MixAudioConfigParams:
+ * @parent: parent.
+ * @decode_mode: Decode Mode to use for current session. See #mix_acp_set_decodemode
+ * @stream_name: Stream name. See #mix_acp_set_streamname. This object will release the string upon destruction.
+ * @audio_manager: Type of Audio Manager. See #mix_acp_set_audio_manager.
+ * @num_channels: Number of output channels. See #MIX_ACP_NUM_CHANNELS
+ * @bit_rate: <emphasis>Optional.</emphasis> See #MIX_ACP_BITRATE
+ * @sample_freq: Output frequency. See #MIX_ACP_SAMPLE_FREQ
+ * @bits_per_sample: Number of output bit per sample. See #mix_acp_set_bps
+ * @op_align: Output Byte Alignment. See #mix_acp_set_op_align
+ *
+ * @MixAudio configuration parameters object.
+ */
+struct _MixAudioConfigParams
+{
+ /*< public >*/
+ MixParams parent;
+
+ /*< public >*/
+ /* Audio Session Parameters */
+ MixDecodeMode decode_mode;
+ gchar *stream_name;
+ MixAudioManager audio_manager;
+
+ /*< public >*/
+ /* Audio Format Parameters */
+ gint num_channels;
+ gint bit_rate;
+ gint sample_freq;
+ MixACPBPSType bits_per_sample;
+ MixACPOpAlign op_align;
+ /*< private >*/
+ void* reserved1;
+ void* reserved2;
+ void* reserved3;
+ void* reserved4;
+};
+
+/**
+ * MixAudioConfigParamsClass:
+ *
+ * MI-X Audio object class
+ */
+struct _MixAudioConfigParamsClass
+{
+ /*< public >*/
+ MixParamsClass parent_class;
+
+ /*< virtual public >*/
+ void (*print_params) (MixAudioConfigParams *obj);
+
+ /* class members */
+
+};
+
+/**
+ * mix_acp_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_acp_get_type (void);
+
+/**
+ * mix_acp_new:
+ * @returns: A newly allocated instance of #MixAudioConfigParams
+ *
+ * Use this method to create new instance of #MixAudioConfigParams
+ */
+MixAudioConfigParams *mix_acp_new(void);
+
+/**
+ * mix_acp_ref:
+ * @mix: object to add reference
+ * @returns: the MixAudioConfigParams instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixAudioConfigParams *mix_acp_ref(MixAudioConfigParams *mix);
+
+/**
+ * mix_acp_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_acp_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/**
+ * MIX_ACP_DECODEMODE:
+ * @obj: #MixAudioConfigParams object
+ *
+ * MixAudioConfigParam.decode_mode accessor.
+ *
+ * Configure the decode mode to one of #MixDecodeMode value.
+*/
+#define MIX_ACP_DECODEMODE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->decode_mode)
+
+/**
+ * MIX_ACP_NUM_CHANNELS:
+ * @obj: #MixAudioConfigParams object
+ *
+ * MixAudioConfigParam.num_channels accessor.
+ *
+ * Configure the number of output channels. This value need to be exact the same as the supported output channel in the audio since down-mixing is not supported.
+ *
+ * This value can be used during #MIX_DECODE_DECODERETURN mode for buffer size/duration calculation.
+ *
+ * In Moorestown, number of channel must be 1 or 2.
+*/
+#define MIX_ACP_NUM_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMS(obj)->num_channels)
+
+/**
+ * MIX_ACP_BITRATE:
+ * @obj: #MixAudioConfigParams object
+ *
+ * MixAudioConfigParam.bit_rate accessor.
+ *
+ * Bit rate of the current audio.
+ *
+ * <remark>Optional</remark>
+*/
+#define MIX_ACP_BITRATE(obj) (MIX_AUDIOCONFIGPARAMS(obj)->bit_rate)
+
+/**
+ * MIX_ACP_SAMPLE_FREQ:
+ * @obj: #MixAudioConfigParams object
+ *
+ * MixAudioConfigParam.sample_freq accessor.
+ *
+ * Output sampling frequency.
+ *
+ * This value can be used during #MIX_DECODE_DECODERETURN mode for buffer size/duration calculation.
+*/
+#define MIX_ACP_SAMPLE_FREQ(obj) (MIX_AUDIOCONFIGPARAMS(obj)->sample_freq)
+
+/**
+ * mix_acp_get_decodemode:
+ * @obj: #MixAudioConfigParams
+ * @returns: #MixDecodeMode
+ *
+ * Retrieve currently configured #MixDecodeMode.
+ */
+MixDecodeMode mix_acp_get_decodemode(MixAudioConfigParams *obj);
+
+/**
+ * mix_acp_set_decodemode:
+ * @obj: #MixAudioConfigParams
+ * @mode: #MixDecodeMode to set
+ * @returns: #MIX_RESULT
+ *
+ * Configure session for one of the #MixDecodeMode.
+ */
+MIX_RESULT mix_acp_set_decodemode(MixAudioConfigParams *obj, MixDecodeMode mode);
+
+/**
+ * mix_acp_get_streamname:
+ * @obj: #MixAudioConfigParams
+ * @returns: pointer to a copy of the stream name. NULL if name is not available.
+ *
+ * Return copy of streamname. caller must free with g_free()
+ */
+gchar *mix_acp_get_streamname(MixAudioConfigParams *obj);
+
+/**
+ * mix_acp_set_streamname:
+ * @obj: #MixAudioConfigParams
+ * @streamname: Stream name to set
+ * @returns: #MIX_RESULT
+ *
+ * Set the stream name. The object will make a copy of the input stream name string.
+ *
+ */
+MIX_RESULT mix_acp_set_streamname(MixAudioConfigParams *obj, const gchar *streamname);
+
+/**
+ * mix_acp_set_audio_manager:
+ * @obj: #MixAudioConfigParams
+ * @am: #MixAudioManager
+ * @returns: #MIX_RESULT
+ *
+ * Set the Audio Manager to one of the #MixAudioManager.
+ */
+MIX_RESULT mix_acp_set_audio_manager(MixAudioConfigParams *obj, MixAudioManager am);
+
+/**
+ * mix_acp_get_audio_manager:
+ * @obj: #MixAudioConfigParams
+ * @returns: #MixAudioManager
+ *
+ * Retrieve name of currently configured audio manager.
+ */
+MixAudioManager mix_acp_get_audio_manager(MixAudioConfigParams *obj);
+
+/**
+ * mix_acp_is_streamname_valid:
+ * @obj: #MixAudioConfigParams
+ * @returns: boolean indicates if stream name is valid.
+ *
+ * Check if stream name is valid considering the current Decode Mode.
+ */
+gboolean mix_acp_is_streamname_valid(MixAudioConfigParams *obj);
+
+
+/**
+ * mix_acp_get_bps:
+ * @obj: #MixAudioConfigParams
+ * @returns: #MixACPBPSType
+ *
+ * Retrive currently configured bit-per-stream value.
+ */
+MixACPBPSType mix_acp_get_bps(MixAudioConfigParams *obj);
+
+/**
+ * mix_acp_set_bps:
+ * @obj: #MixAudioConfigParams
+ * @mode: #MixACPBPSType to set
+ * @returns: #MIX_RESULT
+ *
+ * Configure bit-per-stream of one of the supported #MixACPBPSType.
+ */
+MIX_RESULT mix_acp_set_bps(MixAudioConfigParams *obj, MixACPBPSType type);
+
+/**
+ * mix_acp_get_op_align:
+ * @obj: #MixAudioConfigParams object
+ * @returns: #MixACPOpAlign
+ *
+ * Get Output Alignment.
+ */
+MixACPOpAlign mix_acp_get_op_align(MixAudioConfigParams *obj);
+
+/**
+ * mix_acp_set_op_align:
+ * @obj: #MixAudioConfigParams object
+ * @op_align: One of the supported #MixACPOpAlign
+ * @returns: MIX_RESULT
+ *
+ * Set Output Alignment to one of the #MixACPOpAlign value.
+ */
+MIX_RESULT mix_acp_set_op_align(MixAudioConfigParams *obj, MixACPOpAlign op_align);
+
+/* void mix_acp_print_params(MixAudioConfigParams *obj); */
+
+
+#endif /* __MIX_AUDIOCONFIGPARAMS_H__ */
+
diff --git a/mix_audio/src/mixacpaac.c b/mix_audio/src/mixacpaac.c
new file mode 100644
index 0000000..4f83eb9
--- /dev/null
+++ b/mix_audio/src/mixacpaac.c
@@ -0,0 +1,360 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+ * SECTION:mixacpaac
+ * @short_description: Audio configuration parameters for AAC-LC, HEAAC v1, and HEAAC v2 audio format.
+ * @include: mixacpaac.h
+ *
+ * A data object which stores audio specific parameters for the following formats:
+ * <itemizedlist>
+ * <listitem>AAC-LC</listitem>
+ * <listitem>HE-AAC v1</listitem>
+ * <listitem>HE-AAC v2</listitem>
+ * </itemizedlist>
+ *
+ * Additional parameters must be set in the parent object #MixAudioConfigParams
+ */
+
+#include "mixacpaac.h"
+#include <string.h>
+#include <mixlog.h>
+
+static GType _mix_acp_aac_type = 0;
+static MixAudioConfigParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_acp_aac_type = g_define_type_id; }
+
+gboolean mix_acp_aac_copy(MixParams* target, const MixParams *src);
+MixParams* mix_acp_aac_dup(const MixParams *obj);
+gboolean mix_acp_aac_equal(MixParams* first, MixParams *second);
+static void mix_acp_aac_finalize(MixParams *obj);
+
+void mix_aac_print_params(MixAudioConfigParams *obj);
+
+G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsAAC, mix_acp_aac, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init);
+
+static void mix_acp_aac_init (MixAudioConfigParamsAAC *self)
+{
+ self->MPEG_id = MIX_AAC_MPEG_ID_NULL;
+ self->bit_stream_format= MIX_AAC_BS_NULL;
+ self->aac_profile=MIX_AAC_PROFILE_NULL;
+ self->aot=0;
+ self->bit_rate_type=MIX_AAC_BR_NULL; /* 0=CBR, 1=VBR */
+ self->CRC=FALSE;
+ self->sbrPresentFlag = -1;
+ self->psPresentFlag = -1;
+ self->pce_present=FALSE; /* Flag. 1- present 0 - not present, for RAW */
+ self->syntc_id[0] = self->syntc_id[1] = 0; /* 0 for ID_SCE(Dula Mono), -1 for raw */
+ self->syntc_tag[0] = self->syntc_tag[1] = 0; /* raw - -1 and 0 -16 for rest of the streams */
+ self->num_syntc_elems = 0;
+ self->aac_sample_rate = 0;
+ self->aac_channels = 0;
+}
+
+static void mix_acp_aac_class_init(MixAudioConfigParamsAACClass *klass)
+{
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass);
+
+ mixparams_class->finalize = mix_acp_aac_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction)mix_acp_aac_copy;
+ mixparams_class->dup = (MixParamsDupFunction)mix_acp_aac_dup;
+ mixparams_class->equal = (MixParamsEqualFunction)mix_acp_aac_equal;
+
+// MixAudioConfigParamsClass *acp = MIX_AUDIOCONFIGPARAMS_GET_CLASS(klass);
+ MixAudioConfigParamsClass *acp = (MixAudioConfigParamsClass *)klass;
+ acp->print_params = mix_aac_print_params;
+}
+
+MixAudioConfigParamsAAC *mix_acp_aac_new(void)
+{
+ MixAudioConfigParamsAAC *ret = (MixAudioConfigParamsAAC *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSAAC);
+
+ return ret;
+}
+
+void mix_acp_aac_finalize(MixParams *obj)
+{
+ /* clean up here. */
+
+ /* Chain up parent */
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->finalize)
+ klass->finalize(obj);
+}
+
+MixAudioConfigParamsAAC *mix_acp_aac_ref(MixAudioConfigParamsAAC *mix)
+{
+ return (MixAudioConfigParamsAAC*)mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_acp_aac_dup:
+ * @obj: a #MixAudioConfigParamsAAC object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams* mix_acp_aac_dup(const MixParams *obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj))
+ {
+ MixAudioConfigParamsAAC *duplicate = mix_acp_aac_new();
+ if (mix_acp_aac_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj)))
+ {
+ ret = MIX_PARAMS(duplicate);
+ }
+ else
+ {
+ mix_acp_aac_unref(duplicate);
+ }
+ }
+
+ return ret;
+}
+
+/**
+ * mix_acp_aac_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_acp_aac_copy(MixParams* target, const MixParams *src)
+{
+ if (MIX_IS_AUDIOCONFIGPARAMSAAC(target) && MIX_IS_AUDIOCONFIGPARAMSAAC(src))
+ {
+ MixAudioConfigParamsAAC *t = MIX_AUDIOCONFIGPARAMSAAC(target);
+ MixAudioConfigParamsAAC *s = MIX_AUDIOCONFIGPARAMSAAC(src);
+
+ t->MPEG_id = s->MPEG_id;
+ t->bit_stream_format = s->bit_stream_format;
+ t->aac_profile = s->aac_profile;
+ t->aot = s->aot;
+ t->bit_rate_type = s->bit_rate_type;
+ t->CRC = s->CRC;
+
+ // Now chainup base class
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->copy)
+ {
+ return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src));
+ }
+ else
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/**
+ * mix_acp_aac_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_acp_aac_equal(MixParams* first, MixParams *second)
+{
+ gboolean ret = FALSE;
+
+ if (first && second)
+ {
+ if (first == second) return TRUE;
+ }
+ else
+ {
+ return FALSE;
+ }
+
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = klass->equal(first, second);
+ else
+ ret = TRUE;
+
+ if (ret && MIX_IS_AUDIOCONFIGPARAMSAAC(first) && MIX_IS_AUDIOCONFIGPARAMSAAC(second))
+ {
+
+ MixAudioConfigParamsAAC *acp1 = MIX_AUDIOCONFIGPARAMSAAC(first);
+ MixAudioConfigParamsAAC *acp2 = MIX_AUDIOCONFIGPARAMSAAC(second);
+
+ ret = (acp1->MPEG_id == acp2->MPEG_id) &&
+ (acp1->bit_stream_format && acp2->bit_stream_format) &&
+ (acp1->aac_profile == acp2->aac_profile) &&
+ (acp1->aot == acp2->aot) &&
+ (acp1->bit_rate_type == acp2->bit_rate_type) &&
+ (acp1->CRC == acp2->CRC) &&
+ (acp1->sbrPresentFlag == acp2->sbrPresentFlag) &&
+ (acp1->psPresentFlag == acp2->psPresentFlag) &&
+ (acp1->pce_present == acp2->pce_present) &&
+ (acp1->syntc_id[0] == acp2->syntc_id[0]) &&
+ (acp1->syntc_id[1] == acp2->syntc_id[1]) &&
+ (acp1->syntc_tag[0] == acp2->syntc_tag[0]) &&
+ (acp1->syntc_tag[1] == acp2->syntc_tag[1]);
+ }
+
+ return ret;
+}
+
+MIX_RESULT mix_acp_aac_set_bit_stream_format(MixAudioConfigParamsAAC *obj, MixAACBitstreamFormt bit_stream_format)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (!obj) return MIX_RESULT_NULL_PTR;
+
+ if (bit_stream_format < MIX_AAC_BS_ADTS && bit_stream_format >= MIX_AAC_BS_LAST)
+ {
+ ret = MIX_RESULT_INVALID_PARAM;
+ }
+ else
+ {
+ obj->bit_stream_format = bit_stream_format;
+ }
+
+ return ret;
+}
+MixAACBitstreamFormt mix_acp_aac_get_bit_stream_format(MixAudioConfigParamsAAC *obj)
+{
+ if (obj)
+ return obj->bit_stream_format;
+ else
+ return MIX_AAC_BS_NULL;
+}
+
+MIX_RESULT mix_acp_aac_set_aac_profile(MixAudioConfigParamsAAC *obj, MixAACProfile aac_profile)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (!obj) return MIX_RESULT_NULL_PTR;
+
+ if (aac_profile < MIX_AAC_PROFILE_MAIN || aac_profile >= MIX_AAC_PROFILE_LAST)
+ {
+ ret = MIX_RESULT_INVALID_PARAM;
+ }
+ else
+ {
+ obj->aac_profile = aac_profile;
+ }
+
+ return ret;
+}
+MixAACProfile mix_acp_aac_get_aac_profile(MixAudioConfigParamsAAC *obj)
+{
+ if (obj)
+ return obj->aac_profile;
+ else
+ return MIX_AAC_PROFILE_NULL;
+}
+
+MIX_RESULT mix_acp_aac_set_bit_rate_type(MixAudioConfigParamsAAC *obj, MixAACBitrateType bit_rate_type)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (!obj) return MIX_RESULT_NULL_PTR;
+
+ if (bit_rate_type != MIX_AAC_BR_CONSTANT && bit_rate_type != MIX_AAC_BR_VARIABLE)
+ {
+ ret = MIX_RESULT_INVALID_PARAM;
+ }
+ else
+ {
+ obj->bit_rate_type = bit_rate_type;
+ }
+
+ return ret;
+}
+MixAACBitrateType mix_acp_aac_get_bit_rate_type(MixAudioConfigParamsAAC *obj)
+{
+ if (obj)
+ return obj->bit_rate_type;
+ else
+ return MIX_AAC_BR_NULL;
+}
+
+void mix_aac_print_params(MixAudioConfigParams *obj)
+{
+ MixAudioConfigParamsAAC *t = MIX_AUDIOCONFIGPARAMSAAC(obj);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "Mpeg ID: %d\n", t->MPEG_id);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_stream_format: %d\n", t->bit_stream_format);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "aac_profile: %d\n", t->aac_profile);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "aot: %d\n", t->aot);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "bit_rate_type: %d\n", t->bit_rate_type);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "CRC: %d\n", t->CRC);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, " \n");
+}
+
+
+MIX_RESULT mix_acp_aac_set_aot(MixAudioConfigParamsAAC *obj, guint aot)
+{
+ if (!obj) return MIX_RESULT_NULL_PTR;
+
+ if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj))
+ {
+ if ((aot == 2) || (aot == 5))
+ {
+ obj->aot=aot;
+ return MIX_RESULT_SUCCESS;
+ }
+ else
+ {
+ return MIX_RESULT_NOT_SUPPORTED;
+ }
+ }
+ else
+ {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+}
+
+guint mix_acp_aac_get_aot(MixAudioConfigParamsAAC *obj)
+{
+ if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj))
+ return obj->aot;
+ else
+ return 0;
+}
+
+
+MIX_RESULT mix_acp_aac_set_mpeg_id(MixAudioConfigParamsAAC *obj, MixAACMpegID mpegid)
+{
+ if (!obj) return MIX_RESULT_NULL_PTR;
+
+ if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj))
+ {
+ if ((mpegid >= MIX_AAC_MPEG_ID_NULL) || (mpegid < MIX_AAC_MPEG_LAST))
+ {
+ obj->MPEG_id=mpegid;
+ return MIX_RESULT_SUCCESS;
+ }
+ else
+ {
+ return MIX_RESULT_NOT_SUPPORTED;
+ }
+ }
+ else
+ {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+}
+
+MixAACMpegID mix_acp_aac_get_mpeg_id(MixAudioConfigParamsAAC *obj)
+{
+ if (MIX_IS_AUDIOCONFIGPARAMSAAC(obj))
+ return obj->MPEG_id;
+ else
+ return MIX_AAC_MPEG_ID_NULL;
+}
+
diff --git a/mix_audio/src/mixacpaac.h b/mix_audio/src/mixacpaac.h
new file mode 100644
index 0000000..7de2d95
--- /dev/null
+++ b/mix_audio/src/mixacpaac.h
@@ -0,0 +1,413 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_ACP_AAC_H__
+#define __MIX_ACP_AAC_H__
+
+#include "mixacp.h"
+
+/**
+ * MIX_TYPE_AUDIOCONFIGPARAMSAAC:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_AUDIOCONFIGPARAMSAAC (mix_acp_aac_get_type ())
+
+/**
+ * MIX_AUDIOCONFIGPARAMSAAC:
+ * @obj: object to be type-casted.
+ *
+ * Type casting
+ */
+#define MIX_AUDIOCONFIGPARAMSAAC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAAC))
+
+/**
+ * MIX_IS_AUDIOCONFIGPARAMSAAC:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixAudioConfigParams
+ */
+#define MIX_IS_AUDIOCONFIGPARAMSAAC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC))
+
+/**
+ * MIX_AUDIOCONFIGPARAMSAAC_CLASS:
+ * @klass: class to be type-casted.
+ *
+ * Type Casting.
+ */
+#define MIX_AUDIOCONFIGPARAMSAAC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAACClass))
+
+/**
+ * MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixAudioConfigParamsClass
+ */
+#define MIX_IS_AUDIOCONFIGPARAMSAAC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSAAC))
+
+/**
+ * MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS:
+ * @obj: a #MixAudioConfigParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_AUDIOCONFIGPARAMSAAC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSAAC, MixAudioConfigParamsAACClass))
+
+typedef struct _MixAudioConfigParamsAAC MixAudioConfigParamsAAC;
+typedef struct _MixAudioConfigParamsAACClass MixAudioConfigParamsAACClass;
+
+/**
+ * MixAACBitrateType:
+ * @MIX_AAC_BR_NULL: Undefined bit rate type.
+ * @MIX_AAC_BR_CONSTANT: Constant bit rate.
+ * @MIX_AAC_BR_VARIABLE: Variable bit rate.
+ * @MIX_AAC_BR_LAST: last entry.
+ *
+ * Types of bitrate in AAC.
+ */
+typedef enum {
+ MIX_AAC_BR_NULL=-1,
+ MIX_AAC_BR_CONSTANT=0,
+ MIX_AAC_BR_VARIABLE,
+ MIX_AAC_BR_LAST
+} MixAACBitrateType;
+
+/**
+ * MixAACBitstreamFormt:
+ * @MIX_AAC_BS_NULL: Undefined bitstream format.
+ * @MIX_AAC_BS_ADTS: Bitstream is in ADTS format.
+ * @MIX_AAC_BS_ADIF: Bitstream is in ADIF format.
+ * @MIX_AAC_BS_RAW: Bitstream is in raw format.
+ * @MIX_AAC_BS_LAST: Last entry.
+ *
+ * AAC bitstream format.
+ */
+typedef enum {
+ MIX_AAC_BS_NULL=-1,
+ MIX_AAC_BS_ADTS=0,
+ MIX_AAC_BS_ADIF,
+ MIX_AAC_BS_RAW,
+ MIX_AAC_BS_LAST
+} MixAACBitstreamFormt;
+
+/**
+ * MixAACProfile:
+ * @MIX_AAC_PROFILE_NULL: Undefined profile.
+ * @MIX_AAC_PROFILE_MAIN: <emphasis>Not Supported</emphasis> AAC Main profile.
+ * @MIX_AAC_PROFILE_LC: AAC-LC profile, including support of SBR and PS tool.
+ * @MIX_AAC_PROFILE_SSR: <emphasis>Not Supported</emphasis> SSR profile.
+ * @MIX_AAC_PROFILE_LAST: Last entry.
+ *
+ * AAC profiles definitions.
+ */
+typedef enum {
+ MIX_AAC_PROFILE_NULL=-1,
+ MIX_AAC_PROFILE_MAIN=0,
+ MIX_AAC_PROFILE_LC,
+ MIX_AAC_PROFILE_SSR,
+ MIX_AAC_PROFILE_LAST
+} MixAACProfile;
+
+/* Using enumeration as this MPEG ID definition is specific to SST and different from
+ any MPEG/ADTS header.
+*/
+/**
+ * MixAACMpegID:
+ * @MIX_AAC_MPEG_ID_NULL: Undefined MPEG ID.
+ * @MIX_AAC_MPEG_2_ID: Indicate MPEG 2 Audio.
+ * @MIX_AAC_MPEG_4_ID: Indicate MPEG 4 Audio.
+ * @MIX_AAC_MPEG_LAST: last entry.
+ *
+ * AAC MPEG ID.
+*/
+typedef enum {
+ MIX_AAC_MPEG_ID_NULL=-1,
+ MIX_AAC_MPEG_2_ID = 0,
+ MIX_AAC_MPEG_4_ID = 1,
+ MIX_AAC_MPEG_LAST
+} MixAACMpegID;
+
+/**
+ * MixAudioConfigParamsAAC:
+ * @parent: parent.
+ * @MPEG_id: MPEG ID. See #mix_acp_aac_set_mpeg_id
+ * @bit_stream_format: Bitstream format. See #mix_acp_aac_set_bit_stream_format.
+ * @aac_profile: AAC profile. See #mix_acp_aac_set_aac_profile.
+ * @aot: Audio object type. See #mix_acp_aac_set_aot
+ * @aac_sample_rate: See #MIX_ACP_AAC_SAMPLE_RATE macro.
+ * @aac_channels: See #MIX_ACP_AAC_CHANNELS macro.
+ * @bit_rate_type: Bitrate type. See #mix_acp_aac_set_bit_rate_type
+ * @sbrPresentFlag: See #MIX_ACP_AAC_SBR_FLAG macro.
+ * @psPresentFlag: See #MIX_ACP_AAC_PS_FLAG macro.
+ * @CRC: CRC check 0:disable, 1:enable.
+ * @pce_present: <emphasis>Not Used.</emphasis> See #MIX_ACP_AAC_PCE_FLAG
+ * @syntc_id: <emphasis>Not Used.</emphasis> 0 for ID_SCE(Dula Mono), -1 for raw.
+ * @syntc_tag: <emphasis>Not Used.</emphasis> -1 for raw. 0-16 for rest of the streams.
+ * @num_syntc_elems: <emphasis>Not Used.</emphasis> Number of syntatic elements.
+ *
+ * MixAudio Parameter object
+ */
+struct _MixAudioConfigParamsAAC
+{
+ /*< public >*/
+ MixAudioConfigParams parent;
+
+ /*< public >*/
+ /* Audio Format Parameters */
+ MixAACMpegID MPEG_id;
+ MixAACBitstreamFormt bit_stream_format;
+ MixAACProfile aac_profile;
+ guint aot;
+ guint aac_sample_rate;
+ guint aac_channels;
+ MixAACBitrateType bit_rate_type;
+ gboolean CRC;
+ guint sbrPresentFlag;
+ guint psPresentFlag;
+ gboolean pce_present;
+ gint8 syntc_id[2];
+ gint8 syntc_tag[2];
+ gint num_syntc_elems;
+ /*< private >*/
+ void* reserved1;
+ void* reserved2;
+ void* reserved3;
+ void* reserved4;
+};
+
+/**
+ * MixAudioConfigParamsAACClass:
+ *
+ * MI-X Audio object class
+ */
+struct _MixAudioConfigParamsAACClass
+{
+ /*< public >*/
+ MixAudioConfigParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_acp_aac_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_acp_aac_get_type (void);
+
+/**
+ * mix_acp_aac_new:
+ * @returns: A newly allocated instance of #MixAudioConfigParamsAAC
+ *
+ * Use this method to create new instance of #MixAudioConfigParamsAAC
+ */
+MixAudioConfigParamsAAC *mix_acp_aac_new(void);
+
+/**
+ * mix_acp_aac_ref:
+ * @mix: object to add reference
+ * @returns: the MixAudioConfigParamsAAC instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixAudioConfigParamsAAC *mix_acp_aac_ref(MixAudioConfigParamsAAC *mix);
+
+/**
+ * mix_acp_aac_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_acp_aac_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+
+/**
+ * mix_acp_aac_set_mpeg_id:
+ * @obj: #MixAudioConfigParamsAAC
+ * @mpegid: MPEG ID to set.
+ * @return: MIX_RESULT
+ *
+ * Configure decoder to treat audio as MPEG 2 or MPEG 4.
+*/
+MIX_RESULT mix_acp_aac_set_mpeg_id(MixAudioConfigParamsAAC *obj, MixAACMpegID mpegid);
+
+/**
+ * mix_acp_aac_get_mpeg_id:
+ * @obj: #MixAudioConfigParamsAAC object
+ * @returns: MPEG ID.
+ *
+ * Retrieve currently configured mpeg id value.
+*/
+MixAACMpegID mix_acp_aac_get_mpeg_id(MixAudioConfigParamsAAC *obj);
+
+/**
+ * MIX_ACP_AAC_CRC:
+ * @obj: #MixAudioConfigParamsAAC object.
+ *
+ * #MixAudioConfigParamAAC.CRC accessor.
+*/
+#define MIX_ACP_AAC_CRC(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->CRC)
+
+/**
+ * mix_acp_aac_set_aot:
+ * @obj: #MixAudioConfigParamsAAC
+ * @aot: Audio Object Type.
+ *
+ * Audio Object Type for the MPEG-4 audio stream. Valid value are:
+ *
+ * 2 - for AAC-LC
+ *
+ * 5 - for SBR
+ *
+ * Method returns MIX_RESULT_NOT_SUPPORTED for not supported value.
+ *
+*/
+MIX_RESULT mix_acp_aac_set_aot(MixAudioConfigParamsAAC *obj, guint aot);
+
+/**
+ * mix_acp_aac_get_aot:
+ * @obj: #MixAudioConfigParamsAAC
+ * @aot: Pointer to receive the Audio Object Type.
+ * @return: Currently configured audio object type. Or 0 if not yet specified.
+ *
+ * To retrieve currently configured audio object type.
+*/
+guint mix_acp_aac_get_aot(MixAudioConfigParamsAAC *obj);
+
+/**
+ * MIX_ACP_AAC_SBR_FLAG:
+ * @obj: #MixAudioConfigParamsAAC object
+ *
+ * MixAudioConfigParamAAC.sbrPresentFlag accessor.
+ *
+ * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates whether SBR data is present.
+ *
+ * 0: Absent
+ *
+ * 1: Present
+ *
+ * -1 (0xffffffff): indicates implicit signalling.
+ */
+#define MIX_ACP_AAC_SBR_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->sbrPresentFlag)
+
+/**
+ * MIX_ACP_AAC_PS_FLAG:
+ * @obj: #MixAudioConfigParamsAAC object
+ *
+ * MixAudioConfigParamAAC.psPresentFlag accessor.
+ *
+ * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates whether PS data is present.
+ *
+ * 0: Absent
+ *
+ * 1: Present
+ *
+ * -1 (0xffffffff): indicates implicit signalling.
+ */
+#define MIX_ACP_AAC_PS_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->psPresentFlag)
+
+/**
+ * MIX_ACP_AAC_PCE_FLAG:
+ * @obj: #MixAudioConfigParamsAAC object.
+ *
+ * MixAudioConfigParamAAC.pce_present accessor.
+ *
+ * Applicable only when @bit_stream_format==#MIX_AAC_BS_RAW. Indicates PCE data presence.
+ *
+ * 1:present
+ *
+ * 0:absent.
+ *
+ * <remark>Not Used on Moorestown.</remark>
+ */
+#define MIX_ACP_AAC_PCE_FLAG(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->pce_present)
+
+/**
+ * MIX_ACP_AAC_SAMPLE_RATE:
+ * @obj: #MixAudioConfigParamsAAC object.
+ *
+ * MixAudioConfigParamAAC.aac_sample_rate accessor.
+ *
+ * Plain AAC decoder operating sample rate. Which could be different from the output sampling rate with HE AAC v1 and v2.
+ */
+#define MIX_ACP_AAC_SAMPLE_RATE(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_sample_rate)
+
+/**
+ * MIX_ACP_AAC_CHANNELS:
+ * @obj: #MixAudioConfigParamsAAC
+ *
+ * MixAudioConfigParamAAC.aac_channels accessor.
+ *
+ * Indicates the number of output channels used by AAC decoder before SBR or PS tools are applied.
+ *
+ */
+#define MIX_ACP_AAC_CHANNELS(obj) (MIX_AUDIOCONFIGPARAMSAAC(obj)->aac_channels)
+
+/**
+ * mix_acp_aac_get_bit_stream_format:
+ * @obj: #MixAudioConfigParamsAAC
+ * @returns: #MixAACBitstreamFormt
+ *
+ * Return the bitstream format currently configured.
+ */
+MixAACBitstreamFormt mix_acp_aac_get_bit_stream_format(MixAudioConfigParamsAAC *obj);
+
+/**
+ * mix_acp_aac_set_bit_stream_format:
+ * @obj: #MixAudioConfigParamsAAC
+ * @bit_stream_format: Bit stream format.
+ * @returns: MIX_RESULT
+ *
+ * Set the type of bitstream format as specified in #MixAACBitstreamFormt.
+ */
+MIX_RESULT mix_acp_aac_set_bit_stream_format(MixAudioConfigParamsAAC *obj, MixAACBitstreamFormt bit_stream_format);
+
+/**
+ * mix_acp_aac_get_aac_profile:
+ * @obj: #MixAudioConfigParamsAAC
+ * @returns: #MixAACProfile
+ *
+ * Retrieve the AAC profile currently configured.
+ */
+MixAACProfile mix_acp_aac_get_aac_profile(MixAudioConfigParamsAAC *obj);
+
+/**
+ * mix_acp_aac_set_aac_profile:
+ * @obj: #MixAudioConfigParamsAAC
+ * @aac_profile: AAC profile to set.
+ * @returns: MIX_RESULT
+ *
+ * Configure AAC profile for current session.
+ *
+ * Only #MIX_AAC_PROFILE_LC is supported in Moorestown.
+ */
+MIX_RESULT mix_acp_aac_set_aac_profile(MixAudioConfigParamsAAC *obj, MixAACProfile aac_profile);
+
+/**
+ * mix_acp_aac_get_bit_rate_type:
+ * @obj: #MixAudioConfigParamsAAC
+ * @returns: #MixAACBitrateType
+ *
+ * Retrieve the bit rate type currently configured.
+ */
+MixAACBitrateType mix_acp_aac_get_bit_rate_type(MixAudioConfigParamsAAC *obj);
+
+/**
+ * mix_acp_aac_set_bit_rate_type:
+ * @obj: #MixAudioConfigParamsAAC
+ * @bit_rate_type: Bit rate type to set.
+ * @returns: MIX_RESULT
+ *
+ * Set the bit rate type used.
+ */
+MIX_RESULT mix_acp_aac_set_bit_rate_type(MixAudioConfigParamsAAC *obj, MixAACBitrateType bit_rate_type);
+
+#endif /* __MIX_AUDIOCONFIGPARAMSAAC_H__ */
diff --git a/mix_audio/src/mixacpmp3.c b/mix_audio/src/mixacpmp3.c
new file mode 100644
index 0000000..75ab8cb
--- /dev/null
+++ b/mix_audio/src/mixacpmp3.c
@@ -0,0 +1,175 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+ * SECTION:mixacpmp3
+ * @short_description: Audio configuration parameters for MP3 audio.
+ * @include: mixacpmp3.h
+ *
+ * A data object which stores audio specific parameters for MP3 audio.
+ *
+ * Additional parameters must be set in the parent object #MixAudioConfigParams
+ */
+
+#include "mixacpmp3.h"
+
+static GType _mix_acp_mp3_type = 0;
+static MixAudioConfigParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_acp_mp3_type = g_define_type_id; }
+
+gboolean mix_acp_mp3_copy(MixParams* target, const MixParams *src);
+MixParams* mix_acp_mp3_dup(const MixParams *obj);
+gboolean mix_acp_mp3_equal(MixParams* first, MixParams *second);
+static void mix_acp_mp3_finalize(MixParams *obj);
+
+G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsMP3, mix_acp_mp3, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init);
+
+static void mix_acp_mp3_init (MixAudioConfigParamsMP3 *self)
+{
+ self->CRC=FALSE;
+ self->MPEG_format=0;
+ self->MPEG_layer=0;
+}
+
+static void mix_acp_mp3_class_init(MixAudioConfigParamsMP3Class *klass)
+{
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass);
+
+ mixparams_class->finalize = mix_acp_mp3_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction)mix_acp_mp3_copy;
+ mixparams_class->dup = (MixParamsDupFunction)mix_acp_mp3_dup;
+ mixparams_class->equal = (MixParamsEqualFunction)mix_acp_mp3_equal;
+}
+
+MixAudioConfigParamsMP3 *mix_acp_mp3_new(void)
+{
+ MixAudioConfigParamsMP3 *ret = (MixAudioConfigParamsMP3 *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSMP3);
+
+ return ret;
+}
+
+void mix_acp_mp3_finalize(MixParams *obj)
+{
+ /* clean up here. */
+
+ /* Chain up parent */
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->finalize)
+ klass->finalize(obj);
+}
+
+MixAudioConfigParamsMP3 *mix_acp_mp3_ref(MixAudioConfigParamsMP3 *mix)
+{
+ if (G_UNLIKELY(!mix)) return NULL;
+ return (MixAudioConfigParamsMP3*)mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_acp_mp3_dup:
+ * @obj: a #MixAudioConfigParamsMP3 object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams* mix_acp_mp3_dup(const MixParams *obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_AUDIOCONFIGPARAMSMP3(obj))
+ {
+ MixAudioConfigParamsMP3 *duplicate = mix_acp_mp3_new();
+ if (mix_acp_mp3_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj)))
+ {
+ ret = MIX_PARAMS(duplicate);
+ }
+ else
+ {
+ mix_acp_mp3_unref(duplicate);
+ }
+ }
+
+ return ret;
+}
+
+/**
+ * mix_acp_mp3_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_acp_mp3_copy(MixParams* target, const MixParams *src)
+{
+ if (MIX_IS_AUDIOCONFIGPARAMSMP3(target) && MIX_IS_AUDIOCONFIGPARAMSMP3(src))
+ {
+ MixAudioConfigParamsMP3 *t = MIX_AUDIOCONFIGPARAMSMP3(target);
+ MixAudioConfigParamsMP3 *s = MIX_AUDIOCONFIGPARAMSMP3(src);
+
+ t->CRC = s->CRC;
+ t->MPEG_format = s->MPEG_format;
+ t->MPEG_layer = s->MPEG_layer;
+
+ // Now chainup base class
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->copy)
+ {
+ return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src));
+ }
+ else
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/**
+ * mix_acp_mp3_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_acp_mp3_equal(MixParams* first, MixParams *second)
+{
+ gboolean ret = FALSE;
+
+ if (first && second)
+ {
+ if (first == second) return TRUE;
+ }
+ else
+ {
+ return FALSE;
+ }
+
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = klass->equal(first, second);
+ else
+ ret = TRUE;
+
+ if (ret && MIX_IS_AUDIOCONFIGPARAMSMP3(first) && MIX_IS_AUDIOCONFIGPARAMSMP3(second))
+ {
+ MixAudioConfigParamsMP3 *acp1 = MIX_AUDIOCONFIGPARAMSMP3(first);
+ MixAudioConfigParamsMP3 *acp2 = MIX_AUDIOCONFIGPARAMSMP3(second);
+
+ ret = (acp1->CRC == acp2->CRC) &&
+ (acp1->MPEG_format == acp2->MPEG_format) &&
+ (acp1->MPEG_layer == acp2->MPEG_layer);
+ }
+
+ return ret;
+}
+
+
diff --git a/mix_audio/src/mixacpmp3.h b/mix_audio/src/mixacpmp3.h
new file mode 100644
index 0000000..e000b4f
--- /dev/null
+++ b/mix_audio/src/mixacpmp3.h
@@ -0,0 +1,170 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_ACP_MP3_H__
+#define __MIX_ACP_MP3_H__
+
+
+#include "mixacp.h"
+
+/**
+ * MIX_TYPE_AUDIOCONFIGPARAMSMP3:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_AUDIOCONFIGPARAMSMP3 (mix_acp_mp3_get_type ())
+
+/**
+ * MIX_AUDIOCONFIGPARAMSMP3:
+ * @obj: object to be type-casted.
+ *
+ * Type casting.
+ */
+#define MIX_AUDIOCONFIGPARAMSMP3(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3))
+
+/**
+ * MIX_IS_AUDIOCONFIGPARAMSMP3:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixAudioConfigParamsMP3
+ */
+#define MIX_IS_AUDIOCONFIGPARAMSMP3(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3))
+
+/**
+ * MIX_AUDIOCONFIGPARAMSMP3_CLASS:
+ * @klass: class to be type-casted.
+ *
+ * Type casting.
+ */
+#define MIX_AUDIOCONFIGPARAMSMP3_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3Class))
+
+/**
+ * MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixAudioConfigParamsMP3Class
+ */
+#define MIX_IS_AUDIOCONFIGPARAMSMP3_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSMP3))
+
+/**
+ * MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS:
+ * @obj: a #MixAudioConfigParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_AUDIOCONFIGPARAMSMP3_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSMP3, MixAudioConfigParamsMP3Class))
+
+typedef struct _MixAudioConfigParamsMP3 MixAudioConfigParamsMP3;
+typedef struct _MixAudioConfigParamsMP3Class MixAudioConfigParamsMP3Class;
+
+/**
+ * MixAudioConfigParamsMP3:
+ * @parent: parent.
+ * @CRC: CRC. See #MIX_ACP_MP3_CRC
+ * @MPEG_format: <emphasis>Optional</emphasis>MPEG format of the mpeg audio. See #MIX_ACP_MP3_MPEG_FORMAT
+ * @MPEG_layer: <emphasis>Optional</emphasis>MPEG layer of the mpeg audio. See #MIX_ACP_MP3_MPEG_LAYER
+ *
+ * MI-X Audio Parameter object for MP3 Audio.
+ */
+struct _MixAudioConfigParamsMP3
+{
+ /*< public >*/
+ MixAudioConfigParams parent;
+
+ /*< public >*/
+ /* Audio Format Parameters */
+ gboolean CRC;
+ gint MPEG_format;
+ gint MPEG_layer;
+
+ /*< private >*/
+ void* reserved1;
+ void* reserved2;
+ void* reserved3;
+ void* reserved4;
+};
+
+/**
+ * MixAudioConfigParamsMP3Class:
+ *
+ * MI-X Audio object class
+ */
+struct _MixAudioConfigParamsMP3Class
+{
+ /*< public >*/
+ MixAudioConfigParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_acp_mp3_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_acp_mp3_get_type (void);
+
+/**
+ * mix_acp_mp3_new:
+ * @returns: A newly allocated instance of #MixAudioConfigParamsMP3
+ *
+ * Use this method to create new instance of #MixAudioConfigParamsMP3
+ */
+MixAudioConfigParamsMP3 *mix_acp_mp3_new(void);
+
+/**
+ * mix_acp_mp3_ref:
+ * @mix: object to add reference
+ * @returns: the MixAudioConfigParamsMP3 instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixAudioConfigParamsMP3 *mix_acp_mp3_ref(MixAudioConfigParamsMP3 *mix);
+
+/**
+ * mix_acp_mp3_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_acp_mp3_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/**
+ * MIX_ACP_MP3_CRC:
+ * @obj: #MixAudioConfigParamsMP3 object.
+ *
+ * MixAudioConfigParamMP3.CRC accessor.
+ *
+ * <remark>Optional</remark>
+*/
+#define MIX_ACP_MP3_CRC(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->CRC)
+
+/**
+ * MIX_ACP_MP3_MPEG_FORMAT:
+ * @obj: #MixAudioConfigParamsMP3 object.
+ *
+ * MixAudioConfigParamMP3.MPEG_format accessor.
+ *
+ * Supported MPEG format should be 1 or 2.
+*/
+#define MIX_ACP_MP3_MPEG_FORMAT(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_format)
+
+/**
+ * MIX_ACP_MP3_MPEG_LAYER:
+ * @obj: #MixAudioConfigParamsMP3 object.
+ *
+ * MixAudioConfigParamMP3.MPEG_layer accessor.
+ *
+ * Supported layer should be 1, 2, or 3.
+*/
+#define MIX_ACP_MP3_MPEG_LAYER(obj) (MIX_AUDIOCONFIGPARAMSMP3(obj)->MPEG_layer)
+
+#endif /* __MIX_AUDIOCONFIGPARAMSMP3_H__ */
diff --git a/mix_audio/src/mixacpwma.c b/mix_audio/src/mixacpwma.c
new file mode 100644
index 0000000..cf2590f
--- /dev/null
+++ b/mix_audio/src/mixacpwma.c
@@ -0,0 +1,205 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+ * SECTION:mixacpwma
+ * @short_description: Audio parameters for WMA audio.
+ * @include: mixacpwma.h
+ *
+ * A data object which stores audio specific parameters for WMA.
+ *
+ * In Moorestown, only WMA2 is supported.
+ *
+ * Additional parameters must be set in the parent object #MixAudioConfigParams
+ */
+
+#include "mixacpwma.h"
+
+static GType _mix_acp_wma_type = 0;
+static MixAudioConfigParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_acp_wma_type = g_define_type_id; }
+
+gboolean mix_acp_wma_copy(MixParams* target, const MixParams *src);
+MixParams* mix_acp_wma_dup(const MixParams *obj);
+gboolean mix_acp_wma_equal(MixParams* first, MixParams *second);
+static void mix_acp_wma_finalize(MixParams *obj);
+
+G_DEFINE_TYPE_WITH_CODE(MixAudioConfigParamsWMA, mix_acp_wma, MIX_TYPE_AUDIOCONFIGPARAMS, _do_init);
+
+static void mix_acp_wma_init (MixAudioConfigParamsWMA *self)
+{
+ self->channel_mask = 0;
+ self->format_tag = 0;
+ self->block_align = 0;
+ self->wma_encode_opt = 0;
+ self->pcm_bit_width = 0; /* source pcm bit width */
+ self->wma_version = MIX_AUDIO_WMA_VUNKNOWN;
+}
+
+static void mix_acp_wma_class_init(MixAudioConfigParamsWMAClass *klass)
+{
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixAudioConfigParamsClass *) g_type_class_peek_parent (klass);
+
+ mixparams_class->finalize = mix_acp_wma_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction)mix_acp_wma_copy;
+ mixparams_class->dup = (MixParamsDupFunction)mix_acp_wma_dup;
+ mixparams_class->equal = (MixParamsEqualFunction)mix_acp_wma_equal;
+}
+
+MixAudioConfigParamsWMA *mix_acp_wma_new(void)
+{
+ MixAudioConfigParamsWMA *ret = (MixAudioConfigParamsWMA *)g_type_create_instance (MIX_TYPE_AUDIOCONFIGPARAMSWMA);
+
+ return ret;
+}
+
+void mix_acp_wma_finalize(MixParams *obj)
+{
+ /* clean up here. */
+
+ /* Chain up parent */
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->finalize)
+ klass->finalize(obj);
+}
+
+MixAudioConfigParamsWMA *mix_acp_wma_ref(MixAudioConfigParamsWMA *obj)
+{
+ return (MixAudioConfigParamsWMA*)mix_params_ref(MIX_PARAMS(obj));
+}
+
+/**
+ * mix_acp_wma_dup:
+ * @obj: a #MixAudioConfigParamsWMA object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams* mix_acp_wma_dup(const MixParams *obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_AUDIOCONFIGPARAMSWMA(obj))
+ {
+ MixAudioConfigParamsWMA *duplicate = mix_acp_wma_new();
+ if (mix_acp_wma_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj)))
+ {
+ ret = MIX_PARAMS(duplicate);
+ }
+ else
+ {
+ mix_acp_wma_unref(duplicate);
+ }
+ }
+
+ return ret;
+}
+
+/**
+ * mix_acp_wma_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_acp_wma_copy(MixParams* target, const MixParams *src)
+{
+ if (MIX_IS_AUDIOCONFIGPARAMSWMA(target) && MIX_IS_AUDIOCONFIGPARAMSWMA(src))
+ {
+ MixAudioConfigParamsWMA *t = MIX_AUDIOCONFIGPARAMSWMA(target);
+ MixAudioConfigParamsWMA *s = MIX_AUDIOCONFIGPARAMSWMA(src);
+
+ t->channel_mask = s->channel_mask;
+ t->format_tag = s->format_tag;
+ t->block_align = s->block_align;
+ t->wma_encode_opt = s->wma_encode_opt;
+ t->wma_version = s->wma_version;
+ t->pcm_bit_width = s->pcm_bit_width;
+
+ // Now chainup base class
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->copy)
+ {
+ return klass->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src));
+ }
+ else
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/**
+ * mix_acp_wma_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_acp_wma_equal(MixParams* first, MixParams *second)
+{
+ gboolean ret = FALSE;
+
+ if (first && second)
+ {
+ if (first == second) return TRUE;
+ }
+ else
+ {
+ return FALSE;
+ }
+
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = klass->equal(first, second);
+ else
+ ret = TRUE;
+
+ if (ret && MIX_IS_AUDIOCONFIGPARAMSWMA(first) && MIX_IS_AUDIOCONFIGPARAMSWMA(second))
+ {
+ MixAudioConfigParamsWMA *acp1 = MIX_AUDIOCONFIGPARAMSWMA(first);
+ MixAudioConfigParamsWMA *acp2 = MIX_AUDIOCONFIGPARAMSWMA(second);
+
+ ret = (acp1->channel_mask == acp2->channel_mask) &&
+ (acp1->format_tag == acp2->format_tag) &&
+ (acp1->block_align == acp2->block_align) &&
+ (acp1->wma_encode_opt == acp2->wma_encode_opt) &&
+ (acp1->pcm_bit_width == acp2->pcm_bit_width) &&
+ (acp1->wma_version == acp2->wma_version);
+ }
+
+ return ret;
+}
+
+MixAudioWMAVersion mix_acp_wma_get_version(MixAudioConfigParamsWMA *obj)
+{
+ if (obj)
+ return (obj->wma_version);
+ else
+ return MIX_AUDIO_WMA_VUNKNOWN;
+}
+
+MIX_RESULT mix_acp_wma_set_version(MixAudioConfigParamsWMA *obj, MixAudioWMAVersion ver)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (!obj) return MIX_RESULT_NULL_PTR;
+
+ if ((ver > MIX_AUDIO_WMA_VUNKNOWN) && (ver < MIX_AUDIO_WMA_LAST))
+ obj->wma_version = ver;
+ else
+ ret=MIX_RESULT_INVALID_PARAM;
+
+ return ret;
+}
+
diff --git a/mix_audio/src/mixacpwma.h b/mix_audio/src/mixacpwma.h
new file mode 100644
index 0000000..8c617fd
--- /dev/null
+++ b/mix_audio/src/mixacpwma.h
@@ -0,0 +1,235 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_ACP_WMA_H__
+#define __MIX_ACP_WMA_H__
+
+
+#include "mixacp.h"
+
+/**
+ * MIX_TYPE_AUDIOCONFIGPARAMSWMA:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_AUDIOCONFIGPARAMSWMA (mix_acp_wma_get_type ())
+
+/**
+ * MIX_AUDIOCONFIGPARAMSWMA:
+ * @obj: object to be type-casted.
+ *
+ * Type casting.
+ */
+#define MIX_AUDIOCONFIGPARAMSWMA(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMA))
+
+/**
+ * MIX_IS_AUDIOCONFIGPARAMSWMA:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixAudioConfigParamsWMA
+ */
+#define MIX_IS_AUDIOCONFIGPARAMSWMA(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA))
+
+/**
+ * MIX_AUDIOCONFIGPARAMSWMA_CLASS:
+ * @klass: class to be type-casted.
+ *
+ * Type casting.
+ */
+#define MIX_AUDIOCONFIGPARAMSWMA_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMAClass))
+
+/**
+ * MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixAudioConfigParamsWMAClass
+ */
+#define MIX_IS_AUDIOCONFIGPARAMSWMA_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOCONFIGPARAMSWMA))
+
+/**
+ * MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS:
+ * @obj: a #MixAudioConfigParamsWMA object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_AUDIOCONFIGPARAMSWMA_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOCONFIGPARAMSWMA, MixAudioConfigParamsWMAClass))
+
+/**
+ * MixAudioWMAVersion:
+ * @MIX_AUDIO_WMA_VUNKNOWN: WMA version undefined.
+ * @MIX_AUDIO_WMA_V9: WMA 9
+ * @MIX_AUDIO_WMA_V10: <emphasis>Not Supported</emphasis> WMA 10
+ * @MIX_AUDIO_WMA_V10P: <emphasis>Not Supported</emphasis> WMA 10 Pro
+ * @MIX_AUDIO_WMA_LAST: last entry.
+ *
+ * WMA version.
+ */
+typedef enum {
+ MIX_AUDIO_WMA_VUNKNOWN,
+ MIX_AUDIO_WMA_V9,
+ MIX_AUDIO_WMA_V10,
+ MIX_AUDIO_WMA_V10P,
+ MIX_AUDIO_WMA_LAST
+} MixAudioWMAVersion;
+
+typedef struct _MixAudioConfigParamsWMA MixAudioConfigParamsWMA;
+typedef struct _MixAudioConfigParamsWMAClass MixAudioConfigParamsWMAClass;
+
+/**
+ * MixAudioConfigParamsWMA:
+ * @parent: parent.
+ * @channel_mask: Channel Mask. See #MIX_ACP_WMA_CHANNEL_MASK
+ * @format_tag: Format tag. See #MIX_ACP_WMA_FORMAT_TAG
+ * @block_algin: Block alignment. See #MIX_ACP_WMA_BLOCK_ALIGN
+ * @wma_encode_opt: Encoder option. See #MIX_ACP_WMA_ENCODE_OPT
+ * @pcm_bit_width: Source pcm bit width. See #MIX_ACP_WMA_PCM_BIT_WIDTH
+ * @wma_version: WMA version. See #mix_acp_wma_set_version
+ *
+ * MI-X Audio Parameter object
+ */
+struct _MixAudioConfigParamsWMA
+{
+ /*< public >*/
+ MixAudioConfigParams parent;
+
+ /*< public >*/
+ /* Audio Format Parameters */
+ guint32 channel_mask;
+ guint16 format_tag;
+ guint16 block_align;
+ guint16 wma_encode_opt;/* Encoder option */
+ guint8 pcm_bit_width; /* source pcm bit width */
+ MixAudioWMAVersion wma_version;
+};
+
+/**
+ * MixAudioConfigParamsWMAClass:
+ *
+ * MI-X Audio object class
+ */
+struct _MixAudioConfigParamsWMAClass
+{
+ /*< public >*/
+ MixAudioConfigParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_acp_wma_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_acp_wma_get_type (void);
+
+/**
+ * mix_acp_wma_new:
+ * @returns: A newly allocated instance of #MixAudioConfigParamsWMA
+ *
+ * Use this method to create new instance of #MixAudioConfigParamsWMA
+ */
+MixAudioConfigParamsWMA *mix_acp_wma_new(void);
+
+/**
+ * mix_acp_wma_ref:
+ * @mix: object to add reference
+ * @returns: the MixAudioConfigParamsWMA instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixAudioConfigParamsWMA *mix_acp_wma_ref(MixAudioConfigParamsWMA *mix);
+
+/**
+ * mix_acp_wma_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_acp_wma_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/**
+ * MIX_ACP_WMA_CHANNEL_MASK:
+ * @obj: #MixAudioConfigParamsWMA object
+ *
+ * MixAudioConfigParamWMA.channel_mask accessor.
+ *
+ * Channel mask must be one of the following:
+ *
+ * 4: For single (1) channel output.
+ *
+ * 3: For stereo (2) channels output.
+ *
+ * Only 1 or 2 output channels are supported.
+ *
+*/
+#define MIX_ACP_WMA_CHANNEL_MASK(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->channel_mask)
+
+/**
+ * MIX_ACP_WMA_FORMAT_TAG:
+ * @obj: #MixAudioConfigParamsWMA object
+ *
+ * MixAudioConfigParamWMA.format_tag accessor.
+ *
+ * <remark>In Moorestown, only value 0x0161 combined with use of #MIX_AUDIO_WMA_V9 is supported.</remark>
+*/
+#define MIX_ACP_WMA_FORMAT_TAG(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->format_tag)
+
+/**
+ * MIX_ACP_WMA_BLOCK_ALIGN:
+ * @obj: #MixAudioConfigParamsWMA object
+ *
+ * MixAudioConfigParamWMA.block_align accessor.
+ *
+ * Block alignment indicates packet size. Available from ASF Header.
+*/
+#define MIX_ACP_WMA_BLOCK_ALIGN(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->block_align)
+
+/**
+ * MIX_ACP_WMA_ENCODE_OPT:
+ * @obj: #MixAudioConfigParamsWMA object
+ *
+ * MixAudioConfigParamWMA.wma_encode_opt accessor.
+ *
+ * Encoder option available from ASF header.
+*/
+#define MIX_ACP_WMA_ENCODE_OPT(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->wma_encode_opt)
+
+/**
+ * MIX_ACP_WMA_PCM_BIT_WIDTH:
+ * @obj: #MixAudioConfigParamsWMA object
+ *
+ * MixAudioConfigParamWMA.pcm_bit_width accessor.
+ *
+ * Source pcm bit width available from ASF Header.
+*/
+#define MIX_ACP_WMA_PCM_BIT_WIDTH(obj) (MIX_AUDIOCONFIGPARAMSWMA(obj)->pcm_bit_width)
+
+/* Class Methods */
+/**
+ * mix_acp_wma_get_version:
+ * @obj: #MixAudioConfigParamsWMA object
+ * @returns: MixAudioWMAVersion
+ *
+ * Get WMA Version.
+*/
+MixAudioWMAVersion mix_acp_wma_get_version(MixAudioConfigParamsWMA *obj);
+
+/**
+ * mix_acp_wma_set_version:
+ * @obj: #MixAudioConfigParamsWMA object
+ * @ver: MixAudioWMAVersion to set.
+ * @returns: MIX_RESULT.
+ *
+ * Set WMA Version.
+ *
+ * <remark>In Moorestown, only #MIX_AUDIO_WMA_V9 is supported</remark>
+*/
+MIX_RESULT mix_acp_wma_set_version(MixAudioConfigParamsWMA *obj, MixAudioWMAVersion ver);
+
+#endif /* __MIX_AUDIOCONFIGPARAMSWMA_H__ */
diff --git a/mix_audio/src/mixaip.c b/mix_audio/src/mixaip.c
new file mode 100644
index 0000000..8ee0811
--- /dev/null
+++ b/mix_audio/src/mixaip.c
@@ -0,0 +1,167 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+ * SECTION:mixaip
+ * @short_description: Initialization parameters object.
+ * @include: mixacp.h
+ *
+ * A data object which stores initialization specific parameters.
+ *
+ * Not Implemented in Moorestown.
+ */
+
+#include "mixaip.h"
+
+//static GType _mix_aip_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+// #define _do_init { _mix_aip_type = g_define_type_id; };
+#define _do_init
+
+gboolean mix_aip_copy(MixParams* target, const MixParams *src);
+MixParams* mix_aip_dup(const MixParams *obj);
+gboolean mix_aip_equal(MixParams* first, MixParams *second);
+static void mix_aip_finalize(MixParams *obj);
+
+G_DEFINE_TYPE_WITH_CODE(MixAudioInitParams, mix_aip, MIX_TYPE_PARAMS, _do_init );
+
+#if 0
+void _mix_aip_initialize (void)
+{
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref (mix_aip_get_type ());
+}
+#endif
+
+static void mix_aip_init (MixAudioInitParams *self)
+{
+ self->reserved1 = self->reserved2 = self->reserved3 = self->reserved4 = NULL;
+}
+
+static void mix_aip_class_init(MixAudioInitParamsClass *klass)
+{
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent (klass);
+
+ mixparams_class->finalize = mix_aip_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction)mix_aip_copy;
+ mixparams_class->dup = (MixParamsDupFunction)mix_aip_dup;
+ mixparams_class->equal = (MixParamsEqualFunction)mix_aip_equal;
+}
+
+MixAudioInitParams *mix_aip_new(void)
+{
+ MixAudioInitParams *ret = (MixAudioInitParams *)g_type_create_instance (MIX_TYPE_AUDIOINITPARAMS);
+
+ return ret;
+}
+
+void mix_aip_finalize(MixParams *obj)
+{
+ /* clean up here. */
+
+ /* Chain up parent */
+ if (parent_class->finalize)
+ parent_class->finalize(obj);
+}
+
+MixAudioInitParams *mix_aip_ref(MixAudioInitParams *mix)
+{
+ return (MixAudioInitParams*)mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_aip_dup:
+ * @obj: a #MixAudioInitParams object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams* mix_aip_dup(const MixParams *obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_AUDIOINITPARAMS(obj))
+ {
+ MixAudioInitParams *duplicate = mix_aip_new();
+ if (mix_aip_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj)))
+ {
+ ret = MIX_PARAMS(duplicate);
+ }
+ else
+ {
+ mix_aip_unref(duplicate);
+ }
+ }
+
+ return ret;
+}
+
+/**
+ * mix_aip_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_aip_copy(MixParams* target, const MixParams *src)
+{
+ if (MIX_IS_AUDIOINITPARAMS(target) && MIX_IS_AUDIOINITPARAMS(src))
+ {
+ // TODO perform copy.
+ //
+ // Now chainup base class
+ // Get the root class from the cached parent_class object. This cached parent_class object has not be overwritten by this current class.
+ // Using the cached parent_class object because this_class would have ->copy pointing to this method!
+ // Cached parent_class contains the class object before it is overwritten by this derive class.
+ // MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (parent_class->copy)
+ {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src));
+ }
+ else
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/**
+ * mix_aip_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_aip_equal(MixParams* first, MixParams *second)
+{
+ gboolean ret = FALSE;
+
+ if (MIX_IS_AUDIOINITPARAMS(first) && MIX_IS_AUDIOINITPARAMS(second))
+ {
+ // TODO: do deep compare
+
+ if (ret)
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
diff --git a/mix_audio/src/mixaip.h b/mix_audio/src/mixaip.h
new file mode 100644
index 0000000..613ed54
--- /dev/null
+++ b/mix_audio/src/mixaip.h
@@ -0,0 +1,132 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_AUDIOINITPARAMS_H__
+#define __MIX_AUDIOINITPARAMS_H__
+
+
+#include <mixparams.h>
+
+/**
+ * MIX_TYPE_AUDIOINITPARAMS:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_AUDIOINITPARAMS (mix_aip_get_type ())
+
+/**
+ * MIX_AUDIOINITPARAMS:
+ * @obj: object to be type-casted.
+ *
+ * Type casting.
+ */
+#define MIX_AUDIOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParams))
+
+/**
+ * MIX_IS_AUDIOINITPARAMS:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_AUDIOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIOINITPARAMS))
+
+/**
+ * MIX_AUDIOINITPARAMS_CLASS:
+ * @klass: class to be type-casted.
+ *
+ * Type casting.
+ */
+#define MIX_AUDIOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParamsClass))
+
+/**
+ * MIX_IS_AUDIOINITPARAMS_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_AUDIOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIOINITPARAMS))
+
+/**
+ * MIX_AUDIOINITPARAMS_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_AUDIOINITPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIOINITPARAMS, MixAudioInitParamsClass))
+
+typedef struct _MixAudioInitParams MixAudioInitParams;
+typedef struct _MixAudioInitParamsClass MixAudioInitParamsClass;
+
+/**
+ * MixAudioInitParams:
+ * @parent: Parent.
+ *
+ * @MixAudio initialization parameter object.
+ */
+struct _MixAudioInitParams
+{
+ /*< public >*/
+ MixParams parent;
+
+ /*< private >*/
+ void* reserved1;
+ void* reserved2;
+ void* reserved3;
+ void* reserved4;
+};
+
+/**
+ * MixAudioInitParamsClass:
+ * @parent_class: Parent class.
+ *
+ * @MixAudio initialization parameter object class structure.
+ */
+struct _MixAudioInitParamsClass
+{
+ /*< public >*/
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_aip_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_aip_get_type (void);
+
+/**
+ * mix_aip_new:
+ * @returns: A newly allocated instance of #MixAudioInitParams
+ *
+ * Use this method to create new instance of #MixAudioInitParams
+ */
+MixAudioInitParams *mix_aip_new(void);
+
+/**
+ * mix_aip_ref:
+ * @mix: object to add reference
+ * @returns: the MixAudioInitParams instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixAudioInitParams *mix_aip_ref(MixAudioInitParams *mix);
+
+/**
+ * mix_aip_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_aip_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+#endif /* __MIX_AUDIOINITPARAMS_H__ */
diff --git a/mix_audio/src/mixaudio.c b/mix_audio/src/mixaudio.c
new file mode 100644
index 0000000..6d41350
--- /dev/null
+++ b/mix_audio/src/mixaudio.c
@@ -0,0 +1,2092 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+ * SECTION:mixaudio
+ * @short_description: Object to support a single stream playback using hardware accelerated decoder.
+ * @include: mixaudio.h
+ *
+ * #MixAudio object provide thread-safe API for application and/or multimedia framework to take advantage of Intel Smart Sound Technology(TM) driver for hardware audio decode and render.
+ *
+ * Each #MixAudio object represents one streaming session with the Intel Smart Sound driver and provides configuration and control of the decoding and playback options.
+ *
+ * The #MixAudio object also support integration with Intel Audio Manager service.
+ *
+ * An application can utilize the #MixAudio object by calling the following sequence:
+ * <orderedlist numeration="arabic">
+ * <listitem>mix_audio_new() to create a #MixAudio instance.</listitem>
+ * <listitem>mix_audio_initialize() to allocate Intel Smart Sound Technology resource.</listitem>
+ * <listitem>mix_audio_configure() to configure stream parameters.</listitem>
+ * <listitem>mix_audio_decode() can be called repeatedly for decoding and, optionally, rendering.</listitem>
+ * <listitem>mix_audio_start() is called after the 1st mix_audio_decode() method to start rendering.</listitem>
+ * <listitem>mix_audio_stop_drain() is called after the last buffer is passed for decoding in with mix_audio_decode(). </listitem>
+ * <listitem>mix_audio_deinitialize() to free resource once playback is completed.</listitem>
+ * </orderedlist>
+ *
+ * Since mix_audio_decode() is a blocking call during playback, the following methods are called in a seperate thread to control progress:
+ * <itemizedlist>
+ * <listitem>mix_audio_start()</listitem>
+ * <listitem>mix_audio_pause()</listitem>
+ * <listitem>mix_audio_resume()</listitem>
+ * <listitem>mix_audio_stop_drop()</listitem>
+ * </itemizedlist>
+ */
+
+/**
+ * SECTION:mixaudiotypes
+ * @title: Mix Audio Types
+ * @short_description: Miscellanous types used by #MixAudio API.
+ * @include: mixaudiotypes.h
+ *
+ * Miscellanous types used by #MixAudio API.
+*/
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <errno.h>
+#include <unistd.h>
+#include <sys/uio.h>
+#include <string.h>
+
+#include <glib.h>
+#include <glib/gprintf.h>
+#include <mixlog.h>
+#include "mixaudio.h"
+
+#ifdef AUDIO_MANAGER
+#include "amhelper.h"
+#endif
+
+#ifndef MIXAUDIO_CURRENT
+#define MIXAUDIO_CURRENT 0
+#endif
+#ifndef MIXAUDIO_AGE
+#define MIXAUDIO_AGE 0
+#endif
+
+/* Include this now but it will change when driver updates.
+ We would want to build against a kernel dev package if that
+ is available.
+*/
+#include <linux/types.h>
+#include "intel_sst_ioctl.h"
+#include "sst_proxy.h"
+
+#ifdef G_LOG_DOMAIN
+#undef G_LOG_DOMAIN
+#define G_LOG_DOMAIN ((gchar*)"mixaudio")
+#endif
+
+/**
+ * LPE_DEVICE:
+ *
+ * LPE Device location.
+ */
+static const char* LPE_DEVICE="/dev/lpe";
+/* #define LPE_DEVICE "/dev/lpe" */
+
+#define _LOCK(obj) g_static_rec_mutex_lock(obj);
+#define _UNLOCK(obj) g_static_rec_mutex_unlock(obj);
+
+#define _UNLOCK_RETURN(obj, res) { _UNLOCK(obj); return res; }
+
+typedef enum {
+ MIX_STREAM_PAUSED_DRAINING = MIX_STREAM_LAST,
+ MIX_STREAM_INTERNAL_LAST
+} MixStreamStateInternal;
+
+
+MIX_RESULT mix_audio_initialize_default(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams);
+MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams);
+MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize);
+MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt);
+MIX_RESULT mix_audio_start_default(MixAudio *mix);
+MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix);
+MIX_RESULT mix_audio_stop_drain_default(MixAudio *mix);
+MIX_RESULT mix_audio_pause_default(MixAudio *mix);
+MIX_RESULT mix_audio_resume_default(MixAudio *mix);
+MIX_RESULT mix_audio_get_timestamp_default(MixAudio *mix, guint64 *msecs);
+MIX_RESULT mix_audio_set_mute_default(MixAudio *mix, gboolean mute);
+MIX_RESULT mix_audio_get_mute_default(MixAudio *mix, gboolean* muted);
+MIX_RESULT mix_audio_get_max_vol_default(MixAudio *mix, gint *maxvol);
+MIX_RESULT mix_audio_get_min_vol_default(MixAudio *mix, gint *minvol);
+MIX_RESULT mix_audio_get_volume_default(MixAudio *mix, gint *currvol, MixVolType type);
+MIX_RESULT mix_audio_set_volume_default(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype);
+MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix);
+MIX_RESULT mix_audio_get_stream_state_default(MixAudio *mix, MixStreamState *streamState);
+MIX_RESULT mix_audio_get_state_default(MixAudio *mix, MixState *state);
+MIX_RESULT mix_audio_is_am_available_default(MixAudio *mix, MixAudioManager am, gboolean *avail);
+MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams);
+
+static gboolean g_IAM_available = FALSE;
+MIX_RESULT mix_audio_am_unregister(MixAudio *mix, MixAudioConfigParams *audioconfigparams);
+MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfigparams);
+MIX_RESULT mix_audio_AM_Change(MixAudioConfigParams *oldparams, MixAudioConfigParams *newparams);
+
+static void mix_audio_finalize(GObject *obj);
+G_DEFINE_TYPE (MixAudio, mix_audio, G_TYPE_OBJECT);
+
+static gboolean has_FW_INFO = FALSE;
+static struct snd_sst_fw_info cur_FW_INFO = {{0}};
+
+static MIX_RESULT mix_audio_FW_INFO(MixAudio *mix);
+static MIX_RESULT mix_audio_SST_SET_PARAMS(MixAudio *mix, MixAudioConfigParams *params);
+static MIX_RESULT mix_audio_SST_writev(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize);
+static MIX_RESULT mix_audio_SST_STREAM_DECODE(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize);
+static void mix_audio_debug_dump(MixAudio *mix);
+
+static guint g_log_handler=0;
+static void mix_audio_log(const gchar *log_domain, GLogLevelFlags log_level, const gchar *message, gpointer user_data);
+
+/**
+ * mix_acp_print_params:
+ * @obj: TBD
+ *
+ * This method is to print acp param. It is a hidden implementation within MixAudioConfigParams.
+*/
+void mix_acp_print_params(MixAudioConfigParams *obj);
+
+static void mix_audio_init (MixAudio *self)
+{
+ self->useIAM = FALSE;
+ self->streamID = 0; // TODO: Find out the invalid value for stream ID when integrates with IAM.
+ self->amStreamID = 0; // TODO: as above
+ self->streamState = MIX_STREAM_NULL;
+ self->encoding = NULL;
+ self->fileDescriptor = -1;
+ self->state = MIX_STATE_UNINITIALIZED;
+ self->codecMode = MIX_CODING_INVALID;
+ self->am_registered = FALSE;
+
+ /* private member initialization */
+ g_static_rec_mutex_init (&self->streamlock);
+ g_static_rec_mutex_init (&self->controllock);
+
+ self->audioconfigparams = NULL;
+ self->deviceState = MIX_AUDIO_DEV_CLOSED;
+
+#ifdef LPESTUB
+ g_message("MixAudio running in stub mode!");
+ self->ts_last = 0;
+ self->ts_elapsed = 0;
+#endif
+
+ self->bytes_written=0;
+
+}
+
+void _mix_aip_initialize (void);
+
+static void mix_audio_class_init (MixAudioClass *klass)
+{
+ GObjectClass *gobject_class = (GObjectClass*)klass;
+
+ gobject_class->finalize = mix_audio_finalize;
+
+ // Init thread before any threads/sync object are used.
+ if (!g_thread_supported ()) g_thread_init (NULL);
+
+ /* Init some global vars */
+ g_IAM_available = FALSE;
+
+ // base implementations
+ klass->initialize = mix_audio_initialize_default;
+ klass->configure = mix_audio_configure_default;
+ klass->decode = mix_audio_decode_default;
+ klass->capture_encode = mix_audio_capture_encode_default;
+ klass->start = mix_audio_start_default;
+ klass->stop_drop = mix_audio_stop_drop_default;
+ klass->stop_drain = mix_audio_stop_drain_default;
+ klass->pause = mix_audio_pause_default;
+ klass->resume = mix_audio_resume_default;
+ klass->get_timestamp = mix_audio_get_timestamp_default;
+ klass->set_mute = mix_audio_set_mute_default;
+ klass->get_mute = mix_audio_get_mute_default;
+ klass->get_max_vol = mix_audio_get_max_vol_default;
+ klass->get_min_vol = mix_audio_get_min_vol_default;
+ klass->get_volume = mix_audio_get_volume_default;
+ klass->set_volume = mix_audio_set_volume_default;
+ klass->deinitialize = mix_audio_deinitialize_default;
+ klass->get_stream_state = mix_audio_get_stream_state_default;
+ klass->get_state = mix_audio_get_state_default;
+ klass->is_am_available = mix_audio_is_am_available_default;
+ klass->get_output_configuration = mix_audio_get_output_configuration_default;
+
+ // Set log handler...
+ if (!g_log_handler)
+ {
+ // Get Environment variable
+ // See mix_audio_log for details
+ const gchar* loglevel = g_getenv("MIX_AUDIO_DEBUG");
+ guint64 ll = 0;
+ if (loglevel)
+ {
+ if (g_strstr_len(loglevel,-1, "0x") == loglevel)
+ {
+ // Hex string
+ ll = g_ascii_strtoull(loglevel+2, NULL, 16);
+ }
+ else
+ {
+ // Decimal string
+ ll = g_ascii_strtoull(loglevel, NULL, 10);
+ }
+ }
+ guint32 mask = (guint32)ll;
+ g_log_handler = g_log_set_handler(G_LOG_DOMAIN, 0xffffffff, mix_audio_log, (gpointer)mask);
+/*
+ g_debug("DEBUG Enabled");
+ g_log(G_LOG_DOMAIN, G_LOG_LEVEL_INFO, "%s", "LOG Enabled");
+ g_message("MESSAGE Enabled");
+ g_warning("WARNING Enabled");
+ g_critical("CRITICAL Enabled");
+ g_error("ERROR Enabled");
+*/
+ }
+}
+
+static void mix_audio_log(const gchar *log_domain, GLogLevelFlags log_level, const gchar *message, gpointer user_data)
+{
+ // Log message based on a mask.
+ // Mask could be read from MIX_AUDIO_DEBUG environment variable
+ // mask is a bit mask specifying the message to print. The lsb (0) is "ERROR" and graduating increasing
+ // value as describe in GLogLevelFlags structure. Not that lsb in GLogLevelFlags is not "ERROR" and
+ // here we shifted the log_level to ignore the first 2 values in GLogLevelFlags, making ERROR align to
+ // the lsb.
+ static const gchar* lognames[] = {"error", "critical", "warning", "message", "log", "debug"};
+ guint32 mask = (guint32)user_data & ((G_LOG_LEVEL_MASK & log_level) >> 2);
+ gint index = 0;
+
+ GTimeVal t = {0};
+
+ // convert bit mask back to index.
+ index = ffs(mask) - 1;
+
+ if ((index<0) || (index >= (sizeof(lognames)/sizeof(lognames[0])))) return;
+
+ g_get_current_time(&t);
+ g_printerr("%" G_GUINT64_FORMAT ":%s-%s: %s\n",
+ ((guint64)1000000 * t.tv_sec + (guint64)t.tv_usec),
+ log_domain?log_domain:G_LOG_DOMAIN,
+ lognames[index],
+ message?message:"NULL");
+}
+
+MixAudio *mix_audio_new(void)
+{
+ MixAudio *ret = g_object_new(MIX_TYPE_AUDIO, NULL);
+
+ return ret;
+}
+
+void mix_audio_finalize(GObject *obj)
+{
+ /* clean up here. */
+ MixAudio *mix = MIX_AUDIO(obj);
+
+ if (G_UNLIKELY(!mix)) return;
+
+ /*
+ We are not going to check the thread lock anymore in this method.
+ If a thread is accessing the object it better still have a ref on this
+ object and in that case, this method won't be called.
+
+ The application have to risk access violation if it calls the methods in
+ a thread without actually holding a reference.
+ */
+
+ g_debug("_finalized(). bytes written=%" G_GUINT64_FORMAT, mix->bytes_written);
+
+ g_static_rec_mutex_free (&mix->streamlock);
+ g_static_rec_mutex_free (&mix->controllock);
+
+ if (mix->audioconfigparams)
+ {
+ mix_acp_unref(mix->audioconfigparams);
+ mix->audioconfigparams = NULL;
+ }
+}
+
+MixAudio *mix_audio_ref(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return NULL;
+
+ return (MixAudio*)g_object_ref(G_OBJECT(mix));
+}
+
+MIX_RESULT mix_audio_initialize_default(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams)
+{
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ // TODO: parse and process MixAudioInitParams. It is ignored for now.
+
+ // initialized must be called with both thread-lock held, so no other operation is allowed.
+
+ // try lock stream thread. If failed, a pending _decode/_encode/_drain is ongoing.
+ if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE;
+
+ // also lock the control thread lock.
+ _LOCK(&mix->controllock);
+
+ if (mix->state == MIX_STATE_UNINITIALIZED)
+ {
+ // Only allowed in uninitialized state.
+ switch (mode)
+ {
+ case MIX_CODING_DECODE:
+ case MIX_CODING_ENCODE:
+ {
+ // Open device. Same flags to open for decode and encode?
+#ifdef LPESTUB
+ //g_debug("Reading env var LPESTUB_FILE for data output file.\n");
+ //const char* filename = g_getenv("LPESTUB_FILE");
+ gchar *filename = NULL;
+ GError *err = NULL;
+ const gchar* fn = NULL;
+ fn = g_getenv("MIX_AUDIO_OUTPUT");
+ if (fn)
+ mix->fileDescriptor = open(fn, O_RDWR|O_CREAT, S_IRUSR|S_IWUSR);
+
+ if (mix->fileDescriptor == -1)
+ {
+ mix->fileDescriptor = g_file_open_tmp ("mixaudio.XXXXXX", &filename, &err);
+
+ if (err)
+ {
+ g_warning("Oops, cannot open temp file: Error message: %s", err->message);
+ }
+ else
+ {
+ g_debug("Opening %s as output data file.\n", filename);
+ }
+ }
+ else
+ {
+ g_debug("Opening %s as output data file.\n", fn);
+ }
+ if (filename) g_free(filename);
+#else
+ g_debug("Opening %s\n", LPE_DEVICE);
+ mix->fileDescriptor = open(LPE_DEVICE, O_RDWR);
+#endif
+ if (mix->fileDescriptor != -1)
+ {
+ mix->codecMode = mode;
+ mix->state = MIX_STATE_INITIALIZED;
+ ret = MIX_RESULT_SUCCESS;
+ g_debug("open() succeeded. fd=%d", mix->fileDescriptor);
+ }
+ else
+ {
+ ret = MIX_RESULT_LPE_NOTAVAIL;
+ }
+ }
+ break;
+ default:
+ ret = MIX_RESULT_INVALID_PARAM;
+ break;
+ }
+ }
+ else
+ {
+ ret = MIX_RESULT_WRONG_STATE;
+ }
+
+ _UNLOCK(&mix->controllock);
+ _UNLOCK(&mix->streamlock);
+
+ return ret;
+}
+
+gboolean mix_audio_am_is_available(void)
+{
+ // return FALSE for now until IAM is available for integration.
+ // TODO: Check IAM
+ return FALSE;
+}
+
+gboolean mix_audio_base_am_is_enabled(MixAudio *mix)
+{
+ // TODO: Check IAM usage
+ return FALSE;
+}
+
+/**
+ * mix_audio_SST_SET_PARAMS:
+ * @mix: #MixAudio object.
+ * @params: Audio parameter used to configure SST.
+ * @returns: #MIX_RESULT indicating configuration result.
+ *
+ * This method setup up a SST stream with the given parameters. Note that even though
+ * this method could succeed and SST stream is setup properly, client may still not be able
+ * to use the session if other condition are met, such as a successfully set-up IAM, if used.
+ */
+MIX_RESULT mix_audio_SST_SET_PARAMS(MixAudio *mix, MixAudioConfigParams *params)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ if (mix->state == MIX_STATE_UNINITIALIZED) return MIX_RESULT_NOT_INIT;
+
+ if (!MIX_IS_AUDIOCONFIGPARAMS(params)) return MIX_RESULT_INVALID_PARAM;
+
+ mix_acp_print_params(params);
+
+ struct snd_sst_params sst_params = {0};
+
+ gboolean converted = mix_sst_params_convert(params, &sst_params);
+
+ if (converted)
+ {
+ // Setup the driver structure
+ // We are assuming the configstream will always be called after open so the codec mode
+ // should already been setup.
+ sst_params.stream_id = mix->streamID;
+ // We are not checking the codecMODE here for out-of-range...assuming we check that
+ // during init...
+ if (mix->codecMode == MIX_CODING_ENCODE)
+ sst_params.ops = STREAM_OPS_CAPTURE;
+ else sst_params.ops = STREAM_OPS_PLAYBACK;
+
+ // hard-coded to support music only.
+ sst_params.stream_type = 0x0; // stream_type 0x00 is STREAM_TYPE_MUSIC per SST doc.
+
+ // SET_PARAMS
+ int retVal = 0;
+
+#ifdef LPESTUB
+ // Not calling the ioctl
+#else
+ g_debug("Calling SNDRV_SST_STREAM_SET_PARAMS. fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_SET_PARAMS, &sst_params);
+ g_debug("_SET_PARAMS returned %d", retVal);
+#endif
+
+ if (!retVal)
+ {
+ // IOCTL success.
+ switch (sst_params.result)
+ {
+ // Please refers to SST API doc for return value definition.
+ case 5:
+ g_debug("SET_PARAMS succeeded with Stream Parameter Modified.");
+ case 0:
+ // driver says ok, too.
+ ret = MIX_RESULT_SUCCESS;
+ mix->deviceState = MIX_AUDIO_DEV_ALLOCATED;
+ mix->streamState = MIX_STREAM_STOPPED;
+ mix->streamID = sst_params.stream_id;
+ // clear old params
+ if (MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams))
+ {
+ mix_acp_unref(mix->audioconfigparams);
+ mix->audioconfigparams=NULL;
+ }
+ // replace with new one.
+ mix->audioconfigparams = MIX_AUDIOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(params)));
+ // Note: do not set mix->state here because this state may rely op other than SET_PARAMS
+ g_debug("SET_PARAMS succeeded streamID=%d.", mix->streamID);
+ break;
+ case 1:
+ ret = MIX_RESULT_STREAM_NOTAVAIL;
+ g_debug("SET_PARAMS failed STREAM not available.");
+ break;
+ case 2:
+ ret = MIX_RESULT_CODEC_NOTAVAIL;
+ g_debug("SET_PARAMS failed CODEC not available.");
+ break;
+ case 3:
+ ret = MIX_RESULT_CODEC_NOTSUPPORTED;
+ g_debug("SET_PARAMS failed CODEC not supported.");
+ break;
+ case 4:
+ ret = MIX_RESULT_INVALID_PARAM;
+ g_debug("SET_PARAMS failed Invalid Stream Parameters.");
+ break;
+ case 6:
+ g_debug("SET_PARAMS failed Invalid Stream ID.");
+ default:
+ ret = MIX_RESULT_FAIL;
+ g_critical("SET_PARAMS failed unexpectedly. Result code: %u\n", sst_params.result);
+ break;
+ }
+ }
+ else
+ {
+ // log errors
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("Failed to SET_PARAMS. errno:0x%08x. %s\n", errno, strerror(errno));
+ }
+ }
+ else
+ {
+ ret = MIX_RESULT_INVALID_PARAM;
+ }
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_get_state_default(MixAudio *mix, MixState *state)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ if (state)
+ *state = mix->state;
+ else
+ ret = MIX_RESULT_NULL_PTR;
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_decode_default(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize)
+{
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE;
+
+ if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->streamlock, MIX_RESULT_WRONG_STATE);
+
+ if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DIRECTRENDER)
+ ret = mix_audio_SST_writev(mix, iovin, iovincnt, insize);
+ else
+ ret = mix_audio_SST_STREAM_DECODE(mix, iovin, iovincnt, insize, iovout, iovoutcnt, outsize);
+
+ _UNLOCK(&mix->streamlock);
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_deinitialize_default(MixAudio *mix)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE;
+
+#ifdef AUDIO_MANAGER
+ if (mix->amStreamID && (lpe_stream_unregister(mix->amStreamID) < 0)) {
+ g_debug("lpe_stream_unregister failed\n");
+ //return MIX_RESULT_FAIL; // TODO: not sure what to do here
+ }
+#endif
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state == MIX_STATE_UNINITIALIZED)
+ ret = MIX_RESULT_SUCCESS;
+ else if ((mix->streamState != MIX_STREAM_STOPPED) && (mix->streamState != MIX_STREAM_NULL))
+ ret = MIX_RESULT_WRONG_STATE;
+ else
+ {
+ if (mix->fileDescriptor != -1)
+ {
+ g_debug("Closing fd=%d\n", mix->fileDescriptor);
+ close(mix->fileDescriptor);
+ mix->fileDescriptor = -1;
+ mix->deviceState = MIX_AUDIO_DEV_CLOSED;
+ }
+ mix->state = MIX_STATE_UNINITIALIZED;
+ }
+
+ mix->bytes_written = 0;
+
+ _UNLOCK(&mix->controllock);
+ _UNLOCK(&mix->streamlock);
+
+ return ret;
+}
+
+
+MIX_RESULT mix_audio_stop_drop_default(MixAudio *mix)
+{
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state != MIX_STATE_CONFIGURED)
+ _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED);
+
+ // Will call DROP even if we are already stopped. It is needed to unblock any pending write() call.
+// if (mix->streamState == MIX_STREAM_DRAINING)
+// ret = MIX_RESULT_WRONG_STATE;
+// else
+ {
+ int retVal = 0;
+#ifdef LPESTUB
+ // Not calling ioctl.
+#else
+ g_debug("Calling SNDRV_SST_STREAM_DROP. fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DROP);
+ g_debug("_DROP returned %d", retVal);
+#endif
+
+ if (!retVal)
+ {
+ mix->streamState = MIX_STREAM_STOPPED;
+ ret = MIX_RESULT_SUCCESS;
+ }
+ else
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("Failed to stop stream. Error:0x%08x. Unknown stream state.", errno);
+ }
+ }
+
+ _UNLOCK(&mix->controllock);
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_stop_drain_default(MixAudio *mix)
+{
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ int retVal = 0;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ // No need to lock to check vars that won't be changed in this function
+
+ if (g_static_rec_mutex_trylock(&mix->streamlock))
+ {
+ gboolean doDrain = FALSE;
+
+ if (mix->state != MIX_STATE_CONFIGURED)
+ _UNLOCK_RETURN(&mix->streamlock, MIX_RESULT_NOT_CONFIGURED);
+
+ _LOCK(&mix->controllock);
+ {
+ if (mix->streamState == MIX_STREAM_STOPPED)
+ ret = MIX_RESULT_SUCCESS;
+ else if ((mix->streamState == MIX_STREAM_DRAINING) || mix->streamState == MIX_STREAM_PAUSED_DRAINING)
+ ret = MIX_RESULT_WRONG_STATE;
+ else
+ {
+ doDrain = TRUE;
+ g_debug("MIX stream is DRAINING");
+ mix->streamState = MIX_STREAM_DRAINING;
+ }
+ }
+ _UNLOCK(&mix->controllock);
+
+
+ if (doDrain)
+ {
+ // Calling the blocking DRAIN without holding the controllock
+ // TODO: remove this ifdef when API becomes available.
+ #ifdef LPESTUB
+
+ #else
+ //g_debug("Calling SNDRV_SST_STREAM_DRAIN. fd=0x%08x", mix->fileDescriptor);
+ //retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DRAIN);
+// g_warning("Calling SNDRV_SST_STREAM_DROP instead of SNDRV_SST_STREAM_DRAIN here since DRAIN is not yet integrated. There may be data loss. fd=%d", mix->fileDescriptor);
+ g_debug("Calling SNDRV_SST_STREAM_DRAIN fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DRAIN);
+ g_debug("_DRAIN returned %d", retVal);
+ #endif
+
+ if (retVal)
+ {
+ _LOCK(&mix->controllock);
+ if (mix->streamState != MIX_STREAM_STOPPED)
+ {
+ // DRAIN could return failed if DROP is called during DRAIN.
+ // Any state resulting as a failed DRAIN would be error, execpt STOPPED.
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("Failed to drain stream. Error:0x%08x. Unknown stream state.", errno);
+ }
+ _UNLOCK(&mix->controllock);
+ }
+ else
+ {
+ _LOCK(&mix->controllock);
+ if ((mix->streamState != MIX_STREAM_DRAINING) &&
+ (mix->streamState != MIX_STREAM_STOPPED))
+ {
+ // State is changed while in DRAINING. This should not be allowed and is a bug.
+ g_warning("MIX Internal state error! DRAIN state(%u) changed!",mix->streamState);
+ ret = MIX_RESULT_FAIL;
+ }
+ else
+ {
+ mix->streamState = MIX_STREAM_STOPPED;
+ ret = MIX_RESULT_SUCCESS;
+ }
+ _UNLOCK(&mix->controllock);
+ }
+ }
+
+ _UNLOCK(&mix->streamlock);
+ }
+ else
+ {
+ // Cannot obtain stream lock meaning there's a pending _decode/_encode.
+ // Will not proceed.
+ ret = MIX_RESULT_WRONG_STATE;
+ }
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_start_default(MixAudio *mix)
+{
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state != MIX_STATE_CONFIGURED)
+ _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED);
+
+ if (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN)
+ _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONGMODE);
+
+ // Note this impl return success even if stream is already started.
+ switch (mix->streamState)
+ {
+ case MIX_STREAM_PLAYING:
+ case MIX_STREAM_PAUSED:
+ case MIX_STREAM_PAUSED_DRAINING:
+ ret = MIX_RESULT_SUCCESS;
+ break;
+ case MIX_STREAM_STOPPED:
+ {
+ int retVal = 0;
+#ifdef LPESTUB
+ // Not calling ioctl.
+#else
+ g_debug("Calling SNDRV_SST_STREAM_START. fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_START);
+ g_debug("_START returned %d", retVal);
+#endif
+ if (retVal)
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("Fail to START. Error:0x%08x. Stream state unchanged.", errno);
+ mix_audio_debug_dump(mix);
+ }
+ else
+ {
+ mix->streamState = MIX_STREAM_PLAYING;
+ ret = MIX_RESULT_SUCCESS;
+ }
+ }
+ break;
+ case MIX_STREAM_DRAINING:
+ default:
+ ret = MIX_RESULT_WRONG_STATE;
+ break;
+ }
+
+ _UNLOCK(&mix->controllock);
+
+#ifdef LPESTUB
+ if (MIX_SUCCEEDED(ret))
+ {
+ if (mix->ts_last == 0)
+ {
+ GTimeVal tval = {0};
+ g_get_current_time(&tval);
+ mix->ts_last = 1000ll * tval.tv_sec + tval.tv_usec / 1000;
+ }
+ }
+#endif
+ return ret;
+}
+
+MIX_RESULT mix_audio_get_version(guint* major, guint *minor)
+{
+ // simulate the way libtool generate version so the number synchronize with the filename.
+ if (major)
+ *major = MIXAUDIO_CURRENT-MIXAUDIO_AGE;
+
+ if (minor)
+ *minor = MIXAUDIO_AGE;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_audio_configure_default(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ // param checks
+ if (!MIX_IS_AUDIOCONFIGPARAMS(audioconfigparams)) return MIX_RESULT_NOT_ACP;
+ if (MIX_ACP_DECODEMODE(audioconfigparams) >= MIX_DECODE_LAST) return MIX_RESULT_INVALID_DECODE_MODE;
+ if (!mix_acp_is_streamname_valid(audioconfigparams)) return MIX_RESULT_INVALID_STREAM_NAME;
+
+ // If we cannot lock stream thread, data is flowing and we can't configure.
+ if (!g_static_rec_mutex_trylock(&mix->streamlock)) return MIX_RESULT_WRONG_STATE;
+
+ _LOCK(&mix->controllock);
+
+ // Check all unallowed conditions
+ if (mix->state == MIX_STATE_UNINITIALIZED)
+ ret = MIX_RESULT_NOT_INIT; // Will not allowed if the state is still UNINITIALIZED
+ else if ((mix->codecMode != MIX_CODING_DECODE) && (mix->codecMode != MIX_CODING_ENCODE))
+ ret = MIX_RESULT_WRONGMODE; // This configure is allowed only in DECODE mode.
+ else if ((mix->streamState != MIX_STREAM_STOPPED) && (mix->streamState != MIX_STREAM_NULL))
+ ret = MIX_RESULT_WRONG_STATE;
+
+ if (!MIX_SUCCEEDED(ret))
+ {
+ // Some check failed. Unlock and return.
+ _UNLOCK(&mix->controllock);
+ _UNLOCK(&mix->streamlock);
+ return ret;
+ }
+
+ if (audioconfigparams->audio_manager == MIX_AUDIOMANAGER_INTELAUDIOMANAGER) {
+ mix->useIAM = TRUE;
+ }
+ // now configure stream.
+
+ ret = mix_audio_am_unregister(mix, audioconfigparams);
+
+ if (MIX_SUCCEEDED(ret))
+ {
+ ret = mix_audio_SST_SET_PARAMS(mix, audioconfigparams);
+ }
+
+ if (MIX_SUCCEEDED(ret))
+ {
+ ret = mix_audio_am_register(mix, audioconfigparams);
+ }
+
+ if (MIX_SUCCEEDED(ret))
+ {
+ mix->state = MIX_STATE_CONFIGURED;
+ }
+ else
+ {
+ mix->state = MIX_STATE_INITIALIZED;
+ }
+
+ _UNLOCK(&mix->controllock);
+ _UNLOCK(&mix->streamlock);
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_get_timestamp_default(MixAudio *mix, guint64 *msecs)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ if (!msecs) return MIX_RESULT_NULL_PTR;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state == MIX_STATE_CONFIGURED)
+ {
+ if ((mix->codecMode == MIX_CODING_DECODE) && (MIX_ACP_DECODEMODE(mix->audioconfigparams) == MIX_DECODE_DECODERETURN))
+ {
+ ret = MIX_RESULT_WRONGMODE;
+ }
+ else {
+
+ unsigned long long ts = 0;
+ int retVal = 0;
+
+#ifdef LPESTUB
+ // For stubbing, just get system clock.
+ if (MIX_ACP_BITRATE(mix->audioconfigparams) > 0)
+ {
+ // use bytes_written and bitrate
+ // to get times in msec.
+ ts = mix->bytes_written * 8000 / MIX_ACP_BITRATE(mix->audioconfigparams);
+ }
+ else if (mix->ts_last)
+ {
+ GTimeVal tval = {0};
+ g_get_current_time(&tval);
+ ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000;
+ ts -= mix->ts_last;
+ ts += mix->ts_elapsed;
+ }
+ else
+ {
+ ts = 0;
+ }
+#else
+ g_debug("Calling SNDRV_SST_STREAM_GET_TSTAMP. fd=%d", mix->fileDescriptor);
+ ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_GET_TSTAMP, &ts);
+#endif
+
+ if (retVal)
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("_GET_TSTAMP failed. Error:0x%08x", errno);
+ //ret = MIX_RESULT_FAIL;
+ mix_audio_debug_dump(mix);
+ }
+ else
+ {
+ *msecs = ts;
+ g_debug("_GET_TSTAMP returned %" G_GUINT64_FORMAT, ts);
+ }
+ }
+ }
+ else
+ ret = MIX_RESULT_NOT_CONFIGURED;
+
+ _UNLOCK(&mix->controllock);
+
+ return ret;
+}
+
+gboolean mix_audio_AM_Change(MixAudioConfigParams *oldparams, MixAudioConfigParams *newparams)
+{
+ if (g_strcmp0(oldparams->stream_name, newparams->stream_name) == 0) {
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+MIX_RESULT mix_audio_am_unregister(MixAudio *mix, MixAudioConfigParams *audioconfigparams)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ if (mix->am_registered && MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams) && MIX_IS_AUDIOCONFIGPARAMS(audioconfigparams))
+ {
+ // we have 2 params. let's check
+ if ((MIX_ACP_DECODEMODE(mix->audioconfigparams) != MIX_ACP_DECODEMODE(audioconfigparams)) ||
+ mix_audio_AM_Change(mix->audioconfigparams, audioconfigparams)) //TODO: add checking for SST change
+ {
+ // decode mode change.
+ if (mix->amStreamID > 0) {
+ if (lpe_stream_unregister(mix->amStreamID) != 0) {
+ return MIX_RESULT_FAIL;
+ }
+ mix->am_registered = FALSE;
+ }
+ }
+ }
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_am_register(MixAudio *mix, MixAudioConfigParams *audioconfigparams)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ gint32 codec_mode = -1;
+
+ if (mix->codecMode == MIX_CODING_DECODE)
+ codec_mode = 0;
+ else if (mix->codecMode == MIX_CODING_ENCODE)
+ codec_mode = 1;
+ else
+ return MIX_RESULT_FAIL; // TODO: what to do when fail?
+
+#ifdef AUDIO_MANAGER
+ if (audioconfigparams->stream_name == NULL)
+ return MIX_RESULT_FAIL;
+
+// if AM is enable, and not_registered, then register
+ if (mix->useIAM && !mix->am_registered) {
+ gint32 amStreamID = lpe_stream_register(mix->streamID, "music", audioconfigparams->stream_name, codec_mode);
+
+ if (amStreamID == -1){
+ mix->amStreamID = 0;
+ return MIX_RESULT_FAIL;
+ }
+ else if (amStreamID == -2) { // -2: Direct render not avail, see AM spec
+ mix->amStreamID = 0;
+ return MIX_RESULT_DIRECT_NOTAVAIL;
+ }
+ mix->am_registered = TRUE;
+ mix->amStreamID = amStreamID;
+ }
+#endif
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_capture_encode_default(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt)
+{
+ struct iovec *vec;
+ gint bytes_read;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ // TODO: set count limit
+ if (iovoutcnt < 1) {
+ return MIX_RESULT_INVALID_COUNT;
+ }
+
+ if (iovout == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ vec = (struct iovec *) g_alloca(sizeof(struct iovec) * iovoutcnt);
+ if (!vec) return MIX_RESULT_NO_MEMORY;
+
+ gint i;
+ for (i=0; i < iovoutcnt; i++)
+ {
+ vec[i].iov_base = iovout[i].data;
+ vec[i].iov_len = iovout[i].size;
+ }
+
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "begin readv()\n");
+ bytes_read = readv(mix->fileDescriptor, vec, iovoutcnt);
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_INFO, "end readv(), return: %d\n", bytes_read);
+ if (bytes_read < 0) { // TODO: should not be 0, but driver return 0 right now
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_ERROR, "return: %d\n", bytes_read);
+ return MIX_RESULT_FAIL;
+ }
+/*
+ gint bytes_count=0;
+ for (i=0; i < iovoutcnt; i++)
+ {
+ bytes_count += iovout[i].size;
+ }
+ iovout[i].size = bytes_read - bytes_count;
+*/
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_audio_get_max_vol_default(MixAudio *mix, gint *maxvol)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (!maxvol) return MIX_RESULT_NULL_PTR;
+
+ _LOCK(&mix->controllock);
+
+ if (!has_FW_INFO)
+ {
+ ret = mix_audio_FW_INFO(mix);
+ }
+
+ if (MIX_SUCCEEDED(ret))
+ {
+ *maxvol = (gint)cur_FW_INFO.pop_info.max_vol;
+ }
+
+ _UNLOCK(&mix->controllock);
+
+ return ret;
+}
+
+
+MIX_RESULT mix_audio_get_min_vol_default(MixAudio *mix, gint *minvol)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (!minvol) return MIX_RESULT_NULL_PTR;
+
+ _LOCK(&mix->controllock);
+
+ if (!has_FW_INFO)
+ {
+ ret = mix_audio_FW_INFO(mix);
+ }
+
+ if (MIX_SUCCEEDED(ret))
+ {
+ *minvol = (gint)cur_FW_INFO.pop_info.min_vol;
+ }
+
+ _UNLOCK(&mix->controllock);
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_get_stream_state_default(MixAudio *mix, MixStreamState *streamState)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ if (!streamState) return MIX_RESULT_NULL_PTR;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED);
+
+ // PAUSED_DRAINING is internal state.
+ if (mix->streamState == MIX_STREAM_PAUSED_DRAINING)
+ *streamState = MIX_STREAM_PAUSED;
+ else
+ *streamState = mix->streamState;
+
+ _UNLOCK(&mix->controllock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_audio_get_volume_default(MixAudio *mix, gint *currvol, MixVolType type)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ struct snd_sst_vol vol = {0};
+
+ if (!currvol) return MIX_RESULT_NULL_PTR;
+ if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED);
+
+ vol.stream_id = mix->streamID;
+
+ int retVal = 0;
+
+#ifdef LPESTUB
+ // Not calling.
+#else
+ g_debug("Calling SNDRV_SST_GET_VOL. fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_GET_VOL, &vol);
+ g_debug("SNDRV_SST_GET_VOL returned %d. vol=%d", retVal, vol.volume);
+#endif
+
+ if (retVal)
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("_GET_VOL failed. Error:0x%08x", errno);
+ mix_audio_debug_dump(mix);
+ }
+ else
+ {
+ gint maxvol = 0;
+ ret = mix_audio_get_max_vol(mix, &maxvol);
+
+ if (MIX_SUCCEEDED(ret))
+ {
+ if (type == MIX_VOL_PERCENT)
+ *currvol = (maxvol!=0)?((vol.volume * 100) / maxvol):0;
+ else
+ *currvol = vol.volume;
+ }
+ }
+
+ _UNLOCK(&mix->controllock);
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_get_mute_default(MixAudio *mix, gboolean* muted)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ return ret;
+}
+
+MIX_RESULT mix_audio_set_mute_default(MixAudio *mix, gboolean mute)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ struct snd_sst_mute m = { 0 };
+
+ if (mute) m.mute = 1;
+ else m.mute = 0;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED);
+
+ m.stream_id = mix->streamID;
+
+ int retVal = 0;
+
+#ifdef LPESTUB
+ // Not calling.
+#else
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_MUTE, &m);
+#endif
+
+ if (retVal)
+ {
+ //ret = MIX_RESULT_FAIL;
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("_MUTE failed. Error:0x%08x", errno);
+ mix_audio_debug_dump(mix);
+ }
+
+ _UNLOCK(&mix->controllock);
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_pause_default(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED);
+
+ if (mix->streamState == MIX_STREAM_PAUSED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS);
+
+ if ((mix->streamState != MIX_STREAM_PLAYING) && (mix->streamState != MIX_STREAM_DRAINING))
+ _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONG_STATE);
+
+ int retVal = 0;
+
+#ifdef LPESTUB
+ // Not calling
+#else
+ g_debug("Calling SNDRV_SST_STREAM_PAUSE. fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_PAUSE);
+ g_debug("_PAUSE returned %d", retVal);
+#endif
+
+ if (retVal)
+ {
+ if (mix->streamState == MIX_STREAM_DRAINING)
+ {
+ // if stream state has been DRAINING, DRAIN could become successful during the PAUSE call, but not yet have chance to update streamState since we now hold the lock.
+ // In this case, the mix_streamState becomes out-of-sync with the actual playback state. PAUSE failed due to stream already STOPPED but mix->streamState remains at "DRAINING"
+ // On the other hand, we can't let DRAIN hold the lock the entire time.
+ // We would not know if we fail PAUSE due to DRAINING, or a valid reason.
+ // Need a better mechanism to sync DRAINING.
+ // DRAINING is not likely problem for resume, as long as the PAUSED state is set when stream is really PAUSED.
+ ret = MIX_RESULT_NEED_RETRY;
+ g_warning("PAUSE failed while DRAINING. Draining could be just completed. Retry needed.");
+ }
+ else
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("_PAUSE failed. Error:0x%08x", errno);
+ mix_audio_debug_dump(mix);
+ }
+ }
+ else
+ {
+ if (mix->streamState == MIX_STREAM_DRAINING)
+ {
+ mix->streamState = MIX_STREAM_PAUSED_DRAINING;
+ }
+ else
+ {
+ mix->streamState = MIX_STREAM_PAUSED;
+ }
+ }
+
+ _UNLOCK(&mix->controllock);
+
+#ifdef LPESTUB
+ if (MIX_SUCCEEDED(ret))
+ {
+ GTimeVal tval = {0};
+ g_get_current_time(&tval);
+ guint64 ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000;
+ mix->ts_elapsed += ts - mix->ts_last;
+ mix->ts_last = 0;
+ }
+#endif
+ return ret;
+}
+
+MIX_RESULT mix_audio_resume_default(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED);
+
+ if ((mix->streamState == MIX_STREAM_PLAYING) || (mix->streamState == MIX_STREAM_DRAINING))
+ _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_SUCCESS);
+
+ if ((mix->streamState != MIX_STREAM_PAUSED_DRAINING) && (mix->streamState != MIX_STREAM_PAUSED))
+ _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_WRONG_STATE);
+
+ int retVal = 0;
+
+#ifdef LPESTUB
+ // Not calling
+#else
+ g_debug("Calling SNDRV_SST_STREAM_RESUME");
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_RESUME);
+ g_debug("_STREAM_RESUME returned %d", retVal);
+#endif
+
+ if (retVal)
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("_PAUSE failed. Error:0x%08x", errno);
+ mix_audio_debug_dump(mix);
+ }
+ {
+ if (mix->streamState == MIX_STREAM_PAUSED_DRAINING)
+ mix->streamState = MIX_STREAM_DRAINING;
+ else
+ mix->streamState = MIX_STREAM_PLAYING;
+ }
+
+ _UNLOCK(&mix->controllock);
+
+#ifdef LPESTUB
+ if (MIX_SUCCEEDED(ret))
+ {
+ GTimeVal tval = {0};
+ g_get_current_time(&tval);
+ guint64 ts = 1000ll * tval.tv_sec + tval.tv_usec / 1000;
+ mix->ts_last = ts;
+ }
+#endif
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_set_volume_default(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ struct snd_sst_vol vol = {0};
+
+ vol.ramp_duration = msecs;
+ vol.ramp_type = ramptype; // TODO: confirm the mappings between Mix and SST.
+
+ if (!mix) return MIX_RESULT_NULL_PTR;
+
+ if ((type != MIX_VOL_PERCENT) && (type != MIX_VOL_DECIBELS)) return MIX_RESULT_INVALID_PARAM;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state != MIX_STATE_CONFIGURED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_CONFIGURED);
+
+ vol.stream_id = mix->streamID;
+
+ if (type == MIX_VOL_DECIBELS)
+ {
+ vol.volume = currvol;
+ }
+ else
+ {
+ gint maxvol = 0;
+ ret = mix_audio_get_max_vol(mix, &maxvol);
+
+ if (!maxvol)
+ g_critical("Max Vol is 0!");
+
+ if (MIX_SUCCEEDED(ret))
+ {
+ vol.volume = currvol * maxvol / 100;
+ }
+ }
+
+ int retVal = 0;
+
+#ifdef LPESTUB
+ // Not calling
+#else
+ g_debug("calling SNDRV_SST_SET_VOL vol=%d", vol.volume);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_SET_VOL, &vol);
+ g_debug("SNDRV_SST_SET_VOL returned %d", retVal);
+#endif
+
+ if (retVal)
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("_SET_VOL failed. Error:0x%08x", errno);
+ mix_audio_debug_dump(mix);
+ }
+
+ _UNLOCK(&mix->controllock);
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_FW_INFO(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ _LOCK(&mix->controllock);
+
+ // This call always get the fw info.
+ int retVal = 0;
+
+#ifdef LPESTUB
+ // Not calling.
+#else
+ g_debug("calling SNDRV_SST_FW_INFO fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_FW_INFO, &cur_FW_INFO);
+ g_debug("SNDRV_SST_FW_INFO returned %d", retVal);
+#endif
+
+ if (!retVal)
+ {
+ has_FW_INFO = TRUE;
+ }
+ else
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("_FW_INFO failed. Error:0x%08x", errno);
+ mix_audio_debug_dump(mix);
+ }
+
+ _UNLOCK(&mix->controllock);
+
+ return ret;
+}
+
+
+static MIX_RESULT mix_audio_SST_writev(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+/*
+ definition of "struct iovec" used by writev:
+ struct iovec {
+ void *iov_base;
+ size_t iov_len;
+ };
+*/
+
+ if (!mix) return MIX_RESULT_NULL_PTR;
+
+ size_t total_bytes = 0;
+ // NOTE: we may want to find a way to avoid this copy.
+ struct iovec *in = (struct iovec*)g_alloca(sizeof(struct iovec) * iovincnt);
+ if (!in) return MIX_RESULT_NO_MEMORY;
+
+ int i;
+ for (i=0;i<iovincnt;i++)
+ {
+ in[i].iov_base = (void*)iovin[i].data;
+ in[i].iov_len = (size_t)iovin[i].size;
+ total_bytes += in[i].iov_len;
+ }
+
+ ssize_t written = 0;
+
+#ifdef LPESTUB
+ gulong wait_time = 0; //wait time in second.
+ if (MIX_ACP_BITRATE(mix->audioconfigparams) > 0)
+ {
+ wait_time = total_bytes*8*1000*1000/MIX_ACP_BITRATE(mix->audioconfigparams);
+ // g_debug("To wait %lu usec for writev() to simulate blocking\n", wait_time);
+ }
+ GTimer *timer = g_timer_new();
+ g_timer_start(timer);
+
+ g_debug("calling writev(fd=%d)", mix->fileDescriptor);
+ written = writev(mix->fileDescriptor, in, iovincnt);
+ if (written >= 0) mix->bytes_written += written;
+ g_debug("writev() returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written);
+ /* Now since writing to file rarely block, we put timestamp there to block.*/
+ g_timer_stop(timer);
+ gulong elapsed = 0;
+ g_timer_elapsed(timer, &elapsed);
+ g_timer_destroy(timer);
+ // g_debug("writev() returned in %lu usec\n", elapsed);
+ if ((MIX_ACP_BITRATE(mix->audioconfigparams) > 0) && (wait_time > elapsed))
+ {
+ wait_time -= elapsed;
+ g_usleep(wait_time);
+ }
+#else
+ g_debug("calling writev(fd=%d) with %d", mix->fileDescriptor, total_bytes);
+ written = writev(mix->fileDescriptor, in, iovincnt);
+ if (written > 0) mix->bytes_written += written;
+ g_debug("writev() returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written);
+#endif
+
+ if (written < 0)
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("writev() failed. Error:0x%08x", errno);
+ }
+ else
+ {
+ // guranttee written is positive value before sign extending it.
+ if (insize) *insize = (guint64)written;
+ if (written != total_bytes)
+ {
+ g_warning("writev() wrote only %d out of %d", written, total_bytes);
+ }
+ }
+
+ return ret;
+}
+
+static MIX_RESULT mix_audio_SST_STREAM_DECODE(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ int retVal = 0;
+
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ if ((iovout == NULL) || (iovoutcnt <= 0))
+ {
+ g_critical("Wrong mode. Please report a bug...");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ g_message("Input entries=%d. Output entries=%d", iovincnt, iovoutcnt);
+
+ struct snd_sst_buff_entry *ientries = NULL;
+ struct snd_sst_buff_entry *oentries = NULL;
+
+ ientries = (struct snd_sst_buff_entry*)g_alloca(sizeof(struct snd_sst_buff_entry) * iovincnt);
+ oentries = (struct snd_sst_buff_entry*)g_alloca(sizeof(struct snd_sst_buff_entry) * iovoutcnt);
+
+ if (!ientries || !oentries) return MIX_RESULT_NO_MEMORY;
+
+ struct snd_sst_dbufs dbufs = {0};
+
+ struct snd_sst_buffs ibuf = {0};
+ struct snd_sst_buffs obuf = {0};
+
+ ibuf.entries = iovincnt;
+ ibuf.type = SST_BUF_USER;
+ ibuf.buff_entry = ientries;
+
+ obuf.entries = iovoutcnt;
+ obuf.type = SST_BUF_USER;
+ obuf.buff_entry = oentries;
+
+ dbufs.ibufs = &ibuf;
+ dbufs.obufs = &obuf;
+
+ int i = 0;
+ for (i=0;i<iovincnt;i++)
+ {
+ ientries[i].size = (unsigned long)iovin[i].size;
+ ientries[i].buffer = (void *)iovin[i].data;
+ g_debug("Creating in entry#%d, size=%u", i, ientries[i].size);
+ }
+
+ for (i=0;i<iovoutcnt;i++)
+ {
+ oentries[i].size = (unsigned long)iovout[i].size;
+ oentries[i].buffer = (void *)iovout[i].data;
+ g_debug("Creating out entry#%d, size=%u", i, oentries[i].size);
+ }
+
+#ifdef LPESTUB
+ size_t total_bytes = 0;
+ // NOTE: we may want to find a way to avoid this copy.
+ struct iovec *in = (struct iovec*)g_alloca(sizeof(struct iovec) * iovincnt);
+ if (iovincnt>1)
+ {
+ for (i=0;i<iovincnt-1;i++)
+ {
+ in[i].iov_base = (void*)iovin[i].data;
+ in[i].iov_len = (size_t)iovin[i].size;
+ total_bytes += in[i].iov_len;
+ }
+ in[i].iov_base = (void*)iovin[i].data;
+ in[i].iov_len = (size_t)iovin[i].size/2;
+ total_bytes += in[i].iov_len;
+ }
+ else
+ {
+ for (i=0;i<iovincnt;i++)
+ {
+ in[i].iov_base = (void*)iovin[i].data;
+ in[i].iov_len = (size_t)iovin[i].size;
+ total_bytes += in[i].iov_len;
+ }
+ }
+ ssize_t written = 0;
+
+ g_debug("calling stub STREAM_DECODE (writev) (fd=%d)", mix->fileDescriptor);
+ written = writev(mix->fileDescriptor, in, iovincnt);
+ if (written >= 0)
+ {
+ mix->bytes_written += written;
+ dbufs.output_bytes_produced = written;
+ dbufs.input_bytes_consumed = written;
+ }
+ g_debug("stub STREAM_DECODE (writev) returned %d. Total %" G_GUINT64_FORMAT, written, mix->bytes_written);
+#else
+ g_debug("calling SNDRV_SST_STREAM_DECODE fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_DECODE, &dbufs);
+ g_debug("SNDRV_SST_STREAM_DECODE returned %d", retVal);
+#endif
+
+ if (retVal)
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("_STREAM_DECODE failed. Error:0x%08x", errno);
+ mix_audio_debug_dump(mix);
+ }
+ else
+ {
+ if (insize) *insize = dbufs.input_bytes_consumed;
+ if (outsize) *outsize = dbufs.output_bytes_produced;
+ g_message("consumed=%" G_GUINT64_FORMAT " produced=%" G_GUINT64_FORMAT, dbufs.input_bytes_consumed, dbufs.output_bytes_produced);
+ }
+
+ return ret;
+}
+
+// Starting interface
+//MIX_RESULT mix_audio_get_version(guint* major, guint *minor);
+
+MIX_RESULT mix_audio_initialize(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_VERBOSE, "mix_audio_initialize\n");
+
+ if (!klass->initialize)
+ return MIX_RESULT_FAIL; // TODO: add more descriptive error
+
+#ifdef AUDIO_MANAGER
+ if (dbus_init() < 0) {
+ mix_log(MIX_AUDIO_COMP, MIX_LOG_LEVEL_ERROR, "Failed to connect to dbus\n");
+// commented out, gracefully exit right now
+// return MIX_RESULT_FAIL; // TODO: add more descriptive error
+ }
+#endif
+
+ return klass->initialize(mix, mode, aip, drminitparams);
+}
+
+MIX_RESULT mix_audio_configure(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->configure)
+ return MIX_RESULT_FAIL;
+
+ return klass->configure(mix, audioconfigparams, drmparams);
+}
+
+MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->decode)
+ return MIX_RESULT_FAIL;
+
+ return klass->decode(mix, iovin, iovincnt, insize, iovout, iovoutcnt, outsize);
+}
+
+MIX_RESULT mix_audio_capture_encode(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->capture_encode)
+ return MIX_RESULT_FAIL;
+
+ return klass->capture_encode(mix, iovout, iovoutcnt);
+}
+
+MIX_RESULT mix_audio_start(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->start)
+ return MIX_RESULT_FAIL;
+
+ return klass->start(mix);
+}
+
+MIX_RESULT mix_audio_stop_drop(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->stop_drop)
+ return MIX_RESULT_FAIL;
+
+ return klass->stop_drop(mix);
+}
+
+MIX_RESULT mix_audio_stop_drain(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->stop_drain)
+ return MIX_RESULT_FAIL;
+
+ return klass->stop_drain(mix);
+}
+
+MIX_RESULT mix_audio_pause(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->pause)
+ return MIX_RESULT_FAIL;
+
+ return klass->pause(mix);
+}
+
+MIX_RESULT mix_audio_resume(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->resume)
+ return MIX_RESULT_FAIL;
+
+ return klass->resume(mix);
+}
+
+MIX_RESULT mix_audio_get_timestamp(MixAudio *mix, guint64 *msecs)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->get_timestamp)
+ return MIX_RESULT_FAIL;
+
+ return klass->get_timestamp(mix, msecs);
+}
+
+MIX_RESULT mix_audio_get_mute(MixAudio *mix, gboolean* muted)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->get_mute)
+ return MIX_RESULT_FAIL;
+
+ return klass->get_mute(mix, muted);
+}
+
+MIX_RESULT mix_audio_set_mute(MixAudio *mix, gboolean mute)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->set_mute)
+ return MIX_RESULT_FAIL;
+
+ return klass->set_mute(mix, mute);
+}
+
+MIX_RESULT mix_audio_get_max_vol(MixAudio *mix, gint *maxvol)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->get_max_vol)
+ return MIX_RESULT_FAIL;
+
+ return klass->get_max_vol(mix, maxvol);
+}
+
+MIX_RESULT mix_audio_get_min_vol(MixAudio *mix, gint *minvol)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->get_min_vol)
+ return MIX_RESULT_FAIL;
+
+ return klass->get_min_vol(mix, minvol);
+}
+
+MIX_RESULT mix_audio_get_volume(MixAudio *mix, gint *currvol, MixVolType type)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->get_volume)
+ return MIX_RESULT_FAIL;
+
+ return klass->get_volume(mix, currvol, type);
+}
+
+MIX_RESULT mix_audio_set_volume(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->set_volume)
+ return MIX_RESULT_FAIL;
+
+ return klass->set_volume(mix, currvol, type, msecs, ramptype);
+}
+
+MIX_RESULT mix_audio_deinitialize(MixAudio *mix)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->deinitialize)
+ return MIX_RESULT_FAIL;
+
+ return klass->deinitialize(mix);
+}
+
+MIX_RESULT mix_audio_get_stream_state(MixAudio *mix, MixStreamState *streamState)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->get_stream_state)
+ return MIX_RESULT_FAIL;
+
+ return klass->get_stream_state(mix, streamState);
+}
+
+MIX_RESULT mix_audio_get_state(MixAudio *mix, MixState *state)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->get_state)
+ return MIX_RESULT_FAIL;
+
+ return klass->get_state(mix, state);
+}
+
+MIX_RESULT mix_audio_is_am_available_default(MixAudio *mix, MixAudioManager am, gboolean *avail)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (avail)
+ *avail = FALSE;
+ else
+ ret = MIX_RESULT_NULL_PTR;
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_is_am_available(MixAudio *mix, MixAudioManager am, gboolean *avail)
+{
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->is_am_available)
+ return MIX_RESULT_FAIL;
+
+ return klass->is_am_available(mix, am, avail);
+}
+
+const gchar* dbgstr_UNKNOWN="UNKNOWN";
+
+static const gchar* _mix_stream_state_get_name (MixStreamState s)
+{
+ static const gchar *MixStreamStateNames[] = {
+ "MIX_STREAM_NULL",
+ "MIX_STREAM_STOPPED",
+ "MIX_STREAM_PLAYING",
+ "MIX_STREAM_PAUSED",
+ "MIX_STREAM_DRAINING",
+ "MIX_STREAM_PAUSED_DRAINING",
+ "MIX_STREAM_INTERNAL_LAST"
+ };
+
+ const gchar *ret = dbgstr_UNKNOWN;
+
+ if (s < sizeof(MixStreamStateNames)/sizeof(MixStreamStateNames[0]))
+ {
+ ret = MixStreamStateNames[s];
+ }
+
+ return ret;
+}
+
+static const gchar* _mix_state_get_name(MixState s)
+{
+ static const gchar* MixStateNames[] = {
+ "MIX_STATE_NULL",
+ "MIX_STATE_UNINITIALIZED",
+ "MIX_STATE_INITIALIZED",
+ "MIX_STATE_CONFIGURED",
+ "MIX_STATE_LAST"
+ };
+
+ const gchar *ret = dbgstr_UNKNOWN;
+
+ if (s < sizeof(MixStateNames)/sizeof(MixStateNames[0]))
+ {
+ ret = MixStateNames[s];
+ }
+
+ return ret;
+}
+
+static const gchar* _mix_codec_mode_get_name(MixCodecMode s)
+{
+ static const gchar* MixCodecModeNames[] = {
+ "MIX_CODING_INVALID",
+ "MIX_CODING_ENCODE",
+ "MIX_CODING_DECODE",
+ "MIX_CODING_LAST"
+ };
+
+ const gchar *ret = dbgstr_UNKNOWN;
+
+ if (s < sizeof(MixCodecModeNames)/sizeof(MixCodecModeNames[0]))
+ {
+ ret = MixCodecModeNames[s];
+ }
+
+ return ret;
+}
+
+static const gchar* _mix_device_state_get_name(MixDeviceState s)
+{
+ static const gchar* MixDeviceStateNames[] = {
+ "MIX_AUDIO_DEV_CLOSED",
+ "MIX_AUDIO_DEV_OPENED",
+ "MIX_AUDIO_DEV_ALLOCATED"
+ };
+
+ const gchar *ret = dbgstr_UNKNOWN;
+
+ if (s < sizeof(MixDeviceStateNames)/sizeof(MixDeviceStateNames[0]))
+ {
+ ret = MixDeviceStateNames[s];
+ }
+
+ return ret;
+}
+
+void mix_audio_debug_dump(MixAudio *mix)
+{
+ const gchar* prefix="MixAudio:";
+
+ if (!MIX_IS_AUDIO(mix))
+ {
+ g_debug("%s Not a valid MixAudio object.", prefix);
+ return;
+ }
+
+ g_debug("%s streamState(%s)", prefix, _mix_stream_state_get_name(mix->streamState));
+ g_debug("%s encoding(%s)", prefix, mix->encoding?mix->encoding:dbgstr_UNKNOWN);
+ g_debug("%s fileDescriptor(%d)", prefix, mix->fileDescriptor);
+ g_debug("%s state(%s)", prefix, _mix_state_get_name(mix->state));
+ g_debug("%s codecMode(%s)", prefix, _mix_codec_mode_get_name(mix->codecMode));
+
+ // Private members
+ g_debug("%s streamID(%d)", prefix, mix->streamID);
+ //GStaticRecMutex streamlock; // lock that must be acquired to invoke stream method.
+ //GStaticRecMutex controllock; // lock that must be acquired to call control function.
+ if (MIX_IS_AUDIOCONFIGPARAMS(mix->audioconfigparams))
+ {
+ // TODO: print audioconfigparams
+ }
+ else
+ {
+ g_debug("%s audioconfigparams(NULL)", prefix);
+ }
+
+ g_debug("%s deviceState(%s)", prefix, _mix_device_state_get_name(mix->deviceState));
+
+ g_debug("%s ts_last(%" G_GUINT64_FORMAT ")", prefix, mix->ts_last);
+ g_debug("%s ts_elapsed(%" G_GUINT64_FORMAT ")", prefix, mix->ts_elapsed);
+ g_debug("%s bytes_written(%" G_GUINT64_FORMAT ")", prefix, mix->bytes_written);
+
+ return;
+}
+
+MIX_RESULT mix_audio_get_output_configuration(MixAudio *mix, MixAudioConfigParams **audioconfigparams)
+{
+ if (G_UNLIKELY(!mix)) return MIX_RESULT_NULL_PTR;
+
+ MixAudioClass *klass = MIX_AUDIO_GET_CLASS(mix);
+
+ if (!klass->get_output_configuration)
+ return MIX_RESULT_FAIL;
+
+ return klass->get_output_configuration(mix, audioconfigparams);
+}
+
+MIX_RESULT mix_audio_get_output_configuration_default(MixAudio *mix, MixAudioConfigParams **audioconfigparams)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ struct snd_sst_get_stream_params stream_params = {{0}};
+ MixAudioConfigParams *p = NULL;
+ int retVal = 0;
+
+ if (G_UNLIKELY(!mix || !audioconfigparams)) return MIX_RESULT_NULL_PTR;
+
+ _LOCK(&mix->controllock);
+
+ if (mix->state <= MIX_STATE_UNINITIALIZED) _UNLOCK_RETURN(&mix->controllock, MIX_RESULT_NOT_INIT);
+
+#ifdef LPESTUB
+#else
+ // Check only if we are initialized.
+ g_debug("Calling SNDRV_SST_STREAM_GET_PARAMS. fd=%d", mix->fileDescriptor);
+ retVal = ioctl(mix->fileDescriptor, SNDRV_SST_STREAM_GET_PARAMS, &stream_params);
+ g_debug("_GET_PARAMS returned %d", retVal);
+#endif
+
+ _UNLOCK(&mix->controllock);
+
+ if (retVal)
+ {
+ ret = MIX_RESULT_SYSTEM_ERRNO;
+ g_debug("Failed to GET_PARAMS. errno:0x%08x. %s\n", errno, strerror(errno));
+ }
+ else
+ {
+ p = mix_sst_params_to_acp(&stream_params);
+ *audioconfigparams = p;
+ }
+
+ return ret;
+}
+
+MIX_RESULT mix_audio_get_stream_byte_decoded(MixAudio *mix, guint64 *byte)
+{
+ return MIX_RESULT_NOT_SUPPORTED;
+}
+
diff --git a/mix_audio/src/mixaudio.h b/mix_audio/src/mixaudio.h
new file mode 100644
index 0000000..a3cef5a
--- /dev/null
+++ b/mix_audio/src/mixaudio.h
@@ -0,0 +1,574 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_AUDIO_H__
+#define __MIX_AUDIO_H__
+
+#include <glib-object.h>
+#include "mixacp.h"
+#include "mixaip.h"
+#include "mixdrmparams.h"
+#include "mixresult.h"
+#include "mixaudiotypes.h"
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_AUDIO (mix_audio_get_type ())
+#define MIX_AUDIO(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_AUDIO, MixAudio))
+#define MIX_IS_AUDIO(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_AUDIO))
+#define MIX_AUDIO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_AUDIO, MixAudioClass))
+#define MIX_IS_AUDIO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_AUDIO))
+#define MIX_AUDIO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_AUDIO, MixAudioClass))
+
+typedef struct _MixAudio MixAudio;
+typedef struct _MixAudioClass MixAudioClass;
+
+/**
+ * MixStreamState:
+ * @MIX_STREAM_NULL: Stream is not allocated.
+ * @MIX_STREAM_STOPPED: Stream is at STOP state. This is the only state DNR is allowed.
+ * @MIX_STREAM_PLAYING: Stream is at Playing state.
+ * @MIX_STREAM_PAUSED: Stream is Paused.
+ * @MIX_STREAM_DRAINING: Stream is draining -- remaining of the buffer in the device are playing. This state is special due to the limitation that no other control operations are allowed at this state. Stream will become @MIX_STREAM_STOPPED automatically when this data draining has completed.
+ * @MIX_STREAM_LAST: Last index in the enumeration.
+ *
+ * Stream State during Decode and Render or Encode mode. These states do not apply to Decode and Return mode.
+ */
+typedef enum {
+ MIX_STREAM_NULL=0,
+ MIX_STREAM_STOPPED,
+ MIX_STREAM_PLAYING,
+ MIX_STREAM_PAUSED,
+ MIX_STREAM_DRAINING,
+ MIX_STREAM_LAST
+} MixStreamState;
+
+/**
+ * MixState:
+ * @MIX_STATE_UNINITIALIZED: MIX is not initialized.
+ * @MIX_STATE_INITIALIZED: MIX is initialized.
+ * @MIX_STATE_CONFIGURED: MIX is configured successfully.
+ * @MIX_STATE_LAST: Last index in the enumeration.
+ *
+ * The varies states the device is in.
+ */
+typedef enum {
+ MIX_STATE_NULL=0,
+ MIX_STATE_UNINITIALIZED,
+ MIX_STATE_INITIALIZED,
+ MIX_STATE_CONFIGURED,
+ MIX_STATE_LAST
+} MixState;
+
+/**
+ * MixCodecMode:
+ * @MIX_CODING_INVALID: Indicates device uninitialied for any mode.
+ * @MIX_CODING_ENCODE: Indicates device is opened for encoding.
+ * @MIX_CODING_DECODE: Indicates device is opened for decoding.
+ * @MIX_CODING_LAST: Last index in the enumeration.
+ *
+ * Mode where device is operating on. See mix_audio_initialize().
+ */
+typedef enum {
+ MIX_CODING_INVALID=0,
+ MIX_CODING_ENCODE,
+ MIX_CODING_DECODE,
+ MIX_CODING_LAST
+} MixCodecMode;
+
+/**
+ * MixVolType:
+ * @MIX_VOL_PERCENT: volume is expressed in percentage.
+ * @MIX_VOL_DECIBELS: volume is expressed in decibel.
+ * @MIX_VOL_LAST: last entry.
+ *
+ * See mix_audio_getvolume() and mix_audio_setvolume().
+ */
+typedef enum {
+ MIX_VOL_PERCENT=0,
+ MIX_VOL_DECIBELS,
+ MIX_VOL_LAST
+} MixVolType;
+
+/**
+ * MixVolRamp:
+ * @MIX_RAMP_LINEAR: volume is expressed in percentage.
+ * @MIX_RAMP_EXPONENTIAL: volume is expressed in decibel.
+ * @MIX_RAMP_LAST: last entry.
+ *
+ * See mix_audio_getvolume() and mix_audio_setvolume().
+ */
+typedef enum
+{
+ MIX_RAMP_LINEAR = 0,
+ MIX_RAMP_EXPONENTIAL,
+ MIX_RAMP_LAST
+} MixVolRamp;
+
+/**
+ * MixIOVec:
+ * @data: data pointer
+ * @size: size of buffer in @data
+ *
+ * Scatter-gather style structure. To be used by mix_audio_decode() method for input and output buffer.
+ */
+typedef struct {
+ guchar *data;
+ gint size;
+} MixIOVec;
+
+/**
+ * MixDeviceState:
+ * @MIX_AUDIO_DEV_CLOSED: TBD
+ * @MIX_AUDIO_DEV_OPENED: TBD
+ * @MIX_AUDIO_DEV_ALLOCATED: TBD
+ *
+ * Device state.
+ */
+typedef enum {
+ MIX_AUDIO_DEV_CLOSED=0,
+ MIX_AUDIO_DEV_OPENED,
+ MIX_AUDIO_DEV_ALLOCATED
+} MixDeviceState;
+
+/**
+ * MixAudioClass:
+ * @parent_class: Parent class;
+ *
+ * MI-X Audio object class
+ */
+struct _MixAudioClass
+{
+ /*< public >*/
+ GObjectClass parent_class;
+
+ /*< virtual public >*/
+ MIX_RESULT (*initialize) (MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams);
+ MIX_RESULT (*configure) (MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams);
+ MIX_RESULT (*decode) (MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize);
+ MIX_RESULT (*capture_encode) (MixAudio *mix, MixIOVec *iovout, gint iovoutcnt);
+ MIX_RESULT (*start) (MixAudio *mix);
+ MIX_RESULT (*stop_drop) (MixAudio *mix);
+ MIX_RESULT (*stop_drain) (MixAudio *mix);
+ MIX_RESULT (*pause) (MixAudio *mix);
+ MIX_RESULT (*resume) (MixAudio *mix);
+ MIX_RESULT (*get_timestamp) (MixAudio *mix, guint64 *msecs);
+ MIX_RESULT (*set_mute) (MixAudio *mix, gboolean mute);
+ MIX_RESULT (*get_mute) (MixAudio *mix, gboolean* muted);
+ MIX_RESULT (*get_max_vol) (MixAudio *mix, gint *maxvol);
+ MIX_RESULT (*get_min_vol) (MixAudio *mix, gint *minvol);
+ MIX_RESULT (*get_volume) (MixAudio *mix, gint *currvol, MixVolType type);
+ MIX_RESULT (*set_volume) (MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype);
+ MIX_RESULT (*deinitialize) (MixAudio *mix);
+ MIX_RESULT (*get_stream_state) (MixAudio *mix, MixStreamState *streamState);
+ MIX_RESULT (*get_state) (MixAudio *mix, MixState *state);
+ MIX_RESULT (*is_am_available) (MixAudio *mix, MixAudioManager am, gboolean *avail);
+ MIX_RESULT (*get_output_configuration) (MixAudio *mix, MixAudioConfigParams **audioconfigparams);
+};
+
+/**
+ * MixAudio:
+ * @parent: Parent object.
+ * @streamState: Current state of the stream
+ * @decodeMode: Current decode mode of the device. This value is valid only when @codingMode equals #MIX_CODING_DECODE.
+ * @fileDescriptor: File Descriptor to the opened device.
+ * @state: State of the current #MixAudio session.
+ * @codecMode: Current codec mode of the session.
+ * @useIAM: Is current stream configured to use Intel Audio Manager.
+ * @encoding: <emphasis>Not Used.</emphasis>
+ *
+ * MI-X Audio object
+ */
+struct _MixAudio
+{
+ /*< public >*/
+ GObject parent;
+
+ /*< public >*/
+
+ /*< private >*/
+ MixStreamState streamState;
+ gchar *encoding;
+ MixState state;
+ MixCodecMode codecMode;
+ gboolean useIAM;
+ int fileDescriptor;
+ gint streamID;
+ guint32 amStreamID;
+ GStaticRecMutex streamlock; // lock that must be acquired to invoke stream method.
+ GStaticRecMutex controllock; // lock that must be acquired to call control function.
+ MixAudioConfigParams *audioconfigparams;
+ gboolean am_registered;
+ MixDeviceState deviceState;
+
+ guint64 ts_last;
+ guint64 ts_elapsed;
+ guint64 bytes_written;
+};
+
+/**
+ * mix_audio_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_audio_get_type (void);
+
+/**
+ * mix_audio_new:
+ * @returns: A newly allocated instance of #MixAudio
+ *
+ * Use this method to create new instance of #MixAudio
+ */
+MixAudio *mix_audio_new(void);
+
+/**
+ * mix_audio_ref:
+ * @mix: object to add reference
+ * @returns: the MixAudio instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixAudio *mix_audio_ref(MixAudio *mix);
+
+/**
+ * mix_audio_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_audio_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/**
+ * mix_audio_get_version:
+ * @returns: #MIX_RESULT_SUCCESS
+ *
+ * Returns the version of the MI-X library.
+ *
+ */
+MIX_RESULT mix_audio_get_version(guint* major, guint *minor);
+
+/**
+ * mix_audio_initialize:
+ * @mix: #MixAudio object.
+ * @mode: Requested #MixCodecMode.
+ * @aip: Audio initialization parameters.
+ * @drminitparams: <emphasis>Optional.</emphasis> DRM initialization param if applicable.
+ * @returns: #MIX_RESULT_SUCCESS on successful initilaization. #MIX_RESULT_ALREADY_INIT if session is already initialized.
+ *
+ * This function will initialize an encode or decode session with this #MixAudio instance. During this call, the device will be opened. If the device is not available, an error is returned to the caller so that an alternative (e.g. software decoding) can be configured instead. Use mix_audio_deinitialize() to close the device.
+ *
+ * A previous initialized session must be de-initialized using mix_audio_deinitialize() before it can be initialized again.
+ */
+MIX_RESULT mix_audio_initialize(MixAudio *mix, MixCodecMode mode, MixAudioInitParams *aip, MixDrmParams *drminitparams);
+
+/**
+ * mix_audio_configure:
+ * @mix: #MixAudio object.
+ * @audioconfigparams: a #MixAudioConfigParams derived object containing information for the specific stream type.
+ * @drmparams: <emphasis>Optional.</emphasis> DRM initialization param if applicable.
+ * @returns: Result indicates successful or not.
+ *
+ * This function can be used to configure a stream for the current session. The caller can use this function to do the following:
+ *
+ * <itemizedlist>
+ * <listitem>Choose decoding mode (direct-render or decode-return)</listitem>
+ * <listitem>Provide DRM parameters (using DRMparams object)</listitem>
+ * <listitem>Provide stream parameters (using STRMparams objects)</listitem>
+ * <listitem>Provide a stream name for the Intel Smart Sound Technology stream</listitem>
+ * </itemizedlist>
+ *
+ * SST stream parameters will be set during this call, and stream resources allocated in SST.
+ *
+ * <note>
+ * <title>Intel Audio Manager support:</title>
+ * <para>If Intel Audio Manager support is enabled, and if @mode is specified to #MIX_DECODE_DIRECTRENDER, the SST stream will be registered with Intel Audio Manager in the context of this call, using the stream name provided in @streamname. Application will receive a notification from Intel Audio Manager that the stream has been created during or soon after this call. The application should be ready to handle either possibility. A stream ID (associated with the stream name) will be provided by Intel Audio Manager which will be used for subsequent notifications from Intel Audio Manager or calls to Intel Audio Manager for muting, pause and resume. See mix_audio_getstreamid()</para>
+ * <para>If a stream is already registered with Intel Audio Manager, application must pass the same @streamname argument to retain the session. Otherwise, the existing stream will be unregistered and a new stream will be registered with the new @streamname.
+ * </para>
+ * </note>
+ *
+ * If @mode is specified to #MIX_DECODE_DIRECTRENDER but direct-render mode is not available (due to end user use of alternative output device), an error indication will be returned to the caller so that an alternate pipeline configuration can be created (e.g. including a Pulse Audio sink, and support for output buffers). In this case, the caller will need to call mix_audio_configure() again to with @mode specify as #MIX_DECODE_DECODERETURN to request decode-return mode.
+ *
+ * This method can be called multiple times if reconfiguration of the stream is needed. However, this method must be called when the stream is in #MIX_STREAM_STOPPED state.
+ *
+ */
+MIX_RESULT mix_audio_configure(MixAudio *mix, MixAudioConfigParams *audioconfigparams, MixDrmParams *drmparams);
+
+/**
+ * mix_audio_decode:
+ * @mix: #MixAudio object.
+ * @iovin: a pointer to an array of #MixIOVec structure that contains the input buffers
+ * @iovincnt: the number of entry in the @iovin array
+ * @iovout: a pointer to an arrya of #MixIOVec structure that represent the output buffer. During input, each size in the #MixIOVec array represents the available buffer size pointed to by data. Upon return, each size value will be updated to reflect how much data has been filled. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail.
+ * @iovoutcnt: in/out parameter which when input, it contains the number of entry available in the @iovout array. Upon return, this value will be updated to reflect how many entry in the @iovout array has been populated with data. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER. See mix_audio_configure() for more detail.
+ * @outsize: Total number of bytes returned for the decode session. This parameter is ignored if stream is configured to #MIX_DECODE_DIRECTRENDER.
+ * @returns: #MIX_RESULT
+ *
+ * This function is used to initiate HW accelerated decoding of encoded data buffers. This function may be used in two major modes, direct-render or decode-return.
+ *
+ * With direct-render, input buffers are provided by the caller which hold encoded audio data, and no output buffers are provided. The encoded data is decoded, and the decoded data is sent directly to the output speaker. This allows very low power audio rendering and is the best choice of operation for longer battery life.
+ *
+ * <note>
+ * <title>Intel Audio Manager Support</title>
+ * However, if the user has connected a different target output device, such as Bluetooth headphones, this mode cannot be used as the decoded audio must be directed to the Pulse Audio stack where the output to Bluetooth device can be supported, per Intel Audio Manager guidelines. This mode is called decode-return, and requires the caller to provide output buffers for the decoded data.
+ * </note>
+ *
+ * Input buffers in both modes are one or more user space buffers using a scatter/gather style vector interface.
+ *
+ * Output buffers for the decode-return mode are one or more user space buffers in a scatter style vector interface. Buffers will be filled in order and lengths of data filled will be returned.
+ *
+ * This call will block until data has been completely copied or queued to the driver. All user space buffers may be used or released when this call returns.
+ *
+ * Note: If the stream is configured as #MIX_DECODE_DIRECTRENDER, and whenever the stream in #MIX_STREAM_STOPPED state, the call to mix_audio_decode() will not start the playback until mix_audio_start() is called. This behavior would allow application to queue up data but delay the playback until appropriate time.
+ *
+ */
+MIX_RESULT mix_audio_decode(MixAudio *mix, const MixIOVec *iovin, gint iovincnt, guint64 *insize, MixIOVec *iovout, gint iovoutcnt, guint64 *outsize);
+
+/**
+ * mix_audio_capture_encode:
+ * @mix: #MixAudio object.
+ * @iovout: Capture audio samples.
+ * @iovoutcnt: Number of entry in the input vector @iovout.
+ * @returns: #MIX_RESULT
+ *
+ * To read encoded data from device.
+ *
+ * <comment>
+ * NOTE: May need to rename to "read_encoded" or other name. Since "encode" seems to mean taking raw audio and convert to compressed audio.
+ * </comment>
+ */
+MIX_RESULT mix_audio_capture_encode(MixAudio *mix, MixIOVec *iovout, gint iovoutcnt);
+
+/**
+ * mix_audio_start:
+ * @mix: #MixAudio object.
+ * @returns: #MIX_RESULT_SUCCESS if the resulting state is either #MIX_STREAM_PLAYING or #MIX_STREAM_PAUSED. Fail code otherwise.
+ *
+ * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application use this call to change the stream out of the #MIX_STREAM_STOPPED state. If mix_audio_decode() is called and blocking in a seperate thread prior to this call. This method causes the device to start rendering data.
+ *
+ * In #MIX_DECODE_DECODERETURN, this method is no op.
+ */
+MIX_RESULT mix_audio_start(MixAudio *mix);
+
+/**
+ * mix_audio_stop_drop:
+ * @mix: #MixAudio object.
+ * @returns: #MIX_RESULT_SUCCESS if the resulting state has successfully reached #MIX_STREAM_STOPPED. Fail code otherwise.
+ *
+ * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio.
+ *
+ * All remaining frames to be decoded or rendered will be discarded and playback will stop immediately, unblocks any pending mix_audio_decode().
+ *
+ * If #MIX_STOP_DRAIN is requested, the call will block with stream state set to #MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered. When #MIX_STOP_DRAIN returns successfully, the stream would have reached #MIX_STREAM_STOPPED successfully.
+ *
+ * After this call, timestamp retrived by mix_audio_gettimestamp() is reset to zero.
+ *
+ * Note that this method returns #MIX_RESULT_WRONG_STATE if the stream is in #MIX_STREAM_DRAINING state.
+ *
+ */
+MIX_RESULT mix_audio_stop_drop(MixAudio *mix);
+
+/**
+ * mix_audio_stop_drain:
+ * @mix: #MixAudio object.
+ * @returns: #MIX_RESULT_SUCCESS if the resulting state has successfully reached #MIX_STREAM_STOPPED. Fail code otherwise.
+ *
+ * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this function to stop the processing and playback of audio.
+ *
+ * The call will block with stream state set to #MIX_STREAM_DRAINING, and return only until all remaining frame in previously submitted buffers are decoded and rendered.
+ *
+ * Note that this method blocks until #MIX_STREAM_STOPPED is reached if it is called when the stream is already in #MIX_STREAM_DRAINING state.
+ *
+ */
+MIX_RESULT mix_audio_stop_drain(MixAudio *mix);
+
+/**
+ * mix_audio_pause:
+ * @mix: #MixAudio object.
+ * @returns: #MIX_RESULT_SUCCESS if #MIX_STREAM_PAUSED state is reached successfully. #MIX_RESULT_WRONG_STATE if operation is not allowed with the current state.
+ *
+ * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state from #MIX_STREAM_PLAYING to #MIX_STREAM_PAUSED. Note that this method returns sucessful only when the resulting state reaches #MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as #MIX_STREAM_STOPPED, where transitioning to #MIX_STREAM_PAUSED is not possible.
+ *
+ * In some situation, where there is potential race condition with the DRAINING operation, this method may return MIX_RESULT_NEED_RETRY to indicate last operation result is inclusive and request caller to call again.
+ */
+MIX_RESULT mix_audio_pause(MixAudio *mix);
+
+/**
+ * mix_audio_resume:
+ * @mix: #MixAudio object.
+ * @returns: #MIX_RESULT_SUCCESS if #MIX_STREAM_PLAYING state is reached successfully. #MIX_RESULT_WRONG_STATE if operation is not allowed with the current state.
+ *
+ * If the stream is configured to #MIX_DECODE_DIRECTRENDER, application uses this call to change the stream state to #MIX_STREAM_PLAYING. Note that this method returns sucessful only when the resulting state reaches #MIX_STREAM_PAUSED. Meaning it will return fail code if it is called in a state such as #MIX_STREAM_DRAINING, where transitioning to #MIX_STREAM_PLAYING is not possible.
+ *
+ */
+MIX_RESULT mix_audio_resume(MixAudio *mix);
+
+
+/**
+ * mix_audio_get_timestamp:
+ * @mix: #MixAudio object.
+ * @msecs: play time in milliseconds.
+ * @returns: #MIX_RESULT_SUCCESS if the timestamp is available. #MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode.
+ *
+ * This function can be used to retrieve the current timestamp for audio playback in milliseconds. The timestamp will reflect the amount of audio data rendered since the start of stream, or since the last stop. Note that the timestamp is always reset to zero when the stream enter #MIX_STREAM_STOPPED state. The timestamp is an unsigned long value, so the value will wrap when the timestamp reaches #ULONG_MAX. This function is only valid in direct-render mode.
+ */
+MIX_RESULT mix_audio_get_timestamp(MixAudio *mix, guint64 *msecs);
+
+/**
+ * mix_audio_set_mute:
+ * @mix: #MixAudio object.
+ * @mute: Turn mute on/off.
+ * @returns: #MIX_RESULT_SUCCESS on success or other fail code.
+ *
+ * This function is used to mute and unmute audio playback. While muted, playback would continue but silently. This function is only valid when the session is configured to #MIX_DECODE_DIRECTRENDER mode.
+ *
+ * Note that playback volumn may change due to change of global settings while stream is muted.
+ */
+MIX_RESULT mix_audio_set_mute(MixAudio *mix, gboolean mute);
+
+/**
+ * mix_audio_get_mute:
+ * @mix: #MixAudio object.
+ * @muted: current mute state.
+ * @returns: #MIX_RESULT_SUCCESS on success or other fail code.
+ *
+ * Get Mute.
+ */
+MIX_RESULT mix_audio_get_mute(MixAudio *mix, gboolean* muted);
+
+/**
+ * mix_audio_get_max_vol:
+ * @mix: #MixAudio object.
+ * @maxvol: pointer to receive max volumn.
+ * @returns: #MIX_RESULT_SUCCESS on success or other fail code.
+ *
+ * This function can be used if the application will be setting the audio volume using decibels instead of percentage. The maximum volume in decibels supported by the driver will be returned. This value can be used to determine the upper bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode.
+ *
+ */
+MIX_RESULT mix_audio_get_max_vol(MixAudio *mix, gint *maxvol);
+
+/**
+ * mix_audio_get_min_vol:
+ * @mix: #MixAudio object.
+ * @minvol: pointer to receive max volumn.
+ * @returns: #MIX_RESULT_SUCCESS on success or other fail code.
+ *
+ * This function can be used if the application will be setting the audio volume using decibels instead of percentage. The minimum volume in decibels supported by the driver will be returned. This value can be used to determine the lower bound of the decibel range in calculating volume levels. This value is a signed integer. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode.
+ *
+ */
+MIX_RESULT mix_audio_get_min_vol(MixAudio *mix, gint *minvol);
+
+/**
+ * mix_audio_get_volume:
+ * @mix: #MixAudio object.
+ * @currvol: Current volume. Note that if @type equals #MIX_VOL_PERCENT, this value will be return within the range of 0 to 100 inclusive.
+ * @type: The type represented by @currvol.
+ * @returns: #MIX_RESULT_SUCCESS on success or other fail code.
+ *
+ * This function returns the current volume setting in either decibels or percentage. This function is only valid if stream is configured to #MIX_DECODE_DIRECTRENDER mode.
+ *
+ */
+MIX_RESULT mix_audio_get_volume(MixAudio *mix, gint *currvol, MixVolType type);
+
+/**
+ * mix_audio_set_volume:
+ * @mix: #MixAudio object.
+ * @currvol: Current volume. Note that if @type equals #MIX_VOL_PERCENT, this value will be trucated to within the range of 0 to 100 inclusive.
+ * @type: The type represented by @currvol.
+ * @returns: #MIX_RESULT_SUCCESS on success or other fail code.
+ *
+ * This function sets the current volume setting in either decibels or percentage. This function is only valid if the stream is configured to #MIX_DECODE_DIRECTRENDER mode.
+ *
+ */
+MIX_RESULT mix_audio_set_volume(MixAudio *mix, gint currvol, MixVolType type, gulong msecs, MixVolRamp ramptype);
+
+/**
+ * mix_audio_deinitialize:
+ * @mix: #MixAudio object.
+ * @returns: #MIX_RESULT_SUCCESS on success or other fail code.
+ *
+ * This function will uninitialize a session with this MI-X instance. During this call, the SST device will be closed and resources including mmapped buffers would be freed.This function should be called by the application once mix_audio_init() has been called.
+ *
+ * <note>
+ * <title>Intel Audio Manager Support</title>
+ * The SST stream would be unregistered with Intel Audio Manager if it was registered.
+ * </note>
+ *
+ * Note that if this method should not fail normally. If it does return failure, the state of this object and the underlying mechanism is compromised and application should not attempt to reuse this object.
+ */
+MIX_RESULT mix_audio_deinitialize(MixAudio *mix);
+
+/**
+ * mix_audio_get_stream_state:
+ * @mix: #MixAudio object.
+ * @streamState: pointer to receive stream state.
+ * @returns: #MIX_RESULT
+ *
+ * Get the stream state of the current stream.
+ */
+MIX_RESULT mix_audio_get_stream_state(MixAudio *mix, MixStreamState *streamState);
+
+/**
+ * mix_audio_get_state:
+ * @mix: #MixAudio object.
+ * @state: pointer to receive state
+ * @returns: Current device state.
+ *
+ * Get the device state of the audio session.
+ */
+MIX_RESULT mix_audio_get_state(MixAudio *mix, MixState *state);
+
+/**
+ * mix_audio_am_is_enabled:
+ * @mix: #MixAudio object.
+ * @returns: boolean indicates if Intel Audio Manager is enabled with the current session.
+ *
+ * This method checks if the current session is configure to use Intel Audio Manager. Note that Intel Audio Manager is considered disabled if the stream has not be initialized to use the service explicitly.
+ */
+gboolean mix_audio_am_is_enabled(MixAudio *mix);
+
+// Real implementation for Base class
+//MIX_RESULT mix_audio_get_version(guint* major, guint *minor);
+
+/**
+ * mix_audio_is_am_available:
+ * @mix: TBD
+ * @am: TBD
+ * @avail: TBD
+ * @returns: TBD
+ *
+ * Check if AM is available.
+ */
+MIX_RESULT mix_audio_is_am_available(MixAudio *mix, MixAudioManager am, gboolean *avail);
+
+/**
+ * mix_audio_get_output_configuration:
+ * @mix: #MixAudio object.
+ * @audioconfigparams: double pointer to hold output configuration.
+ * @returns: #MIX_RESULT_SUCCESS on success or other fail code.
+ *
+ * This method retrieve the current configuration. This can be called after initialization. If a stream has been configured, it returns the corresponding derive object of MixAudioConfigParams.
+ */
+MIX_RESULT mix_audio_get_output_configuration(MixAudio *mix, MixAudioConfigParams **audioconfigparams);
+
+/**
+ * mix_audio_get_stream_byte_decoded:
+ * @mix: #MixAudio object.
+ * @msecs: stream byte decoded..
+ * @returns: #MIX_RESULT_SUCCESS if the value is available. #MIX_RESULT_WRONG_MODE if operation is not allowed with the current mode.
+ *
+ * Retrive the culmulative byte decoded.
+ *
+ * <remark>Not Implemented.</remark>
+ */
+MIX_RESULT mix_audio_get_stream_byte_decoded(MixAudio *mix, guint64 *byte);
+
+#endif /* __MIX_AUDIO_H__ */
diff --git a/mix_audio/src/mixaudiotypes.h b/mix_audio/src/mixaudiotypes.h
new file mode 100644
index 0000000..1b4e085
--- /dev/null
+++ b/mix_audio/src/mixaudiotypes.h
@@ -0,0 +1,27 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_AUDIO_TYPES_H__
+#define __MIX_AUDIO_TYPES_H__
+
+/**
+ * MixAudioManager:
+ * @MIX_AUDIOMANAGER_NONE: No Audio Manager.
+ * @MIX_AUDIOMANAGER_INTELAUDIOMANAGER: Intel Audio Manager.
+ * @MIX_AUDIOMANAGER_LAST: Last index.
+ *
+ * Audio Manager enumerations.
+ */
+typedef enum {
+ MIX_AUDIOMANAGER_NONE = 0,
+ MIX_AUDIOMANAGER_INTELAUDIOMANAGER,
+ MIX_AUDIOMANAGER_LAST
+} MixAudioManager;
+
+
+#endif
diff --git a/mix_audio/src/pvt.h b/mix_audio/src/pvt.h
new file mode 100644
index 0000000..f4be9e5
--- /dev/null
+++ b/mix_audio/src/pvt.h
@@ -0,0 +1,9 @@
+
+
+typedef unsigned short u16;
+typedef unsigned long u32;
+typedef unsigned char u8;
+typedef signed char s8;
+typedef signed short s16;
+#define __user
+
diff --git a/mix_audio/src/sst_proxy.c b/mix_audio/src/sst_proxy.c
new file mode 100644
index 0000000..438e06e
--- /dev/null
+++ b/mix_audio/src/sst_proxy.c
@@ -0,0 +1,435 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+
+#include <glib.h>
+#include <glib/gprintf.h>
+#include <linux/types.h>
+#include "mixacpmp3.h"
+#include "mixacpwma.h"
+#include "mixacpaac.h"
+#include "intel_sst_ioctl.h"
+#include "mixacp.h"
+#include "sst_proxy.h"
+
+#ifdef G_LOG_DOMAIN
+#undef G_LOG_DOMAIN
+#define G_LOG_DOMAIN ((gchar*)"mixaudio")
+#endif
+
+gboolean mix_sst_params_convert_mp3(MixAudioConfigParamsMP3 *acp, struct snd_sst_params *s);
+gboolean mix_sst_params_convert_wma(MixAudioConfigParamsWMA *acp, struct snd_sst_params *s);
+gboolean mix_sst_params_convert_aac(MixAudioConfigParamsAAC *acp, struct snd_sst_params *s);
+void mix_sst_params_to_mp3(MixAudioConfigParamsMP3 *acp, struct snd_mp3_params *params);
+void mix_sst_params_to_wma(MixAudioConfigParamsWMA *acp, struct snd_wma_params *params);
+void mix_sst_params_to_aac(MixAudioConfigParamsAAC *acp, struct snd_aac_params *params);
+void mix_sst_set_bps(MixAudioConfigParams *acp, guchar pcm_wd_sz);
+void mix_sst_set_op_align(MixAudioConfigParams *acp, guchar op_align);
+
+/*
+ * Utilities that convert param object to driver struct.
+ * No Mix Context needed. However, it knows about the driver's param structure.
+ */
+gboolean mix_sst_params_convert(MixAudioConfigParams *acp, struct snd_sst_params *s)
+{
+ gboolean ret = FALSE;
+
+ if (!s) return FALSE;
+
+ if (MIX_IS_AUDIOCONFIGPARAMSMP3(acp))
+ ret = mix_sst_params_convert_mp3(MIX_AUDIOCONFIGPARAMSMP3(acp), s);
+ else if (MIX_IS_AUDIOCONFIGPARAMSWMA(acp))
+ ret = mix_sst_params_convert_wma(MIX_AUDIOCONFIGPARAMSWMA(acp), s);
+ else if (MIX_IS_AUDIOCONFIGPARAMSAAC(acp))
+ ret = mix_sst_params_convert_aac(MIX_AUDIOCONFIGPARAMSAAC(acp), s);
+
+ return ret;
+}
+
+
+gboolean mix_sst_params_convert_mp3(MixAudioConfigParamsMP3 *acp, struct snd_sst_params *s)
+{
+ struct snd_mp3_params *p = &s->sparams.uc.mp3_params;
+
+ s->codec = p->codec = SST_CODEC_TYPE_MP3;
+ p->num_chan = MIX_ACP_NUM_CHANNELS(acp);
+ p->brate = MIX_ACP_BITRATE(acp);
+ p->sfreq = MIX_ACP_SAMPLE_FREQ(acp);
+ p->crc_check = MIX_ACP_MP3_CRC(acp);
+ p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp));
+ if (p->pcm_wd_sz == MIX_ACP_BPS_16)
+ p->op_align = MIX_ACP_OUTPUT_ALIGN_16;
+ else
+ p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp));
+
+ return TRUE;
+}
+
+gboolean mix_sst_params_convert_wma(MixAudioConfigParamsWMA *acp, struct snd_sst_params *s)
+{
+ struct snd_wma_params *p = &s->sparams.uc.wma_params;
+
+ p->num_chan = MIX_ACP_NUM_CHANNELS(acp);
+ p->brate = MIX_ACP_BITRATE(acp);
+ p->sfreq = MIX_ACP_SAMPLE_FREQ(acp);
+ p->wma_encode_opt = MIX_ACP_WMA_ENCODE_OPT(acp);
+ p->block_align = MIX_ACP_WMA_BLOCK_ALIGN(acp);
+ p->channel_mask = MIX_ACP_WMA_CHANNEL_MASK(acp);
+ p->format_tag = MIX_ACP_WMA_FORMAT_TAG(acp);
+ p->pcm_src = MIX_ACP_WMA_PCM_BIT_WIDTH(acp);
+ p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp));
+ if (p->pcm_wd_sz == MIX_ACP_BPS_16)
+ p->op_align = MIX_ACP_OUTPUT_ALIGN_16;
+ else
+ p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp));
+
+ switch (mix_acp_wma_get_version(acp))
+ {
+ case MIX_AUDIO_WMA_V9:
+ s->codec = p->codec = SST_CODEC_TYPE_WMA9;
+ break;
+ case MIX_AUDIO_WMA_V10:
+ s->codec = p->codec = SST_CODEC_TYPE_WMA10;
+ break;
+ case MIX_AUDIO_WMA_V10P:
+ s->codec = p->codec = SST_CODEC_TYPE_WMA10P;
+ break;
+ default:
+ break;
+ }
+
+ return TRUE;
+}
+
+#define AAC_DUMP(param) g_message("snd_aac_params.%s=%u", #param, p->param)
+#define AAC_DUMP_I(param, idx) g_message("snd_aac_params.%s[%d]=%x", #param, idx, p->param[idx])
+
+gboolean mix_sst_params_convert_aac(MixAudioConfigParamsAAC *acp, struct snd_sst_params *s)
+{
+ struct snd_aac_params *p = &s->sparams.uc.aac_params;
+
+ // I have only AOT, where tools are usually specified at eAOT.
+ // However, sometimes, AOT could tell us the tool involved. e.g.
+ // AOT==5 --> SBR
+ // AOT==29 --> PS
+ // AOT==2 --> AAC-LC
+
+ // we know SBR present only if it is indicated presence, or AOT says so.
+ guint aot = mix_acp_aac_get_aot(acp);
+ p->sbr_present = ((MIX_ACP_AAC_SBR_FLAG(acp) == 1) ||
+ (aot == 5) ||
+ (MIX_ACP_AAC_PS_FLAG(acp) == 1) ||
+ (aot == 29))?1:0;
+
+ // As far as we know, we should:
+ // set sbr_present flag for SST in case of possible implicit signalling of SBR, and
+ // we should use HEAACv2 decoder in case of possible implicit signalling of PS.
+ // Although we should theoretically select HEAACv2 decoder for HEAACv1 and HEAAC,
+ // it is not advisable since HEAACv2 decoder has more overhead as per SST team.
+ // So MixAudio is implicitly selecting codec base on AOT, psPresentFlag and sbrPresentFlag.
+ // Application can override the selection by explicitly setting psPresentFlag and/or sbrPresentFlag.
+ if ((MIX_ACP_AAC_PS_FLAG(acp) == 1) || (aot == 29))
+ {
+ // PS present.
+ s->codec = p->codec = SST_CODEC_TYPE_eAACP;
+ }
+ else if (p->sbr_present == 1)
+ {
+ s->codec = p->codec = SST_CODEC_TYPE_AACP;
+ }
+ else
+ {
+ s->codec = p->codec = SST_CODEC_TYPE_AAC;
+ }
+
+ p->num_chan = MIX_ACP_AAC_CHANNELS(acp); // core/internal channels
+ p->ext_chl = MIX_ACP_NUM_CHANNELS(acp); // external channels
+ p->aac_srate = MIX_ACP_AAC_SAMPLE_RATE(acp); // aac decoder internal frequency
+ p->sfreq = MIX_ACP_SAMPLE_FREQ(acp); // output/external frequency
+
+ p->brate = MIX_ACP_BITRATE(acp);
+ p->mpg_id = (guint)mix_acp_aac_get_mpeg_id(acp);
+ p->bs_format = mix_acp_aac_get_bit_stream_format(acp);
+ p->aac_profile = mix_acp_aac_get_aac_profile(acp);
+ // AOT defined by MPEG spec is 5 for SBR but SST definition is 4 for SBR.
+ if (aot == 5)
+ p->aot = 4;
+ else if (aot == 2)
+ p->aot = aot;
+ p->crc_check = MIX_ACP_AAC_CRC(acp);
+ p->brate_type = mix_acp_aac_get_bit_rate_type(acp);
+ p->pce_present = MIX_ACP_AAC_PCE_FLAG(acp);
+ p->pcm_wd_sz = mix_acp_get_bps(MIX_AUDIOCONFIGPARAMS(acp));
+
+ if (p->pcm_wd_sz == MIX_ACP_BPS_16)
+ p->op_align = MIX_ACP_OUTPUT_ALIGN_16;
+ else
+ p->op_align = mix_acp_get_op_align(MIX_AUDIOCONFIGPARAMS(acp));
+
+ //p->aac_srate = ; // __u32 aac_srate; /* Plain AAC decoder operating sample rate */
+ //p->ext_chl = ; // __u8 ext_chl; /* No.of external channels */
+
+ switch (p->bs_format)
+ {
+ case MIX_AAC_BS_ADTS:
+ g_sprintf((gchar*)p->bit_stream_format, "adts");
+ break;
+ case MIX_AAC_BS_ADIF:
+ g_sprintf((gchar*)p->bit_stream_format, "adif");
+ break;
+ case MIX_AAC_BS_RAW:
+ g_sprintf((gchar*)p->bit_stream_format, "raw");
+ p->num_syntc_elems = 0;
+ p->syntc_id[0] = (gint8)-1; /* 0 for ID_SCE(Dula Mono), -1 for raw */
+ p->syntc_id[1] = (gint8)-1;
+ p->syntc_tag[0] = (gint8)-1; /* raw - -1 and 0 -16 for rest of the streams */
+ p->syntc_tag[1] = (gint8)-1;
+ break;
+ default:
+ break;
+ }
+
+ {
+ AAC_DUMP(codec);
+ AAC_DUMP(num_chan); /* 1=Mono, 2=Stereo*/
+ AAC_DUMP(pcm_wd_sz); /* 16/24 - bit*/
+ AAC_DUMP(brate);
+ AAC_DUMP(sfreq); /* Sampling freq eg. 8000, 441000, 48000 */
+ AAC_DUMP(aac_srate); /* Plain AAC decoder operating sample rate */
+ AAC_DUMP(mpg_id); /* 0=MPEG-2, 1=MPEG-4 */
+ AAC_DUMP(bs_format); /* input bit stream format adts=0, adif=1, raw=2 */
+ AAC_DUMP(aac_profile); /* 0=Main Profile, 1=LC profile, 3=SSR profile */
+ AAC_DUMP(ext_chl); /* No.of external channels */
+ AAC_DUMP(aot); /* Audio object type. 1=Main , 2=LC , 3=SSR, 4=SBR*/
+ AAC_DUMP(op_align); /* output alignment 0=16 bit , 1=MSB, 2= LSB align */
+ AAC_DUMP(brate_type); /* 0=CBR, 1=VBR */
+ AAC_DUMP(crc_check); /* crc check 0= disable, 1=enable */
+ // AAC_DUMP(bit_stream_format[8]); /* input bit stream format adts/adif/raw */
+ g_message("snd_aac_params.bit_stream_format=%s", p->bit_stream_format);
+ AAC_DUMP(jstereo); /* Joint stereo Flag */
+ AAC_DUMP(sbr_present); /* 1 = SBR Present, 0 = SBR absent, for RAW */
+ AAC_DUMP(downsample); /* 1 = Downsampling ON, 0 = Downsampling OFF */
+ AAC_DUMP(num_syntc_elems); /* 1- Mono/stereo, 0 - Dual Mono, 0 - for raw */
+ g_message("snd_aac_params.syntc_id[0]=%x", p->syntc_id[0]);
+ g_message("snd_aac_params.syntc_id[1]=%x", p->syntc_id[1]);
+ g_message("snd_aac_params.syntc_tag[0]=%x", p->syntc_tag[0]);
+ g_message("snd_aac_params.syntc_tag[1]=%x", p->syntc_tag[1]);
+ //AAC_DUMP_I(syntc_id, 0); /* 0 for ID_SCE(Dula Mono), -1 for raw */
+ //AAC_DUMP_I(syntc_id, 1); /* 0 for ID_SCE(Dula Mono), -1 for raw */
+ //AAC_DUMP_I(syntc_tag, 0); /* raw - -1 and 0 -16 for rest of the streams */
+ //AAC_DUMP_I(syntc_tag, 1); /* raw - -1 and 0 -16 for rest of the streams */
+ AAC_DUMP(pce_present); /* Flag. 1- present 0 - not present, for RAW */
+ AAC_DUMP(reserved);
+ AAC_DUMP(reserved1);
+ }
+
+ return TRUE;
+}
+
+MixAudioConfigParams *mix_sst_acp_from_codec(guint codec)
+{
+ MixAudioConfigParams *ret = NULL;
+
+ // need stream specific ACP
+ switch (codec)
+ {
+ case SST_CODEC_TYPE_MP3:
+ case SST_CODEC_TYPE_MP24:
+ ret = (MixAudioConfigParams*)mix_acp_mp3_new();
+ break;
+ case SST_CODEC_TYPE_AAC:
+ case SST_CODEC_TYPE_AACP:
+ case SST_CODEC_TYPE_eAACP:
+ ret = (MixAudioConfigParams*)mix_acp_aac_new();
+ break;
+ case SST_CODEC_TYPE_WMA9:
+ case SST_CODEC_TYPE_WMA10:
+ case SST_CODEC_TYPE_WMA10P:
+ ret = (MixAudioConfigParams*)mix_acp_wma_new();
+ break;
+ }
+
+ return ret;
+}
+
+
+
+MixAudioConfigParams *mix_sst_params_to_acp(struct snd_sst_get_stream_params *stream_params)
+{
+ MixAudioConfigParams *ret = NULL;
+
+ gboolean allocated = FALSE;
+ // Ingoring stream_params.codec_params.result, which seem to return details specific to stream allocation.
+ switch (stream_params->codec_params.result)
+ {
+ // Please refers to SST API doc for return value definition.
+ case 5:
+ g_debug("last SET_PARAMS succeeded with Stream Parameter Modified.");
+ case 0:
+ allocated = TRUE;
+ break;
+ case 1:
+ // last SET_PARAMS failed STREAM was not available.
+ case 2:
+ // last SET_PARAMS failed CODEC was not available.
+ case 3:
+ // last SET_PARAMS failed CODEC was not supported.
+ case 4:
+ // last SET_PARAMS failed Invalid Stream Parameters.
+ case 6:
+ // last SET_PARAMS failed Invalid Stream ID.
+ default:
+ // last SET_PARAMS failed unexpectedly.
+ break;
+ }
+
+ if (allocated)
+ {
+ switch (stream_params->codec_params.codec)
+ {
+ case SST_CODEC_TYPE_MP3:
+ case SST_CODEC_TYPE_MP24:
+ ret = (MixAudioConfigParams*)mix_acp_mp3_new();
+ mix_sst_params_to_mp3(MIX_AUDIOCONFIGPARAMSMP3(ret), &stream_params->codec_params.sparams.uc.mp3_params);
+ break;
+ case SST_CODEC_TYPE_AAC:
+ case SST_CODEC_TYPE_AACP:
+ case SST_CODEC_TYPE_eAACP:
+ ret = (MixAudioConfigParams*)mix_acp_aac_new();
+ mix_sst_params_to_aac(MIX_AUDIOCONFIGPARAMSAAC(ret), &stream_params->codec_params.sparams.uc.aac_params);
+ break;
+ case SST_CODEC_TYPE_WMA9:
+ case SST_CODEC_TYPE_WMA10:
+ case SST_CODEC_TYPE_WMA10P:
+ ret = (MixAudioConfigParams*)mix_acp_wma_new();
+ mix_sst_params_to_wma(MIX_AUDIOCONFIGPARAMSWMA(ret), &stream_params->codec_params.sparams.uc.wma_params);
+ break;
+ }
+ }
+
+ if (!ret) ret = mix_acp_new();
+
+ if (ret)
+ {
+ // Be sure to update all vars that becomes available since the ACP could set defaults.
+ MIX_ACP_SAMPLE_FREQ(ret) = stream_params->pcm_params.sfreq;
+ MIX_ACP_NUM_CHANNELS(ret) = stream_params->pcm_params.num_chan;
+ mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(ret), stream_params->pcm_params.pcm_wd_sz);
+ }
+
+ return ret;
+}
+
+
+void mix_sst_params_to_mp3(MixAudioConfigParamsMP3 *acp, struct snd_mp3_params *params)
+{
+ if(!acp || !params) return;
+
+ MIX_ACP_NUM_CHANNELS(MIX_AUDIOCONFIGPARAMS(acp)) = params->num_chan;
+ MIX_ACP_BITRATE(MIX_AUDIOCONFIGPARAMS(acp)) = params->brate;
+ MIX_ACP_SAMPLE_FREQ(MIX_AUDIOCONFIGPARAMS(acp)) = params->sfreq;
+ MIX_ACP_MP3_CRC(acp) = params->crc_check;
+
+ mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz);
+ mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align);
+}
+
+void mix_sst_params_to_wma(MixAudioConfigParamsWMA *acp, struct snd_wma_params *params)
+{
+
+ MIX_ACP_BITRATE(acp) = params->brate;
+ MIX_ACP_SAMPLE_FREQ(acp) = params->sfreq;
+ MIX_ACP_WMA_ENCODE_OPT(acp) = params->wma_encode_opt;
+ MIX_ACP_WMA_BLOCK_ALIGN(acp) = params->block_align;
+ MIX_ACP_WMA_CHANNEL_MASK(acp) = params->channel_mask;
+ MIX_ACP_WMA_FORMAT_TAG(acp) = params->format_tag;
+ MIX_ACP_WMA_PCM_BIT_WIDTH(acp) = params->pcm_src;
+
+ mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz);
+ mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align);
+
+ switch (params->codec)
+ {
+ case SST_CODEC_TYPE_WMA9:
+ mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V9);
+ break;
+ case SST_CODEC_TYPE_WMA10:
+ mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V10);
+ break;
+ case SST_CODEC_TYPE_WMA10P:
+ mix_acp_wma_set_version(acp, MIX_AUDIO_WMA_V10P);
+ break;
+ }
+}
+
+
+void mix_sst_params_to_aac(MixAudioConfigParamsAAC *acp, struct snd_aac_params *params)
+{
+ if (params->codec == SST_CODEC_TYPE_eAACP)
+ {
+ MIX_ACP_AAC_PS_FLAG(acp) = TRUE;
+ }
+
+ MIX_ACP_NUM_CHANNELS(acp) = params->num_chan;
+ MIX_ACP_BITRATE(acp) = params->brate;
+ MIX_ACP_SAMPLE_FREQ(acp) = params->sfreq;
+ mix_acp_aac_set_mpeg_id(acp, params->mpg_id);
+ mix_acp_aac_set_bit_stream_format(acp, params->bs_format);
+ mix_acp_aac_set_aac_profile(acp, params->aac_profile);
+
+ // SST API specific 4 for SBR while AOT definition in MPEG 4 spec specific 5.
+ // converting.
+ if (params->aot == 4)
+ mix_acp_aac_set_aot(acp, 5);
+ else if (params->aot == 2)
+ mix_acp_aac_set_aot(acp, params->aot);
+
+ MIX_ACP_AAC_CRC(acp) = params->crc_check;
+ mix_acp_aac_set_bit_rate_type(acp, params->brate_type);
+ MIX_ACP_AAC_SBR_FLAG(acp) = params->sbr_present;
+ MIX_ACP_AAC_PCE_FLAG(acp) = params->pce_present;
+
+ mix_sst_set_bps(MIX_AUDIOCONFIGPARAMS(acp), params->pcm_wd_sz);
+ mix_sst_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), params->op_align);
+
+ acp->num_syntc_elems = params->num_syntc_elems;
+ acp->syntc_id[0] = params->syntc_id[0];
+ acp->syntc_id[1] = params->syntc_id[1];
+ acp->syntc_tag[0] = params->syntc_tag[0];
+ acp->syntc_tag[1] = params->syntc_tag[1];
+}
+
+void mix_sst_set_bps(MixAudioConfigParams *acp, guchar pcm_wd_sz)
+{
+ switch (pcm_wd_sz)
+ {
+ case MIX_ACP_BPS_16:
+ case MIX_ACP_BPS_24:
+ break;
+ default:
+ pcm_wd_sz = MIX_ACP_BPS_UNKNOWN;
+ break;
+ }
+ mix_acp_set_bps(MIX_AUDIOCONFIGPARAMS(acp), pcm_wd_sz);
+}
+
+void mix_sst_set_op_align(MixAudioConfigParams *acp, guchar op_align)
+{
+ switch (op_align)
+ {
+ case MIX_ACP_OUTPUT_ALIGN_16:
+ case MIX_ACP_OUTPUT_ALIGN_MSB:
+ case MIX_ACP_OUTPUT_ALIGN_LSB:
+ break;
+ default:
+ op_align = MIX_ACP_OUTPUT_ALIGN_UNKNOWN;
+ break;
+ }
+ mix_acp_set_op_align(MIX_AUDIOCONFIGPARAMS(acp), op_align);
+}
+
diff --git a/mix_audio/src/sst_proxy.h b/mix_audio/src/sst_proxy.h
new file mode 100644
index 0000000..6ad69fe
--- /dev/null
+++ b/mix_audio/src/sst_proxy.h
@@ -0,0 +1,17 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __SST_PROXY_H__
+#define __SST_PROXY_H__
+
+// renaming the struct for easier update, and reference, in MixAudio code.
+
+gboolean mix_sst_params_convert(MixAudioConfigParams *params, struct snd_sst_params *s);
+MixAudioConfigParams *mix_sst_params_to_acp(struct snd_sst_get_stream_params *stream_params);
+
+#endif
diff --git a/mix_audio/tests/Makefile.am b/mix_audio/tests/Makefile.am
new file mode 100644
index 0000000..372e488
--- /dev/null
+++ b/mix_audio/tests/Makefile.am
@@ -0,0 +1,2 @@
+SUBDIRS = smoke
+
diff --git a/mix_audio/tests/smoke/Makefile.am b/mix_audio/tests/smoke/Makefile.am
new file mode 100644
index 0000000..0a373ec
--- /dev/null
+++ b/mix_audio/tests/smoke/Makefile.am
@@ -0,0 +1,25 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+noinst_PROGRAMS = mixaudiosmoke
+
+##############################################################################
+# sources used to compile
+mixaudiosmoke_SOURCES = mixaudiosmoke.c
+
+# flags used to compile this plugin
+# add other _CFLAGS and _LIBS as needed
+mixaudiosmoke_CFLAGS = -I$(top_srcdir)/src $(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXCOMMON_CFLAGS)
+mixaudiosmoke_LDADD = $(GLIB_LIBS) $(GOBJECT_LIBS) $(top_srcdir)/src/libmixaudio.la $(MIXCOMMON_LIBS)
+#mixaudiosmoke_LDFLAGS = $(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXCOMMON_LIBS)
+mixaudiosmoke_LIBTOOLFLAGS = --tag=disable-static
+
+# headers we need but don't want installed
+noinst_HEADERS =
+
+
+
diff --git a/mix_audio/tests/smoke/mixaudiosmoke.c b/mix_audio/tests/smoke/mixaudiosmoke.c
new file mode 100644
index 0000000..8f81108
--- /dev/null
+++ b/mix_audio/tests/smoke/mixaudiosmoke.c
@@ -0,0 +1,77 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#include <stdio.h>
+#include "mixaudio.h"
+#include "mixparams.h"
+#include "mixacp.h"
+#include "mixacpmp3.h"
+
+void test_getversion()
+{
+ g_printf("Calling mixaudio_getversion...\n");
+ {
+ guint major = 0;
+ guint minor = 0;
+ MIX_RESULT ret = mix_audio_get_version(&major, &minor);
+ if (MIX_SUCCEEDED(ret))
+ {
+ g_printf("MixAudio Version %u.%u\n", major, minor);
+ }
+ else
+ g_printf("mixaudio_getversion() failed! Ret code : 0x%08x\n", ret);
+ }
+}
+
+int main (int argc, char **argv)
+{
+ g_type_init();
+
+ g_printf("Smoke test for MixAudio and structs\n");
+
+ test_getversion();
+
+ g_printf("Creating MixAudio...\n");
+ MixAudio *ma = mix_audio_new();
+ if (MIX_IS_AUDIO(ma))
+ {
+ g_printf("Successful.\n");
+
+ }
+ else
+ {
+ g_printf("Failed.\n");
+ }
+
+ g_printf("Creating MixAudioConfigParams...\n");
+ MixAudioConfigParams *map = mix_acp_new();
+ if (MIX_IS_AUDIOCONFIGPARAMS(map))
+ {
+ g_printf("Successful.\n");
+
+ g_printf("Destroying MixAudioConfigParams...\n");
+ mix_acp_unref(map);
+ g_printf("Successful.\n");
+ }
+ else
+ {
+ g_printf("Failed.\n");
+ }
+ g_printf("Creating mp3 config params...\n");
+ MixAudioConfigParamsMP3 *mp3 = mix_acp_mp3_new();
+
+ mp3->CRC = 0;
+
+ g_printf("Destroying MixAudio...\n");
+ mix_audio_unref(ma);
+ g_printf("Successful.\n");
+
+ g_printf("Smoke completed.\n");
+}
+
+
diff --git a/mix_common/AUTHORS b/mix_common/AUTHORS
new file mode 100644
index 0000000..2175750
--- /dev/null
+++ b/mix_common/AUTHORS
@@ -0,0 +1 @@
+Khanh Nguyen
diff --git a/mix_common/COPYING b/mix_common/COPYING
new file mode 100644
index 0000000..a4f852c
--- /dev/null
+++ b/mix_common/COPYING
@@ -0,0 +1,26 @@
+INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License)
+
+IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING.
+Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software.
+
+
+LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions:
+1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software.
+2. You may not reverse engineer, decompile, or disassemble the Software.
+3. You may not sublicense the Software.
+4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions.
+5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL).
+OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights.
+EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software.
+LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS.
+TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate.
+APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations.
+GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052.
+CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos.
+ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion.
+ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel.
+NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties.
+SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions.
+WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself.
+CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions.
+
diff --git a/mix_common/ChangeLog b/mix_common/ChangeLog
new file mode 100644
index 0000000..b3469f8
--- /dev/null
+++ b/mix_common/ChangeLog
@@ -0,0 +1,28 @@
+2010-01-11 Echo Choi <echo@firefly>
+
+ * Updated version to 0.1.6
+ * Added NEED_RETRY and ERRNO error code to support retry and errno.
+
+2009-11-18 Echo Choi <echo@firefly>
+
+ * Fixed inclusion of m4 directory.
+ * Added return code.
+
+2009-11-13 Echo Choi <echo@firefly>
+
+ * Updated version to 0.1.5
+ * Added additional error codes.
+
+2009-10-16 Echo Choi <echo@firefly>
+
+ * Included mixdrmparams.*
+
+2009-10-14 Echo Choi <echo@firefly>
+
+ * Updated version to 0.1.4.
+
+2009-10-08 Echo Choi <echo@firefly>
+
+ * Updated version to 0.1.3 and packaged for build.
+
+Initial version
diff --git a/mix_common/INSTALL b/mix_common/INSTALL
new file mode 100644
index 0000000..8b82ade
--- /dev/null
+++ b/mix_common/INSTALL
@@ -0,0 +1,291 @@
+Installation Instructions
+*************************
+
+Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005,
+2006, 2007, 2008 Free Software Foundation, Inc.
+
+ This file is free documentation; the Free Software Foundation gives
+unlimited permission to copy, distribute and modify it.
+
+Basic Installation
+==================
+
+ Briefly, the shell commands `./configure; make; make install' should
+configure, build, and install this package. The following
+more-detailed instructions are generic; see the `README' file for
+instructions specific to this package.
+
+ The `configure' shell script attempts to guess correct values for
+various system-dependent variables used during compilation. It uses
+those values to create a `Makefile' in each directory of the package.
+It may also create one or more `.h' files containing system-dependent
+definitions. Finally, it creates a shell script `config.status' that
+you can run in the future to recreate the current configuration, and a
+file `config.log' containing compiler output (useful mainly for
+debugging `configure').
+
+ It can also use an optional file (typically called `config.cache'
+and enabled with `--cache-file=config.cache' or simply `-C') that saves
+the results of its tests to speed up reconfiguring. Caching is
+disabled by default to prevent problems with accidental use of stale
+cache files.
+
+ If you need to do unusual things to compile the package, please try
+to figure out how `configure' could check whether to do them, and mail
+diffs or instructions to the address given in the `README' so they can
+be considered for the next release. If you are using the cache, and at
+some point `config.cache' contains results you don't want to keep, you
+may remove or edit it.
+
+ The file `configure.ac' (or `configure.in') is used to create
+`configure' by a program called `autoconf'. You need `configure.ac' if
+you want to change it or regenerate `configure' using a newer version
+of `autoconf'.
+
+The simplest way to compile this package is:
+
+ 1. `cd' to the directory containing the package's source code and type
+ `./configure' to configure the package for your system.
+
+ Running `configure' might take a while. While running, it prints
+ some messages telling which features it is checking for.
+
+ 2. Type `make' to compile the package.
+
+ 3. Optionally, type `make check' to run any self-tests that come with
+ the package.
+
+ 4. Type `make install' to install the programs and any data files and
+ documentation.
+
+ 5. You can remove the program binaries and object files from the
+ source code directory by typing `make clean'. To also remove the
+ files that `configure' created (so you can compile the package for
+ a different kind of computer), type `make distclean'. There is
+ also a `make maintainer-clean' target, but that is intended mainly
+ for the package's developers. If you use it, you may have to get
+ all sorts of other programs in order to regenerate files that came
+ with the distribution.
+
+ 6. Often, you can also type `make uninstall' to remove the installed
+ files again.
+
+Compilers and Options
+=====================
+
+ Some systems require unusual options for compilation or linking that
+the `configure' script does not know about. Run `./configure --help'
+for details on some of the pertinent environment variables.
+
+ You can give `configure' initial values for configuration parameters
+by setting variables in the command line or in the environment. Here
+is an example:
+
+ ./configure CC=c99 CFLAGS=-g LIBS=-lposix
+
+ *Note Defining Variables::, for more details.
+
+Compiling For Multiple Architectures
+====================================
+
+ You can compile the package for more than one kind of computer at the
+same time, by placing the object files for each architecture in their
+own directory. To do this, you can use GNU `make'. `cd' to the
+directory where you want the object files and executables to go and run
+the `configure' script. `configure' automatically checks for the
+source code in the directory that `configure' is in and in `..'.
+
+ With a non-GNU `make', it is safer to compile the package for one
+architecture at a time in the source code directory. After you have
+installed the package for one architecture, use `make distclean' before
+reconfiguring for another architecture.
+
+ On MacOS X 10.5 and later systems, you can create libraries and
+executables that work on multiple system types--known as "fat" or
+"universal" binaries--by specifying multiple `-arch' options to the
+compiler but only a single `-arch' option to the preprocessor. Like
+this:
+
+ ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+ CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+ CPP="gcc -E" CXXCPP="g++ -E"
+
+ This is not guaranteed to produce working output in all cases, you
+may have to build one architecture at a time and combine the results
+using the `lipo' tool if you have problems.
+
+Installation Names
+==================
+
+ By default, `make install' installs the package's commands under
+`/usr/local/bin', include files under `/usr/local/include', etc. You
+can specify an installation prefix other than `/usr/local' by giving
+`configure' the option `--prefix=PREFIX'.
+
+ You can specify separate installation prefixes for
+architecture-specific files and architecture-independent files. If you
+pass the option `--exec-prefix=PREFIX' to `configure', the package uses
+PREFIX as the prefix for installing programs and libraries.
+Documentation and other data files still use the regular prefix.
+
+ In addition, if you use an unusual directory layout you can give
+options like `--bindir=DIR' to specify different values for particular
+kinds of files. Run `configure --help' for a list of the directories
+you can set and what kinds of files go in them.
+
+ If the package supports it, you can cause programs to be installed
+with an extra prefix or suffix on their names by giving `configure' the
+option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'.
+
+Optional Features
+=================
+
+ Some packages pay attention to `--enable-FEATURE' options to
+`configure', where FEATURE indicates an optional part of the package.
+They may also pay attention to `--with-PACKAGE' options, where PACKAGE
+is something like `gnu-as' or `x' (for the X Window System). The
+`README' should mention any `--enable-' and `--with-' options that the
+package recognizes.
+
+ For packages that use the X Window System, `configure' can usually
+find the X include and library files automatically, but if it doesn't,
+you can use the `configure' options `--x-includes=DIR' and
+`--x-libraries=DIR' to specify their locations.
+
+Particular systems
+==================
+
+ On HP-UX, the default C compiler is not ANSI C compatible. If GNU
+CC is not installed, it is recommended to use the following options in
+order to use an ANSI C compiler:
+
+ ./configure CC="cc -Ae"
+
+and if that doesn't work, install pre-built binaries of GCC for HP-UX.
+
+ On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot
+parse its `<wchar.h>' header file. The option `-nodtk' can be used as
+a workaround. If GNU CC is not installed, it is therefore recommended
+to try
+
+ ./configure CC="cc"
+
+and if that doesn't work, try
+
+ ./configure CC="cc -nodtk"
+
+Specifying the System Type
+==========================
+
+ There may be some features `configure' cannot figure out
+automatically, but needs to determine by the type of machine the package
+will run on. Usually, assuming the package is built to be run on the
+_same_ architectures, `configure' can figure that out, but if it prints
+a message saying it cannot guess the machine type, give it the
+`--build=TYPE' option. TYPE can either be a short name for the system
+type, such as `sun4', or a canonical name which has the form:
+
+ CPU-COMPANY-SYSTEM
+
+where SYSTEM can have one of these forms:
+
+ OS KERNEL-OS
+
+ See the file `config.sub' for the possible values of each field. If
+`config.sub' isn't included in this package, then this package doesn't
+need to know the machine type.
+
+ If you are _building_ compiler tools for cross-compiling, you should
+use the option `--target=TYPE' to select the type of system they will
+produce code for.
+
+ If you want to _use_ a cross compiler, that generates code for a
+platform different from the build platform, you should specify the
+"host" platform (i.e., that on which the generated programs will
+eventually be run) with `--host=TYPE'.
+
+Sharing Defaults
+================
+
+ If you want to set default values for `configure' scripts to share,
+you can create a site shell script called `config.site' that gives
+default values for variables like `CC', `cache_file', and `prefix'.
+`configure' looks for `PREFIX/share/config.site' if it exists, then
+`PREFIX/etc/config.site' if it exists. Or, you can set the
+`CONFIG_SITE' environment variable to the location of the site script.
+A warning: not all `configure' scripts look for a site script.
+
+Defining Variables
+==================
+
+ Variables not defined in a site shell script can be set in the
+environment passed to `configure'. However, some packages may run
+configure again during the build, and the customized values of these
+variables may be lost. In order to avoid this problem, you should set
+them in the `configure' command line, using `VAR=value'. For example:
+
+ ./configure CC=/usr/local2/bin/gcc
+
+causes the specified `gcc' to be used as the C compiler (unless it is
+overridden in the site shell script).
+
+Unfortunately, this technique does not work for `CONFIG_SHELL' due to
+an Autoconf bug. Until the bug is fixed you can use this workaround:
+
+ CONFIG_SHELL=/bin/bash /bin/bash ./configure CONFIG_SHELL=/bin/bash
+
+`configure' Invocation
+======================
+
+ `configure' recognizes the following options to control how it
+operates.
+
+`--help'
+`-h'
+ Print a summary of all of the options to `configure', and exit.
+
+`--help=short'
+`--help=recursive'
+ Print a summary of the options unique to this package's
+ `configure', and exit. The `short' variant lists options used
+ only in the top level, while the `recursive' variant lists options
+ also present in any nested packages.
+
+`--version'
+`-V'
+ Print the version of Autoconf used to generate the `configure'
+ script, and exit.
+
+`--cache-file=FILE'
+ Enable the cache: use and save the results of the tests in FILE,
+ traditionally `config.cache'. FILE defaults to `/dev/null' to
+ disable caching.
+
+`--config-cache'
+`-C'
+ Alias for `--cache-file=config.cache'.
+
+`--quiet'
+`--silent'
+`-q'
+ Do not print messages saying which checks are being made. To
+ suppress all normal output, redirect it to `/dev/null' (any error
+ messages will still be shown).
+
+`--srcdir=DIR'
+ Look for the package's source code in directory DIR. Usually
+ `configure' can determine that directory automatically.
+
+`--prefix=DIR'
+ Use DIR as the installation prefix. *Note Installation Names::
+ for more details, including other options available for fine-tuning
+ the installation locations.
+
+`--no-create'
+`-n'
+ Run the configure checks, but stop before creating any output
+ files.
+
+`configure' also accepts some other, not widely useful, options. Run
+`configure --help' for more details.
+
diff --git a/mix_common/Makefile.am b/mix_common/Makefile.am
new file mode 100644
index 0000000..f5b19ff
--- /dev/null
+++ b/mix_common/Makefile.am
@@ -0,0 +1,10 @@
+SUBDIRS = src
+
+#ACLOCAL_AMFLAGS=-I m4
+#Uncomment the following line if building documentation using gtkdoc
+#SUBDIRS += docs
+
+pkgconfigdir = $(libdir)/pkgconfig
+pkgconfig_DATA=mixcommon.pc
+EXTRA_DIST = autogen.sh m4
+DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
diff --git a/mix_common/NEWS b/mix_common/NEWS
new file mode 100644
index 0000000..4327969
--- /dev/null
+++ b/mix_common/NEWS
@@ -0,0 +1 @@
+news
diff --git a/mix_common/README b/mix_common/README
new file mode 100644
index 0000000..8178c76
--- /dev/null
+++ b/mix_common/README
@@ -0,0 +1 @@
+readme
diff --git a/mix_common/autogen.sh b/mix_common/autogen.sh
new file mode 100644
index 0000000..e123d49
--- /dev/null
+++ b/mix_common/autogen.sh
@@ -0,0 +1,8 @@
+package=MixCommon
+
+aclocal -I m4/ $ACLOCAL_FLAGS || exit 1
+libtoolize --copy --force || exit 1
+autoheader -v || exit 1
+autoconf -v || exit 1
+automake -a -c -v || exit 1
+#autoreconf -v --install
diff --git a/mix_common/configure.ac b/mix_common/configure.ac
new file mode 100644
index 0000000..2165138
--- /dev/null
+++ b/mix_common/configure.ac
@@ -0,0 +1,39 @@
+AC_INIT("", "", [khanh.v.nguyen@intel.com])
+
+AC_CONFIG_MACRO_DIR(m4)
+
+AS_MIX_VERSION(mixcommon, MIXCOMMON, 0, 1, 7)
+
+AM_INIT_AUTOMAKE($PACKAGE, $VERSION)
+#AM_INIT_AUTOMAKE([-Wall -Werror foreign])
+
+AC_PROG_CC
+AC_PROG_LIBTOOL
+
+AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes , no)
+
+dnl Give error and exit if we don't have pkgconfig
+if test "x$HAVE_PKGCONFIG" = "xno"; then
+ AC_MSG_ERROR(you need to have pkgconfig installed !)
+fi
+
+GLIB_REQ=2.16
+dnl Check for glib2 without extra fat, useful for the unversioned tool frontends
+dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+if test "x$HAVE_GLIB" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no)
+if test "x$HAVE_GOBJECT" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+AC_CONFIG_HEADERS([config.h])
+AC_CONFIG_FILES([
+ mixcommon.pc
+ Makefile
+ src/Makefile
+])
+AC_OUTPUT
diff --git a/mix_common/m4/as-mix-version.m4 b/mix_common/m4/as-mix-version.m4
new file mode 100644
index 0000000..8b09d7c
--- /dev/null
+++ b/mix_common/m4/as-mix-version.m4
@@ -0,0 +1,35 @@
+dnl as-mix-version.m4
+
+dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE)
+
+dnl example
+dnl AS_MIX_VERSION(mixaudio,MIXAUDIO, 0, 3, 2,)
+dnl for a 0.3.2 release version
+
+dnl this macro
+dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE
+dnl - defines [$PREFIX], VERSION
+dnl - AC_SUBST's all defined vars
+
+AC_DEFUN([AS_MIX_VERSION],
+[
+ PACKAGE=[$1]
+ [$2]_MAJOR=[$3]
+ [$2]_MINOR=[$4]
+ [$2]_REVISION=[$5]
+ [$2]_CURRENT=m4_eval([$3] + [$4])
+ [$2]_AGE=[$4]
+ VERSION=[$3].[$4].[$5]
+
+ AC_SUBST([$2]_MAJOR)
+ AC_SUBST([$2]_MINOR)
+ AC_SUBST([$2]_REVISION)
+ AC_SUBST([$2]_CURRENT)
+ AC_SUBST([$2]_AGE)
+
+ AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name])
+ AC_SUBST(PACKAGE)
+ AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version])
+ AC_SUBST(VERSION)
+
+])
diff --git a/mix_common/mixcommon.pc.in b/mix_common/mixcommon.pc.in
new file mode 100644
index 0000000..05ef285
--- /dev/null
+++ b/mix_common/mixcommon.pc.in
@@ -0,0 +1,11 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=@includedir@
+
+Name: MI-X Library - Common
+Description: Common library for MI-X
+Requires:
+Version: @VERSION@
+Libs: -L${libdir} -l@PACKAGE@
+Cflags: -I${includedir}
diff --git a/mix_common/mixcommon.spec b/mix_common/mixcommon.spec
new file mode 100644
index 0000000..46f900a
--- /dev/null
+++ b/mix_common/mixcommon.spec
@@ -0,0 +1,43 @@
+Summary: MIX Common
+Name: mixcommon
+Version: 0.1.7
+Release: 1
+Source0: %{name}-%{version}.tar.gz
+NoSource: 0
+License: Proprietary
+Group: System Environment/Libraries
+BuildRoot: %{_tmppath}/%{name}-root
+ExclusiveArch: i586
+
+%description
+MIX Common contains common classes, datatype, header files used by other MIX components
+
+%package devel
+Summary: Libraries include files
+Group: Development/Libraries
+Requires: %{name} = %{version}
+
+%description devel
+The %{name}-devel package contains the header files and static libraries for building applications which use %{name}.
+
+%prep
+%setup -q
+%build
+./autogen.sh
+./configure --prefix=%{_prefix}
+make
+%install
+rm -rf $RPM_BUILD_ROOT
+make DESTDIR=$RPM_BUILD_ROOT install
+%clean
+rm -rf $RPM_BUILD_ROOT
+%files
+%defattr(-,root,root)
+%{_prefix}/lib/*.so*
+
+%files devel
+%defattr(-,root,root)
+%{_prefix}/include
+%{_prefix}/lib/*.la
+%{_prefix}/lib/pkgconfig/mixcommon.pc
+%doc COPYING
diff --git a/mix_common/src/Makefile.am b/mix_common/src/Makefile.am
new file mode 100644
index 0000000..199c509
--- /dev/null
+++ b/mix_common/src/Makefile.am
@@ -0,0 +1,23 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+lib_LTLIBRARIES = libmixcommon.la
+
+##############################################################################
+# sources used to compile
+libmixcommon_la_SOURCES = mixparams.c mixlog.c mixdrmparams.c
+
+# flags used to compile this plugin
+# add other _CFLAGS and _LIBS as needed
+libmixcommon_la_CFLAGS = $(GLIB_CFLAGS) $(GOBJECT_CFLAGS)
+libmixcommon_la_LIBADD = $(GLIB_LIBS) $(GOBJECT_LIBS)
+libmixcommon_la_LDFLAGS = $(GLIB_LIBS) $(GOBJECT_LIBS) -version-info @MIXCOMMON_CURRENT@:@MIXCOMMON_REVISION@:@MIXCOMMON_AGE@
+libmixcommon_la_LIBTOOLFLAGS = --tag=disable-static
+
+include_HEADERS = mixparams.h mixresult.h mixlog.h mixdrmparams.h
+#mixcommonincludedir = $(includedir)
+#mixcommoninclude_HEADERS = mixparams.h mixresult.h
diff --git a/mix_common/src/mixdrmparams.c b/mix_common/src/mixdrmparams.c
new file mode 100644
index 0000000..82e3f39
--- /dev/null
+++ b/mix_common/src/mixdrmparams.c
@@ -0,0 +1,163 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+ * SECTION:mixdrmparams
+ * @short_description: Drm parameters
+ *
+ * A data object which stores drm specific parameters.
+ */
+
+#include "mixdrmparams.h"
+
+static GType _mix_drmparams_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_drmparams_type = g_define_type_id; }
+
+gboolean mix_drmparams_copy(MixParams* target, const MixParams *src);
+MixParams* mix_drmparams_dup(const MixParams *obj);
+gboolean mix_drmparams_equal(MixParams* first, MixParams *second);
+static void mix_drmparams_finalize(MixParams *obj);
+
+G_DEFINE_TYPE_WITH_CODE(MixDrmParams, mix_drmparams, MIX_TYPE_PARAMS, _do_init);
+
+void
+_mix_drmparams_initialize (void)
+{
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref (mix_drmparams_get_type ());
+}
+
+static void mix_drmparams_init (MixDrmParams *self)
+{
+}
+
+static void mix_drmparams_class_init(MixDrmParamsClass *klass)
+{
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent (klass);
+
+ mixparams_class->finalize = mix_drmparams_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction)mix_drmparams_copy;
+ mixparams_class->dup = (MixParamsDupFunction)mix_drmparams_dup;
+ mixparams_class->equal = (MixParamsEqualFunction)mix_drmparams_equal;
+}
+
+MixDrmParams *mix_drmparams_new(void)
+{
+ MixDrmParams *ret = (MixDrmParams *)g_type_create_instance (MIX_TYPE_DRMPARAMS);
+
+ return ret;
+}
+
+void mix_drmparams_finalize(MixParams *obj)
+{
+ /* clean up here. */
+
+ /* Chain up parent */
+ if (parent_class->finalize)
+ parent_class->finalize(obj);
+}
+
+MixDrmParams *mix_drmparams_ref(MixDrmParams *mix)
+{
+ return (MixDrmParams*)mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_drmparams_dup:
+ * @obj: a #MixDrmParams object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams* mix_drmparams_dup(const MixParams *obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_DRMPARAMS(obj))
+ {
+ MixDrmParams *duplicate = mix_drmparams_new();
+ if (mix_drmparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj)))
+ {
+ ret = MIX_PARAMS(duplicate);
+ }
+ else
+ {
+ mix_drmparams_unref(duplicate);
+ }
+ }
+
+ return ret;;
+}
+
+/**
+ * mix_drmparams_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_drmparams_copy(MixParams* target, const MixParams *src)
+{
+ if (MIX_IS_DRMPARAMS(target) && MIX_IS_DRMPARAMS(src))
+ {
+ // TODO perform copy.
+ //
+ // Now chainup base class
+ // Get the root class from the cached parent_class object. This cached parent_class object has not be overwritten by this current class.
+ // Using the cached parent_class object because this_class would have ->copy pointing to this method!
+ // Cached parent_class contains the class object before it is overwritten by this derive class.
+ // MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (parent_class->copy)
+ {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(src));
+ }
+ else
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/**
+ * mix_drmparams_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_drmparams_equal(MixParams* first, MixParams *second)
+{
+ gboolean ret = TRUE;
+
+ if (MIX_IS_DRMPARAMS(first) && MIX_IS_DRMPARAMS(second))
+ {
+ // TODO: do deep compare
+
+ if (ret)
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
+
+
diff --git a/mix_common/src/mixdrmparams.h b/mix_common/src/mixdrmparams.h
new file mode 100644
index 0000000..7ef82fb
--- /dev/null
+++ b/mix_common/src/mixdrmparams.h
@@ -0,0 +1,123 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_DRMPARAMS_H__
+#define __MIX_DRMPARAMS_H__
+
+
+#include "mixparams.h"
+
+/**
+ * MIX_TYPE_DRMPARAMS:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_DRMPARAMS (mix_drmparams_get_type ())
+
+/**
+ * MIX_DRMPARAMS:
+ * @obj: object to be type-casted.
+ */
+#define MIX_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DRMPARAMS, MixDrmParams))
+
+/**
+ * MIX_IS_DRMPARAMS:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DRMPARAMS))
+
+/**
+ * MIX_DRMPARAMS_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DRMPARAMS, MixDrmParamsClass))
+
+/**
+ * MIX_IS_DRMPARAMS_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DRMPARAMS))
+
+/**
+ * MIX_DRMPARAMS_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_DRMPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DRMPARAMS, MixDrmParamsClass))
+
+typedef struct _MixDrmParams MixDrmParams;
+typedef struct _MixDrmParamsClass MixDrmParamsClass;
+
+/**
+ * MixDrmParams:
+ *
+ * MI-X Drm Parameter object
+ */
+struct _MixDrmParams
+{
+ /*< public >*/
+ MixParams parent;
+
+ /*< public >*/
+};
+
+/**
+ * MixDrmParamsClass:
+ *
+ * MI-X Drm object class
+ */
+struct _MixDrmParamsClass
+{
+ /*< public >*/
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_drmparams_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_drmparams_get_type (void);
+
+/**
+ * mix_drmparams_new:
+ * @returns: A newly allocated instance of #MixDrmParams
+ *
+ * Use this method to create new instance of #MixDrmParams
+ */
+MixDrmParams *mix_drmparams_new(void);
+
+/**
+ * mix_drmparams_ref:
+ * @mix: object to add reference
+ * @returns: the MixDrmParams instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixDrmParams *mix_drmparams_ref(MixDrmParams *mix);
+
+/**
+ * mix_drmparams_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+
+#endif /* __MIX_DRMPARAMS_H__ */
diff --git a/mix_common/src/mixlog.c b/mix_common/src/mixlog.c
new file mode 100644
index 0000000..a9dd359
--- /dev/null
+++ b/mix_common/src/mixlog.c
@@ -0,0 +1,257 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include <glib.h>
+#include <string.h>
+#include "mixlog.h"
+
+#define MIX_DELOG_COMPS "MIX_DELOG_COMPS"
+#define MIX_DELOG_FILES "MIX_DELOG_FILES"
+#define MIX_DELOG_FUNCS "MIX_DELOG_FUNCS"
+#define MIX_LOG_ENABLE "MIX_LOG_ENABLE"
+#define MIX_DELOG_DELIMITERS " ,;"
+
+#define MIX_LOG_LEVEL "MIX_LOG_LEVEL"
+
+static GStaticMutex g_mutex = G_STATIC_MUTEX_INIT;
+
+#ifdef MIX_LOG_USE_HT
+static GHashTable *g_defile_ht = NULL, *g_defunc_ht = NULL, *g_decom_ht = NULL;
+static gint g_mix_log_level = MIX_LOG_LEVEL_VERBOSE;
+static gint g_refcount = 0;
+
+#define mix_log_destroy_ht(ht) if(ht) { g_hash_table_destroy(ht); ht = NULL; }
+
+void mix_log_get_ht(GHashTable **ht, const gchar *var) {
+
+ const char *delog_list = NULL;
+ char *item = NULL;
+ if (!ht || !var) {
+ return;
+ }
+
+ delog_list = g_getenv(var);
+ if (!delog_list) {
+ return;
+ }
+
+ if (*ht == NULL) {
+ *ht = g_hash_table_new(g_str_hash, g_str_equal);
+ if (*ht == NULL) {
+ return;
+ }
+ }
+
+ item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS);
+ while (item != NULL) {
+ g_hash_table_insert(*ht, item, "true");
+ item = strtok(NULL, MIX_DELOG_DELIMITERS);
+ }
+}
+
+void mix_log_initialize_func() {
+
+ const gchar *mix_log_level = NULL;
+ g_static_mutex_lock(&g_mutex);
+
+ if (g_refcount == 0) {
+
+ mix_log_level = g_getenv(MIX_LOG_LEVEL);
+ if (mix_log_level) {
+ g_mix_log_level = atoi(mix_log_level);
+ }
+
+ mix_log_get_ht(&g_decom_ht, MIX_DELOG_COMPS);
+ mix_log_get_ht(&g_defile_ht, MIX_DELOG_FILES);
+ mix_log_get_ht(&g_defunc_ht, MIX_DELOG_FUNCS);
+ }
+
+ g_refcount++;
+
+ g_static_mutex_unlock(&g_mutex);
+}
+
+void mix_log_finalize_func() {
+
+ g_static_mutex_lock(&g_mutex);
+
+ g_refcount--;
+
+ if (g_refcount == 0) {
+ mix_log_destroy_ht(g_decom_ht);
+ mix_log_destroy_ht(g_defile_ht);
+ mix_log_destroy_ht(g_defunc_ht);
+
+ g_mix_log_level = MIX_LOG_LEVEL_VERBOSE;
+ }
+
+ if (g_refcount < 0) {
+ g_refcount = 0;
+ }
+
+ g_static_mutex_unlock(&g_mutex);
+}
+
+void mix_log_func(const gchar* comp, gint level, const gchar *file,
+ const gchar *func, gint line, const gchar *format, ...) {
+
+ va_list args;
+ static gchar* loglevel[4] = {"**ERROR", "*WARNING", "INFO", "VERBOSE"};
+
+ if (!format) {
+ return;
+ }
+
+ g_static_mutex_lock(&g_mutex);
+
+ if (level > g_mix_log_level) {
+ goto exit;
+ }
+
+ if (g_decom_ht) {
+ if (g_hash_table_lookup(g_decom_ht, comp)) {
+ goto exit;
+ }
+ }
+
+ if (g_defile_ht) {
+ if (g_hash_table_lookup(g_defile_ht, file)) {
+ goto exit;
+ }
+ }
+
+ if (g_defunc_ht) {
+ if (g_hash_table_lookup(g_defunc_ht, func)) {
+ goto exit;
+ }
+ }
+
+ if(level > MIX_LOG_LEVEL_VERBOSE) {
+ level = MIX_LOG_LEVEL_VERBOSE;
+ }
+ if(level < MIX_LOG_LEVEL_ERROR) {
+ level = MIX_LOG_LEVEL_ERROR;
+ }
+
+ g_print("%s : %s : %s : ", loglevel[level - 1], file, func);
+
+ va_start(args, format);
+ g_vprintf(format, args);
+ va_end(args);
+
+ exit: g_static_mutex_unlock(&g_mutex);
+}
+
+#else /* MIX_LOG_USE_HT */
+
+gboolean mix_shall_delog(const gchar *name, const gchar *var) {
+
+ const char *delog_list = NULL;
+ char *item = NULL;
+ gboolean delog = FALSE;
+
+ if (!name || !var) {
+ return delog;
+ }
+
+ delog_list = g_getenv(var);
+ if (!delog_list) {
+ return delog;
+ }
+
+ item = strtok((char *) delog_list, MIX_DELOG_DELIMITERS);
+ while (item != NULL) {
+ if (strcmp(item, name) == 0) {
+ delog = TRUE;
+ break;
+ }
+ item = strtok(NULL, MIX_DELOG_DELIMITERS);
+ }
+
+ return delog;
+}
+
+gboolean mix_log_enabled() {
+
+ const char *value = NULL;
+ value = g_getenv(MIX_LOG_ENABLE);
+ if(!value) {
+ return FALSE;
+ }
+
+ if(value[0] == '0') {
+ return FALSE;
+ }
+ return TRUE;
+}
+
+void mix_log_func(const gchar* comp, gint level, const gchar *file,
+ const gchar *func, gint line, const gchar *format, ...) {
+
+ va_list args;
+ static gchar* loglevel[4] = { "**ERROR", "*WARNING", "INFO", "VERBOSE" };
+
+ const gchar *env_mix_log_level = NULL;
+ gint mix_log_level_threhold = MIX_LOG_LEVEL_VERBOSE;
+
+ if(!mix_log_enabled()) {
+ return;
+ }
+
+ if (!format) {
+ return;
+ }
+
+ g_static_mutex_lock(&g_mutex);
+
+ /* log level */
+ env_mix_log_level = g_getenv(MIX_LOG_LEVEL);
+ if (env_mix_log_level) {
+ mix_log_level_threhold = atoi(env_mix_log_level);
+ }
+
+ if (level > mix_log_level_threhold) {
+ goto exit;
+ }
+
+ /* component */
+ if (mix_shall_delog(comp, MIX_DELOG_COMPS)) {
+ goto exit;
+ }
+
+ /* files */
+ if (mix_shall_delog(file, MIX_DELOG_FILES)) {
+ goto exit;
+ }
+
+ /* functions */
+ if (mix_shall_delog(func, MIX_DELOG_FUNCS)) {
+ goto exit;
+ }
+
+ if (level > MIX_LOG_LEVEL_VERBOSE) {
+ level = MIX_LOG_LEVEL_VERBOSE;
+ }
+ if (level < MIX_LOG_LEVEL_ERROR) {
+ level = MIX_LOG_LEVEL_ERROR;
+ }
+
+ g_print("%s : %s : %s : ", loglevel[level - 1], file, func);
+
+ va_start(args, format);
+ g_vprintf(format, args);
+ va_end(args);
+
+exit:
+ g_static_mutex_unlock(&g_mutex);
+}
+
+
+#endif /* MIX_LOG_USE_HT */
+
+
diff --git a/mix_common/src/mixlog.h b/mix_common/src/mixlog.h
new file mode 100644
index 0000000..2fe60fd
--- /dev/null
+++ b/mix_common/src/mixlog.h
@@ -0,0 +1,47 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include <glib.h>
+
+#ifndef __MIX_LOG_H__
+#define __MIX_LOG_H__
+
+/* Warning: don't call these functions */
+void mix_log_func(const gchar* comp, gint level, const gchar *file,
+ const gchar *func, gint line, const gchar *format, ...);
+
+/* Components */
+#define MIX_VIDEO_COMP "mixvideo"
+#define GST_MIX_VIDEO_DEC_COMP "gstmixvideodec"
+#define GST_MIX_VIDEO_SINK_COMP "gstmixvideosink"
+#define GST_MIX_VIDEO_ENC_COMP "gstmixvideoenc"
+
+#define MIX_AUDIO_COMP "mixaudio"
+#define GST_MIX_AUDIO_DEC_COMP "gstmixaudiodec"
+#define GST_MIX_AUDIO_SINK_COMP "gstmixaudiosink"
+
+/* log level */
+#define MIX_LOG_LEVEL_ERROR 1
+#define MIX_LOG_LEVEL_WARNING 2
+#define MIX_LOG_LEVEL_INFO 3
+#define MIX_LOG_LEVEL_VERBOSE 4
+
+
+/* MACROS for mixlog */
+#ifdef MIX_LOG_ENABLE
+
+#define mix_log(comp, level, format, ...) \
+ mix_log_func(comp, level, __FILE__, __FUNCTION__, __LINE__, format, ##__VA_ARGS__)
+
+#else
+
+#define mix_log(comp, level, format, ...)
+
+#endif
+
+#endif
diff --git a/mix_common/src/mixparams.c b/mix_common/src/mixparams.c
new file mode 100644
index 0000000..2f8f8f6
--- /dev/null
+++ b/mix_common/src/mixparams.c
@@ -0,0 +1,274 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+ * SECTION:mixparams
+ * @short_description: Lightweight base class for the MIX media params
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "mixparams.h"
+#include <gobject/gvaluecollector.h>
+
+
+#define DEBUG_REFCOUNT
+
+static void mix_params_class_init (gpointer g_class, gpointer class_data);
+static void mix_params_init (GTypeInstance * instance, gpointer klass);
+
+static void mix_params_finalize(MixParams * obj);
+static gboolean mix_params_copy_default (MixParams *target, const MixParams *src);
+static MixParams *mix_params_dup_default(const MixParams *obj);
+static gboolean mix_params_equal_default (MixParams *first, MixParams *second);
+
+GType mix_params_get_type (void)
+{
+ static GType _mix_params_type = 0;
+
+ if (G_UNLIKELY (_mix_params_type == 0)) {
+
+ GTypeInfo info = {
+ sizeof (MixParamsClass),
+ NULL,
+ NULL,
+ mix_params_class_init,
+ NULL,
+ NULL,
+ sizeof (MixParams),
+ 0,
+ (GInstanceInitFunc) mix_params_init,
+ NULL
+ };
+
+ static const GTypeFundamentalInfo fundamental_info = {
+ (G_TYPE_FLAG_CLASSED | G_TYPE_FLAG_INSTANTIATABLE |
+ G_TYPE_FLAG_DERIVABLE | G_TYPE_FLAG_DEEP_DERIVABLE)
+ };
+
+ info.value_table = NULL;
+
+ _mix_params_type = g_type_fundamental_next ();
+ g_type_register_fundamental (_mix_params_type, "MixParams", &info, &fundamental_info, G_TYPE_FLAG_ABSTRACT);
+
+ }
+
+ return _mix_params_type;
+}
+
+static void mix_params_class_init (gpointer g_class, gpointer class_data)
+{
+ MixParamsClass *klass = MIX_PARAMS_CLASS (g_class);
+
+ klass->dup = mix_params_dup_default;
+ klass->copy = mix_params_copy_default;
+ klass->finalize = mix_params_finalize;
+ klass->equal = mix_params_equal_default;
+}
+
+static void mix_params_init (GTypeInstance * instance, gpointer klass)
+{
+ MixParams *obj = MIX_PARAMS_CAST (instance);
+
+ obj->refcount = 1;
+}
+
+gboolean mix_params_copy (MixParams *target, const MixParams *src)
+{
+ /* Use the target object class. Because it knows what it is looking for. */
+ MixParamsClass *klass = MIX_PARAMS_GET_CLASS(target);
+ if (klass->copy)
+ {
+ return klass->copy(target, src);
+ }
+ else
+ {
+ return mix_params_copy_default(target, src);
+ }
+}
+
+/**
+ * mix_params_copy_default:
+ * @target: target
+ * @src: source
+ *
+ * The default copy method of this object. Perhap copy at this level.
+ * Assign this to the copy vmethod.
+ */
+static gboolean mix_params_copy_default (MixParams *target, const MixParams *src)
+{
+ if (MIX_IS_PARAMS(target) && MIX_IS_PARAMS(src))
+ {
+ // TODO perform deep copy.
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static void mix_params_finalize (MixParams * obj)
+{
+ /* do nothing */
+}
+
+MixParams *mix_params_dup(const MixParams *obj)
+{
+ MixParamsClass *klass = MIX_PARAMS_GET_CLASS(obj);
+
+ if (klass->dup)
+ {
+ return klass->dup(obj);
+ }
+ else if (MIX_IS_PARAMS(obj))
+ {
+ return mix_params_dup_default(obj);
+ }
+ return NULL;
+}
+
+static MixParams *mix_params_dup_default(const MixParams *obj)
+{
+ MixParams *ret = mix_params_new();
+ if (mix_params_copy(ret, obj))
+ {
+ return ret;
+ }
+
+ return NULL;
+}
+
+MixParams* mix_params_new (GType type)
+{
+ MixParams *obj;
+
+ /* we don't support dynamic types because they really aren't useful,
+ * and could cause refcount problems */
+ obj = (MixParams *) g_type_create_instance (type);
+
+ return obj;
+}
+
+MixParams* mix_params_ref (MixParams *obj)
+{
+ g_return_val_if_fail(MIX_IS_PARAMS (obj), NULL);
+
+ g_atomic_int_inc(&obj->refcount);
+
+ return obj;
+}
+
+static void mix_params_free(MixParams *obj)
+{
+ MixParamsClass *klass = NULL;
+
+ klass = MIX_PARAMS_GET_CLASS(obj);
+ klass->finalize(obj);
+
+ /* Should we support recycling the object? */
+ /* If so, refcount handling is slightly different. */
+ /* i.e. If the refcount is still 0 we can really free the object, else the finalize method recycled the object -- but to where? */
+
+ if (g_atomic_int_get (&obj->refcount) == 0) {
+
+ g_type_free_instance ((GTypeInstance *) obj);
+ }
+}
+
+void mix_params_unref (MixParams *obj)
+{
+ g_return_if_fail (obj != NULL);
+ g_return_if_fail (obj->refcount > 0);
+
+ if (G_UNLIKELY (g_atomic_int_dec_and_test (&obj->refcount))) {
+ mix_params_free (obj);
+ }
+}
+
+/**
+ * mix_params_replace:
+ * @olddata: pointer to a pointer to a object to be replaced
+ * @newdata: pointer to new object
+ *
+ * Modifies a pointer to point to a new object. The modification
+ * is done atomically, and the reference counts are updated correctly.
+ * Either @newdata and the value pointed to by @olddata may be NULL.
+ */
+void mix_params_replace (MixParams **olddata, MixParams *newdata)
+{
+ MixParams *olddata_val;
+
+ g_return_if_fail (olddata != NULL);
+
+ olddata_val = g_atomic_pointer_get ((gpointer *) olddata);
+
+ if (olddata_val == newdata)
+ return;
+
+ if (newdata)
+ mix_params_ref (newdata);
+
+ while (!g_atomic_pointer_compare_and_exchange ((gpointer *) olddata, olddata_val, newdata))
+ {
+ olddata_val = g_atomic_pointer_get ((gpointer *) olddata);
+ }
+
+ if (olddata_val)
+ mix_params_unref (olddata_val);
+
+}
+
+gboolean mix_params_equal (MixParams *first, MixParams *second)
+{
+ if (MIX_IS_PARAMS(first))
+ {
+ MixParamsClass *klass = MIX_PARAMS_GET_CLASS(first);
+
+ if (klass->equal)
+ {
+ return klass->equal(first, second);
+ }
+ else
+ {
+ return mix_params_equal_default(first, second);
+ }
+ }
+ else
+ return FALSE;
+}
+
+static gboolean mix_params_equal_default (MixParams *first, MixParams *second)
+{
+ if (MIX_IS_PARAMS(first) && MIX_IS_PARAMS(second))
+ {
+ gboolean ret = TRUE;
+
+ // Do data comparison here.
+
+ return ret;
+ }
+ else
+ return FALSE;
+}
+
+/**
+ * mix_value_dup_params:
+ * @value: a valid #GValue of %MIX_TYPE_PARAMS derived type
+ * @returns: object contents of @value
+ *
+ * Get the contents of a #MIX_TYPE_PARAMS derived #GValue,
+ * increasing its reference count.
+ */
+MixParams* mix_value_dup_params (const GValue * value)
+{
+ g_return_val_if_fail (MIX_VALUE_HOLDS_PARAMS (value), NULL);
+
+ return mix_params_ref (value->data[0].v_pointer);
+}
+
+
diff --git a/mix_common/src/mixparams.h b/mix_common/src/mixparams.h
new file mode 100644
index 0000000..75d4051
--- /dev/null
+++ b/mix_common/src/mixparams.h
@@ -0,0 +1,202 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_PARAMS_H__
+#define __MIX_PARAMS_H__
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+
+#define MIX_TYPE_PARAMS (mix_params_get_type())
+#define MIX_IS_PARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_PARAMS))
+#define MIX_IS_PARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_PARAMS))
+#define MIX_PARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_PARAMS, MixParamsClass))
+#define MIX_PARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_PARAMS, MixParams))
+#define MIX_PARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_PARAMS, MixParamsClass))
+#define MIX_PARAMS_CAST(obj) ((MixParams*)(obj))
+
+typedef struct _MixParams MixParams;
+typedef struct _MixParamsClass MixParamsClass;
+
+/**
+ * MixParamsDupFunction:
+ * @obj: Params to duplicate
+ * @returns: reference to cloned instance.
+ *
+ * Virtual function prototype for methods to create duplicate of instance.
+ *
+ */
+typedef MixParams * (*MixParamsDupFunction) (const MixParams *obj);
+
+/**
+ * MixParamsCopyFunction:
+ * @target: target of the copy
+ * @src: source of the copy
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Virtual function prototype for methods to create copies of instance.
+ *
+ */
+typedef gboolean (*MixParamsCopyFunction) (MixParams* target, const MixParams *src);
+
+/**
+ * MixParamsFinalizeFunction:
+ * @obj: Params to finalize
+ *
+ * Virtual function prototype for methods to free ressources used by
+ * object.
+ */
+typedef void (*MixParamsFinalizeFunction) (MixParams *obj);
+
+/**
+ * MixParamsEqualsFunction:
+ * @first: first object in the comparison
+ * @second: second object in the comparison
+ *
+ * Virtual function prototype for methods to compare 2 objects and check if they are equal.
+ */
+typedef gboolean (*MixParamsEqualFunction) (MixParams *first, MixParams *second);
+
+/**
+ * MIX_VALUE_HOLDS_PARAMS:
+ * @value: the #GValue to check
+ *
+ * Checks if the given #GValue contains a #MIX_TYPE_PARAM value.
+ */
+#define MIX_VALUE_HOLDS_PARAMS(value) (G_VALUE_HOLDS(value, MIX_TYPE_PARAMS))
+
+/**
+ * MIX_PARAMS_REFCOUNT:
+ * @obj: a #MixParams
+ *
+ * Get access to the reference count field of the object.
+ */
+#define MIX_PARAMS_REFCOUNT(obj) ((MIX_PARAMS_CAST(obj))->refcount)
+/**
+ * MIX_PARAMS_REFCOUNT_VALUE:
+ * @obj: a #MixParams
+ *
+ * Get the reference count value of the object
+ */
+#define MIX_PARAMS_REFCOUNT_VALUE(obj) (g_atomic_int_get (&(MIX_PARAMS_CAST(obj))->refcount))
+
+/**
+ * MixParams:
+ * @instance: type instance
+ * @refcount: atomic refcount
+ *
+ * Base class for a refcounted parameter objects.
+ */
+struct _MixParams {
+ GTypeInstance instance;
+ /*< public >*/
+ gint refcount;
+
+ /*< private >*/
+ gpointer _reserved;
+};
+
+/**
+ * MixParamsClass:
+ * @dup: method to duplicate the object.
+ * @copy: method to copy details in one object to the other.
+ * @finalize: destructor
+ * @equal: method to check if the content of two objects are equal.
+ *
+ * #MixParams class strcut.
+ */
+struct _MixParamsClass {
+ GTypeClass type_class;
+
+ MixParamsDupFunction dup;
+ MixParamsCopyFunction copy;
+ MixParamsFinalizeFunction finalize;
+ MixParamsEqualFunction equal;
+
+ /*< private >*/
+ gpointer _mix_reserved;
+};
+
+/**
+ * mix_params_get_type:
+ * @returns: type of this object.
+ *
+ * Get type.
+ */
+GType mix_params_get_type(void);
+
+/**
+ * mix_params_new:
+ * @returns: return a newly allocated object.
+ *
+ * Create new instance of the object.
+ */
+MixParams* mix_params_new();
+
+/**
+ * mix_params_copy:
+ * @target: copy to target
+ * @src: copy from source
+ * @returns: boolean indicating if copy is successful.
+ *
+ * Copy data from one instance to the other. This method internally invoked the #MixParams::copy method such that derived object will be copied correctly.
+ */
+gboolean mix_params_copy(MixParams *target, const MixParams *src);
+
+
+/**
+ * mix_params_ref:
+ * @obj: a #MixParams object.
+ * @returns: the object with reference count incremented.
+ *
+ * Increment reference count.
+ */
+MixParams* mix_params_ref(MixParams *obj);
+
+
+/**
+ * mix_params_unref:
+ * @obj: a #MixParams object.
+ *
+ * Decrement reference count.
+ */
+void mix_params_unref (MixParams *obj);
+
+/**
+ * mix_params_replace:
+ * @olddata:
+ * @newdata:
+ *
+ * Replace a pointer of the object with the new one.
+ */
+void mix_params_replace(MixParams **olddata, MixParams *newdata);
+
+/**
+ * mix_params_dup:
+ * @obj: #MixParams object to duplicate.
+ * @returns: A newly allocated duplicate of the object, or NULL if failed.
+ *
+ * Duplicate the given #MixParams and allocate a new instance. This method is chained up properly and derive object will be dupped properly.
+ */
+MixParams *mix_params_dup(const MixParams *obj);
+
+/**
+ * mix_params_equal:
+ * @first: first object to compare
+ * @second: second object to compare
+ * @returns: boolean indicates if the 2 object contains same data.
+ *
+ * Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance.
+ */
+gboolean mix_params_equal(MixParams *first, MixParams *second);
+
+G_END_DECLS
+
+#endif
+
diff --git a/mix_common/src/mixresult.h b/mix_common/src/mixresult.h
new file mode 100644
index 0000000..9472a7e
--- /dev/null
+++ b/mix_common/src/mixresult.h
@@ -0,0 +1,90 @@
+/*************************************************************************************
+ * INTEL CONFIDENTIAL
+ * Copyright 2008-2009 Intel Corporation All Rights Reserved.
+ * The source code contained or described herein and all documents related
+ * to the source code ("Material") are owned by Intel Corporation or its
+ * suppliers or licensors. Title to the Material remains with Intel
+ * Corporation or its suppliers and licensors. The Material contains trade
+ * secrets and proprietary and confidential information of Intel or its
+ * suppliers and licensors. The Material is protected by worldwide copyright
+ * and trade secret laws and treaty provisions. No part of the Material may
+ * be used, copied, reproduced, modified, published, uploaded, posted,
+ * transmitted, distributed, or disclosed in any way without Intel’s prior
+ * express written permission.
+ *
+ * No license under any patent, copyright, trade secret or other intellectual
+ * property right is granted to or conferred upon you by disclosure or delivery
+ * of the Materials, either expressly, by implication, inducement, estoppel or
+ * otherwise. Any license under such intellectual property rights must be express
+ * and approved by Intel in writing.
+ ************************************************************************************/
+
+#ifndef MIX_RESULT_H
+#define MIX_RESULT_H
+
+#include <glib.h>
+
+typedef gint32 MIX_RESULT;
+
+#define MIX_SUCCEEDED(result_code) ((((MIX_RESULT)(result_code)) & 0x80000000) == 0)
+
+typedef enum {
+ /** General success */
+ MIX_RESULT_SUCCESS = (MIX_RESULT) 0x00000000,
+ MIX_RESULT_SUCCESS_CHG = (MIX_RESULT)0x00000001,
+
+ /** Module specific success starting number */
+
+ /** Starting success number for Audio */
+ MIX_RESULT_SUCCESS_AUDIO_START = (MIX_RESULT) 0x00010000,
+ /** Starting success number for Video */
+ MIX_RESULT_SUCCESS_VIDEO_START = (MIX_RESULT) 0x00020000,
+ /** Starting success number for DRM */
+ MIX_RESULT_SUCCESS_DRM_START = (MIX_RESULT) 0x00030000
+} MIX_SUCCESS_COMMON;
+
+typedef enum {
+ /** General failure */
+ MIX_RESULT_FAIL = (MIX_RESULT) 0x80000000,
+ MIX_RESULT_NULL_PTR = (MIX_RESULT) 0x80000001,
+ MIX_RESULT_LPE_NOTAVAIL = (MIX_RESULT) 0X80000002,
+ MIX_RESULT_DIRECT_NOTAVAIL = (MIX_RESULT) 0x80000003,
+ MIX_RESULT_NOT_SUPPORTED = (MIX_RESULT) 0x80000004,
+ MIX_RESULT_CONF_MISMATCH = (MIX_RESULT) 0x80000005,
+ MIX_RESULT_RESUME_NEEDED = (MIX_RESULT) 0x80000007,
+ MIX_RESULT_WRONGMODE = (MIX_RESULT) 0x80000008,
+ MIX_RESULT_RESOURCES_NOTAVAIL = (MIX_RESULT)0x80000009,
+ MIX_RESULT_INVALID_PARAM = (MIX_RESULT)0x8000000a,
+ MIX_RESULT_ALREADY_INIT = (MIX_RESULT)0x8000000b,
+ MIX_RESULT_WRONG_STATE = (MIX_RESULT)0x8000000c,
+ MIX_RESULT_NOT_INIT = (MIX_RESULT)0x8000000d,
+ MIX_RESULT_NOT_CONFIGURED = (MIX_RESULT)0x8000000e,
+ MIX_RESULT_STREAM_NOTAVAIL = (MIX_RESULT)0x8000000f,
+ MIX_RESULT_CODEC_NOTAVAIL = (MIX_RESULT)0x80000010,
+ MIX_RESULT_CODEC_NOTSUPPORTED = (MIX_RESULT)0x80000011,
+ MIX_RESULT_INVALID_COUNT = (MIX_RESULT)0x80000012,
+ MIX_RESULT_NOT_ACP = (MIX_RESULT)0x80000013,
+ MIX_RESULT_INVALID_DECODE_MODE = (MIX_RESULT)0x80000014,
+ MIX_RESULT_INVALID_STREAM_NAME = (MIX_RESULT)0x80000015,
+ MIX_RESULT_NO_MEMORY = (MIX_RESULT)0x80000016,
+ MIX_RESULT_NEED_RETRY = (MIX_RESULT)0x80000017,
+ MIX_RESULT_SYSTEM_ERRNO = (MIX_RESULT)0x80000018,
+
+ /** Module specific errors starting number */
+
+ /** Starting error number for Audio */
+ MIX_RESULT_ERROR_AUDIO_START = (MIX_RESULT) 0x80010000,
+ /** Starting error number for Video */
+ MIX_RESULT_ERROR_VIDEO_START = (MIX_RESULT) 0x80020000,
+ /** Starting error number for DRM */
+ MIX_RESULT_ERROR_DRM_START = (MIX_RESULT) 0x80030000
+} MIX_ERROR_COMMON;
+
+ /* New success code should be added just above this line */
+// MIX_RESULT_IAM_DISABLED, /* 0x80000008 */
+// MIX_RESULT_IAM_NOTAVAIL, /* 0x80000009 */
+// MIX_RESULT_IAM_REG_FAILED, /* 0x8000000f */
+
+
+
+#endif // MIX_RESULT_H
diff --git a/mix_vbp/AUTHORS b/mix_vbp/AUTHORS
new file mode 100644
index 0000000..db8081b
--- /dev/null
+++ b/mix_vbp/AUTHORS
@@ -0,0 +1 @@
+linda.s.cline@intel.com
diff --git a/mix_vbp/COPYING b/mix_vbp/COPYING
new file mode 100644
index 0000000..a4f852c
--- /dev/null
+++ b/mix_vbp/COPYING
@@ -0,0 +1,26 @@
+INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License)
+
+IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING.
+Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software.
+
+
+LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions:
+1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software.
+2. You may not reverse engineer, decompile, or disassemble the Software.
+3. You may not sublicense the Software.
+4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions.
+5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL).
+OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights.
+EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software.
+LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS.
+TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate.
+APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations.
+GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052.
+CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos.
+ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion.
+ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel.
+NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties.
+SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions.
+WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself.
+CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions.
+
diff --git a/mix_vbp/ChangeLog b/mix_vbp/ChangeLog
new file mode 100644
index 0000000..139597f
--- /dev/null
+++ b/mix_vbp/ChangeLog
@@ -0,0 +1,2 @@
+
+
diff --git a/mix_vbp/INSTALL b/mix_vbp/INSTALL
new file mode 100644
index 0000000..50e1648
--- /dev/null
+++ b/mix_vbp/INSTALL
@@ -0,0 +1,4 @@
+run the following to build and install:
+./autogen.sh
+./configure
+make
diff --git a/mix_vbp/Makefile.am b/mix_vbp/Makefile.am
new file mode 100644
index 0000000..a8b59cd
--- /dev/null
+++ b/mix_vbp/Makefile.am
@@ -0,0 +1,9 @@
+SUBDIRS = viddec_fw/fw/parser
+
+#Uncomment the following line if building documentation using gtkdoc
+#SUBDIRS += docs
+
+pkgconfigdir = $(libdir)/pkgconfig
+pkgconfig_DATA=mixvbp.pc
+EXTRA_DIST = autogen.sh mixvbp.spec
+DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
diff --git a/mix_vbp/Merge_readme.txt b/mix_vbp/Merge_readme.txt
new file mode 100644
index 0000000..90936bb
--- /dev/null
+++ b/mix_vbp/Merge_readme.txt
@@ -0,0 +1,2 @@
+DHG revision #218237
+
diff --git a/mix_vbp/NEWS b/mix_vbp/NEWS
new file mode 100644
index 0000000..139597f
--- /dev/null
+++ b/mix_vbp/NEWS
@@ -0,0 +1,2 @@
+
+
diff --git a/mix_vbp/README b/mix_vbp/README
new file mode 100644
index 0000000..2bcf017
--- /dev/null
+++ b/mix_vbp/README
@@ -0,0 +1,2 @@
+MIX Video is an user library interface for various hardware video codecs available on the platform.
+
diff --git a/mix_vbp/autogen.sh b/mix_vbp/autogen.sh
new file mode 100644
index 0000000..ed2c536
--- /dev/null
+++ b/mix_vbp/autogen.sh
@@ -0,0 +1,19 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+package=MIXVBP
+
+#Uncomment the follow line if building documentation using gtkdoc
+#gtkdocize --flavour no-tmpl || exit 1
+aclocal -I m4/ $ACLOCAL_FLAGS || exit 1
+libtoolize --copy --force || exit 1
+autoheader -v || exit 1
+autoconf -v || exit 1
+automake -a -c -v || exit 1
+
+echo "Now type ./configure to configure $package."
+exit 0
diff --git a/mix_vbp/configure.ac b/mix_vbp/configure.ac
new file mode 100644
index 0000000..93a9081
--- /dev/null
+++ b/mix_vbp/configure.ac
@@ -0,0 +1,77 @@
+AC_INIT("", "", [linda.s.cline@intel.com])
+
+AC_CONFIG_MACRO_DIR(m4)
+
+AS_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 15)
+
+dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode
+AM_MAINTAINER_MODE
+
+AM_INIT_AUTOMAKE($PACKAGE, $VERSION)
+
+dnl make aclocal work in maintainer mode
+AC_SUBST(ACLOCAL_AMFLAGS, "-I m4")
+
+AM_CONFIG_HEADER(config.h)
+
+dnl check for tools
+AC_PROG_CC
+AC_PROG_LIBTOOL
+
+MIX_CFLAGS="-Wall -Werror"
+
+dnl decide on error flags
+dnl AS_COMPILER_FLAG(-Wall, MIX_ERROR="$MIX_ERROR -Wall", MIX_ERROR="$MIX_ERROR")
+dnl AS_COMPILER_FLAG(-Werror,MIX_ERROR="$MIX_ERROR -Werror",MIX_ERROR="$MIX_ERROR")
+
+dnl Check for pkgconfig first
+AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes, no)
+
+dnl Give error and exit if we don't have pkgconfig
+if test "x$HAVE_PKGCONFIG" = "xno"; then
+ AC_MSG_ERROR(you need to have pkgconfig installed !)
+fi
+
+dnl GLib
+dnl FIXME: need to align with moblin glib version
+dnl FIXME: currently using an earlier version so it can be built on dev box.
+GLIB_REQ=2.16
+
+dnl Check for glib2 without extra fat, useful for the unversioned tool frontends
+dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+if test "x$HAVE_GLIB" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no)
+if test "x$HAVE_GOBJECT" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no)
+if test "x$HAVE_GTHREAD" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+dnl Check for documentation xrefs
+dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`"
+dnl AC_SUBST(GLIB_PREFIX)
+
+AC_SUBST(GLIB_CFLAGS)
+AC_SUBST(GLIB_LIBS)
+AC_SUBST(GOBJECT_CFLAGS)
+AC_SUBST(GOBJECT_LIBS)
+AC_SUBST(MIX_CFLAGS)
+AC_SUBST(GTHREAD_CFLAGS)
+AC_SUBST(GTHREAD_LIBS)
+
+AC_CONFIG_FILES([
+mixvbp.pc
+Makefile
+viddec_fw/fw/parser/Makefile
+])
+
+AC_OUTPUT
+
+
diff --git a/mix_vbp/m4/Makefile.am b/mix_vbp/m4/Makefile.am
new file mode 100644
index 0000000..66381d4
--- /dev/null
+++ b/mix_vbp/m4/Makefile.am
@@ -0,0 +1 @@
+EXTRA_DIST +=
diff --git a/mix_vbp/m4/as-mix-version.m4 b/mix_vbp/m4/as-mix-version.m4
new file mode 100644
index 0000000..f0301b1
--- /dev/null
+++ b/mix_vbp/m4/as-mix-version.m4
@@ -0,0 +1,35 @@
+dnl as-mix-version.m4
+
+dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE)
+
+dnl example
+dnl AS_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,)
+dnl for a 0.3.2 release version
+
+dnl this macro
+dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE
+dnl - defines [$PREFIX], VERSION
+dnl - AC_SUBST's all defined vars
+
+AC_DEFUN([AS_MIX_VERSION],
+[
+ PACKAGE=[$1]
+ [$2]_MAJOR=[$3]
+ [$2]_MINOR=[$4]
+ [$2]_REVISION=[$5]
+ [$2]_CURRENT=m4_eval([$3] + [$4])
+ [$2]_AGE=[$4]
+ VERSION=[$3].[$4].[$5]
+
+ AC_SUBST([$2]_MAJOR)
+ AC_SUBST([$2]_MINOR)
+ AC_SUBST([$2]_REVISION)
+ AC_SUBST([$2]_CURRENT)
+ AC_SUBST([$2]_AGE)
+
+ AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name])
+ AC_SUBST(PACKAGE)
+ AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version])
+ AC_SUBST(VERSION)
+
+])
diff --git a/mix_vbp/mixvbp.pc.in b/mix_vbp/mixvbp.pc.in
new file mode 100644
index 0000000..a1ec3a0
--- /dev/null
+++ b/mix_vbp/mixvbp.pc.in
@@ -0,0 +1,11 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=@includedir@
+
+Name: MIX Parser
+Description: MIX Video Parser Library
+Version: @VERSION@
+Libs: -L${libdir} -l@PACKAGE@
+Cflags: -I${includedir}/mixvbp
+
diff --git a/mix_vbp/mixvbp.spec b/mix_vbp/mixvbp.spec
new file mode 100644
index 0000000..da15b9d
--- /dev/null
+++ b/mix_vbp/mixvbp.spec
@@ -0,0 +1,52 @@
+# INTEL CONFIDENTIAL
+# Copyright 2009 Intel Corporation All Rights Reserved.
+# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+#
+# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+
+Summary: MIX Video Bitstream Parser
+Name: mixvbp
+Version: 0.1.15
+Release: 1
+Source0: %{name}-%{version}.tar.gz
+NoSource: 0
+License: Proprietary
+Group: System Environment/Libraries
+BuildRoot: %{_tmppath}/%{name}-root
+ExclusiveArch: i586
+
+%description
+MIX Video Bitstream Parser is an user library interface for various video format bitstream parsing
+
+%package devel
+Summary: Libraries include files
+Group: Development/Libraries
+Requires: %{name} = %{version}
+
+%description devel
+The %{name}-devel package contains the header files and static libraries for building applications which use %{name}.
+
+%prep
+%setup -q
+%build
+./autogen.sh
+./configure --prefix=%{_prefix}
+make
+%install
+make DESTDIR=$RPM_BUILD_ROOT install
+rm -f $RPM_BUILD_ROOT/%{_prefix}/lib/libmixvbp_mpeg2*
+
+%clean
+rm -rf $RPM_BUILD_ROOT
+%files
+%defattr(-,root,root)
+%{_prefix}/lib/libmixvbp.so*
+%{_prefix}/lib/libmixvbp_vc1.so*
+%{_prefix}/lib/libmixvbp_h264.so*
+%{_prefix}/lib/libmixvbp_mpeg4.so*
+
+%files devel
+%defattr(-,root,root)
+%{_prefix}/include
+%{_prefix}/lib/*.la
+%{_prefix}/lib/pkgconfig/mixvbp.pc
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h
new file mode 100644
index 0000000..51f0602
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h
@@ -0,0 +1,1034 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_H_
+#define _H264_H_
+
+#ifdef HOST_ONLY
+#include <stdio.h>
+#include <stdlib.h>
+#include <memory.h>
+#endif
+
+#include "stdint.h"
+#include "viddec_debug.h"
+
+#include "viddec_fw_workload.h"
+#include "h264parse_sei.h"
+
+
+#ifdef WIN32
+#define mfd_printf OS_INFO
+#endif
+
+#ifdef H264_VERBOSE
+#define PRINTF(format, args...) OS_INFO("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args )
+#else
+//#define PRINTF(args...)
+#endif
+
+//#pragma warning(disable : 4710) // function not inlined
+//#pragma warning(disable : 4514) // unreferenced inline function has been removed CL
+//#pragma warning(disable : 4100) // unreferenced formal parameter CL
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define MAX_INT32_VALUE 0x7fffffff
+
+#define MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE 256
+#define MAX_CPB_CNT 32
+#define MAX_NUM_SLICE_GRPS 1 //As per Annex A for high profile, the num_slice_groups_minus1 is 0
+#define MAX_PIC_LIST_NUM 8
+
+//#define MAX_PIC_SIZE_IN_MAP_UNITS 1024 //0 ???????? Henry
+#define MAX_NUM_REF_IDX_L0_ACTIVE 32
+//#define STARTCODE_BUF_SIZE 2048+1024
+
+#define NUM_MMCO_OPERATIONS 17
+
+// Used to check whether the SEI RP is the only way for recovery (cisco contents)
+// This threshold will decide the interval of recovery even no error detected if no IDR during this time
+#define SEI_REC_CHECK_TH 8
+
+//SPS
+#define MAX_NUM_SPS 32
+#define SCL_DEFAULT 1
+
+//PPS
+#define MAX_PIC_PARAMS 255
+#define MAX_NUM_REF_FRAMES 32
+#define MAX_QP 51
+#define MAX_NUM_PPS 256
+
+#define PUT_FS_IDC_BITS(w) (w&0x1F)
+#define PUT_LIST_INDEX_FIELD_BIT(w) ((w&0x1)<<5)
+#define PUT_LIST_LONG_TERM_BITS(w) ((w&0x1)<<6)
+#define PUT_LIST_PTR_LIST_ID_BIT(id) (id<<5)
+
+
+// DPB
+#define FRAME_FLAG_DANGLING_TOP_FIELD ( 0x1 << 3 )
+#define FRAME_FLAG_DANGLING_BOTTOM_FIELD ( 0x1 << 4 )
+
+#define MPD_DPB_FS_NULL_IDC 31 // May need to be changed if we alter gaps_in_frame_num to use
+
+#define MFD_H264_MAX_FRAME_BUFFERS 17
+#define NUM_DPB_FRAME_STORES (MFD_H264_MAX_FRAME_BUFFERS + 1) // 1 extra for storign non-existent pictures.
+
+//Scalling Matrix Type
+#define PPS_QM 0
+#define SPS_QM 1
+#define FB_QM 2
+#define DEFAULT_QM 3
+
+//Frame Type
+#define FRAME_TYPE_IDR 0x00
+#define FRAME_TYPE_I 0x01
+#define FRAME_TYPE_P 0x02
+#define FRAME_TYPE_B 0x03
+#define FRAME_TYPE_INVALID 0x04
+
+
+#define FRAME_TYPE_FRAME_OFFSET 3
+#define FRAME_TYPE_TOP_OFFSET 3
+#define FRAME_TYPE_BOTTOM_OFFSET 0
+#define FRAME_TYPE_STRUCTRUE_OFFSET 6
+
+//// Error handling
+#define FIELD_ERR_OFFSET 17 //offset for Field error flag ----refer to the structure definition viddec_fw_workload_error_codes in viddec_fw_common_defs.h
+
+////Bits Handling
+#define h264_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) )
+#define h264_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start))))
+
+
+//// PIP
+typedef enum _pip_setting_t
+{
+ PIP_SCALER_DISABLED,
+ PIP_SCALE_FACTOR_1_BY_4,
+ PIP_SCALE_FACTOR_1_BY_2,
+ PIP_SCALER_INVALID,
+
+} pip_setting_t;
+
+
+#ifdef VERBOSE
+#define DEBUGGETBITS(args...) OS_INFO( args )
+#else
+//#define DEBUGGETBITS(args...)
+#endif
+
+/* status codes */
+typedef enum _h264_Status
+{
+ H264_STATUS_EOF = 1, // end of file
+ H264_STATUS_OK = 0, // no error
+ H264_STATUS_NO_MEM = 2, // out of memory
+ H264_STATUS_FILE_ERROR = 3, // file error
+ H264_STATUS_NOTSUPPORT = 4, // not supported mode
+ H264_STATUS_PARSE_ERROR = 5, // fail in parse MPEG-4 stream
+ H264_STATUS_ERROR = 6, // unknown/unspecified error
+ H264_NAL_ERROR,
+ H264_SPS_INVALID_PROFILE,
+ H264_SPS_INVALID_LEVEL,
+ H264_SPS_INVALID_SEQ_PARAM_ID,
+ H264_SPS_ERROR,
+ H264_PPS_INVALID_PIC_ID,
+ H264_PPS_INVALID_SEQ_ID,
+ H264_PPS_ERROR,
+ H264_SliceHeader_INVALID_MB,
+ H264_SliceHeader_ERROR,
+ H264_FRAME_DONE,
+ H264_SLICE_DONE,
+ H264_STATUS_POLL_ONCE_ERROR,
+ H264_STATUS_DEC_MEMINIT_ERROR,
+ H264_STATUS_NAL_UNIT_TYPE_ERROR,
+ H264_STATUS_SEI_ERROR,
+ H264_STATUS_SEI_DONE,
+} h264_Status;
+
+
+
+typedef enum _picture_structure_t
+{
+ TOP_FIELD = 1,
+ BOTTOM_FIELD = 2,
+ FRAME = 3,
+ INVALID = 4
+} picture_structure_t;
+
+///// Chorma format
+
+typedef enum _h264_chroma_format_t
+{
+ H264_CHROMA_MONOCHROME,
+ H264_CHROMA_420,
+ H264_CHROMA_422,
+ H264_CHROMA_444,
+}h264_chroma_format_t;
+
+/* H264 start code values */
+typedef enum _h264_nal_unit_type
+{
+ h264_NAL_UNIT_TYPE_unspecified = 0,
+ h264_NAL_UNIT_TYPE_SLICE,
+ h264_NAL_UNIT_TYPE_DPA,
+ h264_NAL_UNIT_TYPE_DPB,
+ h264_NAL_UNIT_TYPE_DPC,
+ h264_NAL_UNIT_TYPE_IDR,
+ h264_NAL_UNIT_TYPE_SEI,
+ h264_NAL_UNIT_TYPE_SPS,
+ h264_NAL_UNIT_TYPE_PPS,
+ h264_NAL_UNIT_TYPE_Acc_unit_delimiter,
+ h264_NAL_UNIT_TYPE_EOSeq,
+ h264_NAL_UNIT_TYPE_EOstream,
+ h264_NAL_UNIT_TYPE_filler_data,
+ h264_NAL_UNIT_TYPE_SPS_extension,
+ h264_NAL_UNIT_TYPE_Reserved1 =14, /*14-18*/
+ h264_NAL_UNIT_TYPE_Reserved2 =15, /*14-18*/
+ h264_NAL_UNIT_TYPE_Reserved3 =16, /*14-18*/
+ h264_NAL_UNIT_TYPE_Reserved4 =17, /*14-18*/
+ h264_NAL_UNIT_TYPE_Reserved5 =18, /*14-18*/
+ h264_NAL_UNIT_TYPE_ACP =19,
+ h264_NAL_UNIT_TYPE_Reserved6 =20, /*20-23*/
+ h264_NAL_UNIT_TYPE_unspecified2 =24, /*24-31*/
+} h264_nal_unit_type;
+
+#define h264_NAL_PRIORITY_HIGHEST 3
+#define h264_NAL_PRIORITY_HIGH 2
+#define h264_NAL_PRIRITY_LOW 1
+#define h264_NAL_PRIORITY_DISPOSABLE 0
+
+
+typedef enum _h264_Profile
+{
+ h264_ProfileBaseline = 66, /** Baseline profile */
+ h264_ProfileMain = 77, /** Main profile */
+ h264_ProfileExtended = 88, /** Extended profile */
+ h264_ProfileHigh = 100 , /** High profile */
+ h264_ProfileHigh10 = 110, /** High 10 profile */
+ h264_ProfileHigh422 = 122, /** High profile 4:2:2 */
+ h264_ProfileHigh444 = 144, /** High profile 4:4:4 */
+} h264_Profile;
+
+
+typedef enum _h264_Level
+{
+ h264_Level1b = 9, /** Level 1b */
+ h264_Level1 = 10, /** Level 1 */
+ h264_Level11 = 11, /** Level 1.1 */
+ h264_Level12 = 12, /** Level 1.2 */
+ h264_Level13 = 13, /** Level 1.3 */
+ h264_Level2 = 20, /** Level 2 */
+ h264_Level21 = 21, /** Level 2.1 */
+ h264_Level22 = 22, /** Level 2.2 */
+ h264_Level3 = 30, /** Level 3 */
+ h264_Level31 = 31, /** Level 3.1 */
+ h264_Level32 = 32, /** Level 3.2 */
+ h264_Level4 = 40, /** Level 4 */
+ h264_Level41 = 41, /** Level 4.1 */
+ h264_Level42 = 42, /** Level 4.2 */
+ h264_Level5 = 50, /** Level 5 */
+ h264_Level51 = 51, /** Level 5.1 */
+ h264_LevelReserved = 255 /** Unknown profile */
+} h264_Level;
+
+
+typedef enum _h264_video_format
+{
+ h264_Component =0,
+ h264_PAL,
+ h264_NTSC,
+ h264_SECAM,
+ h264_MAC,
+ h264_unspecified,
+ h264_Reserved6,
+ h264_Reserved7
+}h264_video_format;
+
+
+typedef enum _h264_fcm
+{
+ h264_ProgressiveFrame = 0,
+ h264_InterlacedFrame = 1,
+ h264_InterlacedField = 3,
+ h264_PictureFormatNone
+} h264_fcm;
+
+
+///// Define the picture types []
+typedef enum _h264_ptype_t
+{
+ h264_PtypeP = 0,
+ h264_PtypeB = 1,
+ h264_PtypeI = 2,
+ h264_PtypeSP = 3,
+ h264_PtypeSI = 4,
+ h264_Ptype_unspecified,
+} h264_ptype_t;
+
+
+///// Aspect ratio
+typedef enum _h264_aspect_ratio
+{
+ h264_AR_Unspecified = 0,
+ h264_AR_1_1 = 1,
+ h264_AR_12_11 = 2,
+ h264_AR_10_11 = 3,
+ h264_AR_16_11 = 4,
+ h264_AR_40_33 = 5,
+ h264_AR_24_11 = 6,
+ h264_AR_20_11 = 7,
+ h264_AR_32_11 = 8,
+ h264_AR_80_33 = 9,
+ h264_AR_18_11 = 10,
+ h264_AR_15_11 = 11,
+ h264_AR_64_33 = 12,
+ h264_AR_160_99 = 13,
+ h264_AR_4_3 = 14,
+ h264_AR_3_2 = 15,
+ h264_AR_2_1 = 16,
+ h264_AR_RESERVED = 17,
+ h264_AR_Extended_SAR = 255,
+}h264_aspect_ratio;
+
+
+//////////////////////////////////////////////
+
+//////////////////////////////////////////////
+// storable_picture
+
+/* Structure details
+ If all members remain ints
+ Size = 11 ints, i.e. 44 bytes
+*/
+
+typedef struct
+{
+ int32_t poc;
+ int32_t pic_num;
+
+ int32_t long_term_pic_num;
+
+ uint8_t long_term_frame_idx;
+ uint8_t is_long_term;
+ uint8_t used_for_reference;
+ uint8_t pad_flag; // Used to indicate the status
+
+} storable_picture, *storable_picture_ptr;
+
+//////////////////////////////////////////////
+// frame store
+
+/* Structure details
+ If all members remain ints
+ Size = 46 ints, i.e. 184 bytes
+*/
+
+typedef struct _frame_store
+{
+ storable_picture frame;
+ storable_picture top_field;
+ storable_picture bottom_field;
+
+ int32_t frame_num;
+
+ int32_t frame_num_wrap;
+
+
+ uint8_t fs_idc;
+ uint8_t pic_type; //bit7 structure: 1 frame , 0 field;
+ //bit4,5,6 top field (frame) pic type, 00 IDR 01 I 10 P 11 B 100 INVALID
+ //bit1,2,3 bottom pic type, 00 IDR 01 I 10 P 11 B 100 INVALID
+ uint8_t long_term_frame_idx; // No two frame stores may have the same long-term frame index
+
+ #define viddec_h264_get_dec_structure(x) h264_bitfields_extract( (x)->fs_flag_1, 0, 0x03)
+ #define viddec_h264_set_dec_structure(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 0, 0x03)
+ #define viddec_h264_get_is_used(x) h264_bitfields_extract( (x)->fs_flag_1, 2, 0x03)
+ #define viddec_h264_set_is_frame_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x03)
+ #define viddec_h264_set_is_top_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x01)
+ #define viddec_h264_set_is_bottom_used(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 3, 0x01)
+ #define viddec_h264_get_is_skipped(x) h264_bitfields_extract( (x)->fs_flag_1, 4, 0x03)
+ #define viddec_h264_set_is_frame_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x03)
+ #define viddec_h264_set_is_top_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x01)
+ #define viddec_h264_set_is_bottom_skipped(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 5, 0x01)
+ #define viddec_h264_get_is_long_term(x) h264_bitfields_extract( (x)->fs_flag_1, 6, 0x03)
+ #define viddec_h264_set_is_frame_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x03)
+ #define viddec_h264_set_is_top_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x01)
+ #define viddec_h264_set_is_bottom_long_term(x, val) h264_bitfields_insert ( (x)->fs_flag_1, (val), 7, 0x01)
+ uint8_t fs_flag_1;
+
+
+ #define viddec_h264_get_is_non_existent(x) h264_bitfields_extract( (x)->fs_flag_2, 0, 0x01)
+ #define viddec_h264_set_is_non_existent(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 0, 0x01)
+ #define viddec_h264_get_is_output(x) h264_bitfields_extract( (x)->fs_flag_2, 1, 0x01)
+ #define viddec_h264_set_is_output(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 1, 0x01)
+ #define viddec_h264_get_is_dangling(x) h264_bitfields_extract( (x)->fs_flag_2, 2, 0x01)
+ #define viddec_h264_set_is_dangling(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 2, 0x01)
+ #define viddec_h264_get_recovery_pt_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 3, 0x01)
+ #define viddec_h264_set_recovery_pt_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 3, 0x01)
+ #define viddec_h264_get_broken_link_picture(x) h264_bitfields_extract( (x)->fs_flag_2, 4, 0x01)
+ #define viddec_h264_set_broken_link_picture(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 4, 0x01)
+ #define viddec_h264_get_open_gop_entry(x) h264_bitfields_extract( (x)->fs_flag_2, 5, 0x01)
+ #define viddec_h264_set_open_gop_entry(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 5, 0x01)
+ #define viddec_h264_get_first_field_intra(x) h264_bitfields_extract( (x)->fs_flag_2, 6, 0x01)
+ #define viddec_h264_set_first_field_intra(x, val) h264_bitfields_insert ( (x)->fs_flag_2, (val), 6, 0x01)
+ uint8_t fs_flag_2;
+
+ uint8_t fs_flag_reserve_1;
+ uint8_t fs_flag_reserve_2;
+ uint8_t fs_flag_reserve_3;
+
+ // If non-reference, may have skipped pixel decode
+ //uint8_t non_ref_skipped;
+} frame_store, *frame_param_ptr;
+
+//! Decoded Picture Buffer
+typedef struct _h264_decoded_picture_buffer
+{
+ ///
+ int32_t last_output_poc;
+ int32_t max_long_term_pic_idx;
+
+ //// Resolutions
+ int32_t PicWidthInMbs;
+ int32_t FrameHeightInMbs;
+
+ frame_store fs[NUM_DPB_FRAME_STORES];
+
+ uint8_t fs_ref_idc[16];
+ uint8_t fs_ltref_idc[16];
+
+ uint8_t fs_dpb_idc[NUM_DPB_FRAME_STORES+2];
+
+ uint8_t listX_0[33+3]; // [bit5}:field_flag:0 for top, 1 for bottom, [bit4~0]:fs_idc
+ uint8_t listX_1[33+3];
+
+ uint8_t listXsize[2]; // 1 to 32
+ uint8_t nInitListSize[2];
+
+ //uint32_t size;
+ uint8_t fs_dec_idc;
+ uint8_t fs_non_exist_idc;
+ uint8_t BumpLevel;
+ uint8_t used_size;
+
+ uint8_t OutputLevel;
+ uint8_t OutputLevelValid;
+ uint8_t OutputCtrl;
+ uint8_t num_ref_frames;
+
+ uint8_t ref_frames_in_buffer;
+ uint8_t ltref_frames_in_buffer;
+ uint8_t SuspendOutput;
+ uint8_t WaitSeiRecovery;
+
+
+ uint8_t frame_numbers_need_to_be_allocated;
+ uint8_t frame_id_need_to_be_allocated;
+
+ //// frame list to release from dpb, need be displayed
+ uint8_t frame_numbers_need_to_be_removed;
+ uint8_t frame_id_need_to_be_removed[17];
+
+ //// frame list to removed from dpb but not display
+ uint8_t frame_numbers_need_to_be_dropped;
+ uint8_t frame_id_need_to_be_dropped[17];
+
+ //// frame list to display (in display order)
+ uint8_t frame_numbers_need_to_be_displayed;
+ uint8_t frame_id_need_to_be_displayed[17];
+
+
+} h264_DecodedPictureBuffer;
+
+
+//////////////////////////////////////////////
+// qm_matrix_set
+typedef struct _qm_matrix_set
+{
+ // uint8_t scaling_default_vector;
+ uint8_t scaling_list[56]; // 0 to 23 for qm 0 to 5 (4x4), 24 to 55 for qm 6 & 7 (8x8)
+
+} qm_matrix_set, *qm_matrix_set_ptr;
+
+/*
+///////// Currently not enabled in parser fw///////////////////
+typedef struct _h264_SPS_Extension_RBSP {
+ int32_t seq_parameter_set_id; //UE
+ int32_t aux_format_idc; //UE
+ int32_t bit_depth_aux_minus8; //UE
+ int32_t alpha_incr_flag;
+ int32_t alpha_opaque_value;
+ int32_t alpha_transparent_value;
+ int32_t additional_extension_flag;
+// h264_rbsp_trail_set* rbsp_trail_ptr;
+}h264_SPS_Extension_RBSP_t;
+*/
+
+typedef struct _h264_hrd_param_set {
+ int32_t bit_rate_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2
+ int32_t cpb_size_value_minus1[MAX_CPB_CNT]; // ue(v), 0 to (2^32)-2
+
+ uint8_t cbr_flag[MAX_CPB_CNT]; // u(1) * 32
+
+} h264_hrd_param_set, *h264_hrd_param_set_ptr;
+
+typedef struct _vui_seq_parameters_t_used
+{
+ uint32_t num_units_in_tick; // u(32)
+ uint32_t time_scale; // u(32)
+
+ int32_t num_reorder_frames; // ue(v), 0 to max_dec_frame_buffering
+ int32_t max_dec_frame_buffering; // ue(v), 0 to MaxDpbSize, specified in subclause A.3
+
+ uint16_t sar_width; // u(16)
+ uint16_t sar_height; // u(16)
+
+ uint8_t aspect_ratio_info_present_flag; // u(1)
+ uint8_t aspect_ratio_idc; // u(8)
+ uint8_t video_signal_type_present_flag; // u(1)
+ uint8_t video_format; // u(3)
+
+ uint8_t colour_description_present_flag; // u(1)
+ uint8_t colour_primaries; // u(8)
+ uint8_t transfer_characteristics; // u(8)
+ uint8_t timing_info_present_flag; // u(1)
+
+ uint8_t fixed_frame_rate_flag; // u(1)
+ uint8_t low_delay_hrd_flag; // u(1)
+ uint8_t bitstream_restriction_flag; // u(1)
+ uint8_t pic_struct_present_flag;
+
+ uint8_t nal_hrd_parameters_present_flag; // u(1)
+ uint8_t nal_hrd_cpb_removal_delay_length_minus1; // u(5)
+ uint8_t nal_hrd_dpb_output_delay_length_minus1; // u(5)
+ uint8_t nal_hrd_time_offset_length; // u(5)
+
+ uint8_t nal_hrd_cpb_cnt_minus1; // ue(v), 0 to 31
+ uint8_t nal_hrd_initial_cpb_removal_delay_length_minus1; // u(5)
+ uint8_t vcl_hrd_parameters_present_flag; // u(1)
+ uint8_t vcl_hrd_cpb_removal_delay_length_minus1; // u(5)
+
+ uint8_t vcl_hrd_dpb_output_delay_length_minus1; // u(5)
+ uint8_t vcl_hrd_time_offset_length; // u(5)
+ uint8_t vcl_hrd_cpb_cnt_minus1; // ue(v), 0 to 31
+ uint8_t vcl_hrd_initial_cpb_removal_delay_length_minus1; // u(5)
+
+ /////// Here should be kept as 32-bits aligned for next structures
+ /// 2 structures for NAL&VCL HRD
+
+
+} vui_seq_parameters_t_used;
+
+
+typedef struct _vui_seq_parameters_t_not_used
+{
+ int16_t chroma_sample_loc_type_top_field; // ue(v)
+ int16_t chroma_sample_loc_type_bottom_field; // ue(v)
+
+ uint8_t overscan_info_present_flag; // u(1)
+ uint8_t overscan_appropriate_flag; // u(1)
+
+ uint8_t video_full_range_flag; // u(1)
+ uint8_t matrix_coefficients; // u(8)
+
+ uint8_t chroma_location_info_present_flag; // u(1)
+ uint8_t max_bytes_per_pic_denom; // ue(v), 0 to 16
+ uint8_t max_bits_per_mb_denom; // ue(v), 0 to 16
+ uint8_t log2_max_mv_length_vertical; // ue(v), 0 to 16, default to 16
+ uint8_t log2_max_mv_length_horizontal; // ue(v), 0 to 16, default to 16
+
+ uint8_t motion_vectors_over_pic_boundaries_flag; // u(1)
+
+ uint8_t nal_hrd_bit_rate_scale; // u(4)
+ uint8_t nal_hrd_cpb_size_scale; // u(4)
+
+ uint8_t vcl_hrd_bit_rate_scale; // u(4)
+ uint8_t vcl_hrd_cpb_size_scale; // u(4)
+
+ h264_hrd_param_set nal_hrd_parameters;
+ h264_hrd_param_set vcl_hrd_parameters;
+
+
+} vui_seq_parameters_t_not_used, *vui_seq_parameters_t_not_used_ptr;
+
+
+//////////////////////////////////////////////
+// picture parameter set
+
+typedef struct _PPS_PAR
+{
+ //int32_t DOUBLE_ALIGN valid; // indicates the parameter set is valid
+
+ int32_t pic_init_qp_minus26; // se(v), -26 to +25
+ int32_t pic_init_qs_minus26; // se(v), -26 to +25
+ int32_t chroma_qp_index_offset; // se(v), -12 to +12
+ int32_t second_chroma_qp_index_offset;
+
+ uint8_t pic_parameter_set_id; // ue(v), 0 to 255, restricted to 0 to 127 by MPD_CTRL_MAXPPS = 128
+ uint8_t seq_parameter_set_id; // ue(v), 0 to 31
+ uint8_t entropy_coding_mode_flag; // u(1)
+ uint8_t pic_order_present_flag; // u(1)
+
+ uint8_t num_slice_groups_minus1; // ue(v), shall be 0 for MP
+ // Below are not relevant for main profile...
+ uint8_t slice_group_map_type; // ue(v), 0 to 6
+ uint8_t num_ref_idx_l0_active; // ue(v), 0 to 31
+ uint8_t num_ref_idx_l1_active; // ue(v), 0 to 31
+
+ uint8_t weighted_pred_flag; // u(1)
+ uint8_t weighted_bipred_idc; // u(2)
+ uint8_t deblocking_filter_control_present_flag; // u(1)
+ uint8_t constrained_intra_pred_flag; // u(1)
+
+ uint8_t redundant_pic_cnt_present_flag; // u(1)
+ uint8_t transform_8x8_mode_flag;
+ uint8_t pic_scaling_matrix_present_flag;
+ uint8_t pps_status_flag;
+
+ //// Keep here with 32-bits aligned
+ uint8_t pic_scaling_list_present_flag[MAX_PIC_LIST_NUM];
+
+ qm_matrix_set pps_qm;
+
+ uint8_t ScalingList4x4[6][16];
+ uint8_t ScalingList8x8[2][64];
+ uint8_t UseDefaultScalingMatrix4x4Flag[6+2];
+ uint8_t UseDefaultScalingMatrix8x8Flag[6+2];
+
+} pic_param_set, *pic_param_set_ptr, h264_PicParameterSet_t;
+
+typedef union _list_reordering_num_t
+{
+ int32_t abs_diff_pic_num_minus1;
+ int32_t long_term_pic_num;
+} list_reordering_num_t;
+
+typedef struct _h264_Ref_Pic_List_Reordering ////size = 8*33+ 1 + 33
+{
+ list_reordering_num_t list_reordering_num[MAX_NUM_REF_FRAMES+1];
+
+ uint8_t ref_pic_list_reordering_flag;
+ uint8_t reordering_of_pic_nums_idc[MAX_NUM_REF_FRAMES+1]; //UE
+
+}h264_Ref_Pic_List_Reordering_t;
+
+typedef enum _H264_DANGLING_TYPE
+{
+ DANGLING_TYPE_LAST_FIELD,
+ DANGLING_TYPE_DPB_RESET,
+ DANGLING_TYPE_FIELD,
+ DANGLING_TYPE_FRAME,
+ DANGLING_TYPE_GAP_IN_FRAME
+
+} H264_DANGLING_TYPE;
+
+
+typedef struct _h264_Dec_Ref_Pic_Marking //size = 17*4*2 + 17*3 + 4 + 1
+{
+ int32_t difference_of_pic_num_minus1[NUM_MMCO_OPERATIONS];
+ int32_t long_term_pic_num[NUM_MMCO_OPERATIONS];
+
+ /// MMCO
+ uint8_t memory_management_control_operation[NUM_MMCO_OPERATIONS];
+ uint8_t max_long_term_frame_idx_plus1[NUM_MMCO_OPERATIONS];
+ uint8_t long_term_frame_idx[NUM_MMCO_OPERATIONS];
+ uint8_t long_term_reference_flag;
+
+ uint8_t adaptive_ref_pic_marking_mode_flag;
+ uint8_t dec_ref_pic_marking_count;
+ uint8_t no_output_of_prior_pics_flag;
+
+ uint8_t pad;
+}h264_Dec_Ref_Pic_Marking_t;
+
+
+
+typedef struct old_slice_par
+{
+ int32_t frame_num;
+ int32_t pic_order_cnt_lsb;
+ int32_t delta_pic_order_cnt_bottom;
+ int32_t delta_pic_order_cnt[2];
+
+ uint8_t field_pic_flag;
+ uint8_t bottom_field_flag;
+ uint8_t nal_ref_idc;
+ uint8_t structure;
+
+ uint8_t idr_flag;
+ uint8_t idr_pic_id;
+ uint8_t pic_parameter_id;
+ uint8_t status;
+} OldSliceParams;
+
+#ifdef VBP
+typedef struct _h264__pred_weight_table
+{
+ uint8_t luma_log2_weight_denom;
+ uint8_t chroma_log2_weight_denom;
+ uint8_t luma_weight_l0_flag;
+ int16_t luma_weight_l0[32];
+ int8_t luma_offset_l0[32];
+ uint8_t chroma_weight_l0_flag;
+ int16_t chroma_weight_l0[32][2];
+ int8_t chroma_offset_l0[32][2];
+
+ uint8_t luma_weight_l1_flag;
+ int16_t luma_weight_l1[32];
+ int8_t luma_offset_l1[32];
+ uint8_t chroma_weight_l1_flag;
+ int16_t chroma_weight_l1[32][2];
+ int8_t chroma_offset_l1[32][2];
+} h264_pred_weight_table;
+#endif
+
+typedef struct _h264_Slice_Header
+{
+ int32_t first_mb_in_slice; //UE
+ int32_t frame_num; //UV
+ int32_t pic_order_cnt_lsb; //UV
+ int32_t delta_pic_order_cnt_bottom; //SE
+ int32_t delta_pic_order_cnt[2]; //SE
+ int32_t redundant_pic_cnt; //UE
+
+ uint32_t num_ref_idx_l0_active; //UE
+ uint32_t num_ref_idx_l1_active; //UE
+
+ int32_t slice_qp_delta; //SE
+ int32_t slice_qs_delta; //SE
+ int32_t slice_alpha_c0_offset_div2; //SE
+ int32_t slice_beta_offset_div2; //SE
+ int32_t slice_group_change_cycle; //UV
+
+#ifdef VBP
+ h264_pred_weight_table sh_predwttbl;
+#endif
+
+ ///// Flags or IDs
+ //h264_ptype_t slice_type; //UE
+ uint8_t slice_type;
+ uint8_t nal_ref_idc;
+ uint8_t structure;
+ uint8_t pic_parameter_id; //UE
+
+ uint8_t field_pic_flag;
+ uint8_t bottom_field_flag;
+ uint8_t idr_flag; //UE
+ uint8_t idr_pic_id; //UE
+
+ uint8_t sh_error;
+ uint8_t cabac_init_idc; //UE
+ uint8_t sp_for_switch_flag;
+ uint8_t disable_deblocking_filter_idc; //UE
+
+ uint8_t direct_spatial_mv_pred_flag;
+ uint8_t num_ref_idx_active_override_flag;
+ int16_t current_slice_nr;
+
+ //// For Ref list reordering
+ h264_Dec_Ref_Pic_Marking_t sh_dec_refpic;
+ h264_Ref_Pic_List_Reordering_t sh_refpic_l0;
+ h264_Ref_Pic_List_Reordering_t sh_refpic_l1;
+
+} h264_Slice_Header_t;
+
+
+#define MAX_USER_DATA_SIZE 1024
+typedef struct _h264_user_data_t
+{
+ h264_sei_payloadtype user_data_type;
+
+ int32_t user_data_id;
+ int32_t dsn;
+ int32_t user_data_size;
+ int32_t user_data[MAX_USER_DATA_SIZE>>2];
+} h264_user_data_t;
+
+// SPS DISPLAY parameters: seq_param_set_disp, *seq_param_set_disp_ptr;
+typedef struct _SPS_DISP
+{
+ ///// VUI info
+ vui_seq_parameters_t_used vui_seq_parameters; //size =
+
+ ///// Resolution
+ int16_t pic_width_in_mbs_minus1;
+ int16_t pic_height_in_map_units_minus1;
+
+ ///// Cropping
+ int16_t frame_crop_rect_left_offset;
+ int16_t frame_crop_rect_right_offset;
+
+ int16_t frame_crop_rect_top_offset;
+ int16_t frame_crop_rect_bottom_offset;
+
+ ///// Flags
+ uint8_t frame_mbs_only_flag;
+ uint8_t mb_adaptive_frame_field_flag;
+ uint8_t direct_8x8_inference_flag;
+ uint8_t frame_cropping_flag;
+
+ uint16_t vui_parameters_present_flag;
+ uint16_t chroma_format_idc;
+} seq_param_set_disp, *seq_param_set_disp_ptr;
+
+
+////SPS: seq_param_set, *seq_param_set_ptr;
+
+typedef struct _SPS_PAR_USED
+{
+ uint32_t is_updated;
+
+ /////////// Required for display section //////////////////////////
+ seq_param_set_disp sps_disp;
+
+ int32_t expectedDeltaPerPOCCycle;
+ int32_t offset_for_non_ref_pic; // se(v), -2^31 to (2^31)-1, 32-bit integer
+ int32_t offset_for_top_to_bottom_field; // se(v), -2^31 to (2^31)-1, 32-bit integer
+
+ /////////// IDC
+ uint8_t profile_idc; // u(8), 0x77 for MP
+ uint8_t constraint_set_flags; // bit 0 to 3 for set0 to set3
+ uint8_t level_idc; // u(8)
+ uint8_t seq_parameter_set_id; // ue(v), 0 to 31
+
+
+ uint8_t pic_order_cnt_type; // ue(v), 0 to 2
+ uint8_t log2_max_frame_num_minus4; // ue(v), 0 to 12
+ uint8_t log2_max_pic_order_cnt_lsb_minus4; // ue(v), 0 to 12
+ uint8_t num_ref_frames_in_pic_order_cnt_cycle; // ue(v), 0 to 255
+
+ //int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; // se(v), -2^31 to (2^31)-1, 32-bit integer
+ uint8_t num_ref_frames; // ue(v), 0 to 16,
+ uint8_t gaps_in_frame_num_value_allowed_flag; // u(1)
+ // This is my addition, we should calculate this once and leave it with the sps
+ // as opposed to calculating it each time in h264_hdr_decoding_POC()
+
+ uint8_t delta_pic_order_always_zero_flag; // u(1)
+ uint8_t residual_colour_transform_flag;
+
+ uint8_t bit_depth_luma_minus8;
+ uint8_t bit_depth_chroma_minus8;
+ uint8_t lossless_qpprime_y_zero_flag;
+ uint8_t seq_scaling_matrix_present_flag;
+
+ uint8_t seq_scaling_list_present_flag[MAX_PIC_LIST_NUM]; //0-7
+
+ //// Combine the scaling matrix to word ( 24 + 32)
+ uint8_t ScalingList4x4[6][16];
+ uint8_t ScalingList8x8[2][64];
+ uint8_t UseDefaultScalingMatrix4x4Flag[6];
+ uint8_t UseDefaultScalingMatrix8x8Flag[6];
+
+} seq_param_set_used, *seq_param_set_used_ptr;
+
+
+typedef struct _SPS_PAR_ALL
+{
+
+ seq_param_set_used sps_par_used;
+ vui_seq_parameters_t_not_used sps_vui_par_not_used;
+
+}seq_param_set_all, *seq_param_set_all_ptr;
+
+
+///// Image control parameter////////////
+typedef struct _h264_img_par
+{
+ int32_t frame_num; // decoding num of current frame
+ int32_t frame_count; // count of decoded frames
+ int32_t current_slice_num;
+ int32_t gaps_in_frame_num;
+
+ // POC decoding
+ int32_t num_ref_frames_in_pic_order_cnt_cycle;
+ int32_t delta_pic_order_always_zero_flag;
+ int32_t offset_for_non_ref_pic;
+ int32_t offset_for_top_to_bottom_field;
+
+ int32_t pic_order_cnt_lsb;
+ int32_t pic_order_cnt_msb;
+ int32_t delta_pic_order_cnt_bottom;
+ int32_t delta_pic_order_cnt[2];
+
+ int32_t PicOrderCntMsb;
+ int32_t CurrPicOrderCntMsb;
+ int32_t PrevPicOrderCntLsb;
+
+ int32_t FrameNumOffset;
+
+ int32_t PreviousFrameNum;
+ int32_t PreviousFrameNumOffset;
+
+ int32_t toppoc;
+ int32_t bottompoc;
+ int32_t framepoc;
+ int32_t ThisPOC;
+
+ //int32_t sei_freeze_this_image;
+
+ ///////////////////// Resolutions
+ int32_t PicWidthInMbs;
+ int32_t FrameHeightInMbs;
+
+ ///////////////////// MMCO
+ uint8_t last_has_mmco_5;
+ uint8_t curr_has_mmco_5;
+
+ /////////////////// Flags
+ uint8_t g_new_frame;
+ uint8_t g_new_pic;
+
+ uint8_t structure;
+ uint8_t second_field; // Set to one if this is the second field of a set of paired fields...
+ uint8_t field_pic_flag;
+ uint8_t last_pic_bottom_field;
+
+ uint8_t bottom_field_flag;
+ uint8_t MbaffFrameFlag;
+ uint8_t no_output_of_prior_pics_flag;
+ uint8_t long_term_reference_flag;
+
+ uint8_t skip_this_pic;
+ uint8_t pic_order_cnt_type;
+ // Recovery
+ uint8_t recovery_point_found;
+ uint8_t used_for_reference;
+} h264_img_par;
+
+
+typedef struct _h264_slice_reg_data
+{
+ uint32_t h264_bsd_slice_p1; // 0x150
+ //uint32_t h264_mpr_list0[8]; // from 0x380 to 0x3BC
+ uint32_t h264_bsd_slice_p2; // 0x154
+ uint32_t h264_bsd_slice_start; // 0x158
+
+} h264_slice_data;
+
+
+typedef struct _h264_pic_data
+{
+ uint32_t h264_dpb_init; // 0x40
+ //info For current pic
+ uint32_t h264_cur_bsd_img_init; // 0x140
+ uint32_t h264_cur_mpr_tf_poc; // 0x300
+ uint32_t h264_cur_mpr_bf_poc; // 0x304
+
+ //info For framess in DPB
+ //uint32_t h264_dpb_bsd_img_init[16]; //0x140
+ //uint32_t h264_dpb_mpr_tf_poc[16]; // 0x300
+ //uint32_t h264_dpb_mpr_bf_poc[16]; // 0x304
+} h264_pic_data;
+
+enum h264_workload_item_type
+{
+ VIDDEC_WORKLOAD_H264_SLICE_REG = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+ VIDDEC_WORKLOAD_H264_PIC_REG,
+ VIDDEC_WORKLOAD_H264_DPB_FRAME_POC,
+ VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET,
+ VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET,
+ VIDDEC_WORKLOAD_H264_PWT_ES_BYTES,
+ VIDDEC_WORKLOAD_H264_SCALING_MATRIX,
+ VIDDEC_WORKLOAD_H264_DEBUG
+};
+
+
+
+////////////////////////////////////////////
+/* Full Info set*/
+////////////////////////////////////////////
+typedef struct _h264_Info
+{
+
+ h264_DecodedPictureBuffer dpb;
+
+ //// Structures
+ //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address
+ seq_param_set_used active_SPS;
+ pic_param_set active_PPS;
+
+
+ h264_Slice_Header_t SliceHeader;
+ OldSliceParams old_slice;
+ sei_info sei_information;
+
+ h264_img_par img;
+
+ uint32_t SPS_PADDR_GL;
+ uint32_t PPS_PADDR_GL;
+ uint32_t OFFSET_REF_FRAME_PADDR_GL;
+ uint32_t TMP_OFFSET_REFFRM_PADDR_GL;
+
+ uint32_t h264_list_replacement;
+
+ uint32_t h264_pwt_start_byte_offset;
+ uint32_t h264_pwt_start_bit_offset;
+ uint32_t h264_pwt_end_byte_offset;
+ uint32_t h264_pwt_end_bit_offset;
+ uint32_t h264_pwt_enabled;
+
+ uint32_t sps_valid;
+
+ uint8_t slice_ref_list0[32];
+ uint8_t slice_ref_list1[32];
+
+
+ uint8_t qm_present_list;
+ //h264_NAL_Unit_t
+ uint8_t nal_unit_type;
+ uint8_t old_nal_unit_type;
+ uint8_t got_start;
+
+ //workload
+ uint8_t push_to_cur;
+ uint8_t Is_first_frame_in_stream;
+ uint8_t Is_SPS_updated;
+ uint8_t number_of_first_au_info_nal_before_first_slice;
+
+ uint8_t is_frame_boundary_detected_by_non_slice_nal;
+ uint8_t is_frame_boundary_detected_by_slice_nal;
+ uint8_t is_current_workload_done;
+ uint8_t primary_pic_type_plus_one; //AUD---[0,7]
+
+ //Error handling
+ uint8_t sei_rp_received;
+ uint8_t last_I_frame_idc;
+ uint8_t sei_b_state_ready;
+ uint8_t gop_err_flag;
+
+
+ uint32_t wl_err_curr;
+ uint32_t wl_err_next;
+
+} h264_Info;
+
+
+
+struct h264_viddec_parser
+{
+ uint32_t sps_pps_ddr_paddr;
+ h264_Info info;
+};
+
+
+
+
+
+#endif //_H264_H_
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h
new file mode 100644
index 0000000..c255980
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse.h
@@ -0,0 +1,172 @@
+#ifndef __H264PARSE_H_
+#define __H264PARSE_H_
+
+#include "h264.h"
+
+#ifndef MFD_FIRMWARE
+#define true 1
+#define false 0
+#endif
+
+////////////////////////////////////////////////////////////////////
+// The following part is only for Parser Debug
+///////////////////////////////////////////////////////////////////
+
+
+
+enum h264_debug_point_id
+{
+ WARNING_H264_GENERAL = 0xff000000,
+ WARNING_H264_DPB,
+ WARNING_H264_REFLIST,
+ WARNING_H264_SPS,
+ WARNING_H264_PPS,
+ WARNING_H264_SEI,
+ WARNING_H264_VCL,
+
+ ERROR_H264_GENERAL = 0xffff0000,
+ ERROR_H264_DPB,
+ ERROR_H264_REFLIST,
+ ERROR_H264_SPS,
+ ERROR_H264_PPS,
+ ERROR_H264_SEI,
+ ERROR_H264_VCL
+};
+
+static inline void MFD_PARSER_DEBUG(int debug_point_id)
+{
+#ifdef H264_MFD_DEBUG
+
+ int p1,p2,p3,p4,p5,p6;
+
+ p1 = 0x0BAD;
+ p2 = 0xC0DE;
+ p3 = debug_point_id;
+ p4=p5=p6 = 0;
+
+ DEBUG_WRITE(p1,p2,p3,p4,p5,p6);
+#endif
+
+ debug_point_id = debug_point_id;
+
+ return;
+}
+
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Init functions
+////////////////////////////////////////////////////////////////////
+extern void h264_init_old_slice(h264_Info* pInfo);
+extern void h264_init_img(h264_Info* pInfo);
+extern void h264_init_Info(h264_Info* pInfo);
+extern void h264_init_Info_under_sps_pps_level(h264_Info* pInfo);
+extern void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem);
+
+extern void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader);
+extern void h264_sei_stream_initialise (h264_Info* pInfo);
+extern void h264_update_img_info(h264_Info * pInfo );
+extern void h264_update_frame_type(h264_Info * pInfo );
+
+extern int32_t h264_check_previous_frame_end(h264_Info * pInfo);
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// bsd functions
+////////////////////////////////////////////////////////////////////
+extern uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo);
+////// VLE and bit operation
+extern uint32_t h264_get_codeNum(void *parent,h264_Info* pInfo);
+extern int32_t h264_GetVLCElement(void *parent,h264_Info* pInfo, uint8_t bIsSigned);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// parse functions
+////////////////////////////////////////////////////////////////////
+
+//NAL
+extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc);
+
+////// Slice header
+extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Slice_Header_1(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+
+
+////// SPS
+extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame);
+//extern h264_Status h264_Parse_SeqParameterSet_Extension(void *parent, h264_Info * pInfo);
+extern h264_Status h264_Parse_PicParameterSet(void *parent, h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet);
+
+////// SEI functions
+h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent,h264_Info* pInfo);
+h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize);
+
+//////
+extern h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo);
+extern h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// utils functions
+////////////////////////////////////////////////////////////////////
+extern int32_t h264_is_new_picture_start(h264_Info* pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice);
+extern int32_t h264_is_second_field(h264_Info * pInfo);
+///// Math functions
+uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod);
+uint32_t mult_u(uint32_t var1, uint32_t var2);
+///// Mem functions
+extern void* h264_memset( void* buf, uint32_t c, uint32_t num );
+extern void* h264_memcpy( void* dest, void* src, uint32_t num );
+
+extern void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId);
+extern void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId);
+
+extern void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId);
+extern void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId);
+
+extern void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId);
+extern void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId);
+extern uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId);
+extern void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId);
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// workload functions
+////////////////////////////////////////////////////////////////////
+
+extern void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo );
+
+extern void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo );
+
+extern void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo );
+extern void h264_parse_emit_eos( void *parent, h264_Info *pInfo );
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// utils functions outside h264
+////////////////////////////////////////////////////////////////////
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+extern void *memcpy(void *dest, const void *src, uint32_t n);
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+extern int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits);
+extern int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Second level parse functions
+////////////////////////////////////////////////////////////////////
+
+#endif ////__H264PARSE_H_
+
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h
new file mode 100644
index 0000000..2a19b5f
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_dpb.h
@@ -0,0 +1,107 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_DPB_CTL_H_
+#define _H264_DPB_CTL_H_
+
+
+#include "h264.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Parser control functions
+////////////////////////////////////////////////////////////////////
+
+///// Reference list
+extern void h264_dpb_update_ref_lists(h264_Info * pInfo);
+extern void h264_dpb_reorder_lists(h264_Info * pInfo);
+
+extern void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting);
+
+///// POC
+extern void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num);
+extern void h264_hdr_post_poc(h264_Info* pInfo,int32_t NonExisting, int32_t frame_num, int32_t use_old);
+
+///// DPB buffer mangement
+extern void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb);
+
+extern void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+extern void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+extern void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx);
+extern void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity);
+extern void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX);
+extern void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+
+extern void h264_dpb_update_queue_dangling_field(h264_Info * pInfo);
+extern void h264_dpb_is_used_for_reference(int32_t * flag);
+
+
+extern void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb,int32_t index);
+extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames);
+
+extern void h264_dpb_idr_memory_management (h264_Info * pInfo,
+ seq_param_set_used_ptr active_sps,
+ int32_t no_output_of_prior_pics_flag);
+
+extern void h264_dpb_init_frame_store(h264_Info * pInfo);
+extern void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs,
+ int32_t SizeChange, int32_t no_output_of_prior_pics_flag);
+
+extern void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo);
+
+extern int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting);
+
+extern void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos);
+extern void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag);
+
+extern void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb,
+ int32_t NonExisting,
+ int32_t num_ref_frames);
+extern int32_t h264_dpb_queue_update(h264_Info * pInfo,
+ int32_t push,
+ int32_t direct,
+ int32_t frame_request,
+ int32_t num_ref_frames);
+
+extern void h264_dpb_split_field (h264_Info * pInfo);
+extern void h264_dpb_combine_field(int32_t use_old);
+
+extern void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,
+ int32_t used_for_reference,
+ int32_t add2dpb,
+ int32_t NonExisting,
+ int32_t use_old);
+
+extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,
+ int32_t NonExisting,
+ int32_t use_old);
+
+extern void h264_dpb_adaptive_memory_management (h264_Info * pInfo);
+
+extern int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,
+ int32_t direct, int32_t request, int32_t num_ref_frames);
+
+extern void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx);
+extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing);
+
+//////////////////////////////////////////////////////////// Globals
+extern frame_store *active_fs;
+
+
+
+#endif //_H264_DPB_CTL_H_
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h
new file mode 100644
index 0000000..e5903cd
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264parse_sei.h
@@ -0,0 +1,314 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_SEI_H_
+#define _H264_SEI_H_
+
+#include "h264.h"
+
+
+//defines for SEI
+#define MAX_CPB_CNT 32
+#define MAX_NUM_CLOCK_TS 3
+#define MAX_PAN_SCAN_CNT 3
+#define MAX_NUM_SPARE_PICS 16
+#define MAX_SUB_SEQ_LAYERS 256
+#define MAX_SLICE_GRPS 1 // for high profile
+#define NUM_SPS 32
+#define MAX_NUM_REF_SUBSEQS 256
+
+
+#define SEI_SCAN_FORMAT_INTERLACED 0x1
+#define SEI_SCAN_FORMAT_PROGRESSIVE 0x3
+#define SEI_SCAN_FORMAT_VALID(r) (r&0x1)
+#define SEI_SCAN_FORMAT(r) ((r&0x2)>>1)
+
+typedef enum
+{
+ SEI_BUF_PERIOD = 0,
+ SEI_PIC_TIMING,
+ SEI_PAN_SCAN,
+ SEI_FILLER_PAYLOAD,
+ SEI_REG_USERDATA,
+ SEI_UNREG_USERDATA,
+ SEI_RECOVERY_POINT,
+ SEI_DEC_REF_PIC_MARKING_REP,
+ SEI_SPARE_PIC,
+ SEI_SCENE_INFO,
+ SEI_SUB_SEQ_INFO,
+ SEI_SUB_SEQ_LAYER,
+ SEI_SUB_SEQ,
+ SEI_FULL_FRAME_FREEZE,
+ SEI_FULL_FRAME_FREEZE_RELEASE,
+ SEI_FULL_FRAME_SNAPSHOT,
+ SEI_PROGRESSIVE_SEGMENT_START,
+ SEI_PROGRESSIVE_SEGMENT_END,
+ SEI_MOTION_CONSTRAINED_SLICE_GRP_SET,
+ SEI_FILM_GRAIN_CHARACTERISTICS,
+ SEI_DEBLK_FILTER_DISPLAY_PREFERENCE,
+ SEI_STEREO_VIDEO_INFO,
+ SEI_RESERVED,
+}h264_sei_payloadtype;
+
+
+
+typedef struct _h264_SEI_buffering_period
+{
+ int32_t seq_param_set_id;
+ int32_t initial_cpb_removal_delay_nal;
+ int32_t initial_cpb_removal_delay_offset_nal;
+ int32_t initial_cpb_removal_delay_vcl;
+ int32_t initial_cpb_removal_delay_offset_vcl;
+
+}h264_SEI_buffering_period_t;
+
+typedef struct _h264_SEI_pic_timing
+{
+ int32_t cpb_removal_delay;
+ int32_t dpb_output_delay;
+ int32_t pic_struct;
+}h264_SEI_pic_timing_t;
+
+#if 0
+int32_t clock_timestamp_flag[MAX_NUM_CLOCK_TS];
+int32_t ct_type[MAX_NUM_CLOCK_TS];
+int32_t nuit_field_based_flag[MAX_NUM_CLOCK_TS];
+int32_t counting_type[MAX_NUM_CLOCK_TS];
+int32_t full_timestamp_flag[MAX_NUM_CLOCK_TS];
+int32_t discontinuity_flag[MAX_NUM_CLOCK_TS];
+int32_t cnt_dropped_flag[MAX_NUM_CLOCK_TS];
+int32_t n_frames[MAX_NUM_CLOCK_TS];
+int32_t seconds_value[MAX_NUM_CLOCK_TS];
+int32_t minutes_value[MAX_NUM_CLOCK_TS];
+int32_t hours_value[MAX_NUM_CLOCK_TS];
+int32_t seconds_flag[MAX_NUM_CLOCK_TS];
+int32_t minutes_flag[MAX_NUM_CLOCK_TS];
+int32_t hours_flag[MAX_NUM_CLOCK_TS];
+int32_t time_offset[MAX_NUM_CLOCK_TS];
+
+#endif
+
+typedef struct _h264_SEI_pan_scan_rectangle
+{
+ int32_t pan_scan_rect_id;
+ int32_t pan_scan_rect_cancel_flag;
+ int32_t pan_scan_cnt_minus1;
+ int32_t pan_scan_rect_left_offset[MAX_PAN_SCAN_CNT];
+ int32_t pan_scan_rect_right_offset[MAX_PAN_SCAN_CNT];
+ int32_t pan_scan_rect_top_offset[MAX_PAN_SCAN_CNT];
+ int32_t pan_scan_rect_bottom_offset[MAX_PAN_SCAN_CNT];
+ int32_t pan_scan_rect_repetition_period;
+}h264_SEI_pan_scan_rectangle_t;
+
+typedef struct _h264_SEI_filler_payload
+{
+ int32_t ff_byte;
+}h264_SEI_filler_payload_t;
+
+typedef struct _h264_SEI_userdata_registered
+{
+ int32_t itu_t_t35_country_code;
+ int32_t itu_t_t35_country_code_extension_byte;
+ int32_t itu_t_t35_payload_byte;
+}h264_SEI_userdata_registered_t;
+
+typedef struct _h264_SEI_userdata_unregistered
+{
+ int32_t uuid_iso_iec_11578[4];
+ int32_t user_data_payload_byte;
+}h264_SEI_userdata_unregistered_t;
+
+typedef struct _h264_SEI_recovery_point
+{
+ int32_t recovery_frame_cnt;
+ int32_t exact_match_flag;
+ int32_t broken_link_flag;
+ int32_t changing_slice_group_idc;
+}h264_SEI_recovery_point_t;
+
+typedef struct _h264_SEI_decoded_ref_pic_marking_repetition
+{
+ int32_t original_idr_flag;
+ int32_t original_frame_num;
+ int32_t orignal_field_pic_flag;
+ int32_t original_bottom_field_pic_flag;
+ int32_t no_output_of_prior_pics_flag;
+ int32_t long_term_reference_flag;
+ int32_t adaptive_ref_pic_marking_mode_flag;
+ int32_t memory_management_control_operation; //UE
+ int32_t difference_of_pics_num_minus1; //UE
+ int32_t long_term_pic_num; //UE
+ int32_t long_term_frame_idx; //UE
+ int32_t max_long_term_frame_idx_plus1; //UE
+}h264_SEI_decoded_ref_pic_marking_repetition_t;
+
+typedef struct _h264_SEI_spare_picture
+{
+ int32_t target_frame_num;
+ int32_t spare_field_flag;
+ int32_t target_bottom_field_flag;
+ int32_t num_spare_pics_minus1;
+ int32_t delta_spare_frame_num[MAX_NUM_SPARE_PICS];
+ int32_t spare_bottom_field_flag[MAX_NUM_SPARE_PICS];
+ int32_t spare_area_idc[MAX_NUM_SPARE_PICS]; // not complete
+}h264_SEI_spare_picture_t;
+
+typedef struct _h264_SEI_scene_info
+{
+ int32_t scene_info_present_flag;
+ int32_t scene_id;
+ int32_t scene_transitioning_type;
+ int32_t second_scene_id;
+}h264_SEI_scene_info_t;
+
+typedef struct _h264_SEI_sub_sequence_info
+{
+ int32_t sub_seq_layer_num;
+ int32_t sub_seq_id;
+ int32_t first_ref_pic_flag;
+ int32_t leading_non_ref_pic_flag;
+ int32_t last_pic_flag;
+ int32_t sub_seq_frame_num_flag;
+ int32_t sub_seq_frame_num;
+}h264_SEI_sub_sequence_info_t;
+
+typedef struct _h264_SEI_sub_sequence_layer
+{
+ int32_t num_sub_seq_layers_minus1;
+ int32_t accurate_statistics_flag[MAX_SUB_SEQ_LAYERS];
+ int32_t average_bit_rate[MAX_SUB_SEQ_LAYERS];
+ int32_t average_frame_rate[MAX_SUB_SEQ_LAYERS];
+}h264_SEI_sub_sequence_layer_t;
+
+typedef struct _h264_SEI_sub_sequence
+{
+ int32_t sub_seq_layer_num;
+ int32_t sub_seq_id;
+ int32_t duration_flag;
+ int32_t sub_seq_duration;
+ int32_t average_rate_flag;
+ int32_t average_statistics_flag;
+ int32_t average_bit_rate;
+ int32_t average_frame_rate;
+ int32_t num_referenced_subseqs;
+ int32_t ref_sub_seq_layer_num;
+ int32_t ref_sub_seq_id;
+ int32_t ref_sub_seq_direction;
+}h264_SEI_sub_sequence_t;
+
+typedef struct _h264_SEI_full_frame_freeze
+{
+ int32_t full_frame_freeze_repetition_period;
+}h264_SEI_full_frame_freeze_t;
+
+typedef struct _h264_SEI_full_frame_snapshot
+{
+ int32_t snapshot_id;
+}h264_SEI_full_frame_snapshot_t;
+
+typedef struct _h264_SEI_progressive_segment_start
+{
+ int32_t progressive_refinement_id;
+ int32_t num_refinement_steps_minus1;
+}h264_SEI_progressive_segment_start_t;
+
+typedef struct _h264_SEI_progressive_segment_end
+{
+ int32_t progressive_refinement_id;
+}h264_SEI_progressive_segment_end_t;
+
+typedef struct _h264_SEI_motion_constrained_slice_group
+{
+ int32_t num_slice_groups_in_set_minus1;
+ int32_t slice_group_id[MAX_SLICE_GRPS];
+ int32_t exact_sample_value_match_flag;
+ int32_t pan_scan_rect_flag;
+ int32_t pan_scan_rect_id;
+}h264_SEI_motion_constrained_slice_group_t;
+
+typedef struct _h264_SEI_deblocking_filter_display_pref
+{
+ int32_t devlocking_display_preference_cancel_flag;
+ int32_t display_prior_to_deblocking_preferred_flag;
+ int32_t dec_frame_buffering_constraint_flag;
+ int32_t deblocking_display_preference_repetition_period;
+}h264_SEI_deblocking_filter_display_pref_t;
+
+typedef struct _h264_SEI_stereo_video_info
+{
+ int32_t field_views_flag;
+ int32_t top_field_is_left_view_flag;
+ int32_t curent_frame_is_left_view_flag;
+ int32_t next_frame_is_second_view_flag;
+ int32_t left_view_self_contained_flag;
+ int32_t right_view_self_contained_flag;
+}h264_SEI_stereo_video_info_t;
+
+typedef struct _h264_SEI_reserved
+{
+ int32_t reserved_sei_message_payload_byte;
+}h264_SEI_reserved_t;
+
+
+////////////////////////////
+// SEI Info
+/////////////////////////////
+
+typedef struct sei_info
+{
+ int32_t recovery_point;
+ int32_t recovery_frame_num;
+
+ int32_t capture_POC;
+ int32_t freeze_POC;
+ int32_t release_POC; // The POC which when reached will allow display update to re-commence
+ int32_t disp_frozen; // Indicates display is currently frozen
+ int32_t freeze_rep_period;
+ int32_t recovery_frame_cnt;
+ int32_t capture_fn;
+ int32_t recovery_fn;
+ int32_t broken_link;
+ int32_t scan_format;
+ int32_t broken_link_pic;
+}sei_info, *sei_info_ptr;
+
+/*typedef struct _h264_SEI
+{
+ h264_SEI_buffering_period_t buf_period;
+ h264_SEI_pic_timing_t pic_timing;
+ h264_SEI_pan_scan_rectangle_t pan_scan_timing;
+ h264_SEI_filler_payload_t filler_payload;
+ h264_SEI_userdata_registered_t userdata_reg;
+ h264_SEI_userdata_unregistered_t userdata_unreg;
+ h264_SEI_recovery_point_t recovery_point;
+ h264_SEI_decoded_ref_pic_marking_repetition_t dec_ref_pic_marking_rep;
+ h264_SEI_spare_picture_t spare_pic;
+ h264_SEI_scene_info_t scene_info;
+ h264_SEI_sub_sequence_info_t sub_sequence_info;
+ h264_SEI_sub_sequence_layer_t sub_sequence_layer;
+ h264_SEI_sub_sequence_t sub_sequence;
+ h264_SEI_full_frame_snapshot_t full_frame_snapshot;
+ h264_SEI_full_frame_t full_frame;
+ h264_SEI_progressive_segment_start_t progressive_segment_start;
+ h264_SEI_progressive_segment_end_t progressive_segment_end;
+ h264_SEI_motion_constrained_slice_group_t motion_constrained_slice_grp;
+ h264_SEI_deblocking_filter_display_pref_t deblk_filter_display_pref;
+ h264_SEI_stereo_video_info_t stereo_video_info;
+ h264_SEI_reserved_t reserved;
+}h264_SEI_t;
+*/
+
+
+#endif //_H264_SEI_H_
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c
new file mode 100644
index 0000000..a96285d
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse.c
@@ -0,0 +1,786 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: h264 parser
+//
+///////////////////////////////////////////////////////////////////////*/
+
+
+#include "h264.h"
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+
+h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo)
+{
+ int32_t j, scanj;
+ int32_t delta_scale, lastScale, nextScale;
+
+#if 0
+ const uint8_t ZZ_SCAN[16] =
+ { 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15
+ };
+
+ const uint8_t ZZ_SCAN8[64] =
+ { 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+ };
+#endif
+
+ lastScale = 8;
+ nextScale = 8;
+ scanj = 0;
+
+ for(j=0; j<sizeOfScalingList; j++)
+ {
+ //scanj = (sizeOfScalingList==16)?ZZ_SCAN[j]:ZZ_SCAN8[j];
+
+ if(nextScale!=0)
+ {
+ delta_scale = h264_GetVLCElement(parent, pInfo, true);
+ nextScale = (lastScale + delta_scale + 256) % 256;
+ *UseDefaultScalingMatrix = (uint8_t) (scanj==0 && nextScale==0);
+ }
+
+ scalingList[scanj] = (nextScale==0) ? lastScale:nextScale;
+ lastScale = scalingList[scanj];
+ scanj ++;
+ }
+
+ return H264_STATUS_OK;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader)
+{
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+
+ ///////////////////////////////////////////////////
+ // Reload SPS/PPS while
+ // 1) Start of Frame (in case of context switch)
+ // 2) PPS id changed
+ ///////////////////////////////////////////////////
+ if((SliceHeader->first_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id))
+ {
+#ifndef WIN32
+ h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id);
+
+ if(pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS)
+ {
+ return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected
+ }
+
+ if(pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id)
+ {
+ pInfo->Is_SPS_updated =1;
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id);
+ h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id);
+ }
+ else
+ {
+ if(h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id))
+ {
+ pInfo->Is_SPS_updated =1;
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id);
+ h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id);
+ }
+ }
+
+#else
+ pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id];
+ pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id];
+#endif
+
+ if(pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)
+ {
+ return H264_PPS_INVALID_PIC_ID; //// Invalid SPS detected
+ }
+ }
+ else {
+ if((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS) || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS))
+ {
+ return H264_PPS_INVALID_PIC_ID; /// Invalid PPS detected
+ }
+ }
+
+
+ pInfo->img.PicWidthInMbs = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1);
+ //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1);
+ pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag? \
+ (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1): \
+ ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1);
+
+
+ return H264_STATUS_OK;
+}; //// End of h264_active_par_set
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////
+// Parse slice header info
+//////////////////////////////////////////////////
+h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+ h264_Status retStatus = H264_STATUS_ERROR;
+
+ ////////////////////////////////////////////////////
+ //// Parse slice header info
+ //// Part1: not depend on the active PPS/SPS
+ //// Part2/3: depend on the active parset
+ //////////////////////////////////////////////////
+
+ //retStatus = h264_Parse_Slice_Header_1(pInfo);
+
+ SliceHeader->sh_error = 0;
+
+ if(h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK)
+ {
+ //////////////////////////////////////////
+ //// Active parameter set for this slice
+ //////////////////////////////////////////
+ retStatus = h264_active_par_set(pInfo, SliceHeader);
+ }
+
+ if(retStatus == H264_STATUS_OK) {
+ switch(pInfo->active_SPS.profile_idc)
+ {
+ case h264_ProfileBaseline:
+ case h264_ProfileMain:
+ case h264_ProfileExtended:
+ pInfo->active_PPS.transform_8x8_mode_flag=0;
+ pInfo->active_PPS.pic_scaling_matrix_present_flag =0;
+ pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset;
+
+ default:
+ break;
+ }
+
+ if( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ SliceHeader->sh_error |= 2;
+ }
+ else if( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ SliceHeader->sh_error |= 4;
+ }
+
+ } else {
+ SliceHeader->sh_error |= 1;
+ }
+
+
+ //if(SliceHeader->sh_error) {
+ //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ //}
+
+
+
+ //////////////////////////////////
+ //// Parse slice data (MB loop)
+ //////////////////////////////////
+ //retStatus = h264_Parse_Slice_Data(pInfo);
+ {
+ //uint32_t data = 0;
+ //if( viddec_pm_peek_bits(parent, &data, 32) == -1)
+ //retStatus = H264_STATUS_ERROR;
+ }
+ //h264_Parse_rbsp_trailing_bits(pInfo);
+
+ return retStatus;
+}
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc)
+{
+ h264_Status ret = H264_STATUS_ERROR;
+
+ //h264_NAL_Unit_t* NAL = &pInfo->NAL;
+ uint32_t code;
+#if 0
+ viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24);
+ viddec_pm_get_bits(parent, &code, 1); //forbidden_zero_bit
+
+ viddec_pm_get_bits(parent, &code, 2);
+ SliceHeader->nal_ref_idc = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 5);
+ pInfo->nal_unit_type = (uint8_t)code;
+#else
+#ifdef VBP
+ if( viddec_pm_get_bits(parent, &code, 8) != -1)
+#else
+ //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type
+ if( viddec_pm_get_bits(parent, &code, 32) != -1)
+#endif
+ {
+ *nal_ref_idc = (uint8_t)((code>>5)&0x3);
+ pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f);
+ ret = H264_STATUS_OK;
+ }
+#endif
+
+ return ret;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+/*!
+ ************************************************************************
+ * \brief
+ * set defaults for old_slice
+ * NAL unit of a picture"
+ ************************************************************************
+ */
+#ifndef INT_MAX
+#define INT_MAX 0xFFFFFFFF
+#endif
+
+#ifndef UINT_MAX
+#define UINT_MAX 0x7FFFFFFF
+#endif
+
+void h264_init_old_slice(h264_Info* pInfo)
+{
+ pInfo->SliceHeader.field_pic_flag = 0;
+
+ pInfo->SliceHeader.pic_parameter_id = 0xFF;
+
+ pInfo->SliceHeader.frame_num = INT_MAX;
+
+ pInfo->SliceHeader.nal_ref_idc = 0xFF;
+
+ pInfo->SliceHeader.idr_flag = 0;
+
+ pInfo->SliceHeader.pic_order_cnt_lsb = UINT_MAX;
+ pInfo->SliceHeader.delta_pic_order_cnt_bottom = INT_MAX;
+
+ pInfo->SliceHeader.delta_pic_order_cnt[0] = INT_MAX;
+ pInfo->SliceHeader.delta_pic_order_cnt[1] = INT_MAX;
+
+ return;
+}
+
+
+void h264_init_img(h264_Info* pInfo)
+{
+ h264_memset(&(pInfo->img), 0x0, sizeof(h264_img_par) );
+
+
+ return;
+}
+
+
+void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem)
+{
+ int32_t i;
+
+ h264_Info * pInfo = &(parser->info);
+
+ parser->sps_pps_ddr_paddr = (uint32_t)persist_mem;
+
+ pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr;
+ pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all);
+ pInfo->OFFSET_REF_FRAME_PADDR_GL = pInfo->PPS_PADDR_GL + MAX_NUM_PPS * sizeof(pic_param_set);
+ pInfo->TMP_OFFSET_REFFRM_PADDR_GL = pInfo->OFFSET_REF_FRAME_PADDR_GL +
+ MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+
+ h264_memset( &(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used) );
+ h264_memset( &(pInfo->active_PPS), 0x0, sizeof(pic_param_set) );
+
+ /* Global for SPS & PPS */
+ for(i=0;i<MAX_NUM_SPS;i++)
+ {
+ pInfo->active_SPS.seq_parameter_set_id = 0xff;
+ h264_Parse_Copy_Sps_To_DDR (pInfo, &(pInfo->active_SPS), i);
+ }
+ for(i=0;i<MAX_NUM_PPS;i++)
+ {
+ pInfo->active_PPS.seq_parameter_set_id = 0xff;
+ h264_Parse_Copy_Pps_To_DDR (pInfo, &(pInfo->active_PPS), i);
+ }
+
+ pInfo->active_SPS.seq_parameter_set_id = 0xff;
+ pInfo->sps_valid = 0;
+ pInfo->got_start = 0;
+
+ return;
+}
+
+
+void h264_init_Info_under_sps_pps_level(h264_Info* pInfo)
+{
+ int32_t i=0;
+
+ h264_memset( &(pInfo->dpb), 0x0, sizeof(h264_DecodedPictureBuffer) );
+ h264_memset( &(pInfo->SliceHeader), 0x0, sizeof(h264_Slice_Header_t) );
+ h264_memset( &(pInfo->old_slice), 0x0, sizeof(OldSliceParams) );
+ h264_memset( &(pInfo->sei_information), 0x0, sizeof(sei_info) );
+ h264_memset( &(pInfo->img), 0x0, sizeof(h264_img_par) );
+
+ pInfo->h264_list_replacement = 0;
+
+ pInfo->h264_pwt_start_byte_offset = 0;
+ pInfo->h264_pwt_start_bit_offset = 0;
+ pInfo->h264_pwt_end_byte_offset = 0;
+ pInfo->h264_pwt_end_bit_offset = 0;
+ pInfo->h264_pwt_enabled = 0;
+
+ for(i=0;i<32;i++)
+ {
+ pInfo->slice_ref_list0[i] = 0;
+ pInfo->slice_ref_list1[i] = 0;
+ }
+
+ pInfo->qm_present_list = 0;
+
+ pInfo->nal_unit_type = 0;
+ pInfo->old_nal_unit_type = 0xff;
+
+ pInfo->push_to_cur = 0;
+ pInfo->Is_first_frame_in_stream = 1;
+ pInfo->Is_SPS_updated = 0;
+ pInfo->number_of_first_au_info_nal_before_first_slice = 0;
+
+ pInfo->is_frame_boundary_detected_by_non_slice_nal = 0;
+ pInfo->is_frame_boundary_detected_by_slice_nal = 0;
+ pInfo->is_current_workload_done = 0;
+
+ pInfo->sei_rp_received = 0;
+ pInfo->last_I_frame_idc = 255;
+ pInfo->wl_err_curr = 0;
+ pInfo->wl_err_next = 0;
+
+ pInfo->primary_pic_type_plus_one = 0;
+ pInfo->sei_b_state_ready = 0;
+
+ /* Init old slice structure */
+ h264_init_old_slice(pInfo);
+
+ /* init_dpb */
+ h264_init_dpb(&(pInfo->dpb));
+
+ /* init_sei */
+ h264_sei_stream_initialise(pInfo);
+
+}
+
+void h264_init_Info(h264_Info* pInfo)
+{
+ h264_memset(pInfo, 0x0, sizeof(h264_Info));
+
+ pInfo->old_nal_unit_type = 0xff;
+
+ pInfo->Is_first_frame_in_stream =1;
+ pInfo->img.frame_count = 0;
+ pInfo->last_I_frame_idc = 255;
+
+ return;
+}
+
+ /* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+/////////////////////////////////////////////////////
+//
+// Judge whether it is the first VCL of a new picture
+//
+/////////////////////////////////////////////////////
+ int32_t h264_is_second_field(h264_Info * pInfo)
+ {
+ h264_Slice_Header_t cur_slice = pInfo->SliceHeader;
+ OldSliceParams old_slice = pInfo->old_slice;
+
+ int result = 0;
+
+ //pInfo->img.second_field = 0;
+
+ /// is it second field?
+
+ //OS_INFO( "xxx is_used = %d\n", pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used);
+
+ if (cur_slice.structure != FRAME)
+ {
+ if( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )
+ &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ))
+ {
+ if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag))
+ {
+
+ if(old_slice.structure != cur_slice.structure)
+ {
+
+ if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1:
+ (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) && \
+ ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0) || // Condition 2:
+ (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0)))
+ {
+ //pInfo->img.second_field = 1;
+ result = 1;
+ }
+ }
+ }
+
+
+ }
+
+
+ }
+
+
+
+ return result;
+
+ } //// End of h264_is_second_field
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice)
+{
+ int result = 0;
+
+ if(pInfo->number_of_first_au_info_nal_before_first_slice)
+ {
+ pInfo->number_of_first_au_info_nal_before_first_slice = 0;
+ return 1;
+ }
+
+
+
+ result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id);
+ result |= (old_slice.frame_num != cur_slice.frame_num);
+ result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag);
+ if(cur_slice.field_pic_flag && old_slice.field_pic_flag)
+ {
+ result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag);
+ }
+
+ result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \
+ ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0));
+ result |= ( old_slice.idr_flag != cur_slice.idr_flag);
+
+ if (cur_slice.idr_flag && old_slice.idr_flag)
+ {
+ result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id);
+ }
+
+ if (pInfo->active_SPS.pic_order_cnt_type == 0)
+ {
+ result |= (old_slice.pic_order_cnt_lsb != cur_slice.pic_order_cnt_lsb);
+ result |= (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom);
+ }
+
+ if (pInfo->active_SPS.pic_order_cnt_type == 1)
+ {
+ result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]);
+ result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]);
+ }
+
+ return result;
+}
+
+
+int32_t h264_check_previous_frame_end(h264_Info * pInfo)
+{
+ int result = 0;
+
+ if( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) )
+ {
+
+ switch ( pInfo->nal_unit_type )
+ {
+ case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+ case h264_NAL_UNIT_TYPE_SPS:
+ case h264_NAL_UNIT_TYPE_PPS:
+ case h264_NAL_UNIT_TYPE_SEI:
+ case h264_NAL_UNIT_TYPE_EOSeq:
+ case h264_NAL_UNIT_TYPE_EOstream:
+ case h264_NAL_UNIT_TYPE_Reserved1:
+ case h264_NAL_UNIT_TYPE_Reserved2:
+ case h264_NAL_UNIT_TYPE_Reserved3:
+ case h264_NAL_UNIT_TYPE_Reserved4:
+ case h264_NAL_UNIT_TYPE_Reserved5:
+ {
+ pInfo->img.current_slice_num = 0;
+
+ if((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) {
+ pInfo->is_frame_boundary_detected_by_non_slice_nal =1;
+ pInfo->is_current_workload_done=1;
+ result=1;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ }
+
+ return result;
+
+}
+
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////
+// 1) Update old slice structure for frame boundary detection
+//////////////////////////////////////////////////////////////
+void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader)
+{
+ pInfo->old_slice.pic_parameter_id = pInfo->SliceHeader.pic_parameter_id;
+
+ pInfo->old_slice.frame_num = pInfo->SliceHeader.frame_num;
+
+ pInfo->old_slice.field_pic_flag = pInfo->SliceHeader.field_pic_flag;
+
+ if(pInfo->SliceHeader.field_pic_flag)
+ {
+ pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag;
+ }
+
+ pInfo->old_slice.nal_ref_idc = pInfo->SliceHeader.nal_ref_idc;
+
+ pInfo->old_slice.structure = pInfo->SliceHeader.structure;
+
+ pInfo->old_slice.idr_flag = pInfo->SliceHeader.idr_flag;
+ if (pInfo->SliceHeader.idr_flag)
+ {
+ pInfo->old_slice.idr_pic_id = pInfo->SliceHeader.idr_pic_id;
+ }
+
+ if (pInfo->active_SPS.pic_order_cnt_type == 0)
+ {
+ pInfo->old_slice.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb;
+ pInfo->old_slice.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom;
+ }
+
+ if (pInfo->active_SPS.pic_order_cnt_type == 1)
+ {
+ pInfo->old_slice.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0];
+ pInfo->old_slice.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1];
+ }
+
+ ////////////////////////////// Next to current
+ memcpy(&pInfo->SliceHeader, &next_SliceHeader, sizeof(h264_Slice_Header_t));
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// Initialization for new picture
+//////////////////////////////////////////////////////////////////////////////
+void h264_update_img_info(h264_Info * pInfo )
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ pInfo->img.frame_num = pInfo->SliceHeader.frame_num;
+ pInfo->img.structure = pInfo->SliceHeader.structure;
+
+ pInfo->img.field_pic_flag = pInfo->SliceHeader.field_pic_flag;
+ pInfo->img.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag;
+
+ pInfo->img.MbaffFrameFlag = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag));
+ pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type;
+
+ if(pInfo->img.pic_order_cnt_type == 1) {
+ pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle;
+ pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag;
+ pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic;
+ pInfo->img.offset_for_top_to_bottom_field = pInfo->active_SPS.offset_for_top_to_bottom_field;
+ }
+
+ pInfo->img.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb;
+ //pInfo->img.pic_order_cnt_msb = pInfo->SliceHeader.pic_order_cnt_msb;
+ pInfo->img.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom;
+ pInfo->img.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0];
+ pInfo->img.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1];
+
+
+ pInfo->img.PreviousFrameNum = pInfo->old_slice.frame_num;
+
+ pInfo->img.no_output_of_prior_pics_flag = pInfo->SliceHeader.sh_dec_refpic.no_output_of_prior_pics_flag;
+
+ ////////////////////////////////////////////////// Check SEI recovery point
+ if (pInfo->sei_information.recovery_point) {
+ int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+ pInfo->sei_information.recovery_frame_num = (pInfo->img.frame_num + pInfo->sei_information.recovery_frame_cnt) % MaxFrameNum;
+ }
+
+ if (pInfo->SliceHeader.idr_flag)
+ pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num;
+
+
+
+ /////////////////////////////////////////////////Resolution Change
+ pInfo->img.curr_has_mmco_5 = 0;
+
+ if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)||
+ (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) )
+ {
+ int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0;
+
+ // If resolution changed, reset the soft DPB here
+ h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics);
+ }
+
+ return;
+
+} ///// End of init new frame
+
+
+void h264_update_frame_type(h264_Info * pInfo )
+{
+
+//update frame type
+ if(pInfo->img.structure == FRAME)
+ {
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET);
+ //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff;
+ //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc;
+
+ }
+ else
+ {
+ #if 1
+ switch(pInfo->SliceHeader.slice_type)
+ {
+ case h264_PtypeB:
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_B << FRAME_TYPE_FRAME_OFFSET);
+ break;
+ case h264_PtypeSP:
+ case h264_PtypeP:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET != FRAME_TYPE_B)
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_P << FRAME_TYPE_FRAME_OFFSET);
+ break;
+ case h264_PtypeI:
+ case h264_PtypeSI:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET == FRAME_TYPE_INVALID)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET);
+ }
+ pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc;
+
+ break;
+ default:
+ break;
+
+ }
+ #endif
+
+ }
+
+ }
+ else if(pInfo->img.structure == TOP_FIELD)
+ {
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));;
+ }
+ else
+ {
+ switch(pInfo->SliceHeader.slice_type)
+ {
+ case h264_PtypeB:
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+ break;
+ case h264_PtypeSP:
+ case h264_PtypeP:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B)
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+ break;
+ case h264_PtypeI:
+ case h264_PtypeSI:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+ }
+ break;
+ default:
+ break;
+
+ }
+
+ }
+
+
+ }else if(pInfo->img.structure == BOTTOM_FIELD)
+ {
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));;
+ }
+ else
+ {
+ switch(pInfo->SliceHeader.slice_type)
+ {
+ case h264_PtypeB:
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+ break;
+ case h264_PtypeSP:
+ case h264_PtypeP:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B)
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+ break;
+ case h264_PtypeI:
+ case h264_PtypeSI:
+ if( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID)
+ {
+ pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+ }
+ break;
+ default:
+ break;
+
+ }
+
+ }
+
+ }
+ return;
+
+}
+
+
+//////#endif ///////////// IFDEF H264_PARSE_C///////////////////
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c
new file mode 100644
index 0000000..c4e00ee
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_bsd.c
@@ -0,0 +1,228 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: h264 bistream decoding
+//
+///////////////////////////////////////////////////////////////////////*/
+
+
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_parser_ops.h"
+
+
+
+
+
+/**
+ get_codeNum :Get codenum based on sec 9.1 of H264 spec.
+ @param cxt : Buffer adress & size are part inputs, the cxt is updated
+ with codeNum & sign on sucess.
+ Assumption: codeNum is a max of 32 bits
+
+ @retval 1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code.
+ @retval 0 : Couldn't find a code in the current buffer.
+ be freed.
+*/
+
+uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo)
+{
+ int32_t leadingZeroBits= 0;
+ uint32_t temp = 0, match = 0, noOfBits = 0, count = 0;
+ uint32_t codeNum =0;
+ uint32_t bits_offset =0, byte_offset =0;
+ uint8_t is_emul =0;
+ uint8_t is_first_byte = 1;
+ uint32_t length =0;
+ uint32_t bits_need_add_in_first_byte =0;
+ int32_t bits_operation_result=0;
+
+ //remove warning
+ pInfo = pInfo;
+
+ ////// Step 1: parse through zero bits until we find a bit with value 1.
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+
+ while(!match)
+ {
+ if ((bits_offset != 0) && ( is_first_byte == 1))
+ {
+ //we handle byte at a time, if we have offset then for first
+ // byte handle only 8 - offset bits
+ noOfBits = (uint8_t)(8 - bits_offset);
+ bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits);
+
+
+ temp = (temp << bits_offset);
+ if(temp!=0)
+ {
+ bits_need_add_in_first_byte = bits_offset;
+ }
+ is_first_byte =0;
+ }
+ else
+ {
+ noOfBits = 8;/* always 8 bits as we read a byte at a time */
+ bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8);
+
+ }
+
+ if(-1==bits_operation_result)
+ {
+ return MAX_INT32_VALUE;
+ }
+
+ if(temp != 0)
+ {
+ // if byte!=0 we have at least one bit with value 1.
+ count=1;
+ while(((temp & 0x80) != 0x80) && (count <= noOfBits))
+ {
+ count++;
+ temp = temp <<1;
+ }
+ //At this point we get the bit position of 1 in current byte(count).
+
+ match = 1;
+ leadingZeroBits += count;
+ }
+ else
+ {
+ // we don't have a 1 in current byte
+ leadingZeroBits += noOfBits;
+ }
+
+ if(!match)
+ {
+ //actually move the bitoff by viddec_pm_get_bits
+ viddec_pm_get_bits(parent, &temp, noOfBits);
+ }
+ else
+ {
+ //actually move the bitoff by viddec_pm_get_bits
+ viddec_pm_get_bits(parent, &temp, count);
+ }
+
+ }
+ ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value.
+
+
+ if(match)
+ {
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+ /* bit position in current byte */
+ //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7);
+ count = ((count + bits_need_add_in_first_byte)& 0x7);
+
+ leadingZeroBits --;
+ length = leadingZeroBits;
+ codeNum = 0;
+ noOfBits = 8 - count;
+
+
+ while(leadingZeroBits > 0)
+ {
+ if(noOfBits < (uint32_t)leadingZeroBits)
+ {
+ viddec_pm_get_bits(parent, &temp, noOfBits);
+
+
+ codeNum = (codeNum << noOfBits) | temp;
+ leadingZeroBits -= noOfBits;
+ }
+ else
+ {
+ viddec_pm_get_bits(parent, &temp, leadingZeroBits);
+
+ codeNum = (codeNum << leadingZeroBits) | temp;
+ leadingZeroBits = 0;
+ }
+
+
+ noOfBits = 8;
+ }
+ // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits).
+ codeNum = codeNum + (1 << length) -1;
+
+ }
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+ if(bits_offset!=0)
+ {
+ viddec_pm_peek_bits(parent, &temp, 8-bits_offset);
+ }
+
+ return codeNum;
+}
+
+
+/*---------------------------------------*/
+/*---------------------------------------*/
+int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned)
+{
+ int32_t sval = 0;
+ signed char sign;
+
+ sval = h264_get_codeNum(parent , pInfo);
+
+ if(bIsSigned) //get signed integer golomb code else the value is unsigned
+ {
+ sign = (sval & 0x1)?1:-1;
+ sval = (sval +1) >> 1;
+ sval = sval * sign;
+ }
+
+ return sval;
+} // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned)
+
+///
+/// Check whether more RBSP data left in current NAL
+///
+uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo)
+{
+ uint8_t cnt = 0;
+
+ uint8_t is_emul =0;
+ uint8_t cur_byte = 0;
+ int32_t shift_bits =0;
+ uint32_t ctr_bit = 0;
+ uint32_t bits_offset =0, byte_offset =0;
+
+ //remove warning
+ pInfo = pInfo;
+
+ if (!viddec_pm_is_nomoredata(parent))
+ return 1;
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ shift_bits = 7-bits_offset;
+
+ // read one byte
+ viddec_pm_get_cur_byte(parent, &cur_byte);
+
+ ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01;
+
+ // a stop bit has to be one
+ if (ctr_bit==0)
+ return 1;
+
+ while (shift_bits>=0 && !cnt)
+ {
+ cnt |= (((cur_byte)>> (shift_bits--)) & 0x01); // set up control bit
+ }
+
+ return (cnt);
+}
+
+
+
+///////////// EOF/////////////////////
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c
new file mode 100644
index 0000000..d1b693b
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c
@@ -0,0 +1,4171 @@
+
+/*!
+ ***********************************************************************
+ * \file: h264_dpb_ctl.c
+ *
+ ***********************************************************************
+ */
+
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+#include "viddec_h264_parse.h"
+
+
+
+//#include <limits.h>
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+//#include "h264_debug.h"
+
+#ifndef NULL
+#define NULL 0
+#endif
+//#ifndef USER_MODE
+//#define NULL 0
+//#endif
+
+////////////////////////// Declare Globals///////////////////////////////
+frame_store *active_fs;
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+///////////////////////// DPB init //////////////////////////////////////////
+//////////////////////////////////////////////////////////////////////////////
+// Init DPB
+// Description: init dpb, which should be called while open
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb)
+{
+ int32_t i;
+
+ //// Init DPB to zero
+ //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) );
+
+
+ for(i=0;i<NUM_DPB_FRAME_STORES;i++)
+ {
+ p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC;
+ p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+ }
+ p_dpb->used_size = 0;
+ p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+ p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC;
+
+ return;
+}
+
+
+///////////////////////// Reference list management //////////////////////////
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_add_ref_list ()
+//
+// Adds an idc to the long term reference list
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_add_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+ p_dpb->fs_ref_idc[p_dpb->ref_frames_in_buffer] = ref_idc;
+ p_dpb->ref_frames_in_buffer++;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_add_ltref_list ()
+//
+// Adds an idc to the long term reference list
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_add_ltref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+ p_dpb->fs_ltref_idc[p_dpb->ltref_frames_in_buffer] = ref_idc;
+ p_dpb->ltref_frames_in_buffer++;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_update_all_ref_lists (h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting)
+//
+// Decide whether the current picture needs to be added to the reference lists
+// active_fs should be set-up prior to calling this function
+//
+// Check if we need to search the lists here
+// or can we go straight to adding to ref lists..
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExisting)
+{
+ if(NonExisting)
+ h264_dpb_set_active_fs(p_dpb,p_dpb->fs_non_exist_idc);
+ else
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ //if(active_fs->is_reference)
+ if(active_fs->frame.used_for_reference)
+ {
+ if(viddec_h264_get_is_long_term(active_fs))
+ {
+ if(viddec_h264_get_dec_structure(active_fs) == FRAME)
+ h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc);
+ else
+ {
+ uint32_t found_in_list = 0, i = 0;
+ for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) {
+ if(p_dpb->fs_ltref_idc[i] == active_fs->fs_idc) found_in_list = 1;
+ }
+
+ if(found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc);
+ }
+ }
+ else
+ {
+ if(viddec_h264_get_dec_structure(active_fs) == FRAME) {
+ h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc);
+ } else
+ {
+ uint32_t found_in_list = 0, i = 0;
+
+ for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++)
+ {
+ if(p_dpb->fs_ref_idc[i] == active_fs->fs_idc) found_in_list = 1;
+ }
+
+ if(found_in_list == 0) h264_dpb_add_ref_list(p_dpb, active_fs->fs_idc);
+ }
+ }
+ }
+
+ return;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Set active fs
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index)
+{
+ active_fs = &p_dpb->fs[index];
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Sort reference list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t desc)
+{
+ int32_t j, k, temp, idc;
+
+ // Dodgy looking for embedded code here...
+ if(size > 1)
+ {
+ for (j = 0; j < size-1; j = j + 1) {
+ for (k = j + 1; k < size; k = k + 1) {
+ if ((desc & (sort_indices[j] < sort_indices[k]))|
+ (~desc & (sort_indices[j] > sort_indices[k])) )
+ {
+ temp = sort_indices[k];
+ sort_indices[k] = sort_indices[j];
+ sort_indices[j] = temp;
+ idc = list[k];
+ list[k] = list[j];
+ list[j] = idc;
+ }
+ }
+ }
+ }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_pic_is_bottom_field_ref ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_pic_is_bottom_field_ref(int32_t long_term)
+{
+ int32_t temp;
+ if(long_term) temp = ((active_fs->bottom_field.used_for_reference) && (active_fs->bottom_field.is_long_term)) ? 1 : 0;
+ else temp = ((active_fs->bottom_field.used_for_reference) && !(active_fs->bottom_field.is_long_term)) ? 1 : 0;
+
+ return temp;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_pic_is_top_field_ref ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_pic_is_top_field_ref(int32_t long_term)
+{
+ int32_t temp;
+ if(long_term)
+ temp = ((active_fs->top_field.used_for_reference) && (active_fs->top_field.is_long_term)) ? 1 : 0;
+ else
+ temp = ((active_fs->top_field.used_for_reference) && !(active_fs->top_field.is_long_term)) ? 1 : 0;
+
+ return temp;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_gen_pic_list_from_frame_list ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, uint8_t *pic_list, uint8_t *frame_list, int32_t currPicStructure, int32_t list_size, int32_t long_term)
+{
+ int32_t top_idx, bot_idx, got_pic, list_idx;
+ int32_t lterm;
+
+ list_idx = 0;
+ lterm = (long_term)? 1:0;
+
+ if(list_size){
+
+
+ top_idx = 0;
+ bot_idx = 0;
+
+ if (currPicStructure == TOP_FIELD) {
+ while ((top_idx < list_size)||(bot_idx < list_size))
+ {
+ /////////////////////////////////////////// ref Top Field
+ got_pic = 0;
+ while ((top_idx < list_size) & ~got_pic)
+ {
+ h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]);
+ if ((viddec_h264_get_is_used(active_fs))&0x1)
+ {
+ if(h264_dpb_pic_is_top_field_ref(long_term))
+ {
+ pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field
+ list_idx++;
+ got_pic = 1;
+ }
+ }
+ top_idx++;
+ }
+
+ /////////////////////////////////////////// ref Bottom Field
+ got_pic = 0;
+ while ((bot_idx < list_size) & ~got_pic)
+ {
+ h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]);
+ if ((viddec_h264_get_is_used(active_fs))&0x2)
+ {
+ if(h264_dpb_pic_is_bottom_field_ref(long_term))
+ {
+ pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field
+ list_idx++;
+ got_pic = 1;
+ }
+ }
+ bot_idx++;
+ }
+ }
+ }
+
+ /////////////////////////////////////////////// current Bottom Field
+ if (currPicStructure == BOTTOM_FIELD) {
+ while ((top_idx < list_size)||(bot_idx < list_size))
+ {
+ /////////////////////////////////////////// ref Top Field
+ got_pic = 0;
+ while ((bot_idx < list_size) && (!(got_pic)))
+ {
+ h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]);
+ if ((viddec_h264_get_is_used(active_fs))&0x2) {
+ if(h264_dpb_pic_is_bottom_field_ref(long_term)) {
+ // short term ref pic
+ pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1); // bottom_field
+ list_idx++;
+ got_pic = 1;
+ }
+ }
+ bot_idx++;
+ }
+
+ /////////////////////////////////////////// ref Bottom Field
+ got_pic = 0;
+ while ((top_idx < list_size) && (!(got_pic)))
+ {
+ h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]);
+ if ((viddec_h264_get_is_used(active_fs))&0x1) {
+ if(h264_dpb_pic_is_top_field_ref(long_term)){
+ // short term ref pic
+ pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0); // top_field
+ list_idx++;
+ got_pic = 1;
+ }
+ }
+ top_idx++;
+ }
+ }
+ }
+ }
+
+ return list_idx;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_ref_list ()
+//
+// Removes an idc from the refernce list and updates list after
+//
+
+void h264_dpb_remove_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+ uint8_t idx = 0;
+ int32_t Found = 0;
+
+ while ((idx < p_dpb->ref_frames_in_buffer) && (!(Found)))
+ {
+ if (p_dpb->fs_ref_idc[idx] == ref_idc)
+ Found = 1;
+ else
+ idx++;
+ }
+
+ if (Found)
+ {
+ // Move the remainder of the list up one
+ while(idx < p_dpb->ref_frames_in_buffer - 1) {
+ p_dpb->fs_ref_idc[idx] = p_dpb->fs_ref_idc[idx + 1];
+ idx ++;
+ }
+
+ p_dpb->fs_ref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one
+ p_dpb->ref_frames_in_buffer--;
+ }
+
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_ltref_list ()
+//
+// Removes an idc from the long term reference list and updates list after
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_ltref_list(h264_DecodedPictureBuffer * p_dpb,int32_t ref_idc)
+{
+ uint8_t idx = 0;
+ int32_t Found = 0;
+
+ while ((idx < p_dpb->ltref_frames_in_buffer) && (!(Found)))
+ {
+ if (p_dpb->fs_ltref_idc[idx] == ref_idc) Found = 1;
+ else idx++;
+ }
+
+ if (Found)
+ {
+ // Move the remainder of the list up one
+ while(idx <(uint8_t)(p_dpb->ltref_frames_in_buffer - 1))
+ {
+ p_dpb->fs_ltref_idc[idx] = p_dpb->fs_ltref_idc[idx + 1];
+ idx ++;
+ }
+ p_dpb->fs_ltref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one
+
+ p_dpb->ltref_frames_in_buffer--;
+ }
+
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_init_lists ()
+//
+// Used to initialise the reference lists
+// Also assigns picture numbers and long term picture numbers if P OR B slice
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_update_ref_lists(h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer * p_dpb = &pInfo->dpb;
+
+ int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+ uint8_t list0idx, list0idx_1, listltidx;
+ uint8_t idx;
+
+ uint8_t add_top, add_bottom, diff;
+ uint8_t list_idc;
+ uint8_t check_non_existing, skip_picture;
+
+
+ uint8_t gen_pic_fs_list0[16];
+ uint8_t gen_pic_fs_list1[16];
+ uint8_t gen_pic_fs_listlt[16];
+ uint8_t gen_pic_pic_list[32]; // check out these sizes...
+
+ uint8_t sort_fs_idc[16];
+ int32_t list_sort_number[16];
+
+#ifdef DUMP_HEADER_INFO
+ static int cc1 = 0;
+ //OS_INFO("-------------cc1= %d\n",cc1); /////// DEBUG info
+ if(cc1 == 255)
+ idx = 0;
+#endif
+
+ list0idx = list0idx_1 = listltidx = 0;
+
+ if (pInfo->SliceHeader.structure == FRAME)
+ {
+ ////////////////////////////////////////////////// short term handling
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if((viddec_h264_get_is_used(active_fs) == 3)&&(active_fs->frame.used_for_reference == 3))
+ {
+ if (active_fs->frame_num > pInfo->img.frame_num)
+ active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum;
+ else
+ active_fs->frame_num_wrap = active_fs->frame_num;
+
+ active_fs->frame.pic_num = active_fs->frame_num_wrap;
+
+ // Use this opportunity to sort list for a p-frame
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame.pic_num;
+ list0idx++;
+ }
+ }
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+ for (idx = 0; idx < list0idx; idx++)
+ p_dpb->listX_0[idx] = (sort_fs_idc[idx]); // frame
+
+ p_dpb->listXsize[0] = list0idx;
+ }
+
+ ////////////////////////////////////////////////// long term handling
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3) && (active_fs->frame.used_for_reference == 3))
+ {
+ active_fs->frame.long_term_pic_num = active_fs->frame.long_term_frame_idx;
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ sort_fs_idc[list0idx-p_dpb->listXsize[0]] = p_dpb->fs_ltref_idc[idx];
+ list_sort_number[list0idx-p_dpb->listXsize[0]] = active_fs->frame.long_term_pic_num;
+ list0idx++;
+ }
+ }
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0);
+ for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) {
+ p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+ }
+ p_dpb->listXsize[0] = list0idx;
+ }
+ }
+ else /// Field base
+ {
+ if (pInfo->SliceHeader.structure == TOP_FIELD)
+ {
+ add_top = 1;
+ add_bottom = 0;
+ }
+ else
+ {
+ add_top = 0;
+ add_bottom = 1;
+ }
+
+ ////////////////////////////////////////////P0: Short term handling
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+ if (active_fs->frame.used_for_reference)
+ {
+ if(active_fs->frame_num > pInfo->SliceHeader.frame_num) {
+ active_fs->frame_num_wrap = active_fs->frame_num - MaxFrameNum;
+ } else {
+ active_fs->frame_num_wrap = active_fs->frame_num;
+ }
+
+ if ((active_fs->frame.used_for_reference)&0x1) {
+ active_fs->top_field.pic_num = (active_fs->frame_num_wrap << 1) + add_top;
+ }
+
+ if ((active_fs->frame.used_for_reference)&0x2) {
+ active_fs->bottom_field.pic_num = (active_fs->frame_num_wrap << 1) + add_bottom;
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP) {
+ sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame_num_wrap;
+ list0idx++;
+ }
+ }
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+ for (idx = 0; idx < list0idx; idx++) {
+ gen_pic_fs_list0[idx] = sort_fs_idc[idx];
+ }
+
+ p_dpb->listXsize[0] = 0;
+ p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0);
+
+ for (idx = 0; idx < p_dpb->listXsize[0]; idx++)
+ {
+ p_dpb->listX_0[idx] = gen_pic_pic_list[idx];
+ }
+ }
+
+ ////////////////////////////////////////////P0: long term handling
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+ if (viddec_h264_get_is_long_term(active_fs)&0x1) {
+ active_fs->top_field.long_term_pic_num = (active_fs->top_field.long_term_frame_idx << 1) + add_top;
+ }
+
+ if (viddec_h264_get_is_long_term(active_fs)&0x2) {
+ active_fs->bottom_field.long_term_pic_num = (active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom;
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx];
+ list_sort_number[listltidx] = active_fs->long_term_frame_idx;
+ listltidx++;
+ }
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0);
+ for (idx = 0; idx < listltidx; idx++) {
+ gen_pic_fs_listlt[idx] = sort_fs_idc[idx];
+ }
+ list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1);
+
+ for (idx = 0; idx < list0idx_1; idx++) {
+ p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx];
+ }
+ p_dpb->listXsize[0] += list0idx_1;
+ }
+ }
+
+
+ if (pInfo->SliceHeader.slice_type == h264_PtypeI)
+ {
+ p_dpb->listXsize[0] = 0;
+ p_dpb->listXsize[1] = 0;
+ return;
+ }
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeP)
+ {
+ //// Forward done above
+ p_dpb->listXsize[1] = 0;
+ }
+
+
+ // B-Slice
+ // Do not include non-existing frames for B-pictures when cnt_type is zero
+
+ if(pInfo->SliceHeader.slice_type == h264_PtypeB)
+ {
+ list0idx = list0idx_1 = listltidx = 0;
+ skip_picture = 0;
+
+ if(pInfo->active_SPS.pic_order_cnt_type == 0)
+ check_non_existing = 1;
+ else
+ check_non_existing = 0;
+
+ if (pInfo->SliceHeader.structure == FRAME)
+ {
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+ if (viddec_h264_get_is_used(active_fs) == 3)
+ {
+ if(check_non_existing)
+ {
+ if(viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1;
+ else skip_picture = 0;
+ }
+
+ if(skip_picture == 0)
+ {
+ if ((active_fs->frame.used_for_reference==3) && (!(active_fs->frame.is_long_term)))
+ {
+ if (pInfo->img.framepoc >= active_fs->frame.poc)
+ {
+ sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame.poc;
+ list0idx++;
+ }
+ }
+ }
+ }
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+ for (idx = 0; idx < list0idx; idx++) {
+ p_dpb->listX_0[idx] = sort_fs_idc[idx];
+ }
+
+ list0idx_1 = list0idx;
+
+ /////////////////////////////////////////B0: Short term handling
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if (viddec_h264_get_is_used(active_fs) == 3)
+ {
+ if(check_non_existing)
+ {
+ if(viddec_h264_get_is_non_existent(active_fs)) skip_picture = 1;
+ else skip_picture = 0;
+ }
+
+ if(skip_picture == 0)
+ {
+ if ((active_fs->frame.used_for_reference) && (!(active_fs->frame.is_long_term)))
+ {
+ if (pInfo->img.framepoc < active_fs->frame.poc)
+ {
+ sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc;
+ list0idx++;
+ }
+ }
+ }
+ }
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0);
+ for (idx = list0idx_1; idx < list0idx; idx++) {
+ p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1];
+ }
+
+ for (idx = 0; idx < list0idx_1; idx++) {
+ p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx];
+ }
+
+ for (idx = list0idx_1; idx < list0idx; idx++) {
+ p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx];
+ }
+
+ p_dpb->listXsize[0] = list0idx;
+ p_dpb->listXsize[1] = list0idx;
+
+ /////////////////////////////////////////B0: long term handling
+ list0idx = 0;
+
+ // Can non-existent pics be set as long term??
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+ if ((viddec_h264_get_is_used(active_fs) == 3) && (viddec_h264_get_is_long_term(active_fs) == 3))
+ {
+ // if we have two fields, both must be long-term
+ sort_fs_idc[list0idx] = p_dpb->fs_ltref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame.long_term_pic_num;
+ list0idx++;
+ }
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0);
+ for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1)
+ {
+ p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+ p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+ }
+
+ p_dpb->listXsize[0] += list0idx;
+ p_dpb->listXsize[1] += list0idx;
+ }
+ else // Field
+ {
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if (viddec_h264_get_is_used(active_fs)) {
+ if(check_non_existing) {
+ if(viddec_h264_get_is_non_existent(active_fs))
+ skip_picture = 1;
+ else
+ skip_picture = 0;
+ }
+
+ if(skip_picture == 0) {
+ if (pInfo->img.ThisPOC >= active_fs->frame.poc) {
+ sort_fs_idc[list0idx] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx] = active_fs->frame.poc;
+ list0idx++;
+ }
+ }
+ }
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+ for (idx = 0; idx < list0idx; idx = idx + 1) {
+ gen_pic_fs_list0[idx] = sort_fs_idc[idx];
+ }
+
+ list0idx_1 = list0idx;
+
+ ///////////////////////////////////////////// B1: Short term handling
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+ if (viddec_h264_get_is_used(active_fs))
+ {
+ if(check_non_existing) {
+ if(viddec_h264_get_is_non_existent(active_fs))
+ skip_picture = 1;
+ else
+ skip_picture = 0;
+ }
+
+ if(skip_picture == 0) {
+ if (pInfo->img.ThisPOC < active_fs->frame.poc) {
+ sort_fs_idc[list0idx-list0idx_1] = p_dpb->fs_ref_idc[idx];
+ list_sort_number[list0idx-list0idx_1] = active_fs->frame.poc;
+ list0idx++;
+ }
+ }
+ }
+ }
+
+ ///// Generate frame list from sorted fs
+ /////
+ h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0);
+ for (idx = list0idx_1; idx < list0idx; idx++)
+ gen_pic_fs_list0[idx] = sort_fs_idc[idx-list0idx_1];
+
+ for (idx = 0; idx < list0idx_1; idx++)
+ gen_pic_fs_list1[list0idx-list0idx_1+idx] = gen_pic_fs_list0[idx];
+
+ for (idx = list0idx_1; idx < list0idx; idx++)
+ gen_pic_fs_list1[idx-list0idx_1] = gen_pic_fs_list0[idx];
+
+ ///// Generate List_X0
+ /////
+ p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0);
+
+ for (idx = 0; idx < p_dpb->listXsize[0]; idx++)
+ p_dpb->listX_0[idx] = gen_pic_pic_list[idx];
+
+ //// Generate List X1
+ ////
+ p_dpb->listXsize[1] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list1, pInfo->img.structure, list0idx, 0);
+
+ for (idx = 0; idx < p_dpb->listXsize[1]; idx++)
+ p_dpb->listX_1[idx] = gen_pic_pic_list[idx];
+
+ ///////////////////////////////////////////// B1: long term handling
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ sort_fs_idc[listltidx] = p_dpb->fs_ltref_idc[idx];
+ list_sort_number[listltidx] = active_fs->long_term_frame_idx;
+ listltidx++;
+ }
+
+ h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0);
+ for (idx = 0; idx < listltidx; idx++)
+ gen_pic_fs_listlt[idx] = sort_fs_idc[idx];
+
+ list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1);
+
+ for (idx = 0; idx < list0idx_1; idx++)
+ {
+ p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx];
+ p_dpb->listX_1[p_dpb->listXsize[1]+idx] = gen_pic_pic_list[idx];
+ }
+
+ p_dpb->listXsize[0] += list0idx_1;
+ p_dpb->listXsize[1] += list0idx_1;
+ }
+ }
+
+ // Setup initial list sizes at this point
+ p_dpb->nInitListSize[0] = p_dpb->listXsize[0];
+ p_dpb->nInitListSize[1] = p_dpb->listXsize[1];
+ if(pInfo->SliceHeader.slice_type != h264_PtypeI)
+ {
+ if ((p_dpb->listXsize[0]==p_dpb->listXsize[1]) && (p_dpb->listXsize[0] > 1))
+ {
+ // check if lists are identical, if yes swap first two elements of listX[1]
+ diff = 0;
+ for (idx = 0; idx < p_dpb->listXsize[0]; idx = idx + 1)
+ {
+ if (p_dpb->listX_0[idx] != p_dpb->listX_1[idx]) diff = 1;
+ }
+
+
+ if (!(diff))
+ {
+ list_idc = p_dpb->listX_1[0];
+ p_dpb->listX_1[0] = p_dpb->listX_1[1];
+ p_dpb->listX_1[1] = list_idc;
+ }
+ }
+
+ // set max size
+ if (p_dpb->listXsize[0] > pInfo->SliceHeader.num_ref_idx_l0_active)
+ {
+ p_dpb->listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active;
+ }
+
+
+ if (p_dpb->listXsize[1] > pInfo->SliceHeader.num_ref_idx_l1_active)
+ {
+ p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active;
+ }
+
+
+
+ }
+
+
+
+ /// DPB reorder list
+ h264_dpb_reorder_lists(pInfo);
+
+ return;
+} //// End of init_dpb_list
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_short_term_pic ()
+//
+// Sets active_fs to point to frame store containing picture with given picNum
+// Sets field_flag, bottom_field and err_flag based on the picture and whether
+// it is available or not...
+//
+static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic_num, int32_t *bottom_field_bit)
+{
+ register uint32_t idx;
+ register frame_param_ptr temp_fs;
+
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ *bottom_field_bit = 0;
+ for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+ {
+ temp_fs = &p_dpb->fs[p_dpb->fs_ref_idc[idx]];
+ if (pInfo->SliceHeader.structure == FRAME)
+ {
+ if(temp_fs->frame.used_for_reference == 3)
+ if (!(temp_fs->frame.is_long_term))
+ if (temp_fs->frame.pic_num == pic_num) return temp_fs;
+ }
+ else // current picture is a field
+ {
+ if (temp_fs->frame.used_for_reference&0x1)
+ if (!(temp_fs->top_field.is_long_term))
+ if (temp_fs->top_field.pic_num == pic_num)
+ {
+ return temp_fs;
+ }
+
+ if (temp_fs->frame.used_for_reference&0x2)
+ if (!(temp_fs->bottom_field.is_long_term))
+ if (temp_fs->bottom_field.pic_num == pic_num)
+ {
+ *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1);
+ return temp_fs;
+ }
+ }
+ }
+ return NULL;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_long_term_pic ()
+//
+// Sets active_fs to point to frame store containing picture with given picNum
+//
+
+static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long_term_pic_num, int32_t *bottom_field_bit)
+{
+ register uint32_t idx;
+ register frame_param_ptr temp_fs;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ *bottom_field_bit = 0;
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ temp_fs = &p_dpb->fs[p_dpb->fs_ltref_idc[idx]];
+ if (pInfo->SliceHeader.structure == FRAME)
+ {
+ if (temp_fs->frame.used_for_reference == 3)
+ if (temp_fs->frame.is_long_term)
+ if (temp_fs->frame.long_term_pic_num == long_term_pic_num)
+ return temp_fs;
+ }
+ else
+ {
+ if (temp_fs->frame.used_for_reference&0x1)
+ if (temp_fs->top_field.is_long_term)
+ if (temp_fs->top_field.long_term_pic_num == long_term_pic_num)
+ return temp_fs;
+
+ if (temp_fs->frame.used_for_reference&0x2)
+ if (temp_fs->bottom_field.is_long_term)
+ if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num)
+ {
+ *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1);
+ return temp_fs;
+ }
+ }
+ }
+ return NULL;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reorder_ref_pic_list ()
+//
+// Used to sort a list based on a corresponding sort indices
+//
+
+struct list_value_t
+{
+ int32_t value;
+ struct list_value_t *next;
+};
+
+struct linked_list_t
+{
+ struct list_value_t *begin;
+ struct list_value_t *end;
+ struct list_value_t *entry;
+ struct list_value_t *prev_entry;
+ struct list_value_t list[32];
+};
+
+static void linked_list_initialize (struct linked_list_t *lp, uint8_t *vp, int32_t size)
+{
+ struct list_value_t *lvp;
+
+ lvp = lp->list;
+ lp->begin = lvp;
+ lp->entry = lvp;
+ lp->end = lvp + (size-1);
+ lp->prev_entry = NULL;
+
+ while (lvp <= lp->end)
+ {
+ lvp->value = *(vp++);
+ lvp->next = lvp + 1;
+ lvp++;
+ }
+ lp->end->next = NULL;
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+static void linked_list_reorder (struct linked_list_t *lp, int32_t list_value)
+{
+ register struct list_value_t *lvp = lp->entry;
+ register struct list_value_t *lvp_prev;
+
+ if (lvp == NULL) {
+ lp->end->value = list_value; // replace the end entry
+ } else if ((lp->begin==lp->end)||(lvp==lp->end)) // replece the begin/end entry and set the entry to NULL
+ {
+ lp->entry->value = list_value;
+ lp->prev_entry = lp->entry;
+ lp->entry = NULL;
+ }
+ else if (lvp->value==list_value) // the entry point matches
+ {
+ lp->prev_entry = lvp;
+ lp->entry = lvp->next;
+ }
+ else if (lvp->next == lp->end) // the entry is just before the end
+ {
+ // replace the end and swap the end and entry points
+ // lvp
+ // prev_entry => entry => old_end
+ // old_end & new_prev_entry => new_end & entry
+ lp->end->value = list_value;
+
+ if (lp->prev_entry)
+ lp->prev_entry->next = lp->end;
+ else
+ lp->begin = lp->end;
+
+ lp->prev_entry = lp->end;
+ lp->end->next = lvp;
+ lp->end = lvp;
+ lvp->next = NULL;
+ }
+ else
+ {
+ lvp_prev = NULL;
+ while (lvp->next) // do not check the end but we'll be in the loop at least once
+ {
+ if (lvp->value == list_value) break;
+ lvp_prev = lvp;
+ lvp = lvp->next;
+ }
+ lvp->value = list_value; // force end matches
+
+ // remove lvp from the list
+ lvp_prev->next = lvp->next;
+ if (lvp==lp->end) lp->end = lvp_prev;
+
+ // insert lvp in front of lp->entry
+ if (lp->entry==lp->begin)
+ {
+ lvp->next = lp->begin;
+ lp->begin = lvp;
+ }
+ else
+ {
+ lvp->next = lp->entry;
+ lp->prev_entry->next = lvp;
+ }
+ lp->prev_entry = lvp;
+ }
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+static void linked_list_output (struct linked_list_t *lp, int32_t *vp)
+{
+ register int32_t *ip1;
+ register struct list_value_t *lvp;
+
+ lvp = lp->begin;
+ ip1 = vp;
+ while (lvp)
+ {
+ *(ip1++) = lvp->value;
+ lvp = lvp->next;
+ }
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+int32_t h264_dpb_reorder_ref_pic_list(h264_Info * pInfo,int32_t list_num, int32_t num_ref_idx_active)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ uint8_t *remapping_of_pic_nums_idc;
+ list_reordering_num_t *list_reordering_num;
+ int32_t bottom_field_bit;
+
+ int32_t maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, pic_num;
+ int32_t refIdxLX;
+ int32_t i;
+
+ int32_t PicList[32] = {0};
+ struct linked_list_t ll;
+ struct linked_list_t *lp = &ll; // should consider use the scratch space
+
+ // declare these below as registers gave me 23 cy/MB for the worst frames in Allegro_Combined_CABAC_07_HD, YHu
+ register frame_param_ptr temp_fs;
+ register int32_t temp;
+ register uint8_t *ip1;
+
+ maxPicNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+
+ if (list_num == 0) // i.e list 0
+ {
+ ip1 = p_dpb->listX_0;
+ remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l0.reordering_of_pic_nums_idc;
+ list_reordering_num = pInfo->SliceHeader.sh_refpic_l0.list_reordering_num;
+ }
+ else
+ {
+ ip1 = p_dpb->listX_1;
+ remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l1.reordering_of_pic_nums_idc;
+ list_reordering_num = pInfo->SliceHeader.sh_refpic_l1.list_reordering_num;
+ }
+
+
+ linked_list_initialize (lp, ip1, num_ref_idx_active);
+
+ currPicNum = pInfo->SliceHeader.frame_num;
+ if (pInfo->SliceHeader.structure != FRAME)
+ {
+
+ /* The reason it is + 1 I think, is because the list is based on polarity
+ expand later...
+ */
+ maxPicNum <<= 1;
+ currPicNum <<= 1;
+ currPicNum++;
+ }
+
+ picNumLXPred = currPicNum;
+ refIdxLX = 0;
+
+ for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++)
+ {
+ if(i > MAX_NUM_REF_FRAMES)
+ {
+ break;
+ }
+
+ if (remapping_of_pic_nums_idc[i] < 2) // - short-term re-ordering
+ {
+ temp = (list_reordering_num[i].abs_diff_pic_num_minus1 + 1);
+ if (remapping_of_pic_nums_idc[i] == 0)
+ {
+ temp = picNumLXPred - temp;
+ if (temp < 0 ) picNumLXNoWrap = temp + maxPicNum;
+ else picNumLXNoWrap = temp;
+ }
+ else // (remapping_of_pic_nums_idc[i] == 1)
+ {
+ temp += picNumLXPred;
+ if (temp >= maxPicNum) picNumLXNoWrap = temp - maxPicNum;
+ else picNumLXNoWrap = temp;
+ }
+
+ // Updates for next iteration of the loop
+ picNumLXPred = picNumLXNoWrap;
+
+ if (picNumLXNoWrap > currPicNum ) pic_num = picNumLXNoWrap - maxPicNum;
+ else pic_num = picNumLXNoWrap;
+
+ temp_fs = h264_dpb_get_short_term_pic(pInfo, pic_num, &bottom_field_bit);
+ if (temp_fs)
+ {
+ temp = bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc);
+ linked_list_reorder (lp, temp);
+ }
+ }
+ else //(remapping_of_pic_nums_idc[i] == 2) long-term re-ordering
+ {
+ pic_num = list_reordering_num[i].long_term_pic_num;
+
+ temp_fs = h264_dpb_get_long_term_pic(pInfo, pic_num, &bottom_field_bit);
+ if (temp_fs)
+ {
+ temp = PUT_LIST_LONG_TERM_BITS(1) + bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc);
+ linked_list_reorder (lp, temp);
+ }
+ }
+ }
+
+ linked_list_output (lp, PicList);
+
+ if(0 == list_num )
+ {
+ for(i=0; i<num_ref_idx_active; i++)
+ {
+ pInfo->slice_ref_list0[i]=(uint8_t)PicList[i];
+ }
+ }
+ else
+ {
+ for(i=0; i<num_ref_idx_active; i++)
+ {
+ pInfo->slice_ref_list1[i]=(uint8_t)PicList[i];
+ }
+ }
+
+
+ // Instead of updating the now reordered list here, just write it down...
+ // This way, we can continue to hold the initialised list in p_dpb->listX_0
+ // and therefore not need to update it every slice
+
+ //h264_dpb_write_list(list_num, PicList, num_ref_idx_active);
+
+ return num_ref_idx_active;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+void h264_dpb_RP_check_list (h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ uint8_t *p_list = pInfo->slice_ref_list0;
+
+ //
+ // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away!
+ //
+
+ if((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) {
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+ }
+
+
+ //
+ // Repare Ref list if it damaged with RP recovery only
+ //
+ if((pInfo->SliceHeader.slice_type == h264_PtypeP) && pInfo->sei_rp_received)
+ {
+
+ int32_t idx, rp_found = 0;
+
+ if(pInfo->SliceHeader.num_ref_idx_l0_active == 1)
+ {
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ p_list = pInfo->slice_ref_list0;
+ }
+ else
+ {
+ p_list = pInfo->dpb.listX_0;
+ //pInfo->sei_rp_received = 0;
+ //return;
+ }
+
+
+ for(idx = 0; idx < p_dpb->used_size; idx++) {
+ if(p_dpb->fs_dpb_idc[idx] == pInfo->last_I_frame_idc) {
+ rp_found = 1;
+ break;
+ }
+ }
+ if(rp_found) {
+#if 0
+ int32_t poc;
+
+ ///// Clear long-term ref list
+ for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ltref_idc[0]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]);
+ }
+
+ ///// Clear short-term ref list
+ //while(p_dpb->used_size>1)
+ for(idx = 0; idx < p_dpb->used_size; idx++)
+ {
+ int32_t idx_pos;
+ //// find smallest non-output POC
+ h264_dpb_get_smallest_poc(p_dpb, &poc, &idx_pos);
+
+ //// Remove all frames in previous GOP
+ if ((idx_pos != MPD_DPB_FS_NULL_IDC) && (p_dpb->fs_dpb_idc[idx_pos] != pInfo->last_I_frame_idc))
+ {
+ // Remove from ref-list
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx_pos]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx_pos]);
+
+ // Output from DPB
+ //h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ //if((active_fs->is_output == 0) && (active_fs->is_non_existent == 0))
+ {
+ //int32_t existing;
+ //h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[idx], 0, &existing);
+ //p_dpb->last_output_poc = poc;
+ }
+ //h264_dpb_remove_frame_from_dpb(p_dpb, idx); // Remove dpb.fs_dpb_idc[pos]
+
+ }
+ }
+#endif
+ ///// Set the reference to last I frame
+ if( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0]))
+ {
+ /// Repaire the reference list now
+ h264_dpb_unmark_for_reference(p_dpb, p_list[0]);
+ h264_dpb_remove_ref_list(p_dpb, p_list[0]);
+ p_list[0] = pInfo->last_I_frame_idc;
+ }
+
+ }
+ }
+
+ pInfo->sei_rp_received = 0;
+ pInfo->sei_b_state_ready = 1;
+
+ }
+
+
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reorder_lists ()
+//
+// Used to sort a list based on a corresponding sort indices
+//
+
+void h264_dpb_reorder_lists(h264_Info * pInfo)
+{
+ int32_t currSliceType = pInfo->SliceHeader.slice_type;
+
+ if (currSliceType == h264_PtypeP )
+ {
+ /////////////////////////////////////////////// Reordering reference list for P slice
+ /// Forward reordering
+ if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active);
+ else
+ {
+
+ }
+ pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active;
+ } else if (currSliceType == h264_PtypeB)
+ {
+ /////////////////////////////////////////////// Reordering reference list for B slice
+ /// Forward reordering
+ if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active);
+ else
+ {
+
+ }
+ pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active;
+
+ /// Backward reordering
+ if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag)
+ h264_dpb_reorder_ref_pic_list(pInfo, 1, pInfo->SliceHeader.num_ref_idx_l1_active);
+ else
+ {
+
+ }
+ pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active;
+ }
+
+ //// Check if need recover reference list with previous recovery point
+ h264_dpb_RP_check_list(pInfo);
+
+
+ return;
+}
+
+////////////////////////////////////////// DPB management //////////////////////
+
+//////////////////////////////////////////////////////////////////////////////
+// avc_dpb_get_non_output_frame_number ()
+//
+// get total non output frame number in the DPB.
+//
+static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo)
+{
+ int32_t idx;
+ int32_t number=0;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ for (idx = 0; idx < p_dpb->used_size; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ if (viddec_h264_get_is_output(active_fs) == 0)
+ {
+ (number)++;
+ }
+ }
+
+ return number;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//// Store previous picture in DPB, and then update DPB queue, remove unused frames from DPB
+
+void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExisting, int32_t use_old)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ int32_t used_for_reference;
+ int32_t is_direct_output;
+ int32_t second_field_stored = 0;
+ int32_t poc;
+ int32_t pos;
+ int32_t flag;
+ int32_t first_field_non_ref = 0;
+ int32_t idr_flag;
+
+ if(NonExisting) {
+ if(p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC)
+ return;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+ } else {
+ if(p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC)
+ return;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ }
+
+ if(NonExisting == 0)
+ {
+ //active_fs->sps_disp_index = (next_sps_disp_entry == 0)? 7 : next_sps_disp_entry - 1;
+ pInfo->img.last_has_mmco_5 = 0;
+ pInfo->img.last_pic_bottom_field = pInfo->img.bottom_field_flag;
+
+ //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag);
+ used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0);
+
+ switch (viddec_h264_get_dec_structure(active_fs))
+ {
+ case(TOP_FIELD) : {
+ active_fs->top_field.used_for_reference = used_for_reference;
+ viddec_h264_set_is_top_used(active_fs, 1);
+ //active_fs->crc_field_coded = 1;
+ }break;
+ case(BOTTOM_FIELD): {
+ active_fs->bottom_field.used_for_reference = used_for_reference << 1;
+ viddec_h264_set_is_bottom_used(active_fs, 1);
+ //active_fs->crc_field_coded = 1;
+ }break;
+ default: {
+ active_fs->frame.used_for_reference = used_for_reference?3:0;
+ viddec_h264_set_is_frame_used(active_fs, 3);
+ //if(pInfo->img.MbaffFrameFlag) active_fs->crc_field_coded = 1;
+
+ }break;
+ }
+
+ //freeze_assert = use_old ? old_pInfo->img.sei_freeze_this_image : pInfo->img.sei_freeze_this_image;
+ //if (freeze_assert) sei_information.disp_frozen = 1;
+
+ idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag;
+ if (idr_flag) {
+ h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag);
+ } else {
+ // adaptive memory management
+ if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) {
+ h264_dpb_adaptive_memory_management(pInfo);
+ }
+ }
+ // Reset the active frame store - could have changed in mem management ftns
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if ((viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD))
+ {
+ // check for frame store with same pic_number -- always true in my case, YH
+ // when we allocate frame store for the second field, we make sure the frame store for the second
+ // field is the one that contains the first field of the frame- see h264_dpb_init_frame_store()
+ // This is different from JM model.
+ // In this way we don't need to move image data around and can reduce memory bandwidth.
+ // simply check if the check if the other field has been decoded or not
+
+ if (viddec_h264_get_is_used(active_fs) != 0)
+ {
+ if(pInfo->img.second_field)
+ {
+ h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 0, NonExisting, use_old);
+ second_field_stored = 1;
+ }
+ }
+ }
+ }
+ else
+ { // Set up locals for non-existing frames
+ used_for_reference = 1;
+
+ active_fs->frame.used_for_reference = used_for_reference?3:0;
+ viddec_h264_set_is_frame_used(active_fs, 3);
+ viddec_h264_set_dec_structure(active_fs, FRAME);
+ pInfo->img.structure = FRAME;
+ }
+
+ is_direct_output = 0;
+ if (NonExisting == 0)
+ {
+ if(p_dpb->used_size >= p_dpb->BumpLevel)
+ {
+ // non-reference frames may be output directly
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if ((used_for_reference == 0) && (viddec_h264_get_is_used(active_fs) == 3))
+ {
+ h264_dpb_get_smallest_poc (p_dpb, &poc, &pos);
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ if ((pos == MPD_DPB_FS_NULL_IDC) || (pInfo->img.ThisPOC < poc))
+ {
+ is_direct_output = 1;
+ }
+ }
+ }
+ }
+
+ if (NonExisting) {
+ h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames);
+ } else if(pInfo->SliceHeader.idr_flag == 0) {
+ if(used_for_reference){
+ if(pInfo->img.second_field == 0) {
+ if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) {
+ h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames);
+ }
+ }
+ }
+ }
+
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+ //if (is_direct_output == 0)
+ {
+ if ((pInfo->img.second_field == 0) || (NonExisting))
+ {
+ h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 1, NonExisting, use_old);
+ }
+
+ // In an errored stream we saw a condition where
+ // p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer > p_dpb->BumpLevel,
+ // which in itself is an error, but this means first_field_non_ref will
+ // not get set and causes problems for h264_dpb_queue_update()
+ if((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) {
+ if(used_for_reference == 0)
+ if(p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer == p_dpb->BumpLevel)
+ first_field_non_ref = 1;
+ }
+
+ }
+
+ if(NonExisting)
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+ else
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if(NonExisting == 0)
+ {
+ if((pInfo->img.second_field == 1) || (pInfo->img.structure == FRAME))
+ {
+ //h264_send_new_decoded_frame();
+ if((p_dpb->OutputCtrl) && (is_direct_output == 0))
+ h264_dpb_output_one_frame_from_dpb(pInfo, 0, 0,pInfo->active_SPS.num_ref_frames);
+
+ // Pictures inserted by this point - check if we have reached the specified output
+ // level (if one has been specified) so we can begin on next call
+
+ /*
+ Fixed HSD 212625---------------should compare OutputLevel with non-output frame number in dpb, not the used number in dpb
+ if((p_dpb->OutputLevelValid)&&(p_dpb->OutputCtrl == 0))
+ {
+ if(p_dpb->used_size == p_dpb->OutputLevel)
+ p_dpb->OutputCtrl = 1;
+ }
+ */
+
+ if(p_dpb->OutputLevelValid)
+ {
+ int32_t non_output_frame_number=0;
+ non_output_frame_number = avc_dpb_get_non_output_frame_number(pInfo);
+
+ if(non_output_frame_number == p_dpb->OutputLevel)
+ p_dpb->OutputCtrl = 1;
+ else
+ p_dpb->OutputCtrl = 0;
+ }
+ else {
+ p_dpb->OutputCtrl = 0;
+ }
+ }
+ }
+
+ while(p_dpb->used_size > (p_dpb->BumpLevel + first_field_non_ref))
+ //while(p_dpb->used_size > p_dpb->BumpLevel)
+ {
+ h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+ //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+ }
+
+ //
+ // Do not output "direct output" pictures until the sempahore has been set that the pic is
+ // decoded!!
+ //
+ if(is_direct_output) {
+ h264_dpb_queue_update(pInfo, 1, 1, 0,pInfo->active_SPS.num_ref_frames);
+ //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+ }
+
+ //
+ // Add reference pictures into Reference list
+ //
+ if(used_for_reference) {
+ h264_dpb_insert_ref_lists(&pInfo->dpb, NonExisting);
+ }
+
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+
+ return;
+} ////////////// End of DPB store pic
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_insert_picture_in_dpb ()
+//
+// Insert the decoded picture into the DPB. A free DPB position is necessary
+// for frames, .
+// This ftn tends to fill out the framestore's top level parameters from the
+// storable picture's parameters within it. It is called from h264_dpb_store_picture_in_dpb()
+//
+// This function finishes by updating the reference lists - this means it must be called after
+// h264_dpb_sliding_window_memory_management()
+//
+// In the case of a frame it will call h264_dpb_split_field()
+// In the case of the second field of a complementary field pair it calls h264_dpb_combine_field()
+//
+
+void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference, int32_t add2dpb, int32_t NonExisting, int32_t use_old)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ if(NonExisting == 0) {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num;
+ }
+ else {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+ active_fs->frame_num = active_fs->frame.pic_num;
+ }
+
+ if (add2dpb) {
+ p_dpb->fs_dpb_idc[p_dpb->used_size] = active_fs->fs_idc;
+ p_dpb->used_size++;
+ }
+
+
+ switch (viddec_h264_get_dec_structure(active_fs))
+ {
+ case FRAME :{
+ viddec_h264_set_is_frame_used(active_fs, 3);
+ active_fs->frame.used_for_reference = used_for_reference?3:0;
+ if (used_for_reference)
+ {
+ active_fs->frame.used_for_reference = 3;
+ if (active_fs->frame.is_long_term)
+ viddec_h264_set_is_frame_long_term(active_fs, 3);
+ }
+ // Split frame to 2 fields for prediction
+ h264_dpb_split_field(pInfo);
+
+ }break;
+ case TOP_FIELD :{
+ viddec_h264_set_is_top_used(active_fs, 1);
+
+ active_fs->top_field.used_for_reference = used_for_reference;
+ if (used_for_reference)
+ {
+ active_fs->frame.used_for_reference |= 0x1;
+ if (active_fs->top_field.is_long_term)
+ {
+ viddec_h264_set_is_top_long_term(active_fs, 1);
+ active_fs->long_term_frame_idx = active_fs->top_field.long_term_frame_idx;
+ }
+ }
+ if (viddec_h264_get_is_used(active_fs) == 3) {
+ h264_dpb_combine_field(use_old); // generate frame view
+ }
+ else
+ {
+ active_fs->frame.poc = active_fs->top_field.poc;
+ }
+
+ }break;
+ case BOTTOM_FIELD :{
+ viddec_h264_set_is_bottom_used(active_fs, 1);
+
+ active_fs->bottom_field.used_for_reference = (used_for_reference<<1);
+ if (used_for_reference)
+ {
+ active_fs->frame.used_for_reference |= 0x2;
+ if (active_fs->bottom_field.is_long_term)
+ {
+ viddec_h264_set_is_bottom_long_term(active_fs, 1);
+ active_fs->long_term_frame_idx = active_fs->bottom_field.long_term_frame_idx;
+ }
+ }
+ if (viddec_h264_get_is_used(active_fs) == 3) {
+ h264_dpb_combine_field(use_old); // generate frame view
+ }
+ else
+ {
+ active_fs->frame.poc = active_fs->bottom_field.poc;
+ }
+
+ }break;
+ }
+/*
+ if ( gRestartMode.LastRestartType == RESTART_SEI )
+ {
+ if ( active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1;
+ }
+
+ gRestartMode.LastRestartType = 0xFFFF;
+*/
+
+ return;
+} ////// End of insert picture in DPB
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_short_term_for_reference ()
+//
+// Adaptive Memory Management: Mark short term picture unused
+//
+
+void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1)
+{
+ int32_t picNumX;
+ int32_t currPicNum;
+ uint32_t idx;
+ int32_t unmark_done;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ if (pInfo->img.structure == FRAME)
+ currPicNum = pInfo->img.frame_num;
+ else
+ currPicNum = (pInfo->img.frame_num << 1) + 1;
+
+ picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1);
+
+ unmark_done = 0;
+
+ for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if (pInfo->img.structure == FRAME)
+ {
+ /* If all pic numbers in the list are different (and they should be)
+ we should terminate the for loop the moment we match pic numbers,
+ no need to continue to check - hence set unmark_done
+ */
+
+ if ((active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(active_fs) == 0) &&
+ (active_fs->frame.pic_num == picNumX))
+ {
+ h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc);
+ h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc);
+ unmark_done = 1;
+ }
+ }
+ else
+ {
+ /*
+ If we wish to unmark a short-term picture by picture number when the current picture
+ is a field, we have to unmark the corresponding field as unused for reference,
+ and also if it was part of a frame or complementary reference field pair, the
+ frame is to be marked as unused. However the opposite field may still be used as a
+ reference for future fields
+
+ How will this affect the reference list update ftn coming after??
+
+ */
+ if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&&
+ (active_fs->top_field.pic_num == picNumX) )
+ {
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->frame.used_for_reference &= 2;
+
+ unmark_done = 1;
+
+ //Check if other field is used for short-term reference, if not remove from list...
+ if(active_fs->bottom_field.used_for_reference == 0)
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+ }
+ if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) &&
+ (active_fs->bottom_field.pic_num == picNumX) )
+ {
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->frame.used_for_reference &= 1;
+
+ unmark_done = 1;
+
+ //Check if other field is used for reference, if not remove from list...
+ if(active_fs->top_field.used_for_reference == 0)
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+ }
+ }
+ }
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+////////////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_long_term_for_reference ()
+//
+// Adaptive Memory Management: Mark long term picture unused
+//
+// In a frame situation the long_term_pic_num will refer to another frame.
+// Thus we can call h264_dpb_unmark_for_long_term_reference() and then remove the picture
+// from the list
+//
+// If the current picture is a field, long_term_pic_num will refer to another field
+// It is also the case that each individual field should have a unique picture number
+// 8.2.5.4.2 suggests that when curr pic is a field, an mmco == 2 operation
+// should be accompanied by a second op to unmark the other field as being unused
+///////////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long_term_pic_num)
+{
+ uint32_t idx;
+ int32_t unmark_done;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ unmark_done = 0;
+ for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (!(unmark_done)); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+ if (pInfo->img.structure == FRAME)
+ {
+ if ((active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(active_fs)==3) &&
+ (active_fs->frame.long_term_pic_num == long_term_pic_num))
+ {
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ unmark_done = 1;
+ }
+ }
+ else
+ {
+ /// Check top field
+ if ((active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(active_fs)&0x1) &&
+ (active_fs->top_field.long_term_pic_num == long_term_pic_num) )
+ {
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->top_field.is_long_term = 0;
+ active_fs->frame.used_for_reference &= 2;
+ viddec_h264_set_is_frame_long_term(active_fs, 2);
+
+ unmark_done = 1;
+
+ //Check if other field is used for long term reference, if not remove from list...
+ if ((active_fs->bottom_field.used_for_reference == 0) || (active_fs->bottom_field.is_long_term == 0))
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ }
+
+ /// Check Bottom field
+ if ((active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(active_fs)&0x2) &&
+ (active_fs->bottom_field.long_term_pic_num == long_term_pic_num) )
+ {
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->bottom_field.is_long_term = 0;
+ active_fs->frame.used_for_reference &= 1;
+ viddec_h264_set_is_frame_long_term(active_fs, 1);
+
+ unmark_done = 1;
+ //Check if other field is used for long term reference, if not remove from list...
+ if ((active_fs->top_field.used_for_reference == 0) || (active_fs->top_field.is_long_term == 0))
+ {
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ }
+ }
+ } // field structure
+ } //for(idx)
+
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_pic_struct_by_pic_num
+//
+// Searches the fields appearing in short term reference list
+// Returns the polarity of the field with pic_num = picNumX
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int32_t picNumX)
+{
+ uint32_t idx;
+ int32_t pic_struct = INVALID;
+ int32_t found = 0;
+
+ for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if ((active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(active_fs)&0x01))&&
+ (active_fs->top_field.pic_num == picNumX) )
+ {
+ found = 1;
+ pic_struct = TOP_FIELD;
+
+ }
+ if ((active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(active_fs)&0x2)) &&
+ (active_fs->bottom_field.pic_num == picNumX) )
+ {
+ found = 1;
+ pic_struct = BOTTOM_FIELD;
+
+ }
+ }
+
+ return pic_struct;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_assign_long_term_frame_idx ()
+//
+// Assign a long term frame index to a short term picture
+// Both lists must be updated as part of this process...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1, int32_t long_term_frame_idx)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ int32_t picNumX;
+ int32_t currPicNum;
+ int32_t polarity = 0;
+
+ if (pInfo->img.structure == FRAME) {
+ currPicNum = pInfo->img.frame_num;
+ } else {
+ currPicNum = (pInfo->img.frame_num << 1) + 1;
+ }
+
+ picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1);
+
+ // remove frames / fields with same long_term_frame_idx
+ if (pInfo->img.structure == FRAME) {
+ h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx);
+ } else {
+ polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX);
+
+ if(polarity != INVALID)
+ h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, active_fs->fs_idc, polarity);
+ }
+
+ h264_dpb_mark_pic_long_term(pInfo, long_term_frame_idx, picNumX);
+
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_update_max_long_term_frame_idx ()
+//
+// Set new max long_term_frame_idx
+//
+
+void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb,int32_t max_long_term_frame_idx_plus1)
+{
+ //h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ int32_t idx;
+ int32_t temp;
+ int32_t removed_count;
+ int32_t idx2 = 0;
+
+ p_dpb->max_long_term_pic_idx = max_long_term_frame_idx_plus1 - 1;
+
+ temp = p_dpb->ltref_frames_in_buffer;
+ removed_count = 0;
+
+ // check for invalid frames
+ for (idx = 0; idx < temp; idx++)
+ {
+ idx2 = idx - removed_count;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+
+ if (active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx)
+ {
+ removed_count++;
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+ }
+ }
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_all_short_term_for_reference ()
+//
+// Unmark all short term refernce pictures
+//
+
+void h264_dpb_mm_unmark_all_short_term_for_reference (h264_DecodedPictureBuffer *p_dpb)
+{
+ int32_t idx;
+ int32_t temp = p_dpb->ref_frames_in_buffer;
+
+ for (idx = 0; idx < temp; idx++)
+ {
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+ }
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_mark_current_picture_long_term ()
+//
+// Marks the current picture as long term after unmarking any long term picture
+// already assigned with the same long term frame index
+//
+
+void h264_dpb_mm_mark_current_picture_long_term(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx)
+{
+ int32_t picNumX;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if (viddec_h264_get_dec_structure(active_fs) == FRAME)
+ {
+ h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx);
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ active_fs->frame.is_long_term = 1;
+ active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+ active_fs->frame.long_term_pic_num = long_term_frame_idx;
+ }
+ else
+ {
+ if(viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)
+ {
+ picNumX = (active_fs->top_field.pic_num << 1) + 1;
+ active_fs->top_field.is_long_term = 1;
+ active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+
+ // Assign long-term pic num
+ active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + 1;
+ }
+ else
+ {
+ picNumX = (active_fs->bottom_field.pic_num << 1) + 1;
+ active_fs->bottom_field.is_long_term = 1;
+ active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+
+ // Assign long-term pic num
+ active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + 1;
+
+ }
+ h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(active_fs));
+ }
+ // Add to long term list
+ //h264_dpb_add_ltref_list(p_dpb->fs_dec_idc);
+
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx ()
+//
+// Mark a long-term reference frame or complementary field pair unused for referemce
+// NOTE: Obviously this ftn cannot be used to unmark individual fields...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx)
+{
+ uint32_t idx;
+ for(idx =0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+ if (active_fs->long_term_frame_idx == long_term_frame_idx)
+ {
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ }
+ }
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_long_term_field_for_reference_by_frame_idx ()
+//
+// Mark a long-term reference field unused for reference. However if it is the
+// complementary field (opposite polarity) of the picture stored in fs_idc,
+// we do not unmark it
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity)
+{
+ uint32_t idx;
+ int32_t found = 0;
+ int32_t is_complement = 0;
+
+ for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (found == 0); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+ if (active_fs->long_term_frame_idx == long_term_frame_idx)
+ {
+ if(active_fs->fs_idc == fs_idc)
+ {
+ // Again these seem like redundant checks but for safety while until JM is updated
+ if (polarity == TOP_FIELD)
+ is_complement = (active_fs->bottom_field.is_long_term)? 1:0;
+ else if(polarity == BOTTOM_FIELD)
+ is_complement = (active_fs->top_field.is_long_term) ? 1:0;
+ }
+ found = 1;
+ }
+ }
+
+ if(found) {
+ if(is_complement == 0)
+ {
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx-1]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx-1]);
+ }
+ }
+
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mark_pic_long_term ()
+//
+// This is used on a picture already in the dpb - i.e. not for the current picture
+// dpb_split / dpb_combine field will perform ftnality in that case
+//
+// Marks a picture as used for long-term reference. Adds it to the long-term
+// reference list. Also removes it from the short term reference list if required
+//
+// Note: if the current picture is a frame, the picture to be marked will be a
+// short-term reference frame or short-term complemenetary reference field pair
+// We use the pic_num assigned to the frame part of the structure to locate it
+// Both its fields will have their long_term_frame_idx and long_term_pic_num
+// assigned to be equal to long_term_frame_idx
+//
+// If the current picture is a field, the picture to be marked will be a
+// short-term reference field. We use the pic_nums assigned to the field parts of
+// the structure to identify the appropriate field. We assign the long_term_frame_idx
+// of the field equal to long_term_frame_idx.
+//
+// We also check to see if this marking has resulted in both fields of the frame
+// becoming long_term. If it has, we update the frame part of the structure by
+// setting its long_term_frame_idx
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ uint32_t idx;
+ int32_t mark_done;
+ int32_t polarity = 0;
+
+ mark_done = 0;
+
+ if (pInfo->img.structure == FRAME)
+ {
+ for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(mark_done)); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+ if (active_fs->frame.used_for_reference == 3)
+ {
+ if ((!(active_fs->frame.is_long_term))&&(active_fs->frame.pic_num == picNumX))
+ {
+ active_fs->long_term_frame_idx = long_term_frame_idx;
+ active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+ active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+ active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+
+ active_fs->frame.is_long_term = 1;
+ active_fs->top_field.is_long_term = 1;
+ active_fs->bottom_field.is_long_term = 1;
+
+ viddec_h264_set_is_frame_long_term(active_fs, 3);
+ mark_done = 1;
+
+ // Assign long-term pic num
+ active_fs->frame.long_term_pic_num = long_term_frame_idx;
+ active_fs->top_field.long_term_pic_num = long_term_frame_idx;
+ active_fs->bottom_field.long_term_pic_num = long_term_frame_idx;
+ // Add to long term list
+ h264_dpb_add_ltref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+ // Remove from short-term list
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+ }
+ }
+ }
+ }
+ else
+ {
+ polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX);
+ active_fs->long_term_frame_idx = long_term_frame_idx; /////BUG
+
+ if(polarity == TOP_FIELD)
+ {
+ active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+ active_fs->top_field.is_long_term = 1;
+ viddec_h264_set_is_top_long_term(active_fs, 1);
+
+ // Assign long-term pic num
+ active_fs->top_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0);
+
+ }
+ else if (polarity == BOTTOM_FIELD)
+ {
+ active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+ active_fs->bottom_field.is_long_term = 1;
+ viddec_h264_set_is_bottom_long_term(active_fs, 1);
+
+ // Assign long-term pic num
+ active_fs->bottom_field.long_term_pic_num = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0);
+ }
+
+ if (viddec_h264_get_is_long_term(active_fs) == 3)
+ {
+ active_fs->frame.is_long_term = 1;
+ active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+ h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc);
+ }
+ else
+ {
+ // We need to add this idc to the long term ref list...
+ h264_dpb_add_ltref_list(p_dpb, active_fs->fs_idc);
+
+ // If the opposite field is not a short term reference, remove it from the
+ // short term list. Since we know top field is a reference but both are not long term
+ // we can simply check that both fields are not references...
+ if(active_fs->frame.used_for_reference != 3)
+ h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc);
+ }
+ }
+ return;
+} ///// End of mark pic long term
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_adaptive_memory_management ()
+//
+// Perform Adaptive memory control decoded reference picture marking process
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_adaptive_memory_management (h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ int32_t idx;
+
+ idx = 0;
+
+ while (idx < pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count)
+ {
+ switch(pInfo->SliceHeader.sh_dec_refpic.memory_management_control_operation[idx])
+ {
+ case 1:{ //Mark a short-term reference picture as �unused for reference?
+ h264_dpb_mm_unmark_short_term_for_reference(pInfo,
+ pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx]);
+ } break;
+ case 2:{ //Mark a long-term reference picture as �unused for reference?
+ h264_dpb_mm_unmark_long_term_for_reference(pInfo,
+ pInfo->SliceHeader.sh_dec_refpic.long_term_pic_num[idx]);
+ }break;
+ case 3:{ //Mark a short-term reference picture as "used for long-term reference" and assign a long-term frame index to it
+ h264_dpb_mm_assign_long_term_frame_idx(pInfo,
+ pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx],
+ pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]);
+ }break;
+ case 4:{ //Specify the maximum long-term frame index and
+ //mark all long-term reference pictureshaving long-term frame indices greater than
+ //the maximum value as "unused for reference"
+ h264_dpb_mm_update_max_long_term_frame_idx (&pInfo->dpb,
+ pInfo->SliceHeader.sh_dec_refpic.max_long_term_frame_idx_plus1[idx]);
+ }break;
+ case 5:{ //Mark all reference pictures as "unused for reference" and set the MaxLongTermFrameIdx variable to
+ // "no long-term frame indices"
+ h264_dpb_mm_unmark_all_short_term_for_reference(&pInfo->dpb);
+ h264_dpb_mm_update_max_long_term_frame_idx(&pInfo->dpb, 0);
+ pInfo->img.last_has_mmco_5 = 1;
+ }break;
+ case 6:{ //Mark the current picture as "used for long-term reference" and assign a long-term frame index to it
+ h264_dpb_mm_mark_current_picture_long_term(&pInfo->dpb,
+ pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]);
+ }break;
+ }
+ idx++;
+ }
+
+
+ if (pInfo->img.last_has_mmco_5)
+ {
+ pInfo->img.frame_num = 0;
+ pInfo->SliceHeader.frame_num=0;
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+ if (viddec_h264_get_dec_structure(active_fs) == FRAME)
+ {
+ pInfo->img.bottompoc -= active_fs->frame.poc;
+ pInfo->img.toppoc -= active_fs->frame.poc;
+
+
+ active_fs->frame.poc = 0;
+ active_fs->frame.pic_num = 0;
+ active_fs->frame_num = 0;
+ }
+
+ else if (viddec_h264_get_dec_structure(active_fs) == TOP_FIELD)
+ {
+ active_fs->top_field.poc = active_fs->top_field.pic_num = 0;
+ pInfo->img.toppoc = active_fs->top_field.poc;
+ }
+ else if (viddec_h264_get_dec_structure(active_fs) == BOTTOM_FIELD)
+ {
+ active_fs->bottom_field.poc = active_fs->bottom_field.pic_num = 0;
+ pInfo->img.bottompoc = 0;
+ }
+
+ h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field,pInfo->active_SPS.num_ref_frames);
+ }
+ // Reset the marking count operations for the current picture...
+ pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count = 0;
+
+ return;
+} ////// End of adaptive memory management
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_gaps_in_frame_num_mem_management ()
+//
+// Produces a set of frame_nums pertaining to "non-existing" pictures
+// Calls h264_dpb_store_picture_in_dpb
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo)
+{
+ int32_t temp_frame_num = 0;
+ int32_t idx, prev_idc;
+ int32_t prev_frame_num_plus1_wrap;
+ uint32_t temp;
+ int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+ seq_param_set_used_ptr active_sps = &pInfo->active_SPS;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ pInfo->img.gaps_in_frame_num = 0;
+
+ // pInfo->img.last_has_mmco_5 set thru store_picture_in_dpb
+ if (pInfo->img.last_has_mmco_5)
+ {
+ // If the previous picture was an unpaired field, mark it as a dangler
+ if(p_dpb->used_size)
+ {
+ idx = p_dpb->used_size-1;
+ prev_idc = p_dpb->fs_dpb_idc[idx];
+ if (prev_idc != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ active_fs->frame_num =0;
+ }
+ }
+ pInfo->img.PreviousFrameNumOffset = 0;
+ //CONFORMANCE_ISSUE
+ pInfo->img.PreviousFrameNum = 0;
+
+ }
+
+ // Check for gaps in frame_num
+ if(pInfo->SliceHeader.idr_flag) {
+ pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+ }
+ // Have we re-started following a recovery point message?
+/*
+ else if(got_sei_recovery || aud_got_restart){
+ pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+ //got_sei_recovery = 0;
+ //aud_got_restart = 0;
+ }
+*/
+ else if(pInfo->img.frame_num != pInfo->img.PreviousFrameNum)
+ {
+ if (MaxFrameNum)
+ ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp);
+
+ prev_frame_num_plus1_wrap = temp;
+ if(pInfo->img.frame_num != prev_frame_num_plus1_wrap)
+ {
+ pInfo->img.gaps_in_frame_num = (pInfo->img.frame_num < pInfo->img.PreviousFrameNum)? ((MaxFrameNum + pInfo->img.frame_num -1) - pInfo->img.PreviousFrameNum): (pInfo->img.frame_num - pInfo->img.PreviousFrameNum - 1);
+ // We should test for an error here - should infer an unintentional loss of pictures
+ }
+ }
+
+
+ //if(active_sps->gaps_in_frame_num_value_allowed_flag == 0) {
+ if(pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) {
+ // infer an unintentional loss of pictures
+ // only invoke following process for a conforming bitstream
+ // when gaps_in_frame_num_value_allowed_flag is equal to 1
+ pInfo->img.gaps_in_frame_num = 0;
+
+ //mfd_printf("ERROR STREAM??\n");
+ ////// Error handling here----
+ }
+
+ /////// Removed following OLO source (Sodaville H.D)
+ //else if (pInfo->img.gaps_in_frame_num > active_sps->num_ref_frames) {
+ // // No need to produce any more non-existent frames than the amount required to flush the dpb
+ // pInfo->img.gaps_in_frame_num = active_sps->num_ref_frames;
+ //mfd_printf("gaps in frame: %d\n", gaps_in_frame_num);
+ //}
+
+ // If the previous picture was an unpaired field, mark it as a dangler
+ if(p_dpb->used_size)
+ {
+ idx = p_dpb->used_size-1;
+ prev_idc = p_dpb->fs_dpb_idc[idx];
+ if (prev_idc != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ if(viddec_h264_get_is_used(active_fs) != 3) {
+ h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_GAP_IN_FRAME
+ }
+ }
+ }
+
+ while(temp_frame_num < pInfo->img.gaps_in_frame_num)
+ {
+ h264_dpb_assign_frame_store(pInfo, 1);
+
+ // Set up initial markings - not sure if all are needed
+ viddec_h264_set_dec_structure(active_fs, FRAME);
+
+ if(MaxFrameNum)
+ ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp);
+
+ active_fs->frame.pic_num = temp;
+ active_fs->long_term_frame_idx = 0;
+ active_fs->frame.long_term_pic_num = 0;
+ viddec_h264_set_is_frame_long_term(active_fs, 0);
+
+ // Note the call below will overwrite some aspects of the img structure with info relating to the
+ // non-existent picture
+ // However, since this is called before h264_hdr_decoding_poc() for the current existing picture
+ // it should be o.k.
+ if(pInfo->img.pic_order_cnt_type)
+ h264_hdr_decoding_poc(pInfo, 1, temp);
+
+ pInfo->img.structure = FRAME;
+ active_fs->frame.poc = pInfo->img.framepoc;
+
+ // call store_picture_in_dpb
+
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 1, 0);
+
+ h264_hdr_post_poc(pInfo, 1, temp, 0);
+
+ temp_frame_num++;
+ }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_for_reference ()
+//
+// Mark FrameStore unused for reference. Removes it from the short term reference list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+ if (viddec_h264_get_is_used(active_fs)&0x1) active_fs->top_field.used_for_reference = 0;
+ if (viddec_h264_get_is_used(active_fs)&0x2) active_fs->bottom_field.used_for_reference = 0;
+ if (viddec_h264_get_is_used(active_fs) == 3) active_fs->frame.used_for_reference = 0;
+
+ active_fs->frame.used_for_reference = 0;
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_for_long_term_reference ()
+//
+// mark FrameStore unused for reference and reset long term flags
+// This function does not remove it form the long term list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+ if (viddec_h264_get_is_used(active_fs)&0x1)
+ {
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->top_field.is_long_term = 0;
+ }
+
+ if (viddec_h264_get_is_used(active_fs)&0x2)
+ {
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->bottom_field.is_long_term = 0;
+ }
+ if (viddec_h264_get_is_used(active_fs) == 3)
+ {
+ active_fs->frame.used_for_reference = 0;
+ active_fs->frame.is_long_term = 0;
+ }
+
+ active_fs->frame.used_for_reference = 0;
+ viddec_h264_set_is_frame_long_term(active_fs, 0);
+
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mark_dangling_field
+//
+// Tells HW previous field was dangling
+// Marks it in SW as so
+// Takes appropriate actions. - sys_data needs thought through...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+ //PRINTF(MFD_NONE, " fs_idc = %d DANGLING_TYPE = %d \n", fs_idc, reason);
+ /*
+ Make the check that it has not already been marked
+ This covers the situation of a dangling field followed by a
+ frame which is direct output (i.e. never entered into the dpb).
+ In this case we could attempt to mark the prev unpaired field
+ as a dangler twice which would upset the HW dpb_disp_q count
+ */
+
+ if(viddec_h264_get_is_dangling(active_fs) == 0)
+ {
+ switch(viddec_h264_get_dec_structure(active_fs))
+ {
+ case TOP_FIELD:
+ viddec_h264_set_is_dangling(active_fs, 1);
+ //PRINTF(MFD_NONE, "FN:%d fs_idc=%d FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), active_fs->fs_idc);
+ break;
+ case BOTTOM_FIELD:
+ //PRINTF(MFD_NONE, " FN:%d fs_idc=%d FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), active_fs->fs_idc);
+ viddec_h264_set_is_dangling(active_fs, 1);
+ break;
+ default:
+ //PRINTF(MFD_NONE, "FN:%d fs_idc=%d DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), active_fs->fs_idc);
+ break;
+ }
+
+ //h264_send_new_decoded_frame();
+ }
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_is_used_for_reference ()
+//
+// Check if one of the frames/fields in active_fs is used for reference
+//
+void h264_dpb_is_used_for_reference(int32_t * flag)
+{
+
+ /* Check out below for embedded */
+ *flag = 0;
+ if (active_fs->frame.used_for_reference)
+ *flag = 1;
+ else if (viddec_h264_get_is_used(active_fs) ==3) // frame
+ *flag = active_fs->frame.used_for_reference;
+ else
+ {
+ if (viddec_h264_get_is_used(active_fs)&0x1) // top field
+ *flag = active_fs->top_field.used_for_reference;
+ if (viddec_h264_get_is_used(active_fs)&0x2) // bottom field
+ *flag = *flag || active_fs->bottom_field.used_for_reference;
+ }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_idr_memory_management ()
+//
+// Perform Memory management for idr pictures
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr active_sps, int32_t no_output_of_prior_pics_flag)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ uint32_t idx;
+ uint32_t i;
+ int32_t DPB_size;
+ int32_t FrameSizeInBytes, FrameSizeInMbs;
+ uint32_t data;
+ int32_t num_ref_frames = active_sps->num_ref_frames;
+ int32_t level_idc = active_sps->level_idc;
+ uint32_t temp_bump_level=0;
+
+
+ /// H.D-----
+ /// There are 2 kinds of dpb flush defined, one is with display, the other is without display
+ /// The function name dpb_flush actually is just the first, and the 2nd one is for error case or no_prior_output
+ /// We will rewrite the code below to make it clean and clear
+ ///
+ if (no_output_of_prior_pics_flag)
+ {
+
+ // free all stored pictures
+ for (idx = 0; idx < p_dpb->used_size; idx = idx + 1)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",active_fs->fs_idc, active_fs->first_dsn);
+ viddec_h264_set_is_frame_used(active_fs, 0);
+ //if( (active_fs->frame_sent == 0x01) && (active_fs->is_output == 0x0))
+ {
+ //DECODED_FRAME sent but not DISPLAY_FRAME
+ h264_dpb_unmark_for_reference(p_dpb, active_fs->fs_idc);
+ h264_dpb_remove_ref_list(p_dpb, active_fs->fs_idc);
+ //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host
+
+ /// Add into drop-out list for all frms in dpb without display
+ if(!(viddec_h264_get_is_non_existent(active_fs))) {
+ if( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) { //// This frame has been displayed but not released
+ p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs_dpb_idc[idx];
+ p_dpb->frame_numbers_need_to_be_removed ++;
+ } else { //// This frame will be removed without display
+ p_dpb->frame_id_need_to_be_dropped[p_dpb->frame_numbers_need_to_be_dropped] = p_dpb->fs_dpb_idc[idx];
+ p_dpb->frame_numbers_need_to_be_dropped ++;
+ }
+ }
+ }
+
+ }
+
+ ////////////////////////////////////////// Reset Reference list
+ for (i = 0; i < p_dpb->ref_frames_in_buffer; i++)
+ p_dpb->fs_ref_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+ for (i = 0; i < p_dpb->ltref_frames_in_buffer; i++)
+ p_dpb->fs_ltref_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+ ////////////////////////////////////////// Reset DPB and dpb list
+ for (i = 0; i < p_dpb->used_size; i++) {
+ p_dpb->fs[p_dpb->fs_dpb_idc[i]].fs_idc = MPD_DPB_FS_NULL_IDC;
+ p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+ }
+
+ p_dpb->used_size = 0;
+ p_dpb->ref_frames_in_buffer = 0;
+ p_dpb->ltref_frames_in_buffer = 0;
+
+ p_dpb->last_output_poc = 0x80000000;
+ }
+ else {
+ h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, num_ref_frames);
+ }
+
+ if (p_dpb->fs_dec_idc != MPD_DPB_FS_NULL_IDC) // added condition for use of DPB initialization
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ if (pInfo->img.long_term_reference_flag)
+ {
+ p_dpb->max_long_term_pic_idx = 0;
+ switch (viddec_h264_get_dec_structure(active_fs))
+ {
+ case FRAME : active_fs->frame.is_long_term = 1;
+ case TOP_FIELD : active_fs->top_field.is_long_term = 1;
+ case BOTTOM_FIELD : active_fs->bottom_field.is_long_term = 1;
+ }
+ active_fs->long_term_frame_idx = 0;
+ }
+ else
+ {
+ p_dpb->max_long_term_pic_idx = MPD_DPB_FS_NULL_IDC;
+ viddec_h264_set_is_frame_long_term(active_fs, 0);
+ }
+ }
+
+ p_dpb->OutputLevel = 0;
+ p_dpb->OutputLevelValid = 0;
+ p_dpb->OutputCtrl = 0;
+
+
+ // Set up bumping level - do this every time a parameters set is activated...
+ if(active_sps->sps_disp.vui_parameters_present_flag)
+ {
+ if(active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag)
+ {
+ //p_dpb->OutputLevel = active_sps->sps_disp.vui_seq_parameters.num_reorder_frames;
+ //p_dpb->OutputLevelValid = 1;
+ }
+ }
+
+ // Set up bumping level - do this every time a parameters set is activated...
+ switch(level_idc)
+ {
+ case h264_Level1b:
+ case h264_Level1:
+ {
+ if ((active_sps->profile_idc < 100) && ((active_sps->constraint_set_flags & 0x1) == 0)) {
+ DPB_size = 338;
+ }
+ else {
+ DPB_size = 149;
+ }
+
+ break;
+ }
+ case h264_Level11:
+ {
+ DPB_size = 338;
+ break;
+ }
+ case h264_Level12:
+ case h264_Level13:
+ case h264_Level2:
+ {
+ DPB_size = 891;
+ break;
+ }
+ case h264_Level21:
+ {
+ DPB_size = 1782;
+ break;
+ }
+ case h264_Level22:
+ case h264_Level3:
+ {
+ DPB_size = 3038;
+ break;
+ }
+ case h264_Level31:
+ {
+ DPB_size = 6750;
+ break;
+ }
+ case h264_Level32:
+ {
+ DPB_size = 7680;
+ break;
+ }
+ case h264_Level4:
+ case h264_Level41:
+ {
+ DPB_size = 12288;
+ break;
+ }
+ case h264_Level42:
+ {
+ DPB_size = 13056;
+ break;
+ }
+ case h264_Level5:
+ {
+ DPB_size = 41400;
+ break;
+ }
+ case h264_Level51:
+ {
+ DPB_size = 69120;
+ break;
+ }
+ default : DPB_size = 69120; break;
+ }
+
+ FrameSizeInMbs = pInfo->img.PicWidthInMbs * pInfo->img.FrameHeightInMbs;
+ FrameSizeInBytes = (FrameSizeInMbs << 8) + (FrameSizeInMbs << 7);
+
+ if(FrameSizeInBytes)
+ {
+
+ temp_bump_level = ldiv_mod_u((DPB_size << 10), FrameSizeInBytes, &data);
+
+ if(temp_bump_level > 255)
+ {
+ p_dpb->BumpLevel = 255;
+ }
+ else
+ {
+ p_dpb->BumpLevel = (uint8_t)temp_bump_level;
+ }
+ }
+
+ if (p_dpb->BumpLevel == 0)
+ p_dpb->BumpLevel = active_sps->num_ref_frames + 1;
+
+ if (p_dpb->BumpLevel > 16)
+ p_dpb->BumpLevel = 16;
+
+
+ if(active_sps->sps_disp.vui_parameters_present_flag && active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) {
+
+ if (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > p_dpb->BumpLevel) {
+ //MFD_PARSER_DEBUG(ERROR_H264_DPB);
+ //// err handling here
+ }
+ else {
+ p_dpb->BumpLevel = (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > 1) ?
+ (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering) : 1;
+ }
+ }
+
+
+ // A new sequence means automatic frame release
+ //sei_information.disp_frozen = 0;
+
+ return;
+} //// End --- dpb_idr_memory_management
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_frame_from_dpb ()
+//
+// remove one frame from DPB
+// The parameter index, is the location of the frame to be removed in the
+// fs_dpb_idc list. The used size is decremented by one
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx)
+{
+ int32_t fs_idc;
+ uint32_t i;
+
+ fs_idc = p_dpb->fs_dpb_idc[idx];
+
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+ viddec_h264_set_is_frame_used(active_fs, 0);
+
+ //add to support frame relocation interface to host
+ if(!(viddec_h264_get_is_non_existent(active_fs)))
+ {
+ p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc;
+ p_dpb->frame_numbers_need_to_be_removed ++;
+ }
+
+ ///////////////////////////////////////// Reset FS
+ p_dpb->fs[fs_idc].fs_idc = MPD_DPB_FS_NULL_IDC;
+
+ /////Remove unused frame from dpb-list
+ i = idx;
+ while( (i + 1)< p_dpb->used_size)
+ {
+ p_dpb->fs_dpb_idc[i] = p_dpb->fs_dpb_idc[i + 1];
+ i ++;
+ }
+ p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+ ////////////////////////////
+ p_dpb->used_size--;
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_unused_frame_from_dpb ()
+//
+// Remove a picture from DPB which is no longer needed.
+// Search for a frame which is not used for reference and has previously been placed
+// in the output queue - if find one call h264_dpb_remove_frame_from_dpb() and
+// set flag 1
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag)
+{
+ uint32_t idx;
+ int32_t first_non_exist_valid, non_exist_idx;
+ int32_t used_for_reference = 0;
+
+ *flag = 0;
+ first_non_exist_valid = 0x0;
+ non_exist_idx = 0x0;
+
+ for (idx = 0; (idx < p_dpb->used_size) && (*flag == 0); idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ h264_dpb_is_used_for_reference(&used_for_reference);
+
+ //if( (used_for_reference == 0x0 ) && active_fs->is_output && active_fs->is_non_existent == 0x0)
+ //{
+ //PRINTF(MFD_NONE, " requesting to send FREE: fs_idc = %d fb_id = %d \n", active_fs->fs_idc, active_fs->fb_id);
+ //dpb_release_fb(&h264_dpb, active_fs->fb_id, 1);
+ //}
+
+ if (viddec_h264_get_is_output(active_fs) && (used_for_reference == 0))
+ {
+ h264_dpb_remove_frame_from_dpb(p_dpb, idx);
+ *flag = 1;
+ }
+/*
+/////// Removed following OLO source (Sodaville H.D)
+ else if ( (first_non_exist_valid == 0x0) && active_fs->is_non_existent )
+ {
+ first_non_exist_valid = 0x01;
+ non_exist_idx = idx;
+ }
+*/
+ }
+/*
+/////// Removed following OLO source (Sodaville H.D)
+ if ( *flag == 0x0 && first_non_exist_valid) {
+ h264_dpb_remove_frame_from_dpb(p_dpb,non_exist_idx);
+ *flag = 1;
+ }
+*/
+ return;
+} //// End of h264_dpb_remove_unused_frame_from_dpb
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_smallest_poc ()
+//
+// find smallest POC in the DPB which has not as yet been output
+// This function only checks for frames and dangling fields...
+// unless the dpb used size is one, in which case it will accept an unpaired field
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos)
+{
+ int32_t poc_int;
+ uint32_t idx;
+ int32_t first_non_output = 1;
+
+ *pos = MPD_DPB_FS_NULL_IDC;
+
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]);
+ poc_int = active_fs->frame.poc;
+
+ for (idx = 0; idx < p_dpb->used_size; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ if (viddec_h264_get_is_output(active_fs) == 0)
+ {
+ //PRINTF(MFD_NONE, " active_fs->fs_idc = %d active_fs->is_used = %d, active_fs->is_dangling = %d , active_fs->poc = %d \n", active_fs->fs_idc, active_fs->is_used, active_fs->is_dangling, active_fs->poc);
+ if ((viddec_h264_get_is_used(active_fs) == 3) || (viddec_h264_get_is_dangling(active_fs)))
+ {
+ if (first_non_output)
+ {
+ *pos = idx;
+ first_non_output = 0;
+ poc_int = active_fs->frame.poc;
+ }
+ else if (poc_int > active_fs->frame.poc)
+ {
+ poc_int = active_fs->frame.poc;
+ *pos = idx;
+ }
+ }
+ else if (p_dpb->used_size == 1)
+ {
+ poc_int = active_fs->frame.poc;
+ *pos = idx;
+ }
+ }
+ }
+
+ *poc = poc_int;
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_split_field ()
+//
+// Extract field information from a frame
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_split_field (h264_Info * pInfo)
+{
+
+ //active_fs->frame.poc = active_fs->frame.poc;
+ // active_fs->top_field.poc = active_fs->frame.poc;
+ // This line changed on 11/05/05 KMc
+ active_fs->top_field.poc = pInfo->img.toppoc;
+ active_fs->bottom_field.poc = pInfo->img.bottompoc;
+
+ active_fs->top_field.used_for_reference = active_fs->frame.used_for_reference & 1;
+ active_fs->bottom_field.used_for_reference = active_fs->frame.used_for_reference >> 1;
+
+ active_fs->top_field.is_long_term = active_fs->frame.is_long_term;
+ active_fs->bottom_field.is_long_term = active_fs->frame.is_long_term;
+
+ active_fs->long_term_frame_idx = active_fs->frame.long_term_frame_idx;
+ active_fs->top_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx;
+ active_fs->bottom_field.long_term_frame_idx = active_fs->frame.long_term_frame_idx;
+
+
+ // Assign field mvs attached to MB-Frame buffer to the proper buffer
+ //! Generate field MVs from Frame MVs
+ // ...
+ // these will be done in RTL through using proper memory mapping
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_combine_field (int32_t use_old)
+//
+// Generate a frame from top and bottom fields
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_combine_field(int32_t use_old)
+{
+
+ //remove warning
+ use_old = use_old;
+
+ active_fs->frame.poc = (active_fs->top_field.poc < active_fs->bottom_field.poc)?
+ active_fs->top_field.poc: active_fs->bottom_field.poc;
+
+ //active_fs->frame.poc = active_fs->poc;
+
+
+ active_fs->frame.used_for_reference = active_fs->top_field.used_for_reference |(active_fs->bottom_field.used_for_reference);
+
+ active_fs->frame.is_long_term = active_fs->top_field.is_long_term |(active_fs->bottom_field.is_long_term <<1);
+
+ if (active_fs->frame.is_long_term)
+ active_fs->frame.long_term_frame_idx = active_fs->long_term_frame_idx;
+
+ return;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_sliding_window_memory_management ()
+//
+// Perform Sliding window decoded reference picture marking process
+// It must be the reference frame, complementary reference field pair
+// or non-paired reference field that has the smallest value of
+// FrameNumWrap which is marked as unused for reference. Note : We CANNOT
+// simply use frame_num!!!!
+//
+// Although we hold frame_num_wrap in SW, currently, this is not
+// being updated for every picture (the b-picture parameter non-update
+// phenomenon of the reference software)
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, int32_t NonExisting, int32_t num_ref_frames)
+{
+ // if this is a reference pic with sliding window, unmark first ref frame
+ // should this be (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer)
+ // Rem: adaptive marking can be on a slice by slice basis so we
+ // could have pictures merked as long term reference in adaptive marking and then
+ // the marking mode changed back to sliding_window_memory_management
+ if (p_dpb->ref_frames_in_buffer >= (num_ref_frames - p_dpb->ltref_frames_in_buffer))
+ {
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+
+ if(NonExisting == 0)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+ viddec_h264_set_is_frame_long_term(active_fs, 0);
+ }
+ }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_store_picture_in_dpb ()
+//
+// First we run the marking procedure.
+// Then, before we add the current frame_store to the list of refernce stores we run some checks
+// These include checking the number of existing reference frames
+// in DPB and if necessary, flushing frames.
+//
+// \param NonExisting
+// If non-zero this is called to store a non-existing frame resulting from gaps_in_frame_num
+//////////////////////////////////////////////////////////////////////////////
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_frame_output ()
+//
+// If direct == 1, Directly output a frame without storing it in the p_dpb->
+// Therefore we must set is_used to 0, which I guess means it will not appear
+// in the fs_dpb_idc list and is_output to 1 which means it should be in the
+// fs_output_idc list.
+//
+// If it is a non-existing pcture we do not actually place it in the output queue
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+ //h264_dpb_push_output_queue();
+ if(pInfo->sei_information.disp_frozen)
+ {
+ // check pocs
+ if(active_fs->top_field.poc >= pInfo->sei_information.freeze_POC)
+ {
+ if(active_fs->top_field.poc < pInfo->sei_information.release_POC)
+ {
+ viddec_h264_set_is_top_skipped(active_fs, 1);
+ }
+ else
+ {
+ pInfo->sei_information.disp_frozen = 0;
+ }
+ }
+
+ if(active_fs->bottom_field.poc >= pInfo->sei_information.freeze_POC)
+ {
+ if(active_fs->bottom_field.poc < pInfo->sei_information.release_POC)
+ {
+ viddec_h264_set_is_bottom_skipped(active_fs, 1);
+ }
+ else
+ {
+ pInfo->sei_information.disp_frozen = 0;
+ }
+ }
+ }
+
+ if ( viddec_h264_get_broken_link_picture(active_fs) )
+ pInfo->sei_information.broken_link = 1;
+
+ if( pInfo->sei_information.broken_link)
+ {
+ // Check if this was the recovery point picture - going to have recovery point on
+ // a frame basis
+ if(viddec_h264_get_recovery_pt_picture(active_fs))
+ {
+ pInfo->sei_information.broken_link = 0;
+ // Also reset wait on sei recovery point picture
+ p_dpb->WaitSeiRecovery = 0;
+ }
+ else
+ {
+ viddec_h264_set_is_frame_skipped(active_fs, 3);
+ }
+ }
+ else
+ {
+ // even if this is not a broken - link, we need to follow SEI recovery point rules
+ // Did we use SEI recovery point for th elast restart?
+ if ( p_dpb->WaitSeiRecovery )
+ {
+ if ( viddec_h264_get_recovery_pt_picture(active_fs) ) {
+ p_dpb->WaitSeiRecovery = 0;
+ } else {
+ viddec_h264_set_is_frame_skipped(active_fs, 3);
+ }
+ }
+ }
+
+ if ( p_dpb->SuspendOutput )
+ {
+ if ( viddec_h264_get_open_gop_entry(active_fs) ) {
+ p_dpb->SuspendOutput = 0;
+ } else{
+ viddec_h264_set_is_frame_skipped(active_fs, 3);
+ }
+ }
+
+ //h264_send_new_display_frame(0x0);
+ viddec_h264_set_is_output(active_fs, 1);
+
+ if(viddec_h264_get_is_non_existent(active_fs) == 0)
+ {
+ *existing = 1;
+ p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=active_fs->fs_idc;
+ p_dpb->frame_numbers_need_to_be_displayed++;
+
+ //if(direct)
+ //h264_dpb_remove_frame_from_dpb(p_dpb, active_fs->fs_idc); // Remove dpb.fs_dpb_idc[pos]
+ }
+ else
+ {
+ *existing = 0;
+ }
+
+ if(direct) {
+ viddec_h264_set_is_frame_used(active_fs, 0);
+ active_fs->frame.used_for_reference = 0;
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->fs_idc = MPD_DPB_FS_NULL_IDC;
+ }
+ return;
+} ///////// End of dpb frame output
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_output_one_frame_from_dpb ()
+//
+// Output one frame stored in the DPB. Basiclly this results in its placment
+// in the fs_output_idc list.
+// Placement in the output queue should cause an automatic removal from the dpb
+// if the frame store is not being used as a reference
+// This may need another param for a frame request so that it definitely outputs one non-exiosting frame
+//////////////////////////////////////////////////////////////////////////////
+int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int32_t request, int32_t num_ref_frames)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+ int32_t poc;
+ int32_t pos;
+ int32_t used_for_reference;
+
+ int32_t existing = 0;
+ int32_t is_refused = 0;
+ int32_t is_pushed = 0;
+
+ //remove warning
+ request = request;
+
+ if(direct)
+ {
+ h264_dpb_frame_output(pInfo, p_dpb->fs_dec_idc, 1, &existing);
+ }
+ else
+ {
+ if(p_dpb->used_size != 0)
+ {
+ // Should this be dpb.not_as_yet_output_num > 0 ??
+ // There should maybe be a is_refused == 0 condition instead...
+ while ((p_dpb->used_size > 0) && (existing == 0) && (is_refused == 0))
+ {
+ // find smallest non-output POC
+ h264_dpb_get_smallest_poc(p_dpb, &poc, &pos);
+ if (pos != MPD_DPB_FS_NULL_IDC)
+ {
+ // put it into the output queue
+ h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing);
+
+ p_dpb->last_output_poc = poc;
+ if (existing) is_pushed = 1;
+ // If non-reference, free frame store and move empty store to end of buffer
+
+ h264_dpb_is_used_for_reference(&used_for_reference);
+ if (!(used_for_reference))
+ h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos]
+ }
+ else
+ {
+ int32_t flag;
+ uint32_t idx;
+
+ // This is basically an error condition caused by too many reference frames in the DPB.
+ // It should only happen in errored streams, and can happen if this picture had an MMCO,
+ // thus disabling h264_dpb_sliding_window_memory_management(), which would normally have
+ // unmarked the oldest reference frame.
+ h264_dpb_sliding_window_memory_management(p_dpb, 0,num_ref_frames);
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+ if (flag == 0) {
+ for (idx = 0; idx < p_dpb->used_size; idx++)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ h264_dpb_is_used_for_reference(&used_for_reference);
+
+ if (used_for_reference) {
+ break;
+ }
+ }
+
+ if (idx < p_dpb->used_size) {
+ // Short term
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ // Long term
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_dpb_idc[idx]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ // Remove from DPB
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+ }
+ }
+ return 1;
+ }
+ }
+ }
+ }
+
+ return is_pushed;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_queue_update
+//
+// This should be called anytime the output queue might be changed
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_queue_update(h264_Info* pInfo,int32_t push, int32_t direct, int32_t frame_request, int32_t num_ref_frames)
+{
+
+ int32_t frame_output = 0;
+
+ if(push)
+ {
+ frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, direct, 0, num_ref_frames);
+ }
+ else if(frame_request)
+ {
+ frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, 0, 1,num_ref_frames);
+ }
+
+
+ return frame_output;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_flush_dpb ()
+//
+// Unmarks all reference pictures in the short-term and long term lists and
+// in doing so resets the lists.
+//
+// Flushing the dpb, adds all the current frames in the dpb, not already on the output list
+// to the output list and removes them from the dpb (they will all be marked as unused for
+// reference first)
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ int32_t idx, flag;
+ int32_t ref_frames_in_buffer;
+
+ ref_frames_in_buffer = p_dpb->ref_frames_in_buffer;
+
+ for (idx = 0; idx < ref_frames_in_buffer; idx++){
+ h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+ h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+ }
+
+ ref_frames_in_buffer = p_dpb->ltref_frames_in_buffer;
+
+ for (idx = 0; idx < ref_frames_in_buffer; idx++)
+ {
+ h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[0]);
+ h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]);
+ }
+
+ // output frames in POC order
+ if (output_all) {
+ while (p_dpb->used_size - keep_complement) {
+ h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames);
+ }
+ }
+
+ flag = 1;
+ while (flag) {
+ h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+ }
+
+ return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reset_dpb ()
+//
+// Used to reset the contents of dpb
+// Must calculate memory (aligned) pointers for each of the possible frame stores
+//
+// Also want to calculate possible max dpb size in terms of frames
+// We should have an active SPS when we call this ftn to calc bumping level
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, int32_t SizeChange, int32_t no_output_of_prior_pics_flag)
+{
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+ int32_t num_ref_frames = pInfo->active_SPS.num_ref_frames;
+
+
+ // If half way through a frame then Frame in progress will still be high,
+ // so mark the previous field as a dangling field. This is also needed to
+ // keep cs7050_sif_dpb_disp_numb_ptr correct. Better to reset instead?
+ if(p_dpb->used_size)
+ {
+ int32_t idx;
+ idx = p_dpb->used_size-1;
+ if (p_dpb->fs_dpb_idc[idx] != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+ if(viddec_h264_get_is_used(active_fs) != 3)
+ h264_dpb_mark_dangling_field(p_dpb, active_fs->fs_idc); //, DANGLING_TYPE_DPB_RESET
+ }
+ }
+
+ // initialize software DPB
+ if(active_fs) {
+ viddec_h264_set_dec_structure(active_fs, INVALID);
+ }
+ h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag); // implied no_output_of_prior_pics_flag==1
+
+
+ // May always be a size change which calls this function now...
+ // could eliminate below branch
+ if(SizeChange)
+ {
+
+ /***
+ Note : 21/03/2005 14:16
+ Danger asociated with resetting curr_alloc_mem as it would allow the FW top reallocate
+ frame stores from 0 -> NUM_FRAME_STORES again - could lead to queue overflow and corruption
+
+ Placed in size change condition in the hope that this will only ensure dpb is empty
+ and thus this behaviour is valid before continuing again
+ ***/
+
+
+ p_dpb->PicWidthInMbs = PicWidthInMbs;
+ p_dpb->FrameHeightInMbs = FrameHeightInMbs;
+
+ p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+ //Flush the current DPB.
+ h264_dpb_flush_dpb(pInfo, 1,0,num_ref_frames);
+ }
+
+ return;
+} ///// End of reset DPB
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+// ---------------------------------------------------------------------------
+// Note that if an 'missing_pip_fb' condition exists, the message will
+// sent to the host each time setup_free_fb is called. However, since this
+// condition is not expected to happen if pre-defined steps are followed, we let
+// it be for now and will change it if required. Basically, as long as host
+// enables PiP after adding PiP buffers and disables PiP before removing buffers
+// and matches PiP fb_id's with normal decode fb_id's this condition should
+// not occur.
+// ---------------------------------------------------------------------------
+int32_t dpb_setup_free_fb( h264_DecodedPictureBuffer *p_dpb, uint8_t* fb_id, pip_setting_t* pip_setting )
+{
+ uint8_t idx;
+
+ //remove warning
+ pip_setting = pip_setting;
+
+
+ for (idx = 0; idx < NUM_DPB_FRAME_STORES; idx++)
+ {
+ if (p_dpb->fs[idx].fs_idc == MPD_DPB_FS_NULL_IDC)
+ {
+ *fb_id = idx;
+ break;
+ }
+ }
+
+ if(idx == NUM_DPB_FRAME_STORES)
+ return 1;
+
+ p_dpb->fs[idx].fs_idc = idx;
+
+ return 0;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_assign_frame_store ()
+//
+// may need a non-existing option parameter
+//
+
+int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting)
+{
+ uint8_t idc = MPD_DPB_FS_NULL_IDC;
+ pip_setting_t pip_setting;
+ h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+
+ while( dpb_setup_free_fb(p_dpb, &idc, &pip_setting) != 0 ) {
+ ///
+ /// Generally this is triggered a error case, no more frame buffer avaliable for next
+ /// What we do here is just remove one with min-POC before get more info
+ ///
+
+ int32_t pos = 0, poc = 0, existing = 1;
+
+ // find smallest non-output POC
+ h264_dpb_get_smallest_poc(p_dpb, &poc, &pos);
+ if (pos != MPD_DPB_FS_NULL_IDC)
+ {
+ // put it into the output queue
+ h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing);
+ p_dpb->last_output_poc = poc;
+ h264_dpb_remove_frame_from_dpb(p_dpb, pos); // Remove dpb.fs_dpb_idc[pos]
+ }
+ }
+
+
+ if(NonExisting) {
+ p_dpb->fs_non_exist_idc = idc;
+ }else {
+ p_dpb->fs_dec_idc = idc;
+ }
+
+ //add to support frame relocation interface to host
+ if(!NonExisting)
+ {
+ p_dpb->frame_numbers_need_to_be_allocated = 1;
+ p_dpb->frame_id_need_to_be_allocated = p_dpb->fs_dec_idc;
+ }
+
+
+ ///////////////////////////////h264_dpb_reset_fs();
+ h264_dpb_set_active_fs(p_dpb, idc);
+ active_fs->fs_flag_1 = 0;
+ active_fs->fs_flag_2 = 0;
+ viddec_h264_set_is_non_existent(active_fs, NonExisting);
+ viddec_h264_set_is_output(active_fs, (NonExisting?1:0));
+
+ active_fs->pic_type = ((FRAME_TYPE_INVALID<<FRAME_TYPE_TOP_OFFSET)|(FRAME_TYPE_INVALID<<FRAME_TYPE_BOTTOM_OFFSET)); //----
+
+ // Only put members in here which will not be reset somewhere else
+ // and which could be used before they are overwritten again with
+ // new valid values
+ // eg ->is_used is reset on removal from dpb, no need for it here
+ // ->poc would only be changed when we overwrite on insert_Picture_in_dpb()
+ // but would be used by get_smallest_poc()
+ // ->top.poc would also not be overwritten until a new valid value comes along,
+ // but I don't think it is used before then so no need to reset
+ //active_fs->is_long_term = 0;
+ active_fs->frame.used_for_reference = 0;
+ active_fs->frame.poc = 0;
+
+ return 1;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_update_queue_dangling_field (h264_Info * pInfo)
+//
+// Update DPB for Dangling field special case
+//
+void h264_dpb_update_queue_dangling_field(h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb;
+ int32_t prev_pic_unpaired_field = 0;
+
+ if(dpb_ptr->used_size > dpb_ptr->BumpLevel)
+ {
+ if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]);
+ if(viddec_h264_get_is_used(active_fs) != 3)
+ {
+ prev_pic_unpaired_field = 1;
+ }
+ }
+
+ if (pInfo->img.structure != FRAME)
+ {
+ // To prove this is the second field,
+ // 1) The previous picture is an (as yet) unpaired field
+ if(prev_pic_unpaired_field)
+ {
+ // If we establish the previous pic was an unpaired field and this picture is not
+ // its complement, the previous picture was a dangling field
+ if(pInfo->img.second_field == 0) {
+ while(dpb_ptr->used_size > dpb_ptr->BumpLevel)
+ h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+ }
+ }
+ }
+ else if (prev_pic_unpaired_field) {
+ while(dpb_ptr->used_size > dpb_ptr->BumpLevel)
+ h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+ }
+ }
+
+
+ return;
+} ///// End of init Frame Store
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_init_frame_store (h264_Info * pInfo)
+//
+// Set the frame store to be used in decoding the picture
+//
+
+void h264_dpb_init_frame_store(h264_Info * pInfo)
+{
+ h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb;
+
+ int32_t free_fs_found;
+ int32_t idx = 0;
+ int32_t prev_pic_unpaired_field = 0;
+ int32_t prev_idc = MPD_DPB_FS_NULL_IDC;
+ int32_t structure = pInfo->img.structure;
+
+ if(dpb_ptr->used_size)
+ {
+ idx = dpb_ptr->used_size-1;
+ prev_idc = dpb_ptr->fs_dpb_idc[idx];
+ }
+
+ if (prev_idc != MPD_DPB_FS_NULL_IDC)
+ {
+ h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]);
+ if(viddec_h264_get_is_used(active_fs) != 3)
+ {
+ //PRINTF(MFD_NONE, " FN: %d active_fs->is_used = %d \n", (h264_frame_number+1), active_fs->is_used);
+ prev_pic_unpaired_field = 1;
+ }
+ }
+
+ //if ((pInfo->img.curr_has_mmco_5) || (pInfo->img.idr_flag)) curr_fld_not_prev_comp = 1;
+
+ if (structure != FRAME)
+ {
+
+ // To prove this is the second field,
+ // 1) The previous picture is an (as yet) unpaired field
+ if(prev_pic_unpaired_field)
+ {
+ // If we establish the previous pic was an unpaired field and this picture is not
+ // its complement, the previous picture was a dangling field
+ if(pInfo->img.second_field == 0)
+ h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FIELD
+ }
+ }
+ else if (prev_pic_unpaired_field) {
+ h264_dpb_mark_dangling_field(dpb_ptr, active_fs->fs_idc); //, DANGLING_TYPE_FRAME
+ }
+
+ free_fs_found = 0;
+
+ // If this is not a second field, we must find a free space for the current picture
+ if (!(pInfo->img.second_field))
+ {
+ dpb_ptr->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+ free_fs_found = h264_dpb_assign_frame_store(pInfo, 0);
+ //h264_frame_number++;
+ //PRINTF(MFD_NONE, " FN: %d (inc) fs_idc = %d \n", (h264_frame_number+1), dpb.fs_dec_idc);
+ }
+
+ h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dec_idc);
+
+ ////////////// TODO: THe following init
+#if 1
+ if( pInfo->img.second_field) {
+ //active_fs->second_dsn = pInfo->img.dsn;
+ //active_fs->prev_dsn = pInfo->img.prev_dsn;
+ if (active_fs->pic_type == FRAME_TYPE_IDR ||
+ active_fs->pic_type == FRAME_TYPE_I) {
+
+ viddec_h264_set_first_field_intra(active_fs, 1);
+ } else {
+ viddec_h264_set_first_field_intra(active_fs, 0);
+ }
+
+ }
+ else {
+ //active_fs->first_dsn = pInfo->img.dsn;
+ //active_fs->prev_dsn = pInfo->img.prev_dsn;
+ viddec_h264_set_first_field_intra(active_fs, 0);
+ }
+
+ if (pInfo->img.structure == FRAME) {
+ //active_fs->second_dsn = 0x0;
+ }
+
+ if ( pInfo->sei_information.broken_link_pic )
+ {
+ viddec_h264_set_broken_link_picture(active_fs, 1);
+ pInfo->sei_information.broken_link_pic = 0;
+ }
+
+ if ((pInfo->img.frame_num == pInfo->sei_information.recovery_frame_num)&&(pInfo->SliceHeader.nal_ref_idc != 0))
+ viddec_h264_set_recovery_pt_picture(active_fs, 1);
+
+ //if ((( gRestartMode.aud ) || ( gRestartMode.sei )) && ( !gRestartMode.idr))
+ if(pInfo->img.recovery_point_found == 6)
+ {
+ viddec_h264_set_open_gop_entry(active_fs, 1);
+ pInfo->dpb.SuspendOutput = 1;
+ }
+#endif
+
+ if ((pInfo->img.second_field) || (free_fs_found))
+ {
+ viddec_h264_set_dec_structure(active_fs, pInfo->img.structure);
+ viddec_h264_set_is_output(active_fs, 0);
+
+ switch(pInfo->img.structure)
+ {
+ case (FRAME) :{
+ active_fs->frame.pic_num = pInfo->img.frame_num;
+ active_fs->frame.long_term_frame_idx = 0;
+ active_fs->frame.long_term_pic_num = 0;
+ active_fs->frame.used_for_reference = 0;
+ active_fs->frame.is_long_term = 0;
+ //active_fs->frame.structure = pInfo->img.structure;
+ active_fs->frame.poc = pInfo->img.framepoc;
+ }break;
+ case (TOP_FIELD) :{
+ active_fs->top_field.pic_num = pInfo->img.frame_num;
+ active_fs->top_field.long_term_frame_idx = 0;
+ active_fs->top_field.long_term_pic_num = 0;
+ active_fs->top_field.used_for_reference = 0;
+ active_fs->top_field.is_long_term = 0;
+ //active_fs->top_field.structure = pInfo->img.structure;
+ active_fs->top_field.poc = pInfo->img.toppoc;
+ }break;
+ case(BOTTOM_FIELD) :{
+ active_fs->bottom_field.pic_num = pInfo->img.frame_num;
+ active_fs->bottom_field.long_term_frame_idx = 0;
+ active_fs->bottom_field.long_term_pic_num = 0;
+ active_fs->bottom_field.used_for_reference = 0;
+ active_fs->bottom_field.is_long_term = 0;
+ //active_fs->bottom_field.structure = pInfo->img.structure;
+ active_fs->bottom_field.poc = pInfo->img.bottompoc;
+ }break;
+ }
+ }
+ else
+ {
+ // Need to drop a frame or something here
+ }
+
+ return;
+} ///// End of init Frame Store
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Decoding POC for current Picture
+// 1) pic_order_cnt_type (0, 1, 2)
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num)
+{
+ int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4));
+ int32_t delta_pic_order_count[2];
+ int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+ int32_t AbsFrameNum =0;
+ int32_t ExpectedDeltaPerPicOrderCntCycle =0;
+ int32_t PicOrderCntCycleCnt = 0;
+ int32_t FrameNumInPicOrderCntCycle =0;
+ int32_t ExpectedPicOrderCnt =0;
+
+ int32_t actual_frame_num =0;
+
+
+
+ if(NonExisting) actual_frame_num = frame_num;
+ else actual_frame_num = pInfo->img.frame_num;
+
+ switch (pInfo->active_SPS.pic_order_cnt_type)
+ {
+ case 0:
+ if(NonExisting != 0) break;
+
+ if (pInfo->SliceHeader.idr_flag)
+ {
+ pInfo->img.PicOrderCntMsb = 0;
+ pInfo->img.PrevPicOrderCntLsb = 0;
+ }
+ else if (pInfo->img.last_has_mmco_5)
+ {
+ if (pInfo->img.last_pic_bottom_field)
+ {
+ pInfo->img.PicOrderCntMsb = 0;
+ pInfo->img.PrevPicOrderCntLsb = 0;
+ }
+ else
+ {
+ pInfo->img.PicOrderCntMsb = 0;
+ pInfo->img.PrevPicOrderCntLsb = pInfo->img.toppoc;
+ }
+ }
+
+ // Calculate the MSBs of current picture
+ if((pInfo->img.pic_order_cnt_lsb < pInfo->img.PrevPicOrderCntLsb) &&
+ ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb )>=(MaxPicOrderCntLsb>>1)) )
+ {
+ pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb + MaxPicOrderCntLsb;
+ } else if ((pInfo->img.pic_order_cnt_lsb > pInfo->img.PrevPicOrderCntLsb) &&
+ ((pInfo->img.pic_order_cnt_lsb - pInfo->img.PrevPicOrderCntLsb ) > (MaxPicOrderCntLsb>>1)) )
+ {
+ pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb - MaxPicOrderCntLsb;
+ } else
+ {
+ pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb;
+ }
+
+ // 2nd
+
+ if(pInfo->img.field_pic_flag==0)
+ {
+ //frame pix
+ pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+ pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.delta_pic_order_cnt_bottom;
+ pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301
+ }
+ else if (pInfo->img.bottom_field_flag==0)
+ { //top field
+ pInfo->img.ThisPOC= pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+ }
+ else
+ { //bottom field
+ pInfo->img.ThisPOC= pInfo->img.bottompoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+ }
+ pInfo->img.framepoc=pInfo->img.ThisPOC;
+
+ if ( pInfo->img.frame_num != pInfo->old_slice.frame_num)
+ pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+
+ if(pInfo->SliceHeader.nal_ref_idc)
+ {
+ pInfo->img.PrevPicOrderCntLsb = pInfo->img.pic_order_cnt_lsb;
+ pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb;
+ }
+
+ break;
+ case 1: {
+ if(NonExisting)
+ {
+ delta_pic_order_count[0] = 0;
+ delta_pic_order_count[1] = 0;
+ }
+ else
+ {
+ delta_pic_order_count[0] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : pInfo->img.delta_pic_order_cnt[0];
+ delta_pic_order_count[1] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 :
+ ( (!pInfo->active_PPS.pic_order_present_flag) && (!(pInfo->img.field_pic_flag))) ? 0 :
+ pInfo->img.delta_pic_order_cnt[1];
+ }
+
+ // this if branch should not be taken during processing of a gap_in_frame_num pic since
+ // an IDR picture cannot produce non-existent frames...
+ if(pInfo->SliceHeader.idr_flag)
+ {
+ pInfo->img.FrameNumOffset = 0;
+ }
+ else
+ {
+
+ if (actual_frame_num < pInfo->img.PreviousFrameNum)
+ {
+ pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum;
+ }
+ else
+ {
+ pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset;
+ }
+ }
+
+ // pInfo->img.num_ref_frames_in_pic_order_cnt_cycle set from SPS
+ // so constant between existent and non-existent frames
+ if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle)
+ AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num;
+ else
+ AbsFrameNum = 0;
+
+ // pInfo->img.disposable_flag should never be true for a non-existent frame since these are always
+ // references...
+ if ((pInfo->SliceHeader.nal_ref_idc == 0) && (AbsFrameNum > 0)) AbsFrameNum = AbsFrameNum - 1;
+
+ // 3rd
+ ExpectedDeltaPerPicOrderCntCycle = pInfo->active_SPS.expectedDeltaPerPOCCycle;
+
+ if (AbsFrameNum)
+ {
+ // Rem: pInfo->img.num_ref_frames_in_pic_order_cnt_cycle takes max value of 255 (8 bit)
+ // Frame NUm may be 2^16 (17 bits)
+ // I guess we really have to treat AbsFrameNum as a 32 bit number
+ uint32_t temp = 0;
+ int32_t i=0;
+ int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE];
+
+ if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle)
+ PicOrderCntCycleCnt = ldiv_mod_u((uint32_t)(AbsFrameNum-1), (uint32_t)pInfo->img.num_ref_frames_in_pic_order_cnt_cycle, &temp);
+
+ ExpectedPicOrderCnt = mult_u((uint32_t)PicOrderCntCycleCnt, (uint32_t)ExpectedDeltaPerPicOrderCntCycle);
+
+ FrameNumInPicOrderCntCycle = temp;
+
+ //ExpectedPicOrderCnt +=pInfo->active_SPS.expectedDeltaPerPOCCycle;
+#ifndef USER_MODE
+ h264_Parse_Copy_Offset_Ref_Frames_From_DDR(pInfo, offset_for_ref_frame, pInfo->active_SPS.seq_parameter_set_id);
+ for (i = 0; i <= FrameNumInPicOrderCntCycle; i++)
+ ExpectedPicOrderCnt += offset_for_ref_frame[i];
+#else
+ for (i = 0; i <= FrameNumInPicOrderCntCycle; i++)
+ ExpectedPicOrderCnt += pInfo->active_SPS.offset_for_ref_frame[i];
+#endif
+ }
+ else {
+ ExpectedPicOrderCnt = 0;
+ }
+
+ if (pInfo->SliceHeader.nal_ref_idc == 0)
+ ExpectedPicOrderCnt += pInfo->img.offset_for_non_ref_pic;
+
+ if (!(pInfo->img.field_pic_flag))
+ {
+ pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0];
+ pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[1];
+ pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc;
+ pInfo->img.ThisPOC = pInfo->img.framepoc;
+ }
+ else if (!(pInfo->img.bottom_field_flag))
+ {
+ //top field
+ pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0];
+ pInfo->img.ThisPOC = pInfo->img.toppoc;
+ pInfo->img.bottompoc = 0;
+ }
+ else
+ {
+ //bottom field
+ pInfo->img.toppoc = 0;
+ pInfo->img.bottompoc = ExpectedPicOrderCnt + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[0];
+ pInfo->img.ThisPOC = pInfo->img.bottompoc;
+ }
+
+ //CONFORMANCE_ISSUE
+ pInfo->img.framepoc=pInfo->img.ThisPOC;
+
+ //CONFORMANCE_ISSUE
+ pInfo->img.PreviousFrameNum=pInfo->img.frame_num;
+ pInfo->img.PreviousFrameNumOffset=pInfo->img.FrameNumOffset;
+
+ }
+ break;
+ case 2: { // POC MODE 2
+ if (pInfo->SliceHeader.idr_flag)
+ {
+ pInfo->img.FrameNumOffset = 0;
+ pInfo->img.framepoc = 0;
+ pInfo->img.toppoc = 0;
+ pInfo->img.bottompoc = 0;
+ pInfo->img.ThisPOC = 0;
+ }
+ else
+ {
+ if (pInfo->img.last_has_mmco_5)
+ {
+ pInfo->img.PreviousFrameNum = 0;
+ pInfo->img.PreviousFrameNumOffset = 0;
+ }
+ if (actual_frame_num < pInfo->img.PreviousFrameNum)
+ pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum;
+ else
+ pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset;
+
+ AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num;
+ if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum<<1) - 1;
+ else pInfo->img.ThisPOC = (AbsFrameNum<<1);
+
+ if (!(pInfo->img.field_pic_flag))
+ {
+ pInfo->img.toppoc = pInfo->img.ThisPOC;
+ pInfo->img.bottompoc = pInfo->img.ThisPOC;
+ pInfo->img.framepoc = pInfo->img.ThisPOC;
+ }
+ else if (!(pInfo->img.bottom_field_flag))
+ {
+ pInfo->img.toppoc = pInfo->img.ThisPOC;
+ pInfo->img.framepoc = pInfo->img.ThisPOC;
+ }
+ else
+ {
+ pInfo->img.bottompoc = pInfo->img.ThisPOC;
+ pInfo->img.framepoc = pInfo->img.ThisPOC;
+ }
+ }
+
+ //CONFORMANCE_ISSUE
+ pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+ pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return;
+} //// End of decoding_POC
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_hdr_post_poc ()
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_hdr_post_poc(h264_Info* pInfo, int32_t NonExisting, int32_t frame_num, int32_t use_old)
+{
+ int32_t actual_frame_num = (NonExisting)? frame_num :
+ (use_old)? pInfo->old_slice.frame_num :
+ pInfo->img.frame_num;
+
+ int32_t disposable_flag = (use_old)?(pInfo->old_slice.nal_ref_idc == 0) :
+ (pInfo->SliceHeader.nal_ref_idc == 0);
+
+ switch(pInfo->img.pic_order_cnt_type)
+ {
+ case 0: {
+ pInfo->img.PreviousFrameNum = actual_frame_num;
+ if ((disposable_flag == 0) && (NonExisting == 0))
+ {
+ pInfo->img.PrevPicOrderCntLsb = (use_old)? pInfo->old_slice.pic_order_cnt_lsb :
+ pInfo->SliceHeader.pic_order_cnt_lsb;
+ pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb;
+ }
+ }
+ break;
+ case 1: {
+ pInfo->img.PreviousFrameNum = actual_frame_num;
+ pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+ }
+ break;
+ case 2: {
+ pInfo->img.PreviousFrameNum = actual_frame_num;
+ pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+
+ }break;
+
+ default: {
+ }break;
+ }
+
+ return;
+} ///// End of h264_hdr_post_poc
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c
new file mode 100644
index 0000000..b5df6d9
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_math.c
@@ -0,0 +1,82 @@
+//#include "math.h"
+// Arithmatic functions using add & subtract
+
+unsigned long mult_u(register unsigned long var1, register unsigned long var2)
+{
+
+ register unsigned long var_out = 0;
+
+ while (var2 > 0)
+ {
+
+ if (var2 & 0x01)
+ {
+ var_out += var1;
+ }
+ var2 >>= 1;
+ var1 <<= 1;
+ }
+ return var_out;
+
+}// mult_u
+
+unsigned long ldiv_mod_u(register unsigned long a, register unsigned long b, unsigned long * mod)
+{
+ register unsigned long div = b;
+ register unsigned long res = 0;
+ register unsigned long bit = 0x1;
+
+ if (!div)
+ {
+ *mod = 0;
+ return 0xffffffff ; // Div by 0
+ }
+
+ if (a < b)
+ {
+ *mod = a;
+ return 0; // It won't even go once
+ }
+
+ while(!(div & 0x80000000))
+ {
+ div <<= 1;
+ bit <<= 1;
+ }
+
+ while (bit)
+ {
+ if (div <= a)
+ {
+ res |= bit;
+ a -= div;
+ }
+ div >>= 1;
+ bit >>= 1;
+ }
+ *mod = a;
+ return res;
+}// ldiv_mod_u
+
+
+unsigned ldiv_u(register unsigned a, register unsigned b)
+{
+ register unsigned div = b << 16;
+ register unsigned res = 0;
+ register unsigned bit = 0x10000;
+
+ while (bit)
+ {
+ div >>= 1;
+ bit >>= 1;
+ if (div < a)
+ {
+ res |= bit;
+ a -= div;
+ }
+ }
+
+ return res;
+}
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c
new file mode 100644
index 0000000..a956607
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_mem.c
@@ -0,0 +1,198 @@
+
+/*!
+ ***********************************************************************
+ * \file: h264_dpb_ctl.c
+ *
+ ***********************************************************************
+ */
+
+//#include <limits.h>
+
+#include "h264parse.h"
+
+
+// ---------------------------------------------------------------------------
+// IMPORTANT: note that in this implementation int c is an int not a char
+// ---------------------------------------------------------------------------
+void* h264_memset( void* buf, uint32_t c, uint32_t num )
+{
+ uint32_t* buf32 = buf;
+ uint32_t size32 = ( num >> 2 );
+ uint32_t i;
+
+ for ( i = 0; i < size32; i++ )
+ {
+ *buf32++ = c;
+ }
+
+ return buf;
+}
+
+
+void* h264_memcpy( void* dest, void* src, uint32_t num )
+{
+ int32_t* dest32 = dest;
+ int32_t* src32 = src;
+ uint32_t size32 = ( num >> 2 );
+ uint32_t i;
+
+ for ( i = 0; i < size32; i++ )
+ {
+ *dest32++ = *src32++;
+ }
+
+ return dest;
+}
+
+
+#ifndef USER_MODE
+
+//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem
+void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId)
+{
+ uint32_t copy_size = sizeof(pic_param_set);
+ uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size;
+
+ if(nPPSId < MAX_NUM_PPS)
+ {
+ cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 1, 0);
+ }
+
+ return;
+
+}
+//end of h264_Parse_Copy_Pps_To_DDR
+
+
+// h264_Parse_Copy_Pps_From_DDR copy a pps with nPPSId from ddr mem to local PPS
+void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId)
+{
+
+ uint32_t copy_size= sizeof(pic_param_set);
+ uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size;
+
+ if( nPPSId < MAX_NUM_PPS)
+ {
+ cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 0, 0);
+ }
+
+ return;
+}
+//end of h264_Parse_Copy_Pps_From_DDR
+
+
+//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem with nSPSId
+void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId)
+{
+ uint32_t copy_size = sizeof(seq_param_set_used);
+ uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all);
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 1, 0);
+ }
+
+ //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id);
+
+
+ return;
+}
+
+//end of h264_Parse_Copy_Sps_To_DDR
+
+
+// h264_Parse_Copy_Sps_From_DDR copy a sps with nSPSId from ddr mem to local SPS
+void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId)
+{
+ uint32_t copy_size= sizeof(seq_param_set_used);
+ uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all);
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 0, 0);
+ }
+
+ return;
+
+}
+//end of h264_Parse_Copy_Sps_From_DDR
+
+//h264_Parse_Copy_Offset_Ref_Frames_To_DDR () copy local offset_ref_frames to ddr mem with nSPSId
+void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId)
+{
+ uint32_t copy_size = sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+ uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size;
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 1, 0);
+ h264_memcpy((int32_t *)offset_ref_frames_entry_ptr,pOffset_ref_frames, copy_size);
+ }
+
+ return;
+}
+
+//end of h264_Parse_Copy_Offset_Ref_Frames_To_DDR
+
+
+// h264_Parse_Copy_Offset_Ref_Frames_From_DDR copy a offset_ref_frames with nSPSId from ddr mem to local offset_ref_frames
+void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId)
+{
+ uint32_t copy_size= sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+ uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size;
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 0, 0);
+ h264_memcpy(pOffset_ref_frames, (int32_t *)offset_ref_frames_entry_ptr, copy_size);
+ }
+
+ return;
+
+}
+//end of h264_Parse_Copy_Offset_Ref_Frames_From_DDR
+
+
+//h264_Parse_Check_Sps_Updated_Flag () copy local sps to ddr mem with nSPSId
+uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId)
+{
+ uint32_t is_updated=0;
+ uint32_t copy_size = sizeof(uint32_t);
+ uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size;
+
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 1, 0);
+ }
+
+ //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id);
+
+
+ return is_updated;
+}
+
+//end of h264_Parse_Check_Sps_Updated_Flag
+
+
+// h264_Parse_Clear_Sps_Updated_Flag copy a sps with nSPSId from ddr mem to local SPS
+void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId)
+{
+ uint32_t is_updated=0;
+ uint32_t copy_size= sizeof(uint32_t);
+ uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size;
+
+ if(nSPSId < MAX_NUM_SPS)
+ {
+ cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 0, 0);
+ }
+
+ return;
+
+}
+//end of h264_Parse_Clear_Sps_Updated_Flag
+
+
+#endif
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c
new file mode 100644
index 0000000..a1281c2
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_pps.c
@@ -0,0 +1,128 @@
+
+
+#include "h264.h"
+#include "h264parse.h"
+
+/*---------------------------------------------*/
+/*---------------------------------------------*/
+/*---------------------------------------------*/
+h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet)
+{
+ h264_Status ret = H264_PPS_ERROR;
+
+ //h264_PicParameterSet_t* PictureParameterSet = &pInfo->PictureParameterSet;
+ uint32_t code=0, i = 0;
+
+ do {
+ ///// PPS par1: pic_parameter_set_id & seq_parameter_set_id
+ code = h264_GetVLCElement(parent, pInfo, false);
+ if(code > MAX_PIC_PARAMS) {
+ break;
+ }
+ PictureParameterSet->pic_parameter_set_id = (uint8_t)code;
+
+
+ code = h264_GetVLCElement(parent, pInfo, false);
+ if(code > MAX_NUM_SPS-1) {
+ break;
+ }
+ PictureParameterSet->seq_parameter_set_id = (uint8_t)code;
+
+ ///// entropy_coding_mode_flag
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code;
+ ///// pic_order_present_flag
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->pic_order_present_flag = (uint8_t)code;
+
+ PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+ //
+ // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0
+ //
+ if(PictureParameterSet->num_slice_groups_minus1 > 0) //MAX_NUM_SLICE_GRPS)
+ break;
+
+ PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false)+1;
+ PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1;
+
+ //// PPS->num_ref_idx_l0_active --- [0,32]
+ if(((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES))
+ {
+ break;
+ }
+
+ //// weighting prediction
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->weighted_pred_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 2);
+ PictureParameterSet->weighted_bipred_idc = (uint8_t)code;
+
+ //// QP
+ PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true);
+ PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true);
+ if(((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP))
+ break;
+ PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true);
+
+ //// Deblocking ctl parameters
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->constrained_intra_pred_flag = (uint8_t)code;
+
+ if( viddec_pm_get_bits(parent, &code, 1) == -1)
+ break;
+ PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code;
+
+ //// Check if have more RBSP Data for additional parameters
+ if(h264_More_RBSP_Data(parent, pInfo))
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ PictureParameterSet->transform_8x8_mode_flag = (uint8_t)code;
+
+ if( viddec_pm_get_bits(parent, &code, 1) == -1)
+ break;
+ PictureParameterSet->pic_scaling_matrix_present_flag = (uint8_t)code;
+
+ if(PictureParameterSet->pic_scaling_matrix_present_flag)
+ {
+ uint32_t n_ScalingList = 6 + (PictureParameterSet->transform_8x8_mode_flag << 1);
+ for(i=0; i<n_ScalingList; i++)
+ {
+ if( viddec_pm_get_bits(parent, &code, 1) == -1)
+ break;
+ PictureParameterSet->pic_scaling_list_present_flag[i] = (uint8_t)code;
+
+ if(PictureParameterSet->pic_scaling_list_present_flag[i])
+ {
+ if(i<6)
+ h264_Scaling_List(parent, PictureParameterSet->ScalingList4x4[i], 16, &PictureParameterSet->UseDefaultScalingMatrix4x4Flag[i], pInfo);
+ else
+ h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i-6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i-6], pInfo);
+ }
+ }
+ }
+
+ PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix
+ //if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12))
+ // break;
+ }
+ else
+ {
+ PictureParameterSet->transform_8x8_mode_flag = 0;
+ PictureParameterSet->pic_scaling_matrix_present_flag = 0;
+ PictureParameterSet->second_chroma_qp_index_offset = PictureParameterSet->chroma_qp_index_offset;
+ }
+
+ ret = H264_STATUS_OK;
+ }while(0);
+
+ //h264_Parse_rbsp_trailing_bits(pInfo);
+ return ret;
+}
+
+////////// EOF///////////////
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c
new file mode 100644
index 0000000..829eb55
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sei.c
@@ -0,0 +1,1176 @@
+#define H264_PARSE_SEI_C
+
+#ifdef H264_PARSE_SEI_C
+
+#include "h264.h"
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_item_types.h"
+#include "viddec_fw_workload.h"
+
+//////////////////////////////////////////////////////////////////////////////
+// avc_sei_stream_initialise ()
+//
+//
+
+void h264_sei_stream_initialise (h264_Info* pInfo)
+{
+ pInfo->sei_information.capture_POC = 0;
+ pInfo->sei_information.disp_frozen = 0;
+ pInfo->sei_information.release_POC = 0;
+ pInfo->sei_information.capture_fn = 0;
+ pInfo->sei_information.recovery_fn = 0xFFFFFFFF;
+ pInfo->sei_information.scan_format = 0;
+ pInfo->sei_information.broken_link_pic = 0;
+ return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_buffering_period(void *parent,h264_Info* pInfo)
+{
+ h264_Status ret = H264_STATUS_SEI_ERROR;
+
+ h264_SEI_buffering_period_t* sei_msg_ptr;
+ h264_SEI_buffering_period_t sei_buffering_period;
+ int32_t SchedSelIdx;
+ int num_bits = 0;
+
+ sei_msg_ptr = (h264_SEI_buffering_period_t *)(&sei_buffering_period);
+
+ do{
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+ {
+ num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1;
+ }
+ else if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag)
+ {
+ num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1;
+ }
+
+ sei_msg_ptr->seq_param_set_id = h264_GetVLCElement(parent, pInfo, false);
+ if(sei_msg_ptr->seq_param_set_id >= NUM_SPS)
+ break;
+
+ //check if this id is same as the id of the current SPS //fix
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+ {
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+ break;
+
+ for(SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; SchedSelIdx++)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_nal, num_bits);
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_nal, num_bits);
+ }
+ }
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)
+ {
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+ break;
+
+ for(SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; SchedSelIdx++)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_vcl, num_bits);
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_vcl, num_bits);
+ }
+ }
+
+ ret = H264_STATUS_OK;
+ } while (0);
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo)
+{
+ int32_t CpbDpbDelaysPresentFlag = 0;
+ h264_SEI_pic_timing_t* sei_msg_ptr;
+ h264_SEI_pic_timing_t sei_pic_timing;
+ int32_t num_bits_cpb = 0, num_bits_dpb = 0, time_offset_length = 0;
+ uint32_t code;
+ uint32_t clock_timestamp_flag = 0;
+ uint32_t full_timestamp_flag = 0;
+ uint32_t seconds_flag = 0;
+ uint32_t minutes_flag = 0;
+ uint32_t hours_flag = 0;
+ uint32_t time_offset = 0;
+
+
+
+
+ sei_msg_ptr = (h264_SEI_pic_timing_t *)(&sei_pic_timing);
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag)
+ {
+ num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 +1;
+ num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 + 1;
+ time_offset_length = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_time_offset_length;
+ }
+ else if(pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag)
+ {
+ num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 +1;
+ num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 + 1;
+ }
+
+
+ CpbDpbDelaysPresentFlag = 1; // as per amphion code
+ if(CpbDpbDelaysPresentFlag)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->cpb_removal_delay, num_bits_cpb);
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->dpb_output_delay, num_bits_dpb);
+ }
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag)
+ {
+ int32_t i = 0, NumClockTS = 0;
+
+ viddec_workload_item_t wi;
+
+ wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0;
+ viddec_pm_get_bits(parent, &code , 4);
+ sei_msg_ptr->pic_struct = (uint8_t)code;
+
+
+ if((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) {
+ pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_PROGRESSIVE;
+ } else {
+ pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED;
+ }
+
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_PIC_TIMING;
+ wi.h264_sei_pic_timing.pic_struct = sei_msg_ptr->pic_struct;
+
+#ifndef VBP
+ //Push to current if we are in first frame, or we do not detect previous frame end
+ if( (pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done) ) {
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+
+ if(sei_msg_ptr->pic_struct < 3) {
+ NumClockTS = 1;
+ } else if((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) {
+ NumClockTS = 2;
+ } else {
+ NumClockTS = 3;
+ }
+
+ for(i = 0; i < NumClockTS; i++)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ clock_timestamp_flag = code;
+ //sei_msg_ptr->clock_timestamp_flag[i] = (uint8_t)code;
+
+ if(clock_timestamp_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 2);
+ //sei_msg_ptr->ct_type[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->nuit_field_based_flag[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 5);
+ //sei_msg_ptr->counting_type[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->full_timestamp_flag[i] = (uint8_t)code;
+ full_timestamp_flag = code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->discontinuity_flag[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->cnt_dropped_flag[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 8);
+ //sei_msg_ptr->n_frames[i] = (uint8_t)code;
+
+
+ if(full_timestamp_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->seconds_value[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->minutes_value[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 5);
+ //sei_msg_ptr->hours_value[i] = (uint8_t)code;
+ }
+ else
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->seconds_flag[i] = (uint8_t)code;
+ seconds_flag = code;
+
+ if(seconds_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->seconds_value[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->minutes_flag[i] = (uint8_t)code;
+ minutes_flag = code;
+
+ if(minutes_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->minutes_value[i] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ //sei_msg_ptr->hours_flag[i] = (uint8_t)code;
+ hours_flag = code;
+
+ if(hours_flag){
+ viddec_pm_get_bits(parent, &code , 6);
+ //sei_msg_ptr->hours_value[i] = (uint8_t)code;
+ }
+ }
+ }
+ }
+
+ if(time_offset_length > 0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&time_offset, time_offset_length);
+ }
+ }
+ }
+ }
+
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo)
+{
+ h264_SEI_pan_scan_rectangle_t* sei_msg_ptr;
+ h264_SEI_pan_scan_rectangle_t sei_pan_scan;
+ uint32_t code;
+
+ viddec_workload_item_t wi;
+
+ h264_memset( &(sei_pan_scan), 0x0, sizeof(h264_SEI_pan_scan_rectangle_t) );
+
+ viddec_fw_reset_workload_item(&wi);
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PAN_SCAN;
+
+ sei_msg_ptr = (h264_SEI_pan_scan_rectangle_t *)(&sei_pan_scan);
+
+ sei_msg_ptr->pan_scan_rect_id = h264_GetVLCElement(parent, pInfo, false);
+
+ wi.h264_sei_pan_scan.pan_scan_rect_id = sei_msg_ptr->pan_scan_rect_id;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->pan_scan_rect_cancel_flag = (uint8_t)code;
+ viddec_fw_h264_sei_pan_scan_set_cancel_flag(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_rect_cancel_flag);
+
+ if(!sei_msg_ptr->pan_scan_rect_cancel_flag)
+ {
+ int32_t i;
+ sei_msg_ptr->pan_scan_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_fw_h264_sei_pan_scan_set_cnt_minus1(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_cnt_minus1);
+ if(sei_msg_ptr->pan_scan_cnt_minus1 > MAX_PAN_SCAN_CNT -1)
+ {
+ return H264_STATUS_SEI_ERROR;
+ }
+ for(i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++)
+ {
+ sei_msg_ptr->pan_scan_rect_left_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+ sei_msg_ptr->pan_scan_rect_right_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+ sei_msg_ptr->pan_scan_rect_top_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+ sei_msg_ptr->pan_scan_rect_bottom_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false);
+ wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period;
+ }
+
+#ifndef VBP
+ if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+
+ if(!sei_msg_ptr->pan_scan_rect_cancel_flag)
+ {
+ int32_t i;
+
+ viddec_fw_reset_workload_item(&wi);
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT;
+
+ for(i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++)
+ {
+ viddec_fw_h264_pan_scan_set_left(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_left_offset[i]);
+ viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]);
+ viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]);
+ viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]);
+
+#ifndef VBP
+ if(pInfo->Is_first_frame_in_stream) { //cur is first frame
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+ }
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_filler_payload(void *parent,h264_Info* pInfo, uint32_t payload_size)
+{
+
+ h264_SEI_filler_payload_t* sei_msg_ptr;
+ h264_SEI_filler_payload_t sei_filler_payload;
+ uint32_t k;
+ uint32_t code;
+
+ //remove warning
+ pInfo = pInfo;
+
+ sei_msg_ptr = (h264_SEI_filler_payload_t *)(&sei_filler_payload);
+ for(k=0; k < payload_size; k++)
+ {
+ viddec_pm_get_bits(parent, &code , 8);
+ sei_msg_ptr->ff_byte = (uint8_t)code;
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payload_size)
+{
+
+ h264_SEI_userdata_registered_t* sei_msg_ptr;
+ h264_SEI_userdata_registered_t sei_userdata_registered;
+ uint32_t i;
+ int32_t byte = 0;
+ uint32_t code = 0;
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED;
+ wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0;
+ //remove warning
+ pInfo = pInfo;
+
+ sei_msg_ptr = (h264_SEI_userdata_registered_t *)(&sei_userdata_registered);
+
+ viddec_pm_get_bits(parent, &code , 8);
+ sei_msg_ptr->itu_t_t35_country_code = (uint8_t)code;
+
+ if(sei_msg_ptr->itu_t_t35_country_code != 0xff) {
+ i = 1;
+ } else {
+ viddec_pm_get_bits(parent, &code , 8);
+ sei_msg_ptr->itu_t_t35_country_code_extension_byte = (uint8_t)code;
+ i = 2;
+ }
+
+
+ wi.user_data.size =0;
+ do
+ {
+
+ viddec_pm_get_bits(parent, (uint32_t *)&byte, 8);
+
+ wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte;
+ wi.user_data.size++;
+
+ if(11 == wi.user_data.size)
+ {
+ viddec_pm_setup_userdata(&wi);
+#ifndef VBP
+ if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+ wi.user_data.size =0;
+ }
+
+ i++;
+ }while(i < payload_size);
+
+ if(0!=wi.user_data.size)
+ {
+ viddec_pm_setup_userdata(&wi);
+
+#ifndef VBP
+ if(pInfo->Is_first_frame_in_stream) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+#endif
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t payload_size)
+{
+
+ h264_SEI_userdata_unregistered_t* sei_msg_ptr;
+ h264_SEI_userdata_unregistered_t sei_userdata_unregistered;
+ uint32_t i;
+ int32_t byte = 0;
+ uint32_t code;
+
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED;
+
+ //remove warning
+ pInfo = pInfo;
+
+ sei_msg_ptr = (h264_SEI_userdata_unregistered_t *)(&sei_userdata_unregistered);
+
+ for (i = 0; i < 4; i++)
+ {
+ viddec_pm_get_bits(parent, &code , 32);
+ sei_msg_ptr->uuid_iso_iec_11578[i] = (uint8_t)code;
+ }
+
+ wi.user_data.size =0;
+ for(i = 16; i < payload_size; i++)
+ {
+
+ viddec_pm_get_bits(parent, (uint32_t *)&byte, 8);
+
+ wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte;
+ wi.user_data.size++;
+
+ if(11 == wi.user_data.size)
+ {
+ viddec_pm_setup_userdata(&wi);
+ if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ wi.user_data.size =0;
+ }
+ }
+
+ if(0!=wi.user_data.size)
+ {
+ viddec_pm_setup_userdata(&wi);
+ if(pInfo->Is_first_frame_in_stream) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo)
+{
+
+ h264_SEI_recovery_point_t* sei_msg_ptr;
+ h264_SEI_recovery_point_t sei_recovery_point;
+ uint32_t code;
+ viddec_workload_item_t wi;
+
+
+ sei_msg_ptr = (h264_SEI_recovery_point_t *)(&sei_recovery_point);
+
+ sei_msg_ptr->recovery_frame_cnt = h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->exact_match_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->broken_link_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 2);
+ sei_msg_ptr->changing_slice_group_idc = (uint8_t)code;
+
+ pInfo->sei_information.recovery_point = 1;
+ pInfo->sei_information.recovery_frame_cnt = (int32_t) sei_msg_ptr->recovery_frame_cnt;
+ pInfo->sei_information.capture_fn = 1;
+ pInfo->sei_information.broken_link_pic = sei_msg_ptr->broken_link_flag;
+
+ if(pInfo->got_start) {
+ pInfo->img.recovery_point_found |= 2;
+
+ //// Enable the RP recovery if no IDR ---Cisco
+ if((pInfo->img.recovery_point_found & 1)==0)
+ pInfo->sei_rp_received = 1;
+ }
+
+ //
+ /// Append workload for SEI
+ //
+ viddec_fw_reset_workload_item(&wi);
+ wi.vwi_type = VIDDEC_WORKLOAD_SEI_RECOVERY_POINT;
+ wi.h264_sei_recovery_point.recovery_frame_cnt = sei_msg_ptr->recovery_frame_cnt;
+ viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag);
+ viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag);
+ wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc;
+
+ if((pInfo->Is_first_frame_in_stream)||(!pInfo->is_current_workload_done)) //cur is first frame
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_dec_ref_pic_marking_rep(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_decoded_ref_pic_marking_repetition_t* sei_msg_ptr;
+ h264_SEI_decoded_ref_pic_marking_repetition_t sei_ref_pic;
+ uint32_t code;
+
+ sei_msg_ptr = (h264_SEI_decoded_ref_pic_marking_repetition_t *)(&sei_ref_pic);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->original_idr_flag = (uint8_t)code;
+
+ sei_msg_ptr->original_frame_num = h264_GetVLCElement(parent, pInfo, false);
+
+ if(!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag))
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->orignal_field_pic_flag = (uint8_t)code;
+
+ if(sei_msg_ptr->orignal_field_pic_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->original_bottom_field_pic_flag = (uint8_t)code;
+ }
+ }
+ h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, &pInfo->SliceHeader);
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_spare_pic(void *parent,h264_Info* pInfo)
+{
+
+ //h264_SEI_spare_picture_t* sei_msg_ptr;
+
+ //remove warning
+ pInfo = pInfo;
+ parent = parent;
+
+ //sei_msg_ptr = (h264_SEI_spare_picture_t *)(&user_data->user_data[0]);
+
+ //OS_INFO("Not supported SEI\n");
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_scene_info(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_scene_info_t* sei_msg_ptr;
+ h264_SEI_scene_info_t sei_scene_info;
+ uint32_t code;
+
+ sei_msg_ptr = (h264_SEI_scene_info_t*)(&sei_scene_info);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->scene_info_present_flag = (uint8_t)code;
+
+ if(sei_msg_ptr->scene_info_present_flag)
+ {
+ sei_msg_ptr->scene_id = h264_GetVLCElement(parent, pInfo, false);
+ sei_msg_ptr->scene_transitioning_type= h264_GetVLCElement(parent, pInfo, false);
+ if(sei_msg_ptr->scene_transitioning_type > 3)
+ {
+ sei_msg_ptr->second_scene_id = h264_GetVLCElement(parent, pInfo, false);
+ }
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq_info(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_sub_sequence_info_t* sei_msg_ptr;
+ h264_SEI_sub_sequence_info_t sei_sub_sequence_info;
+ uint32_t code;
+
+ sei_msg_ptr = (h264_SEI_sub_sequence_info_t *)(&sei_sub_sequence_info);
+
+ sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo,false);
+ sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo,false);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->first_ref_pic_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->leading_non_ref_pic_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->last_pic_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->sub_seq_frame_num_flag = (uint8_t)code;
+
+
+ if(sei_msg_ptr->sub_seq_frame_num_flag)
+ {
+ sei_msg_ptr->sub_seq_frame_num = h264_GetVLCElement(parent, pInfo,false);
+ }
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq_layer(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_sub_sequence_layer_t* sei_msg_ptr;
+ h264_SEI_sub_sequence_layer_t sei_sub_sequence_layer;
+ int32_t layer;
+ uint32_t code;
+
+ sei_msg_ptr = (h264_SEI_sub_sequence_layer_t *)(&sei_sub_sequence_layer);
+ sei_msg_ptr->num_sub_seq_layers_minus1 = h264_GetVLCElement(parent, pInfo,false);
+
+ if(sei_msg_ptr->num_sub_seq_layers_minus1 >= MAX_SUB_SEQ_LAYERS)
+ {
+ return H264_STATUS_SEI_ERROR;
+ }
+
+ for(layer = 0;layer <= sei_msg_ptr->num_sub_seq_layers_minus1; layer++)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->accurate_statistics_flag[layer] = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 16);
+ sei_msg_ptr->average_bit_rate[layer] = (uint16_t)code;
+
+ viddec_pm_get_bits(parent, &code , 16);
+ sei_msg_ptr->average_frame_rate[layer] = (uint16_t)code;
+
+ }
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq(void *parent,h264_Info* pInfo)
+{
+ int32_t n;
+ uint32_t code;
+
+ h264_SEI_sub_sequence_t* sei_msg_ptr;
+ h264_SEI_sub_sequence_t sei_sub_sequence;
+
+ sei_msg_ptr = (h264_SEI_sub_sequence_t *)(&sei_sub_sequence);
+
+ sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo, false);
+ sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->duration_flag = (uint8_t)code;
+
+ if(sei_msg_ptr->duration_flag)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->sub_seq_duration, 32);
+ }
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->average_rate_flag = (uint8_t)code;
+
+ if(sei_msg_ptr->average_rate_flag)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->average_statistics_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 16);
+ sei_msg_ptr->average_bit_rate = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 16);
+ sei_msg_ptr->average_frame_rate = (uint8_t)code;
+
+ }
+ sei_msg_ptr->num_referenced_subseqs = h264_GetVLCElement(parent, pInfo, false);
+ if(sei_msg_ptr->num_referenced_subseqs >= MAX_NUM_REF_SUBSEQS)
+ {
+ return H264_STATUS_SEI_ERROR;
+ }
+
+ for(n = 0; n < sei_msg_ptr->num_referenced_subseqs; n++)
+ {
+ sei_msg_ptr->ref_sub_seq_layer_num= h264_GetVLCElement(parent, pInfo, false);
+ sei_msg_ptr->ref_sub_seq_id= h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->ref_sub_seq_direction = (uint8_t)code;
+ }
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_freeze(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_full_frame_freeze_t* sei_msg_ptr;
+ h264_SEI_full_frame_freeze_t sei_full_frame_freeze;
+
+ sei_msg_ptr = (h264_SEI_full_frame_freeze_t *)(&sei_full_frame_freeze);
+
+ sei_msg_ptr->full_frame_freeze_repetition_period= h264_GetVLCElement(parent, pInfo, false);
+
+ pInfo->sei_information.capture_POC = 1;
+ pInfo->sei_information.freeze_rep_period = sei_msg_ptr->full_frame_freeze_repetition_period;
+ //pInfo->img.sei_freeze_this_image = 1;
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_freeze_release(void *parent,h264_Info* pInfo)
+{
+ //remove warning
+ parent = parent;
+ pInfo = pInfo;
+
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_snapshot(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_full_frame_snapshot_t* sei_msg_ptr;
+ h264_SEI_full_frame_snapshot_t sei_full_frame_snapshot;
+
+ sei_msg_ptr = (h264_SEI_full_frame_snapshot_t *)(&sei_full_frame_snapshot);
+
+ sei_msg_ptr->snapshot_id = h264_GetVLCElement(parent, pInfo, false);
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_progressive_segement_start(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_progressive_segment_start_t* sei_msg_ptr;
+ h264_SEI_progressive_segment_start_t sei_progressive_segment_start;
+
+ sei_msg_ptr = (h264_SEI_progressive_segment_start_t *)(&sei_progressive_segment_start);
+
+ sei_msg_ptr->progressive_refinement_id= h264_GetVLCElement(parent, pInfo, false);
+ sei_msg_ptr->num_refinement_steps_minus1= h264_GetVLCElement(parent, pInfo, false);
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_progressive_segment_end(void *parent,h264_Info* pInfo)
+{
+
+ h264_SEI_progressive_segment_end_t* sei_msg_ptr;
+ h264_SEI_progressive_segment_end_t sei_progressive_segment_end;
+
+ sei_msg_ptr = (h264_SEI_progressive_segment_end_t *)(&sei_progressive_segment_end);
+
+ sei_msg_ptr->progressive_refinement_id = h264_GetVLCElement(parent, pInfo, false);
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_motion_constrained_slice_grp_set(void *parent, h264_Info* pInfo)
+{
+ int32_t i;
+ uint32_t code;
+ h264_SEI_motion_constrained_slice_group_t* sei_msg_ptr;
+ h264_SEI_motion_constrained_slice_group_t sei_motion_constrained_slice_group;
+
+ sei_msg_ptr = (h264_SEI_motion_constrained_slice_group_t *)(&sei_motion_constrained_slice_group);
+
+ sei_msg_ptr->num_slice_groups_in_set_minus1= h264_GetVLCElement(parent, pInfo, false);
+ if(sei_msg_ptr->num_slice_groups_in_set_minus1 >= MAX_NUM_SLICE_GRPS)
+ {
+ return H264_STATUS_SEI_ERROR;
+ }
+
+ for(i=0; i<= sei_msg_ptr->num_slice_groups_in_set_minus1; i++)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->slice_group_id[i] = (uint8_t)code;
+ }
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->exact_sample_value_match_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code , 1);
+ sei_msg_ptr->pan_scan_rect_flag = (uint8_t)code;
+
+
+ if(sei_msg_ptr->pan_scan_rect_flag)
+ {
+ sei_msg_ptr->pan_scan_rect_id= h264_GetVLCElement(parent, pInfo, false);
+ }
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_film_grain_characteristics(void *parent,h264_Info* pInfo)
+{
+ //OS_INFO("Not supported SEI\n");
+
+ //remove warning
+ parent = parent;
+ pInfo = pInfo;
+
+
+
+
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_deblocking_filter_display_preferences(void *parent,h264_Info* pInfo)
+{
+
+ //h264_SEI_deblocking_filter_display_pref_t* sei_msg_ptr;
+
+ //remove warning
+ parent = parent;
+ pInfo = pInfo;
+
+ //sei_msg_ptr = (h264_SEI_deblocking_filter_display_pref_t *)(&user_data->user_data[0]);
+
+ //OS_INFO("Not supported SEI\n");
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_stereo_video_info(void *parent,h264_Info* pInfo)
+{
+
+ //h264_SEI_stereo_video_info_t* sei_msg_ptr;
+
+ //remove warning
+ parent = parent;
+ pInfo = pInfo;
+
+
+ //sei_msg_ptr = (h264_SEI_stereo_video_info_t *)(&user_data->user_data[0]);
+
+ //OS_INFO("Not supported SEI\n");
+ return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t payload_size)
+{
+ int32_t k, byte_index, user_data_byte_index;
+ uint32_t i;
+ int32_t word, bits;
+ uint32_t user_data;
+ //h264_SEI_reserved_t* sei_msg_ptr;
+ //h264_SEI_reserved_t sei_reserved;
+
+ //remove warning
+ pInfo = pInfo;
+
+ //sei_msg_ptr = (h264_SEI_reserved_t *)(&sei_reserved);
+
+ byte_index = 0;
+ word = 0;
+ user_data_byte_index = 0x0;
+
+ for(i = 0, k = 0; i < payload_size; i++)
+ {
+ if(byte_index == 0) word = 0;
+ viddec_pm_get_bits(parent, (uint32_t *)&bits, 8);
+
+ switch (byte_index)
+ {
+ case 1:
+ word = (bits << 8) | word;
+ break;
+ case 2:
+ word = (bits << 16) | word;
+ break;
+ case 3:
+ word = (bits << 24) | word;
+ break;
+ default :
+ word = bits;
+ break;
+ }
+
+ if(byte_index == 3)
+ {
+ byte_index = 0;
+ user_data = word;
+ k++;
+ }
+ else
+ {
+ byte_index++;
+ }
+
+ user_data_byte_index++;
+ if ( user_data_byte_index == MAX_USER_DATA_SIZE)
+ {
+ //user_data->user_data_size = user_data_byte_index;
+ //sei_msg_ptr = (h264_SEI_reserved_t *)(&user_data->user_data[0]);
+ byte_index = 0;
+ word = 0;
+ user_data_byte_index = 0x0;
+ }
+ }
+
+ if(byte_index)
+ user_data = word;
+
+ //user_data->user_data_size = user_data_byte_index;
+
+ return user_data_byte_index;
+
+ return H264_STATUS_OK;
+}
+
+////// TODO
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize)
+{
+ //int32_t bit_equal_to_zero;
+ h264_Status status = H264_STATUS_OK;
+
+ //removing warning
+ payloadSize = payloadSize;
+
+ switch(payloadType)
+ {
+ case SEI_BUF_PERIOD:
+ status = h264_sei_buffering_period(parent, pInfo);
+ break;
+ case SEI_PIC_TIMING:
+ status = h264_sei_pic_timing(parent, pInfo);
+ break;
+ case SEI_PAN_SCAN:
+ status = h264_sei_pan_scan(parent, pInfo);
+ break;
+ case SEI_FILLER_PAYLOAD:
+ status = h264_sei_filler_payload(parent, pInfo, payloadSize);
+ break;
+ case SEI_REG_USERDATA:
+ status = h264_sei_userdata_reg(parent, pInfo, payloadSize);
+ break;
+ case SEI_UNREG_USERDATA:
+ status = h264_sei_userdata_unreg(parent, pInfo, payloadSize);
+ break;
+ case SEI_RECOVERY_POINT:
+ h264_sei_recovery_point(parent, pInfo);
+ break;
+ case SEI_DEC_REF_PIC_MARKING_REP:
+ status = h264_sei_dec_ref_pic_marking_rep(parent, pInfo);
+ break;
+ case SEI_SPARE_PIC:
+ status = h264_sei_spare_pic(parent, pInfo);
+ break;
+ case SEI_SCENE_INFO:
+ status = h264_sei_scene_info(parent, pInfo);
+ break;
+ case SEI_SUB_SEQ_INFO:
+ status = h264_sei_sub_seq_info(parent, pInfo);
+ break;
+ case SEI_SUB_SEQ_LAYER:
+ status = h264_sei_sub_seq_layer(parent, pInfo);
+ break;
+ case SEI_SUB_SEQ:
+ status = h264_sei_sub_seq(parent, pInfo);
+ break;
+ case SEI_FULL_FRAME_FREEZE:
+ status = h264_sei_full_frame_freeze(parent, pInfo);
+ break;
+ case SEI_FULL_FRAME_FREEZE_RELEASE:
+ h264_sei_full_frame_freeze_release(parent, pInfo);
+ break;
+ case SEI_FULL_FRAME_SNAPSHOT:
+ status = h264_sei_full_frame_snapshot(parent, pInfo);
+ break;
+ case SEI_PROGRESSIVE_SEGMENT_START:
+ status = h264_sei_progressive_segement_start(parent, pInfo);
+ break;
+ case SEI_PROGRESSIVE_SEGMENT_END:
+ status = h264_sei_progressive_segment_end(parent, pInfo);
+ break;
+ case SEI_MOTION_CONSTRAINED_SLICE_GRP_SET:
+ status = h264_sei_motion_constrained_slice_grp_set(parent, pInfo);
+ break;
+ case SEI_FILM_GRAIN_CHARACTERISTICS:
+ status = h264_sei_film_grain_characteristics(parent, pInfo);
+ break;
+ case SEI_DEBLK_FILTER_DISPLAY_PREFERENCE:
+ status = h264_sei_deblocking_filter_display_preferences(parent, pInfo);
+ break;
+ case SEI_STEREO_VIDEO_INFO:
+ status = h264_sei_stereo_video_info(parent, pInfo);
+ break;
+ default:
+ status = h264_sei_reserved_sei_message(parent, pInfo, payloadSize);
+ break;
+ }
+
+/*
+ viddec_pm_get_bits(parent, (uint32_t *)&tmp, 1);
+
+ if(tmp == 0x1) // if byte is not aligned
+ {
+ while(pInfo->bitoff != 0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&bit_equal_to_zero, 1);
+ }
+ }
+*/
+ return status;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent, h264_Info* pInfo)
+{
+ h264_Status status = H264_STATUS_OK;
+ int32_t payload_type, payload_size;
+ uint32_t next_8_bits = 0,bits_offset=0,byte_offset = 0;
+ uint8_t is_emul = 0;
+ int32_t bits_operation_result = 0;
+
+ do {
+ //// payload_type
+ payload_type = 0;
+ viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ while (next_8_bits == 0xFF)
+ {
+ bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ if(-1 == bits_operation_result)
+ {
+ status = H264_STATUS_SEI_ERROR;
+ return status;
+ }
+ payload_type += 255;
+
+ }
+ //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ payload_type += next_8_bits;
+
+ //// payload_size
+ payload_size = 0;
+ viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ while (next_8_bits == 0xFF)
+ {
+ payload_size += 255;
+ bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ if(-1 == bits_operation_result)
+ {
+ status = H264_STATUS_SEI_ERROR;
+ return status;
+ }
+ }
+ //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+ payload_size += next_8_bits;
+
+ //PRINTF(MFD_NONE, " SEI: payload type = %d, payload size = %d \n", payload_type, payload_size);
+
+
+ /////////////////////////////////
+ // Parse SEI payloads
+ /////////////////////////////////
+ status = h264_SEI_payload(parent, pInfo, payload_type, payload_size);
+ if(status != H264_STATUS_OK)
+ break;
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+ // OS_INFO("SEI byte_offset 3= %d, bits_offset=%d\n", byte_offset, bits_offset);
+
+ if(bits_offset!=0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8-bits_offset);
+ }
+
+ bits_operation_result = viddec_pm_peek_bits(parent, (uint32_t *)&next_8_bits, 8);
+ if(-1 == bits_operation_result)
+ {
+ status = H264_STATUS_SEI_ERROR;
+ return status;
+ }
+
+ // OS_INFO("next_8_bits = %08x\n", next_8_bits);
+
+ }while(next_8_bits != 0x80);
+
+ //} while (h264_More_RBSP_Data(parent, pInfo) && status == H264_STATUS_OK);
+
+ return status;
+}
+
+#endif
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c
new file mode 100644
index 0000000..3134ae0
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sh.c
@@ -0,0 +1,740 @@
+//#define H264_PARSE_SLICE_HDR
+//#ifdef H264_PARSE_SLICE_HDR
+
+#include "h264.h"
+#include "h264parse.h"
+
+extern int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul);
+
+
+/*-----------------------------------------------------------------------------------------*/
+// Slice header 1----
+// 1) first_mb_in_slice, slice_type, pic_parameter_id
+/*-----------------------------------------------------------------------------------------*/
+h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+ h264_Status ret = H264_STATUS_ERROR;
+
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ int32_t slice_type =0;
+ uint32_t data =0;
+
+ do {
+ ///// first_mb_in_slice
+ SliceHeader->first_mb_in_slice = h264_GetVLCElement(parent, pInfo, false);
+
+ ///// slice_type
+ slice_type = h264_GetVLCElement(parent, pInfo, false);
+ SliceHeader->slice_type = (slice_type%5);
+
+ if(SliceHeader->slice_type > h264_PtypeI) {
+ ret = H264_STATUS_NOTSUPPORT;
+ break;
+ }
+
+
+ ////// pic_parameter_id
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if(data > MAX_PIC_PARAMS) {
+ ret = H264_PPS_INVALID_PIC_ID;
+ break;
+ }
+ SliceHeader->pic_parameter_id = (uint8_t)data;
+ ret = H264_STATUS_OK;
+ }while(0);
+
+ return ret;
+}
+
+/*-----------------------------------------------------------------------------------------*/
+// slice header 2
+// frame_num
+// field_pic_flag, structure
+// idr_pic_id
+// pic_order_cnt_lsb, delta_pic_order_cnt_bottom
+/*-----------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+ h264_Status ret = H264_SliceHeader_ERROR;
+
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ uint32_t code;
+ int32_t max_mb_num=0;
+
+ do {
+ //////////////////////////////////// Slice header part 2//////////////////
+
+ /// Frame_num
+ viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4+4);
+ SliceHeader->frame_num = (int32_t)code;
+
+ /// Picture structure
+ SliceHeader->structure = FRAME;
+ SliceHeader->field_pic_flag = 0;
+ SliceHeader->bottom_field_flag = 0;
+
+ if(!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag))
+ {
+ /// field_pic_flag
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->field_pic_flag = (uint8_t)code;
+
+ if(SliceHeader->field_pic_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->bottom_field_flag = (uint8_t)code;
+
+ SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD;
+ }
+ }
+
+ ////// Check valid or not of first_mb_in_slice
+ if(SliceHeader->structure == FRAME) {
+ max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs;
+ } else {
+ max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2;
+ }
+
+
+ ///if(pInfo->img.MbaffFrameFlag)
+ if(pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) {
+ SliceHeader->first_mb_in_slice <<=1;
+ }
+
+ if(SliceHeader->first_mb_in_slice >= max_mb_num)
+ break;
+
+
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if(pInfo->active_SPS.pic_order_cnt_type == 0)
+ {
+ viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4);
+ SliceHeader->pic_order_cnt_lsb = (uint32_t)code;
+
+
+ if((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag))
+ {
+ SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ SliceHeader->delta_pic_order_cnt_bottom = 0;
+ }
+ }
+
+ if((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag))
+ {
+ SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true);
+ if((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag))
+ {
+ SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+
+ if(pInfo->active_PPS.redundant_pic_cnt_present_flag)
+ {
+ SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false);
+ if(SliceHeader->redundant_pic_cnt > 127)
+ break;
+ } else {
+ SliceHeader->redundant_pic_cnt = 0;
+ }
+
+ ret = H264_STATUS_OK;
+ } while (0);
+
+ //////////// FMO is not supported curently, so comment out the following code
+ //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) )
+ //{
+ // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile
+ //}
+
+ return ret;
+}
+
+/*-----------------------------------------------------------------------------------------*/
+// slice header 3
+// (direct_spatial_mv_pred_flag, num_ref_idx, pic_list_reorder, PWT, ref_pic_remark, alpha, beta, etc)
+/*-----------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+ h264_Status ret = H264_SliceHeader_ERROR;
+
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ int32_t slice_alpha_c0_offset, slice_beta_offset;
+ uint32_t code;
+ uint32_t bits_offset =0, byte_offset =0;
+ uint8_t is_emul =0;
+
+ do {
+ /// direct_spatial_mv_pred_flag
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ viddec_pm_get_bits(parent, &code , 1);
+ SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code;
+ }
+ else
+ {
+ SliceHeader->direct_spatial_mv_pred_flag = 0;
+ }
+
+ //
+ // Reset ref_idx and Overide it if exist
+ //
+ SliceHeader->num_ref_idx_l0_active = pInfo->active_PPS.num_ref_idx_l0_active;
+ SliceHeader->num_ref_idx_l1_active = pInfo->active_PPS.num_ref_idx_l1_active;
+
+ if((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeB))
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->num_ref_idx_active_override_flag = (uint8_t)code;
+
+ if(SliceHeader->num_ref_idx_active_override_flag)
+ {
+ SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1;
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1;
+ }
+ }
+ }
+
+ if(SliceHeader->slice_type != h264_PtypeB) {
+ SliceHeader->num_ref_idx_l1_active = 0;
+ }
+
+ if((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES))
+ {
+ break;
+ }
+
+ if(h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ break;
+ }
+
+
+ ////
+ //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW
+ ////
+ if(((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB)))
+ {
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ pInfo->h264_pwt_enabled = 1;
+ pInfo->h264_pwt_start_byte_offset = byte_offset;
+ pInfo->h264_pwt_start_bit_offset = bits_offset;
+
+ if(h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ break;
+ }
+
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ if(0 == bits_offset)
+ {
+ pInfo->h264_pwt_end_byte_offset = byte_offset-1;
+ pInfo->h264_pwt_end_bit_offset = 8;
+ }
+ else
+ {
+ pInfo->h264_pwt_end_byte_offset = byte_offset;
+ pInfo->h264_pwt_end_bit_offset = bits_offset;
+ }
+
+ }
+
+
+
+ ////
+ //// Parse Ref_pic marking if there
+ ////
+ if(SliceHeader->nal_ref_idc != 0)
+ {
+ if(h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+ {
+ break;
+ }
+ }
+
+ if((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI))
+ {
+ SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false);
+ }
+ else
+ {
+ SliceHeader->cabac_init_idc = 0;
+ }
+
+ if(SliceHeader->cabac_init_idc > 2)
+ {
+ break;
+ }
+
+ SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true);
+ if( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26)))
+ break;
+
+
+ if((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) )
+ {
+ if(SliceHeader->slice_type == h264_PtypeSP)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sp_for_switch_flag = (uint8_t)code;
+
+ }
+ SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true);
+
+ if( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) )
+ break;
+ }
+
+ if(pInfo->active_PPS.deblocking_filter_control_present_flag)
+ {
+ SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false);
+ if(SliceHeader->disable_deblocking_filter_idc != 1)
+ {
+ SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true);
+ slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1;
+ if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12) {
+ break;
+ }
+
+ SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true);
+ slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1;
+ if (slice_beta_offset < -12 || slice_beta_offset > 12) {
+ break;
+ }
+ }
+ else
+ {
+ SliceHeader->slice_alpha_c0_offset_div2 = 0;
+ SliceHeader->slice_beta_offset_div2 = 0;
+ }
+ }
+
+ ret = H264_STATUS_OK;
+ } while (0);
+
+ //////////// FMO is not supported curently, so comment out the following code
+ //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) )
+ //{
+ // SliceHeader->slice_group_change_cycle = 0; //one of the variables is not known in the high profile
+ //}
+
+ return ret;
+}
+
+
+/*--------------------------------------------------------------------------------------------------*/
+//
+// The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num
+// specify the change from the initial reference picture lists to the reference picture lists to be used
+// for decoding the slice
+
+// reordering_of_pic_nums_idc:
+// 0: abs_diff_pic_num_minus1 is present and corresponds to a difference to subtract from a picture number prediction value
+// 1: abs_diff_pic_num_minus1 is present and corresponds to a difference to add to a picture number prediction value
+// 2: long_term_pic_num is present and specifies the long-term picture number for a reference picture
+// 3: End loop for reordering of the initial reference picture list
+//
+/*--------------------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ int32_t reorder= -1;
+ uint32_t code;
+
+
+ if((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI))
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag = (uint8_t)code;
+
+ if(SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+
+ reorder= -1;
+ do
+ {
+ reorder++;
+
+ if(reorder > MAX_NUM_REF_FRAMES)
+ {
+ return H264_SliceHeader_ERROR;
+ }
+
+ SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false);
+ if((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1))
+ {
+ SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false);
+ }
+ else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2)
+ {
+ SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ }while(SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3);
+ }
+ }
+
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag = (uint8_t)code;
+
+ if(SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag)
+ {
+
+ reorder = -1;
+ do
+ {
+ reorder++;
+ if(reorder > MAX_NUM_REF_FRAMES)
+ {
+ return H264_SliceHeader_ERROR;
+ }
+ SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false);
+ if((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1))
+ {
+ SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false);
+ }
+ else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2)
+ {
+ SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false);
+ }
+ }while(SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3);
+ }
+ }
+
+ //currently just two reference frames but in case mroe than two, then should use an array for the above structures that is why reorder
+ return H264_STATUS_OK;
+
+}
+
+#ifdef VBP
+h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+ uint32_t i =0, j=0;
+ uint32_t flag;
+
+ SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false);
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false);
+ }
+
+ for(i=0; i< SliceHeader->num_ref_idx_l0_active; i++)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag;
+
+ if(SliceHeader->sh_predwttbl.luma_weight_l0_flag)
+ {
+ SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true);
+ SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+ SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0;
+ }
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag;
+
+ if(SliceHeader->sh_predwttbl.chroma_weight_l0_flag)
+ {
+ for(j=0; j <2; j++)
+ {
+ SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true);
+ SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+ else
+ {
+ for(j=0; j <2; j++)
+ {
+ SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+ SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0;
+ }
+ }
+ }
+
+ }
+
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ for(i=0; i< SliceHeader->num_ref_idx_l1_active; i++)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag;
+
+ if(SliceHeader->sh_predwttbl.luma_weight_l1_flag)
+ {
+ SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true);
+ SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+ SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0;
+ }
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag;
+
+ if(SliceHeader->sh_predwttbl.chroma_weight_l1_flag)
+ {
+ for(j=0; j <2; j++)
+ {
+ SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(parent, pInfo, true);
+ SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+ else
+ {
+ for(j=0; j <2; j++)
+ {
+ SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+ SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0;
+ }
+ }
+ }
+
+ }
+ }
+
+ return H264_STATUS_OK;
+} ///// End of h264_Parse_Pred_Weight_Table
+
+#else
+
+/*--------------------------------------------------------------------------------------------------*/
+//
+// Parse Prediction weight table
+// Note: This table will be reparsed in HW Accelerator, so needn't keep it in parser
+//
+/*--------------------------------------------------------------------------------------------------*/
+
+
+h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+ uint32_t i =0, j=0;
+ uint32_t flag, val;
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SPS.SliceHeader;
+
+ //SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "luma_log2_weight_denom");
+ val = h264_GetVLCElement(parent, pInfo, false);
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ //SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "chroma_log2_weight_denom");
+ val = h264_GetVLCElement(parent,pInfo, false);
+ }
+
+ for(i=0; i< SliceHeader->num_ref_idx_l0_active; i++)
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l0_flag = h264_GetBits(pInfo, 1, "luma_weight_l0_flag");
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+
+ //if(SliceHeader->sh_predwttbl.luma_weight_l0_flag)
+ if(flag)
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l0");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ //SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l0");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+ //SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0;
+ }
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l0_flag = h264_GetBits(pInfo, 1, "chroma_weight_l0_flag");
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ if(flag)
+ {
+ for(j=0; j <2; j++)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l0");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l0");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+ else
+ {
+ for(j=0; j <2; j++)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+ //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0;
+ }
+ }
+ }
+
+ }
+
+ if(SliceHeader->slice_type == h264_PtypeB)
+ {
+ for(i=0; i< SliceHeader->num_ref_idx_l1_active; i++)
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l1_flag = h264_GetBits(pInfo, 1, "luma_weight_l1_flag");
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ if(flag)
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l1");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ //SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l1");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ }
+ else
+ {
+ //SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+ //SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0;
+ }
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l1_flag = h264_GetBits(pInfo, 1, "chroma_weight_l1_flag");
+ viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+ if(flag)
+ {
+ for(j=0; j <2; j++)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l1");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l1");
+ val = h264_GetVLCElement(parent, pInfo, true);
+ }
+ }
+ else
+ {
+ for(j=0; j <2; j++)
+ {
+ //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+ //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0;
+ }
+ }
+ }
+
+ }
+ }
+
+ return H264_STATUS_OK;
+} ///// End of h264_Parse_Pred_Weight_Table
+
+#endif
+
+/*--------------------------------------------------------------------------------------------------*/
+// The syntax elements specify marking of the reference pictures.
+// 1)IDR: no_output_of_prior_pics_flag,
+// long_term_reference_flag,
+// 2)NonIDR: adaptive_ref_pic_marking_mode_flag,
+// memory_management_control_operation,
+// difference_of_pic_nums_minus1,
+// long_term_frame_idx,
+// long_term_pic_num, and
+// max_long_term_frame_idx_plus1
+//
+//The marking of a reference picture can be "unused for reference", "used for short-term reference", or "used for longterm
+// reference", but only one among these three.
+/*--------------------------------------------------------------------------------------------------*/
+
+
+h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+ //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+ uint8_t i = 0;
+ uint32_t code = 0;
+
+ if(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)code;
+ pInfo->img.long_term_reference_flag = (uint8_t)code;
+ }
+ else
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = (uint8_t)code;
+
+ ///////////////////////////////////////////////////////////////////////////////////////
+ //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified
+ // 0 Sliding window reference picture marking mode: A marking mode
+ // providing a first-in first-out mechanism for short-term reference pictures.
+ // 1 Adaptive reference picture marking mode: A reference picture
+ // marking mode providing syntax elements to specify marking of
+ // reference pictures as �unused for reference?and to assign long-term
+ // frame indices.
+ ///////////////////////////////////////////////////////////////////////////////////////
+
+ if(SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag)
+ {
+ do
+ {
+ SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false);
+ if((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3))
+ {
+ SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2)
+ {
+ SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6))
+ {
+ SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4)
+ {
+ SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ if(SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5)
+ {
+ pInfo->img.curr_has_mmco_5 = 1;
+ }
+
+ if(i>NUM_MMCO_OPERATIONS) {
+ return H264_STATUS_ERROR;
+ }
+
+ }while(SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0);
+ }
+ }
+
+
+
+ SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i;
+
+ return H264_STATUS_OK;
+}
+
+
+
+//#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c
new file mode 100644
index 0000000..29ef54d
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c
@@ -0,0 +1,513 @@
+//#define H264_PARSE_SPS_C
+//#ifdef H264_PARSE_SPS_C
+
+#include "h264.h"
+#include "h264parse.h"
+
+
+/// SPS extension unit (unit_type = 13)
+///
+#if 0
+h264_Status h264_Parse_SeqParameterSet_Extension(void *parent,h264_Info * pInfo)
+{
+ /*h264_SPS_Extension_RBSP_t* SPS_ext = pInfo->p_active_SPS_ext;
+
+ SPS_ext->seq_parameter_set_id = h264_GetVLCElement(pInfo, false);
+ if(SPS_ext->seq_parameter_set_id > MAX_SEQ_PARAMS-1)
+ {
+ return H264_SPS_ERROR;
+ }
+ SPS_ext->aux_format_idc = h264_GetVLCElement(pInfo, false);
+ if(SPS_ext->aux_format_idc > 3)
+ {
+ return H264_SPS_ERROR;
+ }
+ if(SPS_ext->aux_format_idc != 0)
+ {
+ SPS_ext->bit_depth_aux_minus8 = h264_GetVLCElement(pInfo, false);
+ if(SPS_ext->bit_depth_aux_minus8 + 8 > 12)
+ {
+ return H264_SPS_ERROR;
+ }
+
+ SPS_ext->alpha_incr_flag = h264_GetBits(pInfo, 1, "alpha_incr_flag");
+ if(SPS_ext->alpha_incr_flag > 1)
+ {
+ return H264_SPS_ERROR;
+ }
+
+ SPS_ext->alpha_opaque_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_opaque_value"); //+8 to get the bit_depth value
+ SPS_ext->alpha_transparent_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_transparent_value"); //+8 to get the bit_depth value
+ }
+ SPS_ext->additional_extension_flag = h264_GetBits(pInfo, 1, "additional_extension_flag");
+*/
+ return H264_STATUS_OK;
+}
+#endif
+
+
+h264_Status h264_Parse_HRD_Parameters(void *parent, h264_Info* pInfo, int nal_hrd,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used)
+{
+ //seq_param_set_ptr SPS = pInfo->p_active_SPS;
+ int32_t i = 0;
+ uint32_t code;
+
+
+ if(nal_hrd)
+ {
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+ if(SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+ {
+ return H264_SPS_ERROR;
+ }
+
+ viddec_pm_get_bits(parent, &code, 8);
+ pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale = (uint8_t)(code>>4);
+ pVUI_Seq_Not_Used->nal_hrd_cpb_size_scale = (uint8_t)(code & 0xf);
+
+ for(i=0; i<=SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; i++)
+ {
+ pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->nal_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->nal_hrd_parameters.cbr_flag[i] = (uint8_t)code;
+ }
+
+ if( viddec_pm_get_bits(parent, &code, 20) == -1)
+ return H264_SPS_ERROR;
+
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f);
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);;
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);;
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_time_offset_length = (uint8_t)(code&0x1f);;
+
+ }
+ else
+ {
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+ if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+ {
+ return H264_SPS_ERROR;
+ }
+
+ viddec_pm_get_bits(parent, &code, 8);
+ pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale = (uint8_t)(code>>4);
+ pVUI_Seq_Not_Used->vcl_hrd_cpb_size_scale = (uint8_t)(code&0xf);
+
+ for(i=0; i<=SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; i++)
+ {
+ pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->vcl_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->vcl_hrd_parameters.cbr_flag[i] = (uint8_t)code;
+ }
+
+ if( viddec_pm_get_bits(parent, &code, 20) == -1)
+ return H264_SPS_ERROR;
+
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f);
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);;
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);;
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_time_offset_length = (uint8_t)(code&0x1f);;
+ }
+
+ return H264_STATUS_OK;
+}
+
+
+
+h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used)
+{
+ h264_Status ret = H264_STATUS_OK;
+ //seq_param_set_ptr SPS = pInfo->p_active_SPS;
+ int32_t nal_hrd = 0;
+ uint32_t code;
+
+ do {
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag = (uint8_t)code;
+
+
+ if(SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc == h264_AR_Extended_SAR)
+ {
+ viddec_pm_get_bits(parent, &code, 16);
+ SPS->sps_disp.vui_seq_parameters.sar_width = (uint16_t)code;
+
+ viddec_pm_get_bits(parent, &code, 16);
+ SPS->sps_disp.vui_seq_parameters.sar_height = (uint16_t)code;
+
+ }
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->overscan_info_present_flag = (uint8_t)code;
+
+ if(pVUI_Seq_Not_Used->overscan_info_present_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->overscan_appropriate_flag = (uint8_t)code;
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 3);
+ SPS->sps_disp.vui_seq_parameters.video_format = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.colour_description_present_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->sps_disp.vui_seq_parameters.colour_primaries = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->sps_disp.vui_seq_parameters.transfer_characteristics = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 8);
+ pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code;
+ }
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->chroma_location_info_present_flag = (uint8_t)code;
+
+ if(pVUI_Seq_Not_Used->chroma_location_info_present_flag)
+ {
+ pVUI_Seq_Not_Used->chroma_sample_loc_type_top_field = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->chroma_sample_loc_type_bottom_field = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.timing_info_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+ {
+ viddec_pm_get_bits(parent, &code, 32);
+ SPS->sps_disp.vui_seq_parameters.num_units_in_tick = (uint32_t)code;
+
+ viddec_pm_get_bits(parent, &code, 32);
+ SPS->sps_disp.vui_seq_parameters.time_scale = (uint32_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.fixed_frame_rate_flag = (uint8_t)code;
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+ {
+ nal_hrd = 1;
+ ret |= h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used);
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)
+ {
+ nal_hrd = 0;
+ ret |= h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used);
+ }
+
+ if((SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) || (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1))
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.low_delay_hrd_flag = (uint8_t)code;
+ }
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.vui_seq_parameters.pic_struct_present_flag = (uint8_t)code;
+
+ if(viddec_pm_get_bits(parent, &code, 1) == -1) {
+ ret = H264_STATUS_ERROR;
+ break;
+ }
+ SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ pVUI_Seq_Not_Used->motion_vectors_over_pic_boundaries_flag = (uint8_t)code;
+
+ pVUI_Seq_Not_Used->max_bytes_per_pic_denom = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->max_bits_per_mb_denom = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->log2_max_mv_length_horizontal = h264_GetVLCElement(parent, pInfo, false);
+ pVUI_Seq_Not_Used->log2_max_mv_length_vertical = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.vui_seq_parameters.num_reorder_frames = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering = h264_GetVLCElement(parent, pInfo, false);
+
+ if(SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering == MAX_INT32_VALUE)
+ ret = H264_STATUS_ERROR;
+ }
+ }while (0);
+
+ return ret;
+}
+
+
+h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame)
+{
+ h264_Status ret = H264_SPS_ERROR;
+
+ int32_t i = 0, tmp = 0;
+ int32_t PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs;
+ uint32_t code = 0;
+ uint32_t data = 0;
+
+ //SPS->profile_idc = h264_GetBits(pInfo, 8, "Profile");
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->profile_idc = (uint8_t)code;
+
+ switch(SPS->profile_idc)
+ {
+ case h264_ProfileBaseline:
+ case h264_ProfileMain:
+ case h264_ProfileExtended:
+ case h264_ProfileHigh10:
+ case h264_ProfileHigh422:
+ case h264_ProfileHigh444:
+ case h264_ProfileHigh:
+ break;
+ default:
+ return H264_SPS_INVALID_PROFILE;
+ break;
+ }
+
+ //SPS->constraint_set0_flag = h264_GetBits(pInfo, 1, "constraint_set0_flag");
+ //SPS->constraint_set1_flag = h264_GetBits(pInfo, 1, "constraint_set1_flag"); //should be 1
+ //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag");
+ //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag");
+
+ viddec_pm_get_bits(parent, &code, 4);
+ SPS->constraint_set_flags = (uint8_t)code;
+
+ //// reserved_zero_4bits
+ viddec_pm_get_bits(parent, (uint32_t *)&code, 4);
+
+ viddec_pm_get_bits(parent, &code, 8);
+ SPS->level_idc = (uint8_t)code;
+
+ switch(SPS->level_idc)
+ {
+ case h264_Level1b:
+ case h264_Level1:
+ case h264_Level11:
+ case h264_Level12:
+ case h264_Level13:
+ case h264_Level2:
+ case h264_Level21:
+ case h264_Level22:
+ case h264_Level3:
+ case h264_Level31:
+ case h264_Level32:
+ case h264_Level4:
+ case h264_Level41:
+ case h264_Level42:
+ case h264_Level5:
+ case h264_Level51:
+ break;
+ default:
+ return H264_SPS_INVALID_LEVEL;
+ }
+
+ do {
+ SPS->seq_parameter_set_id = h264_GetVLCElement(parent, pInfo, false);
+
+ //// seq_parameter_set_id ---[0,31]
+ if(SPS->seq_parameter_set_id > MAX_NUM_SPS -1)
+ break;
+
+ if((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) ||
+ (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444) )
+ {
+ //// chroma_format_idc ---[0,3], currently we don't support 444, so [0,2]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data > H264_CHROMA_422)
+ break;
+ SPS->sps_disp.chroma_format_idc = (uint8_t)data;
+ //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {}
+
+ //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data)
+ break;
+ SPS->bit_depth_luma_minus8 = (uint8_t)data;
+
+ //// bit_depth_chroma_minus8 ---[0,4]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data )
+ break;
+ SPS->bit_depth_chroma_minus8 = (uint8_t)data;
+
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->lossless_qpprime_y_zero_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->seq_scaling_matrix_present_flag = (uint8_t)code;
+
+ if(SPS->seq_scaling_matrix_present_flag == 1)
+ {
+ //int n_ScalingList = (SPS->sps_disp.chroma_format_idc != H264_CHROMA_444) ? 8 : 12;
+ int n_ScalingList = 8; /// We do not support 444 currrently
+
+ for(i=0; i<n_ScalingList; i++)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->seq_scaling_list_present_flag[i] = (uint8_t)code;
+
+ if(SPS->seq_scaling_list_present_flag[i])
+ {
+ if(i<6)
+ h264_Scaling_List(parent, SPS->ScalingList4x4[i], 16, &SPS->UseDefaultScalingMatrix4x4Flag[i], pInfo);
+ else
+ h264_Scaling_List(parent, SPS->ScalingList8x8[i-6], 64, &SPS->UseDefaultScalingMatrix8x8Flag[i-6], pInfo);
+ }
+ }
+ }
+ }
+ else
+ {
+ SPS->sps_disp.chroma_format_idc = 1;
+ SPS->seq_scaling_matrix_present_flag = 0;
+
+ SPS->bit_depth_luma_minus8 = 0;
+ SPS->bit_depth_chroma_minus8 = 0;
+ //h264_SetDefaultScalingLists(pInfo);
+ }
+
+ //// log2_max_frame_num_minus4 ---[0,12]
+ data = (h264_GetVLCElement(parent, pInfo, false));
+ if( data > 12)
+ break;
+ SPS->log2_max_frame_num_minus4 = (uint8_t)data;
+
+ //// pic_order_cnt_type ---- [0,2]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data > 2)
+ break;
+ SPS->pic_order_cnt_type = (uint8_t)data;
+
+
+ SPS->expectedDeltaPerPOCCycle = 0;
+ if(SPS->pic_order_cnt_type == 0) {
+ SPS->log2_max_pic_order_cnt_lsb_minus4 = h264_GetVLCElement(parent, pInfo, false);
+ } else if(SPS->pic_order_cnt_type == 1){
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->delta_pic_order_always_zero_flag = (uint8_t)code;
+
+ SPS->offset_for_non_ref_pic = h264_GetVLCElement(parent, pInfo, true);
+ SPS->offset_for_top_to_bottom_field = h264_GetVLCElement(parent, pInfo, true);
+
+ //// num_ref_frames_in_pic_order_cnt_cycle ---- [0,255]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data > 255)
+ break;
+ SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data;
+
+
+ //Alloc memory for frame offset -- FIXME
+ for(i=0; i< SPS->num_ref_frames_in_pic_order_cnt_cycle; i++)
+ {
+ /////SPS->offset_for_ref_frame[i] could be removed from SPS
+#ifndef USER_MODE
+ tmp = h264_GetVLCElement(parent, pInfo, true);
+ pOffset_ref_frame[i]=tmp;
+ SPS->expectedDeltaPerPOCCycle += tmp;
+#else
+ tmp = h264_GetVLCElement(parent, pInfo, true);
+ SPS->offset_for_ref_frame[i]=tmp;
+ SPS->expectedDeltaPerPOCCycle += tmp;
+#endif
+ }
+ }
+
+ //// num_ref_frames ---[0,16]
+ data = h264_GetVLCElement(parent, pInfo, false);
+ if( data > 16)
+ break;
+ SPS->num_ref_frames = (uint8_t)data;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->gaps_in_frame_num_value_allowed_flag = (uint8_t)code;
+
+
+ SPS->sps_disp.pic_width_in_mbs_minus1 = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.pic_height_in_map_units_minus1 = h264_GetVLCElement(parent, pInfo, false);
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.frame_mbs_only_flag = (uint8_t)code;
+
+ /// err check for size
+ PicWidthInMbs = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1);
+ PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1);
+ FrameHeightInMbs = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1);
+ if((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128))
+ break;
+
+ if(!SPS->sps_disp.frame_mbs_only_flag)
+ {
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.mb_adaptive_frame_field_flag = (uint8_t)code;
+ }
+
+ //SPS->frame_height_in_mbs = (2-SPS->sps_disp.frame_mbs_only_flag)*(SPS->sps_disp.pic_height_in_map_units_minus1+1);
+ //SPS->pic_size_in_map_units = (SPS->sps_disp.pic_width_in_mbs_minus1+1)*SPS->sps_disp.frame_height_in_mbs;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.direct_8x8_inference_flag = (uint8_t)code;
+
+ viddec_pm_get_bits(parent, &code, 1);
+ SPS->sps_disp.frame_cropping_flag = (uint8_t)code;
+
+ if(SPS->sps_disp.frame_cropping_flag)
+ {
+ SPS->sps_disp.frame_crop_rect_left_offset = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.frame_crop_rect_right_offset = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.frame_crop_rect_top_offset = h264_GetVLCElement(parent, pInfo, false);
+ SPS->sps_disp.frame_crop_rect_bottom_offset = h264_GetVLCElement(parent, pInfo, false);
+ }
+
+ //// when frame_mbs_only_flag is equal to 0, direct_8x8_inference_flag shall be equal to 1
+ if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0){
+ break;
+ }
+
+ ////// vui_parameters
+ if(viddec_pm_get_bits(parent, &code, 1) == -1)
+ break;
+ SPS->sps_disp.vui_parameters_present_flag = (uint8_t)code;
+ ret = H264_STATUS_OK;
+
+ if(SPS->sps_disp.vui_parameters_present_flag)
+ {
+#ifndef VBP // Ignore VUI parsing result
+ ret =
+#endif
+ h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used);
+ }
+
+ }while(0);
+
+ //h264_Parse_rbsp_trailing_bits(pInfo);
+
+ return ret;
+}
+
+//#endif
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c
new file mode 100644
index 0000000..87959f3
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/mix_vbp_h264_stubs.c
@@ -0,0 +1,575 @@
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_fw_item_types.h"
+#include "h264parse_dpb.h"
+#include <glib.h>
+
+extern void* h264_memcpy( void* dest, void* src, uint32_t num );
+
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+ if (swap != 0)
+ {
+ g_warning("swap copying is not implemented.");
+ }
+
+ if (to_ddr)
+ {
+ memcpy((void*)ddr_addr, (void*)local_addr, size);
+ }
+ else
+ {
+ memcpy((void*)local_addr, (void*)ddr_addr, size);
+ }
+
+ return (0);
+}
+
+#if 0
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+ if(pInfo->Is_first_frame_in_stream) //new stream, fill new frame in cur
+ {
+
+ pInfo->img.g_new_frame = 0;
+ pInfo->Is_first_frame_in_stream =0;
+ pInfo->push_to_cur = 1;
+
+ }
+ else // move to next for new frame
+ {
+ pInfo->push_to_cur = 0;
+ }
+
+
+
+ //fill dpb managemnt info
+
+
+
+
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+ pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+
+}
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+ ////
+ //// Now we can flush out all frames in DPB fro display
+ if(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used != 3)
+ {
+ h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME
+ }
+
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+ h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+}
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+ pInfo->qm_present_list=0;
+}
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+#if 1
+ uint32_t i, nitems=0;
+
+
+ if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+ {
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+ else
+ {
+ nitems = pInfo->dpb.listXsize[0];
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+
+ }
+ else
+ {
+ nitems =0;
+ }
+#endif
+}
+#else
+
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+ h264_slice_data slice_data;
+
+ uint32_t i=0, nitems=0, data=0;
+ uint32_t bits_offset =0, byte_offset =0;
+ uint8_t is_emul =0;
+
+
+ ////////////////////// Update Reference list //////////////////
+ if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+ {
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+ else
+ {
+ nitems = pInfo->dpb.listXsize[0];
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+
+ }
+ else
+ {
+ nitems =0;
+ }
+ /////file ref list 0
+ // h264_parse_emit_ref_list(parent, pInfo, 0);
+
+ /////file ref list 1
+ //h264_parse_emit_ref_list(parent, pInfo, 1);
+
+ ///////////////////////////////////// Slice Data ////////////////////////////////
+ // h264_fill_slice_data(pInfo, &slice_data);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG;
+
+ wi.data.data_offset = slice_data.h264_bsd_slice_start;
+ wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1;
+ wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent , &wi);
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent , &wi);
+ }
+
+
+ ///////////////////////////predict weight table item and data if have///////////////////////////
+ if(pInfo->h264_pwt_enabled)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET;
+ wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1;
+ wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset;
+ wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent , &wi);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+ wi.es.es_flags = 0;
+ // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1);
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent , &wi);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+ wi.es.es_flags = 0;
+ // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0);
+ }
+ }
+
+
+ ////////////////////////////////// Update ES Buffer for Slice ///////////////////////
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset);
+
+ if(pInfo->active_PPS.entropy_coding_mode_flag)
+ {
+ if(0!=bits_offset) {
+ data = data; // fix compilation warning
+ // don't skip byte-aligned bits as those bits are actually
+ // part of slice_data
+ //viddec_pm_get_bits(parent, &data, 8-bits_offset);
+ }
+ }
+ else
+ {
+ if(0!=bits_offset) {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET;
+ wi.data.data_offset = bits_offset;
+ wi.data.data_payload[0]=0;
+ wi.data.data_payload[1]=0;
+
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ // viddec_pm_append_workitem( parent , &wi);
+ }
+ else {
+ //viddec_pm_append_workitem_next( parent , &wi);
+ }
+ }
+ }
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ //viddec_pm_append_pixeldata( parent );
+ }
+ else
+ {
+ //viddec_pm_append_pixeldata_next( parent);
+ }
+
+ return;
+}
+
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+
+ const uint32_t *pl;
+ uint32_t i=0,nitems=0;
+
+ h264_pic_data pic_data;
+
+ pInfo->qm_present_list=0;
+
+ //h264_parse_emit_4X4_scaling_matrix(parent, pInfo);
+ // h264_parse_emit_8X8_scaling_matrix(parent, pInfo);
+
+ // h264_fill_pic_data(pInfo, &pic_data);
+
+ // How many payloads must be generated
+ nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up
+
+ pl = (const uint32_t *) &pic_data;
+
+ // Dump slice data to an array of workitems, to do pl access non valid mem
+ for( i = 0; i < nitems; i++ )
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG;
+ wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct
+ wi.data.data_payload[0] = pl[0];
+ wi.data.data_payload[1] = pl[1];
+ pl += 2;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+
+ // viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ //viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ return;
+}
+
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+ uint32_t i=0,nitems=0;
+
+ ///////////////////////// Frame attributes//////////////////////////
+
+ //Push data into current workload if first frame or frame_boundary already detected by non slice nal
+ if( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal))
+ {
+ //viddec_workload_t *wl_cur = viddec_pm_get_header( parent );
+ //pInfo->img.g_new_frame = 0;
+ pInfo->Is_first_frame_in_stream =0;
+ pInfo->is_frame_boundary_detected_by_non_slice_nal=0;
+ pInfo->push_to_cur = 1;
+ //h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo);
+ }
+ else // move to cur if frame boundary detected by previous non slice nal, or move to next if not
+ {
+ //viddec_workload_t *wl_next = viddec_pm_get_next_header (parent);
+
+ pInfo->push_to_cur = 0;
+ //h264_translate_parser_info_to_frame_attributes(wl_next, pInfo);
+
+ pInfo->is_current_workload_done=1;
+ }
+
+ ///////////////////// SPS/////////////////////
+ // h264_parse_emit_sps(parent, pInfo);
+
+ /////////////////////display frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+ /////////////////////release frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ //viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+ /////////////////////flust frames (do not display)/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_dropped;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 + pInfo->dpb.frame_id_need_to_be_dropped[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ //viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ pInfo->dpb.frame_numbers_need_to_be_dropped =0;
+
+ /////////////////////updata DPB frames/////////////////////
+ nitems = pInfo->dpb.used_size;
+ for(i=0; i<nitems; i++)
+ {
+ uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id;
+ wi.ref_frame.reference_id = fs_id;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ //viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ }
+
+
+ /////////////////////updata dpb frames info (poc)/////////////////////
+ nitems = pInfo->dpb.used_size;
+ for(i=0; i<nitems; i++)
+ {
+ uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC;
+ wi.data.data_offset = fs_id;
+ //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc);
+
+ switch(viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id])))
+ {
+ case (FRAME):{
+ wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+ wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+ break;
+ };
+
+ case (TOP_FIELD):{
+ wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+ wi.data.data_payload[1] = 0;
+ break;
+ };
+
+ case (BOTTOM_FIELD):{
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+ break;
+ };
+
+ default : {
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = 0;
+ break;
+ };
+ }
+
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ //viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ }
+
+ /////////////////////Alloc buffer for current Existing frame/////////////////////
+ if(0!=pInfo->dpb.frame_numbers_need_to_be_allocated)
+ {
+ if(pInfo->push_to_cur)
+ {
+ // viddec_workload_t *wl_cur = viddec_pm_get_header (parent);
+ // wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+ }
+ else
+ {
+ // viddec_workload_t *wl_next = viddec_pm_get_next_header (parent);
+ //wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+ return;
+}
+
+
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+
+ uint32_t nitems=0, i=0;
+ viddec_workload_item_t wi;
+
+ ////
+ //// Now we can flush out all frames in DPB fro display
+ if(viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3)
+ {
+ h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME
+ }
+
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+ h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+ /////////////////////display frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ //viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ //viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+ /////////////////////release frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ // viddec_pm_append_workitem( parent, &wi );
+ viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+ }
+ else
+ {
+ // viddec_pm_append_workitem_next( parent, &wi );
+ viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next);
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+ return;
+}
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c
new file mode 100644
index 0000000..9388d81
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c
@@ -0,0 +1,559 @@
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+
+#include "h264.h"
+#include "h264parse.h"
+
+#include "viddec_h264_parse.h"
+#include "h264parse_dpb.h"
+
+/* Init function which can be called to intialized local context on open and flush and preserve*/
+#ifdef VBP
+void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#else
+static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#endif
+{
+ struct h264_viddec_parser* parser = ctxt;
+ h264_Info * pInfo = &(parser->info);
+
+ if(!preserve)
+ {
+ /* we don't initialize this data if we want to preserve
+ sequence and gop information */
+ h264_init_sps_pps(parser,persist_mem);
+ }
+ /* picture level info which will always be initialized */
+ h264_init_Info_under_sps_pps_level(pInfo);
+ return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+#ifdef VBP
+uint32_t viddec_h264_parse(void *parent, void *ctxt)
+#else
+static uint32_t viddec_h264_parse(void *parent, void *ctxt)
+#endif
+{
+ struct h264_viddec_parser* parser = ctxt;
+
+ h264_Info * pInfo = &(parser->info);
+
+ h264_Status status = H264_STATUS_ERROR;
+
+
+ uint8_t nal_ref_idc = 0;
+
+ ///// Parse NAL Unit header
+ pInfo->img.g_new_frame = 0;
+ pInfo->push_to_cur = 1;
+ pInfo->is_current_workload_done =0;
+ pInfo->nal_unit_type = 0;
+
+ h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc);
+
+ ///// Check frame bounday for non-vcl elimitter
+ h264_check_previous_frame_end(pInfo);
+
+ //OS_INFO("========================nal_type: %d=================\n", pInfo->nal_unit_type);
+ //DEBUG_WRITE(pInfo->nal_unit_type, pInfo->got_start, pInfo->wl_err_flag, pInfo->is_current_workload_done, 0, 0);
+#if 0
+ devh_SVEN_WriteModuleEvent( NULL,
+ SVEN_MODULE_EVENT_GV_FW_PARSER_DEBUG_P0,
+ pInfo->got_start,pInfo->nal_unit_type, pInfo->wl_err_curr, pInfo->is_current_workload_done, 0, pInfo->img.frame_num);
+#endif
+
+ //////// Parse valid NAL unit
+ switch ( pInfo->nal_unit_type )
+ {
+ case h264_NAL_UNIT_TYPE_IDR:
+ if(pInfo->got_start) {
+ pInfo->img.recovery_point_found |= 1;
+ }
+
+ pInfo->sei_rp_received = 0;
+
+ case h264_NAL_UNIT_TYPE_SLICE:
+ ////////////////////////////////////////////////////////////////////////////
+ // Step 1: Check start point
+ ////////////////////////////////////////////////////////////////////////////
+ //
+ /// Slice parsing must start from the valid start point( SPS, PPS, IDR or recovery point or primary_I)
+ /// 1) No start point reached, append current ES buffer to workload and release it
+ /// 2) else, start parsing
+ //
+ //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)))
+ //{
+ //pInfo->img.recovery_point_found = 1;
+ //}
+ {
+
+ h264_Slice_Header_t next_SliceHeader;
+
+ /// Reset next slice header
+ h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t));
+ next_SliceHeader.nal_ref_idc = nal_ref_idc;
+
+ if( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start))
+ {
+ pInfo->img.recovery_point_found |=4;
+ }
+ pInfo->primary_pic_type_plus_one = 0;
+
+
+
+ if(pInfo->img.recovery_point_found == 0) {
+ pInfo->img.structure = FRAME;
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+ break;
+ }
+
+ ////////////////////////////////////////////////////////////////////////////
+ // Step 2: Parsing slice header
+ ////////////////////////////////////////////////////////////////////////////
+ /// PWT
+ pInfo->h264_pwt_start_byte_offset=0;
+ pInfo->h264_pwt_start_bit_offset=0;
+ pInfo->h264_pwt_end_byte_offset=0;
+ pInfo->h264_pwt_end_bit_offset=0;
+ pInfo->h264_pwt_enabled =0;
+ /// IDR flag
+ next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR);
+
+
+ /// Pass slice header
+ status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader);
+
+ pInfo->sei_information.recovery_point = 0;
+
+ if(next_SliceHeader.sh_error & 3) {
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+
+ // Error type definition, refer to viddec_fw_common_defs.h
+ // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17)
+ // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18)
+ // if this is frame based, both 2 bits should be set
+ pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+
+ break;
+ }
+ pInfo->img.current_slice_num++;
+
+
+#ifdef DUMP_HEADER_INFO
+dump_slice_header(pInfo, &next_SliceHeader);
+////h264_print_decoder_values(pInfo);
+#endif
+
+
+ ////////////////////////////////////////////////////////////////////////////
+ // Step 3: Processing if new picture coming
+ // 1) if it's the second field
+ // 2) if it's a new frame
+ ////////////////////////////////////////////////////////////////////////////
+ //AssignQuantParam(pInfo);
+ if(h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader))
+ {
+ //
+ ///----------------- New Picture.boundary detected--------------------
+ //
+ pInfo->img.g_new_pic++;
+
+ //
+ // Complete previous picture
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old
+ //h264_hdr_post_poc(0, 0, use_old);
+
+ //
+ // Update slice structures:
+ h264_update_old_slice(pInfo, next_SliceHeader); //cur->old; next->cur;
+
+ //
+ // 1) if resolution change: reset dpb
+ // 2) else: init frame store
+ h264_update_img_info(pInfo); //img, dpb
+
+ //
+ ///----------------- New frame.boundary detected--------------------
+ //
+ pInfo->img.second_field = h264_is_second_field(pInfo);
+ if(pInfo->img.second_field == 0)
+ {
+ pInfo->img.g_new_frame = 1;
+ h264_dpb_update_queue_dangling_field(pInfo);
+
+ //
+ /// DPB management
+ /// 1) check the gaps
+ /// 2) assign fs for non-exist frames
+ /// 3) fill the gaps
+ /// 4) store frame into DPB if ...
+ //
+ //if(pInfo->SliceHeader.redundant_pic_cnt)
+ {
+ h264_dpb_gaps_in_frame_num_mem_management(pInfo);
+ }
+
+#ifdef DUMP_HEADER_INFO
+ dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num);
+#endif
+ }
+ //
+ /// Decoding POC
+ h264_hdr_decoding_poc (pInfo, 0, 0);
+
+ //
+ /// Init Frame Store for next frame
+ h264_dpb_init_frame_store (pInfo);
+ pInfo->img.current_slice_num = 1;
+
+ if(pInfo->SliceHeader.first_mb_in_slice != 0)
+ {
+ ////Come here means we have slice lost at the beginning, since no FMO support
+ pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17);
+ }
+
+ //
+ /// Emit out the New Frame
+ if(pInfo->img.g_new_frame)
+ {
+ h264_parse_emit_start_new_frame(parent, pInfo);
+ }
+
+ h264_parse_emit_current_pic(parent, pInfo);
+ }
+ else ///////////////////////////////////////////////////// If Not a picture start
+ {
+ //
+ /// Update slice structures: cur->old; next->cur;
+ h264_update_old_slice(pInfo, next_SliceHeader);
+
+ //
+ /// 1) if resolution change: reset dpb
+ /// 2) else: update img info
+ h264_update_img_info(pInfo);
+ }
+
+
+ //////////////////////////////////////////////////////////////
+ // Step 4: DPB reference list init and reordering
+ //////////////////////////////////////////////////////////////
+
+ //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field
+ h264_update_frame_type(pInfo);
+
+
+ h264_dpb_update_ref_lists( pInfo);
+
+#ifdef DUMP_HEADER_INFO
+ dump_ref_list(pInfo);
+#endif
+ /// Emit out the current "good" slice
+ h264_parse_emit_current_slice(parent, pInfo);
+
+ }
+ break;
+
+ ///// * Main profile doesn't support Data Partition, skipped.... *////
+ case h264_NAL_UNIT_TYPE_DPA:
+ case h264_NAL_UNIT_TYPE_DPB:
+ case h264_NAL_UNIT_TYPE_DPC:
+ //OS_INFO("***********************DP feature, not supported currently*******************\n");
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ status = H264_STATUS_NOTSUPPORT;
+ break;
+
+ //// * Parsing SEI info *////
+ case h264_NAL_UNIT_TYPE_SEI:
+ status = H264_STATUS_OK;
+
+ //OS_INFO("*****************************SEI**************************************\n");
+ if(pInfo->sps_valid){
+ //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ /// parsing the SEI info
+ status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo);
+ }
+
+ //h264_rbsp_trailing_bits(pInfo);
+ break;
+ case h264_NAL_UNIT_TYPE_SPS:
+ {
+ //OS_INFO("*****************************SPS**************************************\n");
+ ///
+ /// Can not define local SPS since the Current local stack size limitation!
+ /// Could be changed after the limitation gone
+ ///
+ uint8_t old_sps_id=0;
+ vui_seq_parameters_t_not_used vui_seq_not_used;
+
+ old_sps_id = pInfo->active_SPS.seq_parameter_set_id;
+ h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used));
+
+
+ status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL);
+ if(status == H264_STATUS_OK) {
+ h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id);
+ pInfo->sps_valid = 1;
+
+ if(1==pInfo->active_SPS.pic_order_cnt_type) {
+ h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id);
+ }
+
+#ifdef DUMP_HEADER_INFO
+ dump_sps(&(pInfo->active_SPS));
+#endif
+
+ }
+ ///// Restore the active SPS if new arrival's id changed
+ if(old_sps_id>=MAX_NUM_SPS) {
+ h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used));
+ pInfo->active_SPS.seq_parameter_set_id = 0xff;
+ }
+ else {
+ if(old_sps_id!=pInfo->active_SPS.seq_parameter_set_id) {
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+ }
+ else {
+ //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set));
+ pInfo->active_SPS.seq_parameter_set_id = 0xff;
+ }
+ }
+
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ }
+ break;
+ case h264_NAL_UNIT_TYPE_PPS:
+ {
+ //OS_INFO("*****************************PPS**************************************\n");
+
+ uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id;
+ uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id;
+
+ h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set));
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+
+ if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK)
+ {
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id);
+ if(old_sps_id != pInfo->active_SPS.seq_parameter_set_id)
+ {
+ pInfo->Is_SPS_updated = 1;
+ }
+ if(pInfo->active_SPS.seq_parameter_set_id != 0xff) {
+ h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id);
+ pInfo->got_start = 1;
+ if(pInfo->sei_information.recovery_point)
+ {
+ pInfo->img.recovery_point_found |= 2;
+
+ //// Enable the RP recovery if no IDR ---Cisco
+ if((pInfo->img.recovery_point_found & 1)==0)
+ pInfo->sei_rp_received = 1;
+ }
+ }
+ else
+ {
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+ }
+ #ifdef DUMP_HEADER_INFO
+ dump_pps(&(pInfo->active_PPS));
+ #endif
+ } else {
+ if(old_sps_id<MAX_NUM_SPS)
+ h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+ if(old_pps_id<MAX_NUM_PPS)
+ h264_Parse_Copy_Pps_From_DDR(pInfo, &(pInfo->active_PPS), old_pps_id);
+ }
+
+ } //// End of PPS parsing
+ break;
+
+
+ case h264_NAL_UNIT_TYPE_EOSeq:
+ case h264_NAL_UNIT_TYPE_EOstream:
+
+ h264_parse_emit_eos(parent, pInfo);
+ h264_init_dpb(&(pInfo->dpb));
+
+ /* picture level info which will always be initialized */
+ //h264_init_Info_under_sps_pps_level(pInfo);
+
+ ////reset the pInfo here
+ //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false);
+
+
+ status = H264_STATUS_OK;
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ break;
+
+ case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+#if 1
+ ///// primary_pic_type
+ {
+ uint32_t code = 0xff;
+ int32_t ret = 0;
+ ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3);
+
+ if(ret != -1) {
+ //if(pInfo->got_start && (code == 0))
+ //{
+ //pInfo->img.recovery_point_found |= 4;
+ //}
+ pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1;
+ status = H264_STATUS_OK;
+ }
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ break;
+ }
+#endif
+
+ case h264_NAL_UNIT_TYPE_Reserved1:
+ case h264_NAL_UNIT_TYPE_Reserved2:
+ case h264_NAL_UNIT_TYPE_Reserved3:
+ case h264_NAL_UNIT_TYPE_Reserved4:
+ case h264_NAL_UNIT_TYPE_Reserved5:
+ status = H264_STATUS_OK;
+ pInfo->number_of_first_au_info_nal_before_first_slice++;
+ break;
+
+ case h264_NAL_UNIT_TYPE_filler_data:
+ status = H264_STATUS_OK;
+ break;
+ case h264_NAL_UNIT_TYPE_ACP:
+ break;
+ case h264_NAL_UNIT_TYPE_SPS_extension:
+ case h264_NAL_UNIT_TYPE_unspecified:
+ case h264_NAL_UNIT_TYPE_unspecified2:
+ status = H264_STATUS_OK;
+ //nothing
+ break;
+ default:
+ status = H264_STATUS_OK;
+ break;
+ }
+
+ //pInfo->old_nal_unit_type = pInfo->nal_unit_type;
+ switch ( pInfo->nal_unit_type )
+ {
+ case h264_NAL_UNIT_TYPE_IDR:
+ case h264_NAL_UNIT_TYPE_SLICE:
+ case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+ case h264_NAL_UNIT_TYPE_SPS:
+ case h264_NAL_UNIT_TYPE_PPS:
+ case h264_NAL_UNIT_TYPE_SEI:
+ case h264_NAL_UNIT_TYPE_EOSeq:
+ case h264_NAL_UNIT_TYPE_EOstream:
+ case h264_NAL_UNIT_TYPE_Reserved1:
+ case h264_NAL_UNIT_TYPE_Reserved2:
+ case h264_NAL_UNIT_TYPE_Reserved3:
+ case h264_NAL_UNIT_TYPE_Reserved4:
+ case h264_NAL_UNIT_TYPE_Reserved5:
+ {
+ pInfo->old_nal_unit_type = pInfo->nal_unit_type;
+ break;
+ }
+ default:
+ break;
+ }
+
+ return status;
+}
+
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+static uint32_t viddec_h264_is_frame_start(void *ctxt)
+{
+ struct h264_viddec_parser* parser = ctxt;
+ uint32_t ret = 0;
+
+ h264_Info * pInfo = &(parser->info);
+
+ if(pInfo->img.g_new_frame) {
+ ret = 1;
+ }
+
+ return ret;
+}
+
+#ifdef VBP
+uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc,
+ uint32_t *codec_specific_errors)
+#else
+static uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors)
+#endif
+{
+ struct h264_viddec_parser* parser = ctxt;
+ uint32_t ret = VIDDEC_PARSE_SUCESS;
+ h264_Info * pInfo = &(parser->info);
+ uint8_t is_stream_forced_to_complete=false;
+
+ is_stream_forced_to_complete = (VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc);
+
+ if(is_stream_forced_to_complete || (pInfo->is_current_workload_done))
+ {
+ viddec_workload_t *wl;
+ viddec_frame_attributes_t *attrs;
+
+ wl = viddec_pm_get_header( parent );
+ attrs = &wl->attrs;
+
+ if((attrs->cont_size.width < 32) || (attrs->cont_size.width > 2048) || (attrs->cont_size.height < 32) || (attrs->cont_size.height>2048))
+ {
+ attrs->cont_size.width = 32;
+ attrs->cont_size.height = 32;
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+ }
+
+ *codec_specific_errors = pInfo->wl_err_curr;
+ pInfo->wl_err_curr = pInfo->wl_err_next;
+ pInfo->wl_err_next = 0;
+
+ if(is_stream_forced_to_complete)
+ {
+ h264_parse_emit_eos(parent, pInfo);
+ }
+ ret = VIDDEC_PARSE_FRMDONE;
+ }
+
+ return ret;
+}
+
+#ifdef VBP
+void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size)
+#else
+static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size)
+#endif
+{
+ /* Should return size of my structure */
+ size->context_size = sizeof(struct h264_viddec_parser);
+ size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all)
+ + MAX_NUM_PPS * sizeof(pic_param_set)
+ + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE
+ + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+}
+
+void viddec_h264_get_ops(viddec_parser_ops_t *ops)
+{
+ ops->init = viddec_h264_init;
+
+ ops->parse_syntax = viddec_h264_parse;
+ ops->get_cxt_size = viddec_h264_get_context_size;
+ ops->is_wkld_done = viddec_h264_wkld_done;
+ ops->is_frame_start = viddec_h264_is_frame_start;
+ return;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c
new file mode 100644
index 0000000..4fc2f1a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_workload.c
@@ -0,0 +1,1306 @@
+/* Any workload management goes in this file */
+
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_fw_item_types.h"
+#include "h264parse_dpb.h"
+
+
+#include "viddec_fw_workload.h"
+#include <auto_eas/gen4_mfd.h>
+#include "viddec_pm_utils_bstream.h"
+
+// picture parameter 1
+#define PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT(w) (((uint32_t)w)&0x1)
+#define PUT_BSD_PP1_SLICE_TYPE_BITS(w) ((((uint32_t)w)&0x7)<<1)
+#define PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(w) ((((uint32_t)w)&0x3)<<4)
+#define PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6)
+#define PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(w) ((((uint32_t)w)&0x3F)<<8)
+#define PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(w) ((((uint32_t)w)&0x3F)<<16)
+
+// picture parameter 2
+#define PUT_BSD_PP2_CABAC_INIT_IDC_BITS(w) (((uint32_t)w)&0x3)
+#define PUT_BSD_PP2_QP_BITS(w) ((((uint32_t)w)&0x3F)<<2)
+#define PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(w) ((((uint32_t)w)&0x3)<<8)
+#define PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<10)
+#define PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(w) ((((uint32_t)w)&0xF)<<14)
+#define PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<18)
+#define PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(w) ((((uint32_t)w)&0x1F)<<19)
+#define PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(w) ((((uint32_t)w)&0x1F)<<24)
+
+
+// slice start parameter
+#define PUT_BSD_SS_START_ADDR_BITS(w) (((uint32_t)w)&0x7fff) // 14:0 current slice start address
+#define PUT_BSD_SS_SKIP_FS_IDC_BITS(w) ((((uint32_t)w)&0x3f)<<16) // [5:0], [4:0] frame store idc, [5] - 0: top-filed, 1: bottom field
+#define PUT_BSD_SS_SKIP_TYPE_BIT(w) ((((uint32_t)w)&0x1)<<24) // 0: P-skip, 1: I-skip
+#define PUT_BSD_SS_SKIP_REWIND_BITS(w) ((((uint32_t)w)&0xf)<<28) // number of MB or MBAFF pairs to rewind before skip
+
+//h264_dpb_init
+#define PUT_FRAME_WIDTH_MB_BITS(w) (((uint32_t)w)&0x7F)
+#define PUT_FRAME_HEIGHT_MB_BITS(w) ((((uint32_t)w)&0x7F)<<16)
+
+//dpb lut table init
+//#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8)
+
+//h264 img init
+#define PUT_BSD_IMAGE_STRUCTURE_BITS(w) (((uint32_t)w)&0x3)
+#define PUT_BSD_IMAGE_IDR_BIT(w) ((((uint32_t)w)&0x1)<<2)
+#define PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<3)
+#define PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<4)
+#define PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<5)
+#define PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<6)
+#define PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<7)
+#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w) ((((uint32_t)w)&0x1F)<<8)
+
+#define PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<13)
+#define PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<14)
+#define PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<15)
+#define PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<16)
+#define PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(w) ((((uint32_t)w)&0xFF)<<17)
+#define PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(w) ((((uint32_t)w)&0x1)<<25)
+
+
+extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,
+ int32_t NonExisting,
+ int32_t use_old);
+
+extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames);
+
+
+
+void h264_translate_parser_info_to_frame_attributes(viddec_workload_t *wl, h264_Info *pInfo)
+{
+
+ viddec_frame_attributes_t *attrs = &wl->attrs;
+
+
+
+ //// Cont_size
+ attrs->cont_size.height = pInfo->img.FrameHeightInMbs*16;
+ attrs->cont_size.width = pInfo->img.PicWidthInMbs*16;
+
+ //// The following attributes will be updated in slice level
+ attrs->h264.used_for_reference = 0;
+ attrs->h264.top_field_first = 0;
+ attrs->h264.top_field_poc = 0;
+ attrs->h264.bottom_field_poc = 0;
+ attrs->h264.field_pic_flag = 0;
+
+#if 1
+/// Double check the size late!!!!!
+ //attrs->h264.cropped_size.width = pInfo->img.PicWidthInMbs*16;
+ //attrs->h264.cropped_size.height = pInfo->img.PicWidthInMbs*16;
+
+ if( (pInfo->active_SPS.sps_disp.frame_cropping_flag) &&
+ (pInfo->active_SPS.sps_disp.chroma_format_idc < 4))
+ {
+ int32_t CropUnitX, CropUnitY;
+ int32_t SubWidthC, SubHeightC;
+
+ if(pInfo->active_SPS.sps_disp.chroma_format_idc == 0)
+ {
+ CropUnitX = 1;
+ CropUnitY = 2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag;
+ }
+ else
+ {
+ SubWidthC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >> 1);
+ SubHeightC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >>1)
+ - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) & 0x1);
+ CropUnitX = SubWidthC;
+ CropUnitY = SubHeightC * (2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag);
+ }
+
+ if ((int32_t)attrs->cont_size.height >(pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY))
+ {
+ attrs->cont_size.height -= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY);
+ //attrs->h264.cropped_size.height-= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY);
+ }
+ }
+/// Pan-Scan Info
+
+#endif
+
+}
+
+
+static void h264_parse_update_frame_attributes(void *parent, h264_Info *pInfo)
+{
+ viddec_workload_t *wl_cur, *wl_next;
+ viddec_frame_attributes_t *attrs;
+ uint8_t frame_type=0;
+
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ wl_cur = viddec_pm_get_header( parent );
+ attrs = &wl_cur->attrs;
+ }
+ else
+ {
+ wl_next = viddec_pm_get_next_header (parent);
+ attrs = &wl_next->attrs;
+ }
+
+ /////////update frame type
+ if((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&(0x1 << FRAME_TYPE_STRUCTRUE_OFFSET))
+ {
+ frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_FRAME_OFFSET)) )>> FRAME_TYPE_FRAME_OFFSET;
+ switch(frame_type)
+ {
+ case FRAME_TYPE_IDR: attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; break;
+ case FRAME_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break;
+ case FRAME_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break;
+ case FRAME_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break;
+ default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; break;
+ }
+
+ attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID;
+ }
+ else
+ {
+ frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_TOP_OFFSET)) )>> FRAME_TYPE_TOP_OFFSET;
+ switch(frame_type)
+ {
+ case FRAME_TYPE_IDR: attrs->frame_type = VIDDEC_FRAME_TYPE_IDR; break;
+ case FRAME_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break;
+ case FRAME_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break;
+ case FRAME_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break;
+ default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID; break;
+
+ }
+
+ frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_BOTTOM_OFFSET)) )>> FRAME_TYPE_BOTTOM_OFFSET;
+ switch(frame_type)
+ {
+ case FRAME_TYPE_IDR: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_IDR; break;
+ case FRAME_TYPE_I: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_I; break;
+ case FRAME_TYPE_P: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_P; break;
+ case FRAME_TYPE_B: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_B; break;
+ default: attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; break;
+
+ }
+ }
+
+ /////////update is_referece flag
+ attrs->h264.used_for_reference |= (pInfo->SliceHeader.nal_ref_idc == 0)? 0: 1;
+
+ /////////update POC
+ attrs->h264.top_field_poc = pInfo->img.toppoc;
+ attrs->h264.bottom_field_poc = pInfo->img.bottompoc;
+
+ //////// update TFF
+ if(attrs->h264.top_field_poc <= attrs->h264.bottom_field_poc) {
+ attrs->h264.top_field_first = 1;
+ } else {
+ attrs->h264.top_field_first = 0;
+ }
+
+ /////// update field_pic_flag
+ //attrs->h264.field_pic_flag |= (pInfo->SliceHeader.field_pic_flag << pInfo->SliceHeader.bottom_field_flag);
+ attrs->h264.field_pic_flag |= pInfo->SliceHeader.field_pic_flag;
+
+ return;
+}
+
+
+static void h264_fill_slice_data(h264_Info *pInfo, h264_slice_data * p_slice_data)
+{
+ uint32_t data=0;
+ uint32_t first_mb_in_slice =0;
+
+
+
+ ////////////fill pic parameters 1
+ data = PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT( (pInfo->SliceHeader.nal_ref_idc == 0) ) +
+ PUT_BSD_PP1_SLICE_TYPE_BITS(pInfo->SliceHeader.slice_type) +
+ PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(pInfo->active_PPS.weighted_bipred_idc) +
+ PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(pInfo->active_PPS.weighted_pred_flag) +
+ PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(pInfo->SliceHeader.num_ref_idx_l0_active) +
+ PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(pInfo->SliceHeader.num_ref_idx_l1_active);
+ p_slice_data->h264_bsd_slice_p1 = data;
+
+
+ ///////////fill pic parameters 2
+ data = PUT_BSD_PP2_CABAC_INIT_IDC_BITS(pInfo->SliceHeader.cabac_init_idc) +
+ PUT_BSD_PP2_QP_BITS( (pInfo->SliceHeader.slice_qp_delta + pInfo->active_PPS.pic_init_qp_minus26+26) ) +
+ PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(pInfo->SliceHeader.disable_deblocking_filter_idc) +
+ PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_alpha_c0_offset_div2) +
+ PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_beta_offset_div2) +
+ PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(pInfo->SliceHeader.direct_spatial_mv_pred_flag) +
+ PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(pInfo->active_PPS.chroma_qp_index_offset) +
+ PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(pInfo->active_PPS.second_chroma_qp_index_offset);
+
+ p_slice_data->h264_bsd_slice_p2 = data;
+
+ /////////fill slice start
+ first_mb_in_slice = pInfo->SliceHeader.first_mb_in_slice;
+
+ data = PUT_BSD_SS_START_ADDR_BITS(first_mb_in_slice);
+ data |= PUT_BSD_SS_SKIP_FS_IDC_BITS( pInfo->h264_list_replacement) |
+ PUT_BSD_SS_SKIP_TYPE_BIT(0) |
+ PUT_BSD_SS_SKIP_REWIND_BITS((pInfo->img.MbaffFrameFlag? 2: 3));
+
+ p_slice_data->h264_bsd_slice_start = data;
+
+}
+
+
+static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+
+ uint32_t i=0, n_items=0;
+ uint32_t qm_type=0;
+
+
+ for( i = 0; i < 6; i++ )
+ {
+ qm_type = FB_QM;
+ if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first
+ {
+ if (pInfo->active_SPS.seq_scaling_list_present_flag[i])
+ {
+ pInfo->qm_present_list |= ((0x1)<<i);
+
+ if (pInfo->active_SPS.UseDefaultScalingMatrix4x4Flag[i]) {
+ qm_type = DEFAULT_QM;
+ } else {
+ qm_type = SPS_QM;
+ }
+ }
+ }
+
+ if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps
+ {
+ if (pInfo->active_PPS.pic_scaling_list_present_flag[i])
+ {
+ pInfo->qm_present_list |= ((0x1)<<i);
+ if (pInfo->active_PPS.UseDefaultScalingMatrix4x4Flag[i]) {
+ qm_type = DEFAULT_QM;
+ } else {
+ qm_type = PPS_QM;
+ }
+ }
+ else
+ {
+ if ((i != 0) && (i != 3) && (i < 6)) {
+ pInfo->qm_present_list &= ~((0x1)<<i);
+ qm_type = FB_QM;
+ }
+ }
+ }
+
+
+ ///////////////////// Emit out Scaling_matrix//////////////////////
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX;
+ // data_offset 0x aa bb cc dd
+ // bb is the workload item offset
+ // cc is the qm_type
+ // dd is the matrix number
+ //
+ switch (qm_type)
+ {
+ case (SPS_QM):{
+
+ for(n_items =0; n_items<2; n_items++)
+ {
+ wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8);
+ wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+0]))+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+1]))<<8)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+2]))<<16)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+3]))<<24);
+ wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+4]))+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24);
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+
+ break;
+ }
+ case (PPS_QM):{
+
+ for(n_items =0; n_items<2; n_items++)
+ {
+ wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8);
+ wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+0]))+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+1]))<<8)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+2]))<<16)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+3]))<<24);
+ wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+4]))+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24);
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ break;
+ }
+ case (DEFAULT_QM):
+ {
+
+ wi.data.data_offset = i + (DEFAULT_QM << 4);
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = 0;
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+ }
+
+}
+
+static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+
+ uint32_t i=0, n_items=0;
+ uint32_t qm_type=0;
+
+ for( i = 6; i < 8; i++ )
+ {
+ qm_type = FB_QM;
+ if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first
+ {
+ if (pInfo->active_SPS.seq_scaling_list_present_flag[i])
+ {
+ pInfo->qm_present_list |= ((0x1)<<i);
+
+ if (pInfo->active_SPS.UseDefaultScalingMatrix8x8Flag[i-6])
+ {
+ qm_type = DEFAULT_QM;
+ }
+ else
+ {
+ qm_type = SPS_QM;
+ }
+ }
+ }
+
+ if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps
+ {
+ if (pInfo->active_PPS.pic_scaling_list_present_flag[i])
+ {
+ pInfo->qm_present_list |= ((0x1)<<i);
+
+ if (pInfo->active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])
+ {
+ qm_type = DEFAULT_QM;
+ }
+ else
+ {
+ qm_type = PPS_QM;
+ }
+ }
+ }
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX;
+
+ // data_offset 0x aa bb cc dd
+ // bb is the workload item offset
+ // cc is the qm_type
+ // dd is the matrix number
+ //
+ switch (qm_type)
+ {
+ case (SPS_QM):
+ {
+ for(n_items =0; n_items<8; n_items++)
+ {
+ wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8);
+ wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+0]))+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+3]))<<24);
+ wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+4]))+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+
+ (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24);
+
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ break;
+ }
+ case (PPS_QM):
+ {
+ for(n_items =0; n_items<8; n_items++)
+ {
+ wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8);
+ wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+0]))+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+3]))<<24);
+ wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+4]))+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+
+ (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24);
+
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ break;
+ }
+ case (DEFAULT_QM):
+ {
+ wi.data.data_offset = i + (DEFAULT_QM << 4);
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = 0;
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ viddec_pm_append_workitem( parent, &wi );
+ } else {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ break;
+ }
+ default:{
+ break;
+ }
+ }
+ }
+
+}
+
+
+
+static void h264_fill_pic_data(h264_Info *pInfo, h264_pic_data * p_pic_data)
+{
+ uint32_t data=0;
+ uint32_t dec_idc =0;
+ uint32_t frame_structure =0;
+
+ //fill h264_dpb_init
+ data = PUT_FRAME_WIDTH_MB_BITS(pInfo->dpb.PicWidthInMbs) +
+ PUT_FRAME_HEIGHT_MB_BITS(pInfo->dpb.FrameHeightInMbs);
+
+ p_pic_data->h264_dpb_init = data;
+
+ ////////////////////////////////file current pic info
+ data = 0;
+ dec_idc = pInfo->dpb.fs_dec_idc;
+ frame_structure = pInfo->img.structure;
+ if(frame_structure == FRAME)
+ frame_structure=0;
+ //data = PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc);
+
+ //p_pic_data->h264_cur_bsd_img_init= data;
+
+ data = PUT_BSD_IMAGE_STRUCTURE_BITS(frame_structure) +
+ PUT_BSD_IMAGE_IDR_BIT(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) +
+ PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(pInfo->img.MbaffFrameFlag) +
+ PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(pInfo->active_PPS.entropy_coding_mode_flag) +
+ PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(pInfo->active_PPS.constrained_intra_pred_flag) +
+ PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(pInfo->active_SPS.sps_disp.frame_mbs_only_flag) +
+ PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(pInfo->active_SPS.sps_disp.direct_8x8_inference_flag) +
+ PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(pInfo->active_PPS.transform_8x8_mode_flag) +
+ PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(((pInfo->active_SPS.sps_disp.chroma_format_idc==0)? 0x1: 0x0)) +
+ PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(0x0) +
+ PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT((pInfo->active_PPS.pic_scaling_matrix_present_flag||pInfo->active_SPS.seq_scaling_matrix_present_flag)) +
+ PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->qm_present_list) +
+ PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(0x1) +
+ PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc);
+
+ p_pic_data->h264_cur_bsd_img_init= data;
+
+ //to do: add qm list
+ //PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->img.q .qm_present_list) +
+ //printf("structure = %d, tpoc = %d, bpoc = %d\n", pInfo->img.structure, pInfo->img.toppoc, pInfo->img.bottompoc);
+
+ if(pInfo->img.structure == FRAME)
+ {
+ // Write down POC
+ p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc;
+ p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc;
+ }else if (pInfo->img.structure == TOP_FIELD)
+ {
+ // Write down POC
+ p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc;
+ p_pic_data->h264_cur_mpr_bf_poc = 0;
+ }
+ else if (pInfo->img.structure == BOTTOM_FIELD)
+ {
+ // Write down POC
+ p_pic_data->h264_cur_mpr_tf_poc = 0;
+ p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc;
+ }
+ else
+ {
+ // Write down POC
+ p_pic_data->h264_cur_mpr_tf_poc = 0;
+ p_pic_data->h264_cur_mpr_bf_poc = 0;
+ }
+
+ return;
+}
+
+static void h264_parse_emit_sps(void *parent, h264_Info *pInfo)
+{
+ viddec_workload_item_t wi;
+
+ if(pInfo->Is_SPS_updated)
+ {
+ viddec_fw_reset_workload_item(&wi);
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+ viddec_fw_h264_sps_set_profile_idc(&(wi.h264_sps), pInfo->active_SPS.profile_idc);
+ viddec_fw_h264_sps_set_level_idc(&(wi.h264_sps), pInfo->active_SPS.level_idc);
+ viddec_fw_h264_sps_set_chroma_format_idc(&(wi.h264_sps), pInfo->active_SPS.sps_disp.chroma_format_idc);
+ viddec_fw_h264_sps_set_num_ref_frames(&(wi.h264_sps), pInfo->active_SPS.num_ref_frames);
+ viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(&(wi.h264_sps), pInfo->active_SPS.gaps_in_frame_num_value_allowed_flag);
+ viddec_fw_h264_sps_set_frame_mbs_only_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_mbs_only_flag);
+ viddec_fw_h264_sps_set_frame_cropping_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_cropping_flag);
+ viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag);
+ wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1;
+ wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ viddec_fw_reset_workload_item(&wi);
+ if(pInfo->active_SPS.sps_disp.frame_cropping_flag)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_CROPPING;
+ viddec_fw_h264_cropping_set_left(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_left_offset);
+ viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset);
+ viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset);
+ viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset);
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ viddec_fw_reset_workload_item(&wi);
+ if(pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+ viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag);
+ viddec_fw_h264_vui_set_video_signal_type_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag);
+ viddec_fw_h264_vui_set_pic_struct_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag);
+ viddec_fw_h264_vui_set_timing_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag);
+ viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag);
+ viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag);
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag == 1)
+ {
+ viddec_fw_h264_vui_set_aspect_ratio_idc(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc);
+ if(h264_AR_Extended_SAR == pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc)
+ {
+ viddec_fw_h264_vui_set_sar_width(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_width);
+ viddec_fw_h264_vui_set_sar_height(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_height);
+ }
+ }
+
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+ {
+ viddec_fw_h264_vui_set_colour_description_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag);
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag)
+ {
+ viddec_fw_h264_vui_set_colour_primaries(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_primaries);
+ viddec_fw_h264_vui_set_transfer_characteristics(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.transfer_characteristics);
+ }
+ viddec_fw_h264_vui_set_video_format(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_format);
+ }
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+ {
+ viddec_fw_h264_vui_set_fixed_frame_rate_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.fixed_frame_rate_flag);
+ }
+
+ if( (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+ || (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1))
+ {
+ viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag);
+ }
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ viddec_fw_reset_workload_item(&wi);
+
+ if(pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO;
+
+ wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick;
+ wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale;
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+
+ pInfo->Is_SPS_updated =0;
+
+ }
+
+ return;
+}
+
+
+
+
+static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t list_id)
+{
+ uint32_t i=0, nitems=0, byte_index=0, data=0, data_writed=0;
+ uint8_t *p_list;
+ viddec_workload_item_t wi;
+
+ if(0 == list_id)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_0;
+
+ if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ p_list = pInfo->slice_ref_list0;
+ }
+ else
+ {
+ p_list = pInfo->dpb.listX_0;
+ }
+ }
+ else
+ {
+ nitems =0;
+ p_list = pInfo->dpb.listX_0;
+ }
+ }
+ else
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_1;
+
+ if( h264_PtypeB==pInfo->SliceHeader.slice_type)
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l1_active;
+ if(pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag)
+ {
+ p_list = pInfo->slice_ref_list1;
+ }
+ else
+ {
+ p_list = pInfo->dpb.listX_1;
+ }
+ }
+ else
+ {
+ nitems = 0;
+ p_list = pInfo->dpb.listX_1;
+ }
+
+ }
+
+ if(0 == nitems)
+ {
+ return;
+ }
+
+ byte_index =0;
+ data_writed=0;
+
+
+ for (i=0; i < 32; i++)
+ {
+ if(byte_index == 0) data = 0;
+
+ if(i<nitems)
+ {
+ if( viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[ (p_list[i]&0x1f) ])))
+ {
+ data |= (pInfo->h264_list_replacement) << byte_index;
+ }
+ else
+ {
+ data |= (p_list[i] & 0x7f) << byte_index;
+ }
+ }
+ else
+ {
+ data |= (0x80) << byte_index;
+ }
+
+
+ if(byte_index == 24)
+ {
+ byte_index = 0;
+ wi.data.data_offset = data_writed&(~0x1);
+ wi.data.data_payload[data_writed&0x1]=data;
+
+ data =0;
+
+ if(data_writed&0x1)
+ {
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ data_writed ++;
+ }
+ else
+ {
+ byte_index += 8;
+ }
+ }
+
+}
+
+
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+ h264_slice_data slice_data;
+
+ uint32_t i=0, nitems=0, data=0;
+ uint32_t bits_offset =0, byte_offset =0;
+ uint8_t is_emul =0;
+
+ ////////////////////// Update frame attributes/////////////////
+ h264_parse_update_frame_attributes(parent,pInfo);
+
+
+ if(pInfo->SliceHeader.sh_error) {
+ // Error type definition, refer to viddec_fw_common_defs.h
+ // if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17)
+ // if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18)
+ // if this is frame based, both 2 bits should be set
+
+ if(pInfo->push_to_cur) {
+ pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_curr |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET);
+ } else {
+ pInfo->wl_err_next |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ pInfo->wl_err_next |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET);
+ }
+ }
+
+
+ ////////////////////// Update Reference list //////////////////
+ if( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+ {
+ if(pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+ else
+ {
+ nitems = pInfo->dpb.listXsize[0];
+
+ for(i=0; i<nitems;i++)
+ {
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+ {
+ pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+ break;
+ }
+ }
+ }
+
+ }
+ else
+ {
+ nitems =0;
+ }
+ /////file ref list 0
+ h264_parse_emit_ref_list(parent, pInfo, 0);
+
+ /////file ref list 1
+ h264_parse_emit_ref_list(parent, pInfo, 1);
+
+ ///////////////////////////////////// Slice Data ////////////////////////////////
+ h264_fill_slice_data(pInfo, &slice_data);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG;
+
+ wi.data.data_offset = slice_data.h264_bsd_slice_start;
+ wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1;
+ wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent , &wi);
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent , &wi);
+ }
+
+
+ ///////////////////////////predict weight table item and data if have///////////////////////////
+ if(pInfo->h264_pwt_enabled)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET;
+ wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1;
+ wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset;
+ wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent , &wi);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+ wi.es.es_flags = 0;
+ viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1);
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent , &wi);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+ wi.es.es_flags = 0;
+ viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0);
+ }
+ }
+
+
+ ////////////////////////////////// Update ES Buffer for Slice ///////////////////////
+ viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+ //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset);
+
+ if(pInfo->active_PPS.entropy_coding_mode_flag)
+ {
+ if(0!=bits_offset) {
+ viddec_pm_get_bits(parent, &data, 8-bits_offset);
+ }
+ }
+ else
+ {
+ if(0!=bits_offset) {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET;
+ wi.data.data_offset = bits_offset;
+ wi.data.data_payload[0]=0;
+ wi.data.data_payload[1]=0;
+
+ if(pInfo->push_to_cur) { //cur is empty, fill new frame in cur
+ viddec_pm_append_workitem( parent , &wi);
+ }
+ else {
+ viddec_pm_append_workitem_next( parent , &wi);
+ }
+ }
+ }
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_pixeldata( parent );
+ }
+ else
+ {
+ viddec_pm_append_pixeldata_next( parent);
+ }
+
+ return;
+}
+
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+
+ const uint32_t *pl;
+ uint32_t i=0,nitems=0;
+
+ h264_pic_data pic_data;
+
+ pInfo->qm_present_list=0;
+
+ h264_parse_emit_4X4_scaling_matrix(parent, pInfo);
+ h264_parse_emit_8X8_scaling_matrix(parent, pInfo);
+
+ h264_fill_pic_data(pInfo, &pic_data);
+
+ // How many payloads must be generated
+ nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up
+
+ pl = (const uint32_t *) &pic_data;
+
+ // Dump slice data to an array of workitems, to do pl access non valid mem
+ for( i = 0; i < nitems; i++ )
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_PIC_REG;
+ wi.data.data_offset = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct
+ wi.data.data_payload[0] = pl[0];
+ wi.data.data_payload[1] = pl[1];
+ pl += 2;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+
+ return;
+}
+
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+ viddec_workload_item_t wi;
+ uint32_t i=0,nitems=0;
+
+ ///////////////////////// Frame attributes//////////////////////////
+
+ //Push data into current workload if first frame or frame_boundary already detected by non slice nal
+ if( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal))
+ {
+ viddec_workload_t *wl_cur = viddec_pm_get_header( parent );
+ //pInfo->img.g_new_frame = 0;
+ pInfo->Is_first_frame_in_stream =0;
+ pInfo->is_frame_boundary_detected_by_non_slice_nal=0;
+ pInfo->push_to_cur = 1;
+ h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo);
+ }
+ else // move to cur if frame boundary detected by previous non slice nal, or move to next if not
+ {
+ viddec_workload_t *wl_next = viddec_pm_get_next_header (parent);
+
+ pInfo->push_to_cur = 0;
+ h264_translate_parser_info_to_frame_attributes(wl_next, pInfo);
+
+ pInfo->is_current_workload_done=1;
+ }
+
+ ///////////////////// SPS/////////////////////
+ h264_parse_emit_sps(parent, pInfo);
+
+ /////////////////////display frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+ /////////////////////release frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+ /////////////////////flust frames (do not display)/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_dropped;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 + pInfo->dpb.frame_id_need_to_be_dropped[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ pInfo->dpb.frame_numbers_need_to_be_dropped =0;
+
+ /////////////////////updata DPB frames/////////////////////
+ nitems = pInfo->dpb.used_size;
+ for(i=0; i<nitems; i++)
+ {
+ uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id;
+ wi.ref_frame.reference_id = fs_id;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ }
+
+
+ /////////////////////updata dpb frames info (poc)/////////////////////
+ nitems = pInfo->dpb.used_size;
+ for(i=0; i<nitems; i++)
+ {
+ uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+ if(viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC;
+ wi.data.data_offset = fs_id;
+ //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc);
+
+ switch(viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id])))
+ {
+ case (FRAME):{
+ wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+ wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+ break;
+ };
+
+ case (TOP_FIELD):{
+ wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+ wi.data.data_payload[1] = 0;
+ break;
+ };
+
+ case (BOTTOM_FIELD):{
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+ break;
+ };
+
+ default : {
+ wi.data.data_payload[0] = 0;
+ wi.data.data_payload[1] = 0;
+ break;
+ };
+ }
+
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ }
+ }
+
+ /////////////////////Alloc buffer for current Existing frame/////////////////////
+ if(0!=pInfo->dpb.frame_numbers_need_to_be_allocated)
+ {
+ if(pInfo->push_to_cur)
+ {
+ viddec_workload_t *wl_cur = viddec_pm_get_header (parent);
+ wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+ }
+ else
+ {
+ viddec_workload_t *wl_next = viddec_pm_get_next_header (parent);
+ wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+ return;
+}
+
+
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+
+ uint32_t nitems=0, i=0;
+ viddec_workload_item_t wi;
+
+
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+
+ ////
+ //// Now we can flush out all frames in DPB fro display
+
+ if(MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)
+ {
+ if(viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3)
+ {
+ h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc); //, DANGLING_TYPE_GAP_IN_FRAME
+ }
+ }
+
+
+ h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+ h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+ /////////////////////display frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+ /////////////////////release frames/////////////////////
+ nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+ for(i=0; i<nitems; i++)
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+
+ if(pInfo->push_to_cur) //cur is empty, fill new frame in cur
+ {
+ viddec_pm_append_workitem( parent, &wi );
+ viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+ }
+ else
+ {
+ viddec_pm_append_workitem_next( parent, &wi );
+ viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next);
+ }
+ }
+ pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+ return;
+}
+
+
+
+
+
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h b/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h
new file mode 100644
index 0000000..aa2a712
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/include/mpeg2.h
@@ -0,0 +1,195 @@
+#ifndef _MPEG2_H
+#define _MPEG2_H
+
+/**
+ * mpeg2.h
+ * -------
+ * This file contains all the necessary enumerations and structures needed from
+ * the MPEG-2 Specification.
+ */
+
+/* Max Pan-Scan offsets */
+#define MPEG2_MAX_VID_OFFSETS 3
+
+/* Quantization matrix size */
+#define MPEG2_QUANT_MAT_SIZE 64
+
+/* MPEG2 Start Code Values */
+typedef enum {
+ MPEG2_SC_PICTURE = 0x00,
+ MPEG2_SC_SLICE_HDR = 0x01,
+ MPEG2_SC_SLICE_MIN = 0x01,
+ MPEG2_SC_SLICE_MAX = 0xAF,
+ MPEG2_SC_USER_DATA = 0xB2,
+ MPEG2_SC_SEQ_HDR = 0xB3,
+ MPEG2_SC_SEQ_ERR = 0xB4,
+ MPEG2_SC_EXT = 0xB5,
+ MPEG2_SC_SEQ_END = 0xB7,
+ MPEG2_SC_GROUP = 0xB8,
+ MPEG2_SC_SYS_MIN = 0xB9,
+ MPEG2_SC_SYS_MAX = 0xFF,
+ MPEG2_SC_ALL = 0xFF
+} mpeg2_start_codes;
+
+/* MPEG2 Extension Start Code ID */
+typedef enum {
+ MPEG2_EXT_SEQ = 1,
+ MPEG2_EXT_SEQ_DISP = 2,
+ MPEG2_EXT_QUANT_MAT = 3,
+ MPEG2_EXT_COPYRIGHT = 4,
+ MPEG2_EXT_SEQ_SCAL = 5,
+ MPEG2_EXT_PIC_DISP = 7,
+ MPEG2_EXT_PIC_CODING = 8,
+ MPEG2_EXT_PIC_SPA_SCAL = 9,
+ MPEG2_EXT_PIC_TEMP_SCAL = 10,
+ MPEG2_EXT_ALL = 11
+} mpeg2_ext_start_codes;
+
+/* MPEG2 Picture Coding Type Values */
+typedef enum {
+ MPEG2_PC_TYPE_FORBIDDEN = 0,
+ MPEG2_PC_TYPE_I = 1,
+ MPEG2_PC_TYPE_P = 2,
+ MPEG2_PC_TYPE_B = 3
+} mpeg2_picture_type;
+
+/* MPEG2 Picture Structure Type Values */
+typedef enum {
+ MPEG2_PIC_STRUCT_RESERVED = 0,
+ MPEG2_PIC_STRUCT_TOP = 1,
+ MPEG2_PIC_STRUCT_BOTTOM = 2,
+ MPEG2_PIC_STRUCT_FRAME = 3
+} mpeg2_picture_structure;
+
+/* MPEG2 Chroma Format Values */
+typedef enum {
+ MPEG2_CF_RESERVED = 0,
+ MPEG2_CF_420 = 1,
+ MPEG2_CF_422 = 2,
+ MPEG2_CF_444 = 3
+} mpeg2_chroma_format;
+
+/* MPEG2 Parser Structures */
+/* Sequence Header Info */
+struct mpeg2_sequence_hdr_info
+{
+ uint32_t horizontal_size_value;
+ uint32_t vertical_size_value;
+ uint32_t aspect_ratio_information;
+ uint32_t frame_rate_code;
+ uint32_t bit_rate_value;
+ uint32_t vbv_buffer_size_value;
+ uint32_t constrained_parameters_flag;
+};
+
+/* Group of Pictures Header Info */
+struct mpeg2_gop_hdr_info
+{
+ uint32_t closed_gop;
+ uint32_t broken_link;
+};
+
+/* Picture Header */
+struct mpeg2_picture_hdr_info
+{
+ uint32_t temporal_reference;
+ uint32_t picture_coding_type;
+ uint32_t full_pel_forward_vect;
+ uint32_t forward_f_code;
+ uint32_t full_pel_backward_vect;
+ uint32_t backward_f_code;
+};
+
+/* Sequence Extension Info */
+struct mpeg2_sequence_ext_info
+{
+ uint32_t profile_and_level_indication;
+ uint32_t progressive_sequence;
+ uint32_t chroma_format;
+ uint32_t horizontal_size_extension;
+ uint32_t vertical_size_extension;
+ uint32_t bit_rate_extension;
+ uint32_t vbv_buffer_size_extension;
+ uint32_t frame_rate_extension_n;
+ uint32_t frame_rate_extension_d;
+};
+
+/* Sequence Display Extension Info */
+struct mpeg2_sequence_disp_ext_info
+{
+ uint32_t video_format;
+ uint32_t colour_description;
+ uint32_t colour_primaries;
+ uint32_t transfer_characteristics;
+ uint32_t display_horizontal_size;
+ uint32_t display_vertical_size;
+};
+
+/* Sequence scalable extension Info */
+struct mpeg2_sequence_scal_ext_info
+{
+ uint32_t scalable_mode;
+};
+
+/* Picture Coding Extension */
+struct mpeg2_picture_coding_ext_info
+{
+ uint32_t fcode00;
+ uint32_t fcode01;
+ uint32_t fcode10;
+ uint32_t fcode11;
+ uint32_t intra_dc_precision;
+ uint32_t picture_structure;
+ uint32_t top_field_first;
+ uint32_t frame_pred_frame_dct;
+ uint32_t concealment_motion_vectors;
+ uint32_t q_scale_type;
+ uint32_t intra_vlc_format;
+ uint32_t alternate_scan;
+ uint32_t repeat_first_field;
+ uint32_t chroma_420_type;
+ uint32_t progressive_frame;
+ uint32_t composite_display_flag;
+};
+
+/* Picture Display Extension */
+struct mpeg2_picture_disp_ext_info
+{
+ uint32_t frame_center_horizontal_offset[MPEG2_MAX_VID_OFFSETS];
+ uint32_t frame_center_vertical_offset[MPEG2_MAX_VID_OFFSETS];
+};
+
+/* Quantization Matrix Extension */
+struct mpeg2_quant_ext_info
+{
+ uint32_t load_intra_quantiser_matrix;
+ uint32_t load_non_intra_quantiser_matrix;
+ uint32_t load_chroma_intra_quantiser_matrix;
+ uint32_t load_chroma_non_intra_quantiser_matrix;
+};
+
+/* Quantization Matrices */
+struct mpeg2_quant_matrices
+{
+ uint8_t intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+ uint8_t non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+ uint8_t chroma_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+ uint8_t chroma_non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+};
+
+/* MPEG2 Info */
+struct mpeg2_info
+{
+ struct mpeg2_sequence_hdr_info seq_hdr;
+ struct mpeg2_gop_hdr_info gop_hdr;
+ struct mpeg2_picture_hdr_info pic_hdr;
+ struct mpeg2_sequence_ext_info seq_ext;
+ struct mpeg2_sequence_disp_ext_info seq_disp_ext;
+ struct mpeg2_sequence_scal_ext_info seq_scal_ext;
+ struct mpeg2_picture_coding_ext_info pic_cod_ext;
+ struct mpeg2_picture_disp_ext_info pic_disp_ext;
+ struct mpeg2_quant_ext_info qnt_ext;
+ struct mpeg2_quant_matrices qnt_mat;
+};
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h b/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h
new file mode 100644
index 0000000..a6d8c2c
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/include/viddec_mpeg2.h
@@ -0,0 +1,231 @@
+#ifndef _VIDDEC_MPEG2_H
+#define _VIDDEC_MPEG2_H
+
+/**
+ * viddec_mpeg2.h
+ * --------------
+ * This header file contains all the necessary state information and function
+ * prototypes for the MPEG2 parser. This header also defines the debug macros
+ * used by the MPEG2 parser to emit debug messages in host mode.
+ */
+
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "mpeg2.h"
+
+/* Debug Print Macros */
+#define MPEG2_DEB(x...) DEB("MPEG2_Parser: "x)
+#define MPEG2_FA_DEB(x...) DEB("MPEG2_Frame_attribute: "x)
+
+/* Bit masks */
+#define MPEG2_BIT_MASK_11 0x7ff /* Used for masking Height and Width */
+#define MPEG2_BIT_MASK_8 0xff /* Used fro masking start code byte */
+#define MPEG2_BIT_MASK_4 0xf /* Used for masking Level */
+#define MPEG2_BIT_MASK_3 0x7 /* Used for masking Profile */
+
+/* MPEG2 Start code and prefix size */
+#define MPEG2_SC_AND_PREFIX_SIZE 32
+
+/* Number of DMEM Workload Items */
+#define MPEG2_NUM_DMEM_WL_ITEMS 2
+
+/* Number of Quantization Matrix Workload Items */
+#define MPEG2_NUM_QMAT_WL_ITEMS 32
+
+/* Maximum supported content size */
+#define MPEG2_MAX_CONTENT_WIDTH 2048
+#define MPEG2_MAX_CONTENT_HEIGHT 2048
+
+/* Others */
+#define MPEG2_BITS_EIGHT 8
+
+
+/* MPEG2 Stream Levels */
+typedef enum {
+ MPEG2_LEVEL_SEQ = 0,
+ MPEG2_LEVEL_GOP,
+ MPEG2_LEVEL_PIC
+} mpeg2_stream_levels;
+
+/* MPEG2 Headers and Extensions */
+typedef enum {
+ MPEG2_HEADER_NONE = 0,
+ MPEG2_HEADER_SEQ = 1 << 0,
+ MPEG2_HEADER_SEQ_EXT = 1 << 1,
+ MPEG2_HEADER_SEQ_DISP_EXT = 1 << 2,
+ MPEG2_HEADER_GOP = 1 << 3,
+ MPEG2_HEADER_PIC = 1 << 4,
+ MPEG2_HEADER_PIC_COD_EXT = 1 << 5,
+ MPEG2_HEADER_PIC_DISP_EXT = 1 << 6,
+ MPEG2_HEADER_SEQ_SCAL_EXT = 1 << 7
+} mpeg2_headers;
+
+/* MPEG2 Parser Status Codes */
+typedef enum {
+ MPEG2_SUCCESS = 0, /* No error */
+ MPEG2_FRAME_COMPLETE = 1, /* Frame parsing complete found */
+ MPEG2_PARSE_ERROR = 2, /* Failure in parsing */
+} mpeg2_status;
+
+/* MPEG2 Current Workload Status Codes */
+typedef enum {
+ MPEG2_WL_EMPTY = 0,
+ MPEG2_WL_DMEM_DATA = (1 << 0),
+ MPEG2_WL_REF_INFO = (1 << 1),
+ MPEG2_WL_PARTIAL_SLICE = (1 << 2),
+ MPEG2_WL_DANGLING_FIELD = (1 << 3),
+ MPEG2_WL_COMPLETE = (1 << 4),
+ MPEG2_WL_MISSING_TF = (1 << 5),
+ MPEG2_WL_MISSING_BF = (1 << 6),
+ MPEG2_WL_UNSUPPORTED = (1 << 7),
+ /* Error codes */
+ MPEG2_WL_CORRUPTED_SEQ_HDR = (1 << 8),
+ MPEG2_WL_CORRUPTED_SEQ_EXT = (1 << 9),
+ MPEG2_WL_CORRUPTED_SEQ_DISP_EXT = (1 << 10),
+ MPEG2_WL_CORRUPTED_GOP_HDR = (1 << 11),
+ MPEG2_WL_CORRUPTED_PIC_HDR = (1 << 12),
+ MPEG2_WL_CORRUPTED_PIC_COD_EXT = (1 << 13),
+ MPEG2_WL_CORRUPTED_PIC_DISP_EXT = (1 << 14),
+ MPEG2_WL_CORRUPTED_QMAT_EXT = (1 << 15),
+ /* Error concealment codes */
+ MPEG2_WL_CONCEALED_PIC_COD_TYPE = (1 << 16),
+ MPEG2_WL_CONCEALED_PIC_STRUCT = (1 << 17),
+ MPEG2_WL_CONCEALED_CHROMA_FMT = (1 << 18),
+ /* Type of dangling field */
+ MPEG2_WL_DANGLING_FIELD_TOP = (1 << 24),
+ MPEG2_WL_DANGLING_FIELD_BOTTOM = (1 << 25),
+ MPEG2_WL_REPEAT_FIELD = (1 << 26),
+} mpeg2_wl_status_codes;
+
+/* MPEG2 Parser Workload types */
+typedef enum
+{
+ /* MPEG2 Decoder Specific data */
+ VIDDEC_WORKLOAD_MPEG2_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+
+ /* MPEG2 Quantization Matrix data */
+ VIDDEC_WORKLOAD_MPEG2_QMAT,
+
+ /* Past reference frame */
+ VIDDEC_WORKLOAD_MPEG2_REF_PAST = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+
+ /* Future reference frame */
+ VIDDEC_WORKLOAD_MPEG2_REF_FUTURE,
+
+ /* Use current frame as reference */
+ VIDDEC_WORKLOAD_MPEG2_REF_CURRENT_FRAME,
+
+ /* User Data */
+ VIDDEC_WORKLOAD_MPEG2_USERDATA = VIDDEC_WORKLOAD_USERDATA
+} viddec_mpeg2_workloads;
+
+/* MPEG2 Decoder Specific Workitems */
+struct mpeg2_workitems
+{
+ /* Core Sequence Info 1 */
+ uint32_t csi1;
+
+ /* Core Sequence Info 2 */
+ uint32_t csi2;
+
+ /* Core Picture Info 1 */
+ uint32_t cpi1;
+
+ /* Core Picture Coding Extension Info 1 */
+ uint32_t cpce1;
+
+ /* Quantization Matrices */
+ /* 0-15: Intra Quantization Matrix */
+ /* 16-31: Non-Intra Quantization Matrix */
+ /* 32-47: Chroma Intra Quantization Matrix */
+ /* 48-63: Chroma Non-Intra Quantization Matrix */
+ uint32_t qmat[MPEG2_QUANT_MAT_SIZE];
+};
+
+/* MPEG2 Video Parser Context */
+struct viddec_mpeg2_parser
+{
+ /* MPEG2 Metadata Structure */
+ struct mpeg2_info info;
+
+ /* MPEG2 Workitems */
+ struct mpeg2_workitems wi;
+
+ /* Workload Status */
+ uint32_t mpeg2_wl_status;
+
+ /* Last parsed start code */
+ int32_t mpeg2_last_parsed_sc;
+
+ /* Last parsed slice start code. Used to start emitting workload items. */
+ int32_t mpeg2_last_parsed_slice_sc;
+
+ /* Current sequence headers parsed */
+ uint8_t mpeg2_curr_seq_headers;
+
+ /* Current frame headers parsed */
+ uint8_t mpeg2_curr_frame_headers;
+
+ /* Flag to indicate a valid sequence header was successfully parsed for */
+ /* the current stream. */
+ uint8_t mpeg2_valid_seq_hdr_parsed;
+
+ /* Flag to indicate if quantization matrices are updated */
+ uint8_t mpeg2_custom_qmat_parsed;
+
+ /* Flag to indicate if reference table is updated with an entry */
+ uint8_t mpeg2_ref_table_updated;
+
+ /* Flag to indicate if the stream is MPEG2 */
+ uint8_t mpeg2_stream;
+
+ /* Flag to indicate if the previous picture metadata is parsed */
+ uint8_t mpeg2_pic_metadata_complete;
+
+ /* Number of active pan scan offsets */
+ uint8_t mpeg2_num_pan_scan_offsets;
+
+ /* Indicates the current stream level (Sequence/GOP/Picture) */
+ /* Used for identifying the level for User Data */
+ uint8_t mpeg2_stream_level;
+
+ /* Flag to indicate if the current picture is interlaced or not */
+ uint8_t mpeg2_picture_interlaced;
+
+ /* Flag to indicate if the current field for interlaced picture is first */
+ /* field or not. This flag is used only when mpeg2_picture_interlaced is */
+ /* set to 1. */
+ uint8_t mpeg2_first_field;
+
+ /* Flag to indicate if the current parsed data has start of a frame */
+ uint8_t mpeg2_frame_start;
+
+ /* Temporal reference of the previous picture - Used to detect dangling fields */
+ uint32_t mpeg2_prev_temp_ref;
+
+ /* Previous picture structure - Used to identify the type of missing field */
+ uint8_t mpeg2_prev_picture_structure;
+
+ /* Flag to decide whether to use the current or next workload to dump workitems */
+ uint8_t mpeg2_use_next_workload;
+ uint8_t mpeg2_first_slice_flag;
+};
+
+/* External Function Declarations */
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+/* MPEG2 Parser Function Prototypes */
+void viddec_mpeg2_translate_attr (void *parent, void *ctxt);
+void viddec_mpeg2_emit_workload (void *parent, void *ctxt);
+void viddec_mpeg2_parse_seq_hdr (void *parent, void *ctxt);
+void viddec_mpeg2_parse_gop_hdr (void *parent, void *ctxt);
+void viddec_mpeg2_parse_pic_hdr (void *parent, void *ctxt);
+void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt);
+void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt);
+void viddec_mpeg2_parse_ext (void *parent, void *ctxt);
+
+/* MPEG2 wrapper functions for workload operations */
+void viddec_mpeg2_append_workitem (void *parent, viddec_workload_item_t *wi, uint8_t flag);
+void viddec_mpeg2_append_pixeldata (void *parent, uint8_t flag);
+viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c
new file mode 100644
index 0000000..6aa6120
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/mix_vbp_mpeg2_stubs.c
@@ -0,0 +1,32 @@
+#include "viddec_mpeg2.h"
+#include "viddec_fw_item_types.h"
+
+
+void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t flag)
+{
+ return;
+}
+
+void viddec_mpeg2_emit_workload(void *parent, void *ctxt)
+{
+ return;
+}
+
+void viddec_mpeg2_append_pixeldata(void *parent, uint8_t flag)
+{
+ return;
+}
+
+viddec_workload_t* viddec_mpeg2_get_header (void *parent, uint8_t flag)
+{
+ viddec_workload_t *ret;
+ if (flag)
+ {
+ ret = viddec_pm_get_next_header(parent);
+ }
+ else
+ {
+ ret = viddec_pm_get_header(parent);
+ }
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c
new file mode 100644
index 0000000..e33a6d6
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_frame_attr.c
@@ -0,0 +1,114 @@
+/**
+ * viddec_mpeg2_frame_attr.c
+ * -------------------------
+ * This is a helper file for viddec_mpeg2_workload.c to translate the data
+ * stored in the parser context into frame attributes in the workload.
+ */
+
+#include "viddec_mpeg2.h"
+
+/* viddec_mpeg2_print_attr() - Prints collected frame attributes */
+static inline void viddec_mpeg2_print_attr(viddec_frame_attributes_t *attr)
+{
+ unsigned int index = 0;
+
+ MPEG2_FA_DEB("Content_Size=%dx%d\n", attr->cont_size.width,
+ attr->cont_size.height);
+ MPEG2_FA_DEB("Repeat=%d\n", attr->mpeg2.repeat_first_field);
+ MPEG2_FA_DEB("Frame_Type=%d\n", attr->frame_type);
+ MPEG2_FA_DEB("Temporal_Reference=%d\n", attr->mpeg2.temporal_ref);
+ MPEG2_FA_DEB("Top_Field_First=%d\n", attr->mpeg2.top_field_first);
+ MPEG2_FA_DEB("Progressive_Frame=%d\n", attr->mpeg2.progressive_frame);
+ MPEG2_FA_DEB("Picture_Struct=%d\n", attr->mpeg2.picture_struct);
+ MPEG2_FA_DEB("Pan_Scan_Offsets=%d\n", attr->mpeg2.number_of_frame_center_offsets);
+
+ for (index = 0; index < attr->mpeg2.number_of_frame_center_offsets; index++)
+ {
+ MPEG2_FA_DEB("\tPan_Scan_Offset_%d= %dx%d\n", index,
+ attr->mpeg2.frame_center_offset[index].horz,
+ attr->mpeg2.frame_center_offset[index].vert);
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_set_default_values() - Resets attributes that are optional */
+/* in the bitstream to their default values. */
+static inline void viddec_mpeg2_set_default_values(viddec_frame_attributes_t *attrs)
+{
+ unsigned int index = 0;
+
+ attrs->mpeg2.number_of_frame_center_offsets = 0;
+ for (index = 0; index < MPEG2_MAX_VID_OFFSETS ; index++)
+ {
+ attrs->mpeg2.frame_center_offset[index].horz = 0;
+ attrs->mpeg2.frame_center_offset[index].vert = 0;
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_translate_attr() - Translates metadata parsed into frame */
+/* attributes in the workload */
+void viddec_mpeg2_translate_attr(void *parent, void *ctxt)
+{
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get workload */
+ viddec_workload_t *wl = viddec_pm_get_header( parent );
+
+ /* Get attributes in workload */
+ viddec_frame_attributes_t *attrs = &wl->attrs;
+
+ /* Get the default values for optional attributes */
+ viddec_mpeg2_set_default_values(attrs);
+
+ /* Populate attributes from parser context */
+ /* Content Size */
+ attrs->cont_size.height = ((parser->info.seq_ext.vertical_size_extension << 12)
+ | parser->info.seq_hdr.vertical_size_value);
+ attrs->cont_size.width = ((parser->info.seq_ext.horizontal_size_extension << 12)
+ | parser->info.seq_hdr.horizontal_size_value);
+
+ /* Repeat field */
+ attrs->mpeg2.repeat_first_field = parser->info.pic_cod_ext.repeat_first_field;
+
+ /* Temporal Reference */
+ attrs->mpeg2.temporal_ref = parser->info.pic_hdr.temporal_reference;
+
+ /* Top field first */
+ attrs->mpeg2.top_field_first = parser->info.pic_cod_ext.top_field_first;
+
+ /* Progressive frame */
+ attrs->mpeg2.progressive_frame = parser->info.pic_cod_ext.progressive_frame;
+
+ /* Picture Structure */
+ attrs->mpeg2.picture_struct = parser->info.pic_cod_ext.picture_structure;
+
+ /* Populate the frame type */
+ switch (parser->info.pic_hdr.picture_coding_type)
+ {
+ case MPEG2_PC_TYPE_I: attrs->frame_type = VIDDEC_FRAME_TYPE_I; break;
+ case MPEG2_PC_TYPE_P: attrs->frame_type = VIDDEC_FRAME_TYPE_P; break;
+ case MPEG2_PC_TYPE_B: attrs->frame_type = VIDDEC_FRAME_TYPE_B; break;
+ default: attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID;
+ }
+
+ /* Update PanScan data */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_DISP_EXT)
+ {
+ unsigned int index = 0;
+ attrs->mpeg2.number_of_frame_center_offsets = parser->mpeg2_num_pan_scan_offsets;
+ for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++)
+ {
+ attrs->mpeg2.frame_center_offset[index].horz = parser->info.pic_disp_ext.frame_center_horizontal_offset[index];
+ attrs->mpeg2.frame_center_offset[index].vert = parser->info.pic_disp_ext.frame_center_vertical_offset[index];
+ }
+ }
+
+ /* Print frame attributes */
+ viddec_mpeg2_print_attr(attrs);
+
+ return;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c
new file mode 100644
index 0000000..56604a4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_metadata.c
@@ -0,0 +1,1039 @@
+/**
+ * viddec_mpeg2_metadata.c
+ * -----------------------
+ * This file contains all the routines to parse the information from MPEG2
+ * elementary stream and store it in the parser context. Based on the data
+ * parsed, the state information in the context is updated.
+ *
+ * Headers currently parsed from MPEG2 stream include:
+ * - Sequence Header
+ * - Sequence Extension
+ * - Sequence Display Extension
+ * - GOP Header
+ * - Picture Header
+ * - Picture Coding Extension
+ * - Quantization Matrix Extension
+ * - Picture Display Extension
+ *
+ * The slice data is parsed and appended into workload in viddec_mpeg2_parse.c
+ */
+
+#include "viddec_mpeg2.h"
+
+/* Default quantization matrix values */
+const uint8_t mpeg2_default_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = {
+ 8, 16, 19, 22, 26, 27, 29, 34,
+ 16, 16, 22, 24, 27, 29, 34, 37,
+ 19, 22, 26, 27, 29, 34, 34, 38,
+ 22, 22, 26, 27, 29, 34, 37, 40,
+ 22, 26, 27, 29, 32, 35, 40, 48,
+ 26, 27, 29, 32, 35, 40, 48, 58,
+ 26, 27, 29, 34, 38, 46, 56, 69,
+ 27, 29, 35, 38, 46, 56, 69, 83
+};
+const uint8_t mpeg2_default_non_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = {
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16
+};
+
+/* Matrix for converting scan order */
+const uint8_t mpeg2_classic_scan[MPEG2_QUANT_MAT_SIZE] = {
+ 0, 1, 8, 16, 9, 2, 3, 10,
+ 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34,
+ 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36,
+ 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46,
+ 53, 60, 61, 54, 47, 55, 62, 63
+};
+const uint8_t mpeg2_alternate_scan[MPEG2_QUANT_MAT_SIZE] = {
+ 0, 8, 16, 24, 1, 9, 2, 10,
+ 17, 25, 32, 40, 48, 56, 57, 49,
+ 41, 33, 26, 18, 3, 11, 4, 12,
+ 19, 27, 34, 42, 50, 58, 35, 43,
+ 51, 59, 20, 28, 5, 13, 6, 14,
+ 21, 29, 36, 44, 52, 60, 37, 45,
+ 53, 61, 22, 30, 7, 15, 23, 31,
+ 38, 46, 54, 62, 39, 47, 55, 63
+};
+
+/* Look-up tables for macro block address increment VLC */
+const uint8_t mb_addr_inc_tab1[16] = {
+ 0, 0, 7, 6, 5, 5, 4, 4,
+ 3, 3, 3, 3, 2, 2, 2, 2
+};
+const uint8_t mb_addr_inc_tab2[8] = {
+ 13, 12, 11, 10, 9, 9, 8, 8
+};
+const uint8_t mb_addr_inc_tab3[40] = {
+ 33, 32, 31, 30, 29, 28, 27, 26,
+ 25, 24, 23, 22, 21, 21, 20, 20,
+ 19, 19, 18, 18, 17, 17, 16, 16,
+ 15, 15, 15, 15, 15, 15, 15, 15,
+ 14, 14, 14, 14, 14, 14, 14, 14
+};
+
+/* viddec_mpeg2_copy_default_matrix() - Copies quantization matrix from src */
+/* to dst */
+static inline void mpeg2_copy_matrix(const uint8_t *src, uint8_t *dst)
+{
+ register uint32_t index = 0;
+ for(index=0; index < MPEG2_QUANT_MAT_SIZE; index++)
+ dst[index] = src[index];
+}
+
+/* viddec_mpeg2_copy_matrix() - Copies next 64bytes in the stream into given */
+/* matrix */
+static inline int32_t mpeg2_get_quant_matrix(void *parent, uint8_t *matrix, uint32_t alternate_scan)
+{
+ int32_t ret = 1;
+ uint32_t index = 0, code = 0;
+ const uint8_t *zigzag_scan = (const uint8_t *) mpeg2_classic_scan;
+
+ if (alternate_scan)
+ {
+ zigzag_scan = (const uint8_t *) mpeg2_alternate_scan;
+ }
+
+ /* Start extracting matrix co-efficients and copy them in */
+ /* inverse zigzag scan order */
+ for (index = 0; index < MPEG2_QUANT_MAT_SIZE; index++)
+ {
+ ret = viddec_pm_get_bits(parent, &code, MPEG2_BITS_EIGHT);
+ /* Quantization values cannot be zero. If zero value if found, */
+ /* further parsing is stopped and the existing values are used.*/
+ if ((ret != 1) || (code == 0))
+ {
+ ret = -1;
+ break;
+ }
+ matrix[zigzag_scan[index]] = (uint8_t)(code & 0xFF);
+ }
+
+ return ret;
+}
+
+/* viddec_mpeg2_parse_seq_hdr() - Parse sequence header metadata and store */
+/* in parser context */
+void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get Horizontal Frame Size */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.horizontal_size_value, 12);
+
+ /* Get Vertical Frame Size */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vertical_size_value, 12);
+
+ /* Get Frame Aspect Ratio */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.aspect_ratio_information, 4);
+
+ /* Get Frame Rate */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.frame_rate_code, 4);
+
+ /* Get Bit Rate */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.bit_rate_value, 18);
+
+ /* Skip Marker bit */
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+
+ /* Get VBV Buffer Size Value */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vbv_buffer_size_value, 10);
+
+ /* Get Constrained Parameters Flag */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.constrained_parameters_flag, 1);
+
+ /* Quantization Matrix Support */
+ /* Get Intra Quantizer matrix, if available or use default values */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.intra_quantiser_matrix, 0);
+ mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+ }
+ else
+ {
+ if (!parser->mpeg2_custom_qmat_parsed)
+ {
+ mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.intra_quantiser_matrix);
+ mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+ }
+ }
+
+ /* Get Non-Intra Qualtizer matrix, if available or use default values */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_non_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.non_intra_quantiser_matrix, 0);
+ mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+ }
+ else
+ {
+ if (!parser->mpeg2_custom_qmat_parsed)
+ {
+ mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.non_intra_quantiser_matrix);
+ mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+ }
+ }
+
+ /* Error handling */
+ /* The return value from get_bits() function is accumulated. If the return value is not 1, */
+ /* then there was an error getting the required information from the stream and the status */
+ /* is updated for the current workload. */
+ if (ret_code == 1)
+ {
+ /* This flag indicates a valid sequence header has been parsed and so even if */
+ /* a sequence haeder is corrupted in the future, this valid sequence header */
+ /* could be reused. */
+ parser->mpeg2_valid_seq_hdr_parsed = true;
+ /* This flag indicates a valid custom quantization matrix has been parsed. */
+ /* So, if in the future, there is an error parsing quantization matrix, the */
+ /* parser will use the previously parsed custom values. */
+ if ((parser->info.qnt_ext.load_intra_quantiser_matrix)
+ || (parser->info.qnt_ext.load_non_intra_quantiser_matrix))
+ {
+ parser->mpeg2_custom_qmat_parsed = true;
+ }
+ MPEG2_DEB("Seqeunce header parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_HDR;
+ MPEG2_DEB("Sequence header corrupted.\n");
+ }
+
+ parser->mpeg2_stream = false;
+ parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ;
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ;
+ parser->mpeg2_stream_level = MPEG2_LEVEL_SEQ;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_gop_hdr() - Parse group of pictures header info and */
+/* store it in parser context */
+void viddec_mpeg2_parse_gop_hdr(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Skip first 25 bits */
+ /* Skip time_code */
+ ret_code |= viddec_pm_skip_bits(parent, 25);
+
+ /* Get closed gop info */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.closed_gop, 1);
+
+ /* Get broken link info */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.broken_link, 1);
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("GOP Header parsed successfully.\n");
+ }
+ else
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_GOP_HDR;
+ MPEG2_DEB("GOP header corrupted.\n");
+ }
+
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_GOP;
+ parser->mpeg2_stream_level = MPEG2_LEVEL_GOP;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_pic_hdr() - Parse picture header info and store it in */
+/* parser context */
+void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0, found_error = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get Temporal Reference info */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.temporal_reference, 10);
+
+ /* Get Picture Coding type and skip the following byte */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.picture_coding_type, 3);
+
+ /* Error Handling and Concealment */
+ /* Picture coding type should be one I, P or B */
+ if ((parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I) &&
+ (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_P) &&
+ (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_B))
+ {
+ found_error = 1;
+ }
+ /* The first frame after a gop header should be a coded I picture as per */
+ /* section 6.3.1 in MPEG2 Specification. */
+ else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP)
+ {
+ if (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I)
+ {
+ found_error = 1;
+ }
+ }
+ /* The first frame after a sequence header cannot be a coded B picture as per */
+ /* section 6.1.1.6 in MPEG2 Specification. */
+ else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ)
+ {
+ if (parser->info.pic_hdr.picture_coding_type == MPEG2_PC_TYPE_B)
+ {
+ found_error = 1;
+ }
+ }
+
+ /* If there is an error parsing picture coding type, do error concealment and continue. */
+ if ((ret_code != 1) || (found_error))
+ {
+ if (found_error)
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR;
+ MPEG2_DEB("Picture header corrupted.\n");
+ }
+
+ /* Error concealment for picture coding type - Default to I picture. */
+ parser->info.pic_hdr.picture_coding_type = MPEG2_PC_TYPE_I;
+ parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_COD_TYPE;
+ MPEG2_DEB("Picture Coding Type corrupted. Concealing to I type.\n");
+ }
+
+ /* Skip next 16 bits */
+ /* Skip vbv_delay */
+ ret_code |= viddec_pm_skip_bits(parent, 16);
+
+ /* If Picture Coding type is either P or B then */
+ /* Get forward vector code */
+ if ((MPEG2_PC_TYPE_P == parser->info.pic_hdr.picture_coding_type) ||
+ (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type))
+ {
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_forward_vect, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.forward_f_code, 3);
+ }
+ else
+ {
+ parser->info.pic_hdr.full_pel_forward_vect = 0;
+ parser->info.pic_hdr.forward_f_code = 0;
+ }
+
+ /* If Picture coding type is B then */
+ /* Get backward vector code */
+ if (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type)
+ {
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_backward_vect, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.backward_f_code, 3);
+ }
+ else
+ {
+ parser->info.pic_hdr.full_pel_backward_vect = 0;
+ parser->info.pic_hdr.backward_f_code = 0;
+ }
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Picture header parsed successfully.\n")
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR;
+ MPEG2_DEB("Picture header corrupted.\n");
+ }
+
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC;
+ parser->mpeg2_stream_level = MPEG2_LEVEL_PIC;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_seq() - Parse Sequence extension metadata and */
+/* store in parser context */
+void viddec_mpeg2_parse_ext_seq(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get Profile and Level info */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.profile_and_level_indication, 8);
+
+ /* Get Progressive Sequence Flag */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.progressive_sequence, 1);
+
+ /* Get Chroma Format */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.chroma_format, 2);
+
+ /* Error Concealment */
+ /* If there is an error parsing chroma format, do error concealment and continue. */
+ if ((ret_code != 1) || (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED))
+ {
+ if (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED)
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT;
+ MPEG2_DEB("Sequence extension corrupted.\n")
+ }
+
+ /* Error concealment for chroma format - Default to 4:2:0 */
+ parser->info.seq_ext.chroma_format = MPEG2_CF_420;
+ parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_CHROMA_FMT;
+ MPEG2_DEB("Chroma Format corrupted. Concealing to 4:2:0.\n");
+ }
+
+ /* Get Content Size Extension Data */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.horizontal_size_extension, 2);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vertical_size_extension, 2);
+
+ /* Get Bit Rate Extension */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.bit_rate_extension, 12);
+
+ /* Skip Marker bit */
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+
+ /* Get VBV Buffer Size Extension Data */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vbv_buffer_size_extension, 8);
+
+ /* Skip 1 bit */
+ /* Skip low_delay */
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+
+ /* Get Frame Rate extension data */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_n, 2);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_d, 5);
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Sequence extension header parsed successfully.\n")
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT;
+ MPEG2_DEB("Sequence extension corrupted.\n")
+ }
+
+ /* Check if the last parsed start code was that of sequence header. */
+ /* If true, seq extension followed seq header => MPEG2 Stream */
+ parser->mpeg2_stream = (parser->mpeg2_last_parsed_sc == MPEG2_SC_SEQ_HDR) ? true:false;
+ parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_EXT;
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_EXT;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_seq_disp() - Parse Sequence Display extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get video format */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.video_format, 3);
+
+ /* Check if color description info is present */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_description, 1);
+
+ /* If color description is found, get color primaries info */
+ /* and transfer characteristics */
+ if (parser->info.seq_disp_ext.colour_description)
+ {
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_primaries, 8);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.transfer_characteristics, 8);
+ ret_code |= viddec_pm_skip_bits(parent, 8);
+ }
+
+ /* Get Display Horizontal Size */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_horizontal_size, 14);
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_vertical_size, 14);
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Sequence display extension parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_DISP_EXT;
+ MPEG2_DEB("Sequence display extension corrupted.\n")
+ }
+
+ /* Set flag to indicate Sequence Display Extension is present */
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_DISP_EXT;
+ parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_DISP_EXT;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_seq_scal() - Parse Sequence Scalable extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_seq_scal(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get video format */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_scal_ext.scalable_mode, 2);
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Sequence scalable extension parsed successfully.\n");
+ }
+
+ /* Set flag to indicate Sequence Display Extension is present */
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_SCAL_EXT;
+ parser->mpeg2_curr_seq_headers |= MPEG2_HEADER_SEQ_SCAL_EXT;
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_pic() - Parse Picture Coding extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0, found_error = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get Forward/Backward, Horizontal/Vertical codes */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode00, 4);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode01, 4);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode10, 4);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode11, 4);
+
+ /* Get Intra DC Precision */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_dc_precision, 2);
+
+ /* Get Picture Structure */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.picture_structure, 2);
+
+ /* Error Handling and Concealment */
+ /* Picture structure should be frame, top field or bottom field */
+ if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_RESERVED)
+ {
+ found_error = 1;
+ }
+ /* All pictures in progressive sequence should be frame picture */
+ else if (parser->info.seq_ext.progressive_sequence)
+ {
+ if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME)
+ {
+ found_error = 1;
+ }
+ }
+
+ /* If there is an error parsing picture structure, do error concealment and continue. */
+ if ((ret_code != 1) || (found_error))
+ {
+ if (found_error)
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT;
+ MPEG2_DEB("Picture coding extension corrupted.\n");
+ }
+
+ /* Error concealment for picture structure - Default to frame picture. */
+ parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME;
+ parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT;
+ MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n");
+ }
+
+ /* Get flags */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.top_field_first, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.frame_pred_frame_dct, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.concealment_motion_vectors, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.q_scale_type, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_vlc_format, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.alternate_scan, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.repeat_first_field, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.chroma_420_type, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.progressive_frame, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.composite_display_flag, 1);
+
+ /* Error concealment for frame picture */
+ if ((parser->info.pic_cod_ext.top_field_first)
+ || (parser->info.pic_cod_ext.frame_pred_frame_dct)
+ || (parser->info.pic_cod_ext.repeat_first_field)
+ || (parser->info.pic_cod_ext.progressive_frame))
+ {
+ if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME)
+ {
+ parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME;
+ parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT;
+ MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n");
+ }
+ }
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Picture coding extension parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT;
+ MPEG2_DEB("Picture coding extension corrupted.\n");
+ }
+
+ /* Dangling field detection */
+ /* If the previous picture is the first field, then the temporal reference number */
+ /* should match with the second field. Otherwise, one of the fields in the previous */
+ /* picture is missing and dangling field error is marked. The workload containing */
+ /* the previous picture is emitted out and current picture data is added to the next */
+ /* workload. The mpeg2_use_next_workload variable is used as a flag to direct the */
+ /* items into the current/next workload. */
+ if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))
+ {
+ if (parser->mpeg2_prev_temp_ref != parser->info.pic_hdr.temporal_reference)
+ {
+ /* Mark dangling field info in workload status */
+ parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD;
+ if (parser->mpeg2_prev_picture_structure == MPEG2_PIC_STRUCT_BOTTOM)
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_TOP;
+ }
+ else
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_BOTTOM;
+ }
+ /* Set flag stating current workload is done */
+ parser->mpeg2_pic_metadata_complete = true;
+ /* Set flag to use the next workload for adding workitems for */
+ /* the current frame */
+ parser->mpeg2_use_next_workload = true;
+ /* Toggle first field flag to compensate for missing field */
+ parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true;
+ }
+ else
+ {
+ /* Same field repeated */
+ if (parser->mpeg2_prev_picture_structure == parser->info.pic_cod_ext.picture_structure)
+ {
+ /* Mark unsupported in workload status */
+ parser->mpeg2_wl_status |= MPEG2_WL_REPEAT_FIELD;
+ }
+ }
+ }
+
+ /* Set context variables for interlaced picture handling */
+ if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_FRAME)
+ {
+ /* Frame picture found. Reset variables used for interlaced fields picture. */
+ parser->mpeg2_picture_interlaced = false;
+ parser->mpeg2_first_field = false;
+ parser->mpeg2_use_next_workload = false;
+ }
+ else
+ {
+ /* Interlaced fields picture found. */
+ parser->mpeg2_picture_interlaced = true;
+ parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true;
+ }
+
+ /* Set flags */
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_COD_EXT;
+ parser->mpeg2_prev_temp_ref = parser->info.pic_hdr.temporal_reference;
+ parser->mpeg2_prev_picture_structure = parser->info.pic_cod_ext.picture_structure;
+ if ((!parser->mpeg2_picture_interlaced)
+ || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)))
+ {
+ parser->mpeg2_frame_start = true;
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_pic_disp() - Parse Picture Display extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+ uint32_t index = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Determine number of offsets */
+ if (parser->info.seq_ext.progressive_sequence)
+ {
+ if (parser->info.pic_cod_ext.repeat_first_field)
+ {
+ parser->mpeg2_num_pan_scan_offsets =
+ (parser->info.pic_cod_ext.top_field_first) ? 3 : 2;
+ }
+ else /* Not repeat field */
+ parser->mpeg2_num_pan_scan_offsets = 1;
+ }
+ else /* Not progressive sequence */
+ {
+ /* Check if picture structure is a field */
+ if ((parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_TOP) ||
+ (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_BOTTOM))
+ {
+ parser->mpeg2_num_pan_scan_offsets = 1;
+ }
+ else
+ {
+ parser->mpeg2_num_pan_scan_offsets =
+ (parser->info.pic_cod_ext.repeat_first_field) ? 3 : 2;
+ }
+ }
+
+ /* Get the offsets */
+ for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++)
+ {
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_horizontal_offset[index], 16);
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_vertical_offset[index], 16);
+ ret_code |= viddec_pm_skip_bits(parent, 1);
+ }
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Picture display extension parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_DISP_EXT;
+ MPEG2_DEB("Picture display extension corrupted.\n");
+ }
+
+ /* Set flag to indicate picture display extension is found */
+ parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_DISP_EXT;
+ return;
+}
+
+/* viddec_mpeg2_parse_ext_quant() - Parse Quantization Matrix extension */
+/* metadata and store in parser context */
+void viddec_mpeg2_parse_ext_quant(void *parent, void *ctxt)
+{
+ int32_t ret_code = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Quantization Matrix Support */
+ /* Get Intra Quantizer matrix, if available or use default values */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent,
+ parser->info.qnt_mat.intra_quantiser_matrix,
+ parser->info.pic_cod_ext.alternate_scan);
+ mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix,
+ parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+ }
+
+ /* Get Non-Intra Qualtizer matrix, if available */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_non_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent,
+ parser->info.qnt_mat.non_intra_quantiser_matrix,
+ parser->info.pic_cod_ext.alternate_scan);
+ mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix,
+ parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+ }
+
+ /* Get Chroma Intra Quantizer matrix, if available */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent,
+ parser->info.qnt_mat.chroma_intra_quantiser_matrix,
+ parser->info.pic_cod_ext.alternate_scan);
+ }
+
+ /* Get Chroma Non-Intra Quantizer matrix, if available */
+ ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix, 1);
+ if (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix)
+ {
+ ret_code |= mpeg2_get_quant_matrix(parent,
+ parser->info.qnt_mat.chroma_non_intra_quantiser_matrix,
+ parser->info.pic_cod_ext.alternate_scan);
+ }
+
+ if (ret_code == 1)
+ {
+ MPEG2_DEB("Quantization matrix extension parsed successfully.\n");
+ }
+ else
+ {
+ /* Setting status to mark parser error while emitting the current workload. */
+ parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_QMAT_EXT;
+ MPEG2_DEB("Quantization matrix extension corrupted.\n");
+ }
+
+ /* Set quantization matrices updated flag */
+ if ( (parser->info.qnt_ext.load_intra_quantiser_matrix) ||
+ (parser->info.qnt_ext.load_non_intra_quantiser_matrix) ||
+ (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) ||
+ (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) )
+ {
+ MPEG2_DEB("Custom quantization matrix found.\n");
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext() - Parse extension metadata and store in parser */
+/* context */
+void viddec_mpeg2_parse_ext(void *parent, void *ctxt)
+{
+ uint32_t ext_code = 0;
+
+ /* Get extension start code */
+ viddec_pm_get_bits(parent, &ext_code, 4);
+
+ /* Switch on extension type */
+ switch ( ext_code )
+ {
+ /* Sequence Extension Info */
+ case MPEG2_EXT_SEQ:
+ viddec_mpeg2_parse_ext_seq(parent, ctxt);
+ break;
+
+ /* Sequence Display Extension info */
+ case MPEG2_EXT_SEQ_DISP:
+ viddec_mpeg2_parse_ext_seq_disp(parent, ctxt);
+ break;
+
+ case MPEG2_EXT_SEQ_SCAL:
+ viddec_mpeg2_parse_ext_seq_scal(parent, ctxt);
+ break;
+
+ /* Picture Coding Extension */
+ case MPEG2_EXT_PIC_CODING:
+ viddec_mpeg2_parse_ext_pic(parent, ctxt);
+ break;
+
+ /* Picture Display Extension */
+ case MPEG2_EXT_PIC_DISP:
+ viddec_mpeg2_parse_ext_pic_disp(parent, ctxt);
+ break;
+
+ /* Quantization Extension*/
+ case MPEG2_EXT_QUANT_MAT:
+ viddec_mpeg2_parse_ext_quant(parent, ctxt);
+ break;
+
+ default:
+ break;
+ } /* Switch, on extension type */
+
+ return;
+}
+
+/* viddec_mpeg2_parse_ext() - Parse user data and append to workload. */
+void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt)
+{
+ uint32_t user_data = 0;
+ viddec_workload_item_t wi;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Set the user data level (SEQ/GOP/PIC) in the workitem type. */
+ switch (parser->mpeg2_stream_level)
+ {
+ case MPEG2_LEVEL_SEQ:
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+ break;
+ }
+ case MPEG2_LEVEL_GOP:
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+ break;
+ }
+ case MPEG2_LEVEL_PIC:
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA;
+ break;
+ }
+ default:
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_INVALID;
+ break;
+ }
+ }
+
+ /* Read 1 byte of user data and store it in workitem for the current */
+ /* stream level (SEQ/GOP/PIC). Keep adding data payloads till it reaches */
+ /* size 11. When it is 11, the maximum user data payload size, append the */
+ /* workitem. This loop is repeated till all user data is extracted and */
+ /* appended. */
+ wi.user_data.size = 0;
+ memset(&(wi.user_data), 0, sizeof(wi.user_data));
+ while(viddec_pm_get_bits(parent, &user_data, MPEG2_BITS_EIGHT) != -1)
+ {
+ /* Store the valid byte in data payload */
+ wi.user_data.data_payload[wi.user_data.size] = user_data;
+ wi.user_data.size++;
+
+ /* When size exceeds payload size, append workitem and continue */
+ if (wi.user_data.size >= 11)
+ {
+ viddec_pm_setup_userdata(&wi);
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ viddec_fw_reset_workload_item(&wi);
+ wi.user_data.size = 0;
+ }
+ }
+ /* If size is not 0, append remaining user data. */
+ if (wi.user_data.size > 0)
+ {
+ viddec_pm_setup_userdata(&wi);
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ wi.user_data.size = 0;
+ }
+
+ MPEG2_DEB("User data @ Level %d found.\n", parser->mpeg2_stream_level);
+ return;
+}
+
+static inline uint32_t get_mb_addr_increment(uint32_t *data)
+{
+ if (*data >= 1024)
+ {
+ return 1;
+ }
+ else if (*data >= 128)
+ {
+ *data >>= 6;
+ return mb_addr_inc_tab1[*data];
+ }
+ else if (*data >= 64)
+ {
+ *data >>= 3;
+ *data -= 8;
+ return mb_addr_inc_tab2[*data];
+ }
+ else
+ {
+ *data -= 24;
+ return mb_addr_inc_tab3[*data];
+ }
+}
+
+static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t *first_mb)
+{
+ uint32_t mb_row = 0, mb_width = 0, prev_mb_addr = 0;
+ uint32_t temp = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+ *first_mb = 0;
+ mb_row = ((parser->mpeg2_last_parsed_slice_sc & 0xFF) - 1);
+ mb_width = parser->info.seq_hdr.horizontal_size_value >> 4;
+ prev_mb_addr = (mb_row * mb_width) - 1;
+
+ /* Skip slice start code */
+ viddec_pm_skip_bits(parent, 32);
+
+ if (parser->info.seq_hdr.vertical_size_value > 2800)
+ {
+ /* Get 3 bits of slice_vertical_position_extension */
+ viddec_pm_get_bits(parent, &temp, 3);
+ mb_row += (temp << 7);
+ }
+
+ /* Skip proprity_breakpoint if sequence scalable extension is present */
+ if (parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_SCAL_EXT)
+ {
+ /* Skip 7 bits if scalable mode is 00 (Data partition) */
+ if (parser->info.seq_scal_ext.scalable_mode == 0)
+ {
+ viddec_pm_skip_bits(parent, 7);
+ }
+ }
+
+ /* Skip quantizer_scale */
+ viddec_pm_skip_bits(parent, 5);
+
+ /* Skip a few bits with slice information */
+ temp = 0;
+ viddec_pm_peek_bits(parent, &temp, 1);
+ if (temp == 0x1)
+ {
+ /* Skip intra_slice_flag(1), intra_slice(1) and reserved_bits(7) */
+ viddec_pm_skip_bits(parent, 9);
+ temp=0;
+ viddec_pm_peek_bits(parent, &temp, 1);
+ while (temp == 0x1)
+ {
+ /* Skip extra_bit_slice(1) and extra_information_slice(8) */
+ viddec_pm_skip_bits(parent, 9);
+ temp=0;
+ viddec_pm_peek_bits(parent, &temp, 1);
+ }
+ }
+
+ /* Skip extra_bit_slice flag */
+ viddec_pm_skip_bits(parent, 1);
+
+ /* Increment prev_mb_addr by 33 for every 11 bits of macroblock_escape string */
+ temp=0;
+ viddec_pm_peek_bits(parent, &temp, 11);
+ while (temp == 0x8)
+ {
+ viddec_pm_skip_bits(parent, 11);
+ prev_mb_addr += 33;
+ temp=0;
+ viddec_pm_peek_bits(parent, &temp, 11);
+ }
+
+ /* Get the mb_addr_increment and add it to prev_mb_addr to get the current mb number. */
+ *first_mb = prev_mb_addr + get_mb_addr_increment(&temp);
+ MPEG2_DEB("First MB number in slice is 0x%08X.\n", *first_mb);
+
+ return;
+}
+
+/* Parse slice data to get the number of macroblocks in the current slice and then */
+/* append as pixel data. */
+void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt)
+{
+ uint32_t bit_off=0, start_byte=0, first_mb = 0;
+ uint8_t is_emul=0;
+ viddec_workload_item_t wi;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get current byte position */
+ viddec_pm_get_au_pos(parent, &bit_off, &start_byte, &is_emul);
+
+ /* Populate wi type */
+ viddec_mpeg2_get_first_mb_number(parent, ctxt, &first_mb);
+ wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES;
+ wi.es.es_flags = (first_mb << 16);
+
+ /* Append data from given byte position as pixel data */
+ viddec_pm_append_misc_tags(parent, start_byte, (unsigned int) -1, &wi, !parser->mpeg2_use_next_workload);
+ return;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c
new file mode 100644
index 0000000..a7b6ef7
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_parse.c
@@ -0,0 +1,380 @@
+/**
+ * viddec_mpeg2_parse.c
+ * --------------------
+ * This file acts as the main interface between the parser manager and MPEG2
+ * parser. All the operations done by the MPEG2 parser are defined here and
+ * functions pointers for each operation is returned to the parser manager.
+ */
+
+#include "viddec_mpeg2.h"
+
+/* viddec_mpeg2_parser_init() - Initializes parser context. */
+static void viddec_mpeg2_parser_init
+(
+ void *ctxt,
+ uint32_t *persist_mem,
+ uint32_t preserve
+)
+{
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Avoid compiler warning */
+ persist_mem = persist_mem;
+
+ /* Initialize state variables */
+ parser->mpeg2_pic_metadata_complete = false;
+ parser->mpeg2_picture_interlaced = false;
+ parser->mpeg2_first_field = false;
+ parser->mpeg2_frame_start = false;
+ parser->mpeg2_ref_table_updated = false;
+ parser->mpeg2_use_next_workload = false;
+ parser->mpeg2_first_slice_flag = false;
+ parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE;
+ parser->mpeg2_last_parsed_sc = MPEG2_SC_ALL;
+ parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX;
+ parser->mpeg2_wl_status = MPEG2_WL_EMPTY;
+ parser->mpeg2_prev_picture_structure = MPEG2_PIC_STRUCT_FRAME;
+ parser->mpeg2_prev_temp_ref = 0;
+ parser->mpeg2_num_pan_scan_offsets = 0;
+
+ if(preserve)
+ {
+ /* Init all picture level header info */
+ memset(&parser->info.pic_hdr, 0, sizeof(struct mpeg2_picture_hdr_info));
+ memset(&parser->info.pic_cod_ext, 0, sizeof(struct mpeg2_picture_coding_ext_info));
+ memset(&parser->info.pic_disp_ext, 0, sizeof(struct mpeg2_picture_disp_ext_info));
+ }
+ else
+ {
+ /* Init all header info */
+ memset(&parser->info, 0, sizeof(struct mpeg2_info));
+
+ parser->mpeg2_stream = false;
+ parser->mpeg2_custom_qmat_parsed = false;
+ parser->mpeg2_valid_seq_hdr_parsed = false;
+ parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE;
+ }
+
+ MPEG2_DEB("MPEG2 Parser: Context Initialized.\n");
+
+ return;
+}
+
+/* viddec_mpeg2_get_context_size() - Returns the memory size required by the */
+/* MPEG2 parser. */
+static void viddec_mpeg2_get_context_size
+(
+ viddec_parser_memory_sizes_t *size
+)
+{
+ /* Should return size of my structure */
+ size->context_size = sizeof(struct viddec_mpeg2_parser);
+ size->persist_size = 0;
+}
+
+/* viddec_mpeg2_get_error_code() - Returns the error code for the current */
+/* workload. */
+static void viddec_mpeg2_get_error_code
+(
+ struct viddec_mpeg2_parser *parser,
+ viddec_workload_t *wl,
+ uint32_t *error_code
+)
+{
+ *error_code = 0;
+
+ /* Dangling field error */
+ if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD)
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD;
+ if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD_TOP)
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_TOPFIELD;
+ }
+ else
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD;
+ }
+ }
+
+ /* Repeated same field */
+ if (parser->mpeg2_wl_status & MPEG2_WL_REPEAT_FIELD)
+ {
+ *error_code |= (VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD
+ | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+ }
+
+ /* If workload is not complete, set non-decodeable flag */
+ if (!(parser->mpeg2_wl_status & MPEG2_WL_COMPLETE))
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ }
+
+ /* If reference info is not updated, set missing reference flag */
+ if (!(parser->mpeg2_wl_status & MPEG2_WL_REF_INFO))
+ {
+ *error_code |= VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE;
+ }
+
+ /* Missing DMEM data flag and irrecoverable flag is set */
+ if (!(parser->mpeg2_wl_status & MPEG2_WL_DMEM_DATA))
+ {
+ *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM
+ | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+ }
+
+ /* Missing sequence header and irrecoverable flag is set */
+ if ((!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ))
+ && (!parser->mpeg2_valid_seq_hdr_parsed))
+ {
+ *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO
+ | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+ }
+
+ /* Unsupported features found in stream */
+ if (parser->mpeg2_wl_status & MPEG2_WL_UNSUPPORTED)
+ {
+ *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED
+ | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+ }
+
+ /* If frame type is unknown, default to I frame. */
+ if ((wl->attrs.frame_type != VIDDEC_FRAME_TYPE_I)
+ && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_P)
+ && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_B))
+ {
+ wl->attrs.frame_type = VIDDEC_FRAME_TYPE_I;
+ }
+
+ /* If there is a mismatch between the frame type and reference information */
+ /* then mark the workload as not decodable */
+ if (wl->attrs.frame_type == VIDDEC_FRAME_TYPE_B)
+ {
+ if (wl->is_reference_frame != 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ }
+ else
+ {
+ if (wl->is_reference_frame == 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ }
+
+ /* For non-decodable frames, do not set reference info so that the workload */
+ /* manager does not increment ref count. */
+ if (*error_code & VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE)
+ {
+ wl->is_reference_frame = 0;
+ }
+
+ /* Corrupted header notification */
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_HDR)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_DISP_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_GOP_HDR)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_HDR)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_COD_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_DISP_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT;
+ if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_QMAT_EXT)
+ *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT;
+
+ MPEG2_DEB("Workload error code: 0x%8X.\n", *error_code);
+ return;
+}
+
+/* viddec_mpeg2_is_start_frame() - Returns if the current chunk of parsed */
+/* data has start of a frame. */
+static uint32_t viddec_mpeg2_is_start_frame
+(
+ void *ctxt
+)
+{
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+ return (parser->mpeg2_frame_start);
+}
+
+/* viddec_mpeg2_is_workload_done() - Returns current frame parsing status */
+/* to the parser manager. */
+static uint32_t viddec_mpeg2_is_workload_done
+(
+ void *parent,
+ void *ctxt,
+ unsigned int next_sc,
+ uint32_t *codec_specific_errors
+)
+{
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+ viddec_workload_t *wl = viddec_pm_get_header(parent);
+ uint32_t ret = VIDDEC_PARSE_SUCESS;
+ uint32_t frame_boundary = 0;
+ uint8_t force_frame_complete = 0;
+ parent = parent;
+
+ /* Detect Frame Boundary */
+ frame_boundary = ((MPEG2_SC_PICTURE == next_sc) || (MPEG2_SC_SEQ_HDR == next_sc) || (MPEG2_SC_GROUP == next_sc));
+ if (frame_boundary)
+ {
+ parser->mpeg2_first_slice_flag = false;
+ }
+
+ force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc));
+
+ if (force_frame_complete || (frame_boundary && (parser->mpeg2_pic_metadata_complete)))
+ {
+ if(!force_frame_complete)
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_COMPLETE;
+ parser->mpeg2_last_parsed_slice_sc = MPEG2_SC_SLICE_MAX;
+ parser->mpeg2_pic_metadata_complete = false;
+ parser->mpeg2_first_slice_flag = false;
+ }
+
+ viddec_mpeg2_get_error_code(parser, wl, codec_specific_errors);
+ parser->mpeg2_wl_status = MPEG2_WL_EMPTY;
+ parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE;
+ /* Reset mpeg2_use_next_workload flag if it is set */
+ if (parser->mpeg2_use_next_workload)
+ {
+ viddec_pm_set_late_frame_detect(parent);
+ parser->mpeg2_use_next_workload = false;
+ }
+ ret = VIDDEC_PARSE_FRMDONE;
+ }
+ return ret;
+}
+
+/* viddec_mpeg2_parse() - Parse metadata info from the buffer for the prev */
+/* start code found. */
+static mpeg2_status viddec_mpeg2_parse
+(
+ void *parent,
+ void *ctxt
+)
+{
+ uint32_t current_sc = 0, sc_bits = MPEG2_SC_AND_PREFIX_SIZE;
+ int32_t ret = MPEG2_SUCCESS;
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Reset frame start flag. For Mpeg1 we want to set frame start after
+ we parsed pich header, since there is no extension*/
+ parser->mpeg2_frame_start = (!parser->mpeg2_stream) && (parser->mpeg2_last_parsed_sc == MPEG2_SC_PICTURE);
+
+ /* Peak current start code - First 32 bits of the stream */
+ ret = viddec_pm_peek_bits(parent, &current_sc, sc_bits);
+ if (ret == -1)
+ {
+ MPEG2_DEB("Unable to get start code.\n");
+ return MPEG2_PARSE_ERROR;
+ }
+ current_sc &= MPEG2_BIT_MASK_8;
+ MPEG2_DEB("Start Code found = 0x%.8X\n", current_sc);
+
+ /* Get rid of the start code prefix for all start codes except slice */
+ /* start codes. */
+ if ((current_sc < MPEG2_SC_SLICE_MIN) || (current_sc > MPEG2_SC_SLICE_MAX))
+ {
+ viddec_pm_skip_bits(parent, sc_bits);
+ }
+
+ /* Parse Metadata based on the start code found */
+ switch( current_sc )
+ {
+ /* Sequence Start Code */
+ case MPEG2_SC_SEQ_HDR:
+ {
+ parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE;
+ viddec_mpeg2_parse_seq_hdr(parent, ctxt);
+ }
+ break;
+
+ /* Picture Start Code */
+ case MPEG2_SC_PICTURE:
+ {
+ viddec_mpeg2_parse_pic_hdr(parent, ctxt);
+ }
+ break;
+
+ /* Extension Code */
+ case MPEG2_SC_EXT:
+ {
+ viddec_mpeg2_parse_ext(parent, ctxt);
+ }
+ break;
+
+ /* Group of Pictures Header */
+ case MPEG2_SC_GROUP:
+ {
+ viddec_mpeg2_parse_gop_hdr(parent, ctxt);
+ }
+ break;
+
+ /* Unused Start Code */
+ case MPEG2_SC_SEQ_END:
+ case MPEG2_SC_SEQ_ERR:
+ break;
+
+ /* User Data */
+ case MPEG2_SC_USER_DATA:
+ {
+ viddec_mpeg2_parse_and_append_user_data(parent, ctxt);
+ }
+ break;
+
+ default:
+ {
+ /* Slice Data - Append slice data to the workload */
+ if ((current_sc >= MPEG2_SC_SLICE_MIN) &&
+ (current_sc <= MPEG2_SC_SLICE_MAX))
+ {
+ if (!parser->mpeg2_first_slice_flag)
+ {
+ /* At this point, all the metadata required by the MPEG2 */
+ /* hardware for decoding is extracted and stored. So the */
+ /* metadata can be packed into workitems and emitted out.*/
+ viddec_mpeg2_emit_workload(parent, ctxt);
+
+ /* If the current picture is progressive or it is the */
+ /* second field of interlaced field picture then, set */
+ /* the workload done flag. */
+ if ((!parser->mpeg2_picture_interlaced)
+ || ((parser->mpeg2_picture_interlaced) && (!parser->mpeg2_first_field)))
+ {
+ parser->mpeg2_pic_metadata_complete = true;
+ }
+ else if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))
+ {
+ parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE;
+ }
+
+ parser->mpeg2_first_slice_flag = true;
+ }
+ parser->mpeg2_last_parsed_slice_sc = current_sc;
+ viddec_mpeg2_parse_and_append_slice_data(parent, ctxt);
+ parser->mpeg2_wl_status |= MPEG2_WL_PARTIAL_SLICE;
+ }
+ }
+ } /* Switch */
+
+ /* Save last parsed start code */
+ parser->mpeg2_last_parsed_sc = current_sc;
+ return ret;
+}
+
+/* viddec_mpeg2_get_ops() - Register parser ops with the parser manager. */
+void viddec_mpeg2_get_ops
+(
+ viddec_parser_ops_t *ops
+)
+{
+ ops->init = viddec_mpeg2_parser_init;
+ ops->parse_syntax = viddec_mpeg2_parse;
+ ops->get_cxt_size = viddec_mpeg2_get_context_size;
+ ops->is_wkld_done = viddec_mpeg2_is_workload_done;
+ ops->is_frame_start = viddec_mpeg2_is_start_frame;
+ return;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c
new file mode 100644
index 0000000..503ded5
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp2/parser/viddec_mpeg2_workload.c
@@ -0,0 +1,461 @@
+/**
+ * viddec_mpeg2_workload.c
+ * -----------------------
+ * This file packs the data parsed and stored in the context into workload and
+ * emits it out. The current list of workitems emitter into the workload
+ * include:
+ *
+ * - DMEM - Register Data
+ * - Past and Future picture references
+ * - Quantization matrix data
+ *
+ * Slice data gets appended into the workload in viddec_mpeg2_parse.c
+ *
+ * Also, the frame attributes are updated in the workload.
+ */
+
+#include "viddec_mpeg2.h"
+#include "viddec_fw_item_types.h"
+
+void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t next_wl)
+{
+ if (next_wl)
+ {
+ viddec_pm_append_workitem_next(parent, wi);
+ }
+ else
+ {
+ viddec_pm_append_workitem(parent, wi);
+ }
+ return;
+}
+
+viddec_workload_t* viddec_mpeg2_get_header(void *parent, uint8_t next_wl)
+{
+ viddec_workload_t *ret;
+ if (next_wl)
+ {
+ ret = viddec_pm_get_next_header(parent);
+ }
+ else
+ {
+ ret = viddec_pm_get_header(parent);
+ }
+ return ret;
+}
+
+/* viddec_mpeg2_set_seq_ext_defaults() - Sets non-zero default values for */
+/* sequence extension items in case sequence extension is not present. */
+static void viddec_mpeg2_set_seq_ext_defaults(struct viddec_mpeg2_parser *parser)
+{
+ parser->info.seq_ext.progressive_sequence = true;
+ parser->info.seq_ext.chroma_format = MPEG2_CF_420;
+}
+
+/* viddec_mpeg2_set_pic_cod_ext_defaults() - Sets non-zero default values for*/
+/* picture coding extension items in case picture coding extension is not */
+/* present. */
+static void viddec_mpeg2_set_pic_cod_ext_defaults(struct viddec_mpeg2_parser *parser)
+{
+ parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME;
+ parser->info.pic_cod_ext.frame_pred_frame_dct = true;
+ parser->info.pic_cod_ext.progressive_frame = true;
+}
+
+/* viddec_mpeg2_pack_qmat() - Packs the 256 byte quantization matrix data */
+/* 64 32-bit values. */
+#ifdef MFDBIGENDIAN
+static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser)
+{
+ /* Quantization Matrix Support */
+ /* Populate Quantization Matrices */
+ uint32_t index = 0;
+ uint32_t *qmat_packed, *qmat_unpacked;
+
+ /* When transferring the quantization matrix data from the parser */
+ /* context into workload items, we are packing four 8 bit */
+ /* quantization values into one DWORD (32 bits). To do this, the */
+ /* array of values of type uint8_t, is typecast as uint32 * and */
+ /* read. */
+ qmat_packed = (uint32_t *) parser->wi.qmat;
+ qmat_unpacked = (uint32_t *) &parser->info.qnt_mat;
+
+ for (index=0; index<MPEG2_QUANT_MAT_SIZE; index++)
+ {
+ qmat_packed[index] = qmat_unpacked[index];
+ }
+ return;
+}
+#else
+static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser)
+{
+ /* Quantization Matrix Support */
+ /* Populate Quantization Matrices */
+ uint32_t index = 0;
+ uint32_t *qmat_packed;
+ uint8_t *qmat_unpacked;
+
+ /* When transferring the quantization matrix data from the parser */
+ /* context into workload items, we are packing four 8 bit */
+ /* quantization values into one DWORD (32 bits). To do this, the */
+ /* array of values of type uint8_t, is typecast as uint32 * and */
+ /* read. */
+ qmat_packed = (uint32_t *) parser->wi.qmat;
+ qmat_unpacked = (uint8_t *) &parser->info.qnt_mat;
+
+ for (index=0; index<MPEG2_QUANT_MAT_SIZE; index++)
+ {
+ qmat_packed[index] =
+ (((uint32_t)qmat_unpacked[(index<<2)+0])<< 24) |
+ (((uint32_t)qmat_unpacked[(index<<2)+1])<< 16) |
+ (((uint32_t)qmat_unpacked[(index<<2)+2])<< 8) |
+ (((uint32_t)qmat_unpacked[(index<<2)+3])<< 0) ;
+ }
+ return;
+}
+#endif
+
+/* viddec_mpeg2_trans_metadata_workitems() - Transfers the metadata stored */
+/* in parser context into workitems by bit masking. These workitems are then */
+/* sent through emitter */
+static void viddec_mpeg2_trans_metadata_workitems(void *ctxt)
+{
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Reset register values */
+ parser->wi.csi1 = 0x0;
+ parser->wi.csi2 = 0x0;
+ parser->wi.cpi1 = 0x0;
+ parser->wi.cpce1 = 0x0;
+
+ /* Set defaults for missing fields */
+ if (!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_EXT))
+ {
+ viddec_mpeg2_set_seq_ext_defaults(parser);
+ }
+ if (!(parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_COD_EXT))
+ {
+ viddec_mpeg2_set_pic_cod_ext_defaults(parser);
+ }
+
+ /* Populate Core Sequence Info 1 */
+ parser->wi.csi1 |= (parser->mpeg2_stream) << 1;
+ parser->wi.csi1 |= (parser->info.seq_hdr.constrained_parameters_flag) << 2;
+ parser->wi.csi1 |= (parser->info.seq_ext.progressive_sequence) << 3;
+ parser->wi.csi1 |= (parser->info.seq_ext.chroma_format) << 16;
+ parser->wi.csi1 |= (parser->info.qnt_ext.load_intra_quantiser_matrix) << 19;
+ parser->wi.csi1 |= (parser->info.qnt_ext.load_non_intra_quantiser_matrix) << 20;
+ parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) << 21;
+ parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) << 22;
+ MPEG2_DEB("Core Sequence Info 1: 0x%.8X\n", parser->wi.csi1);
+
+ /* Populate Core Sequence Info 2 */
+ parser->wi.csi2 |= (parser->info.seq_hdr.horizontal_size_value & MPEG2_BIT_MASK_11);
+ parser->wi.csi2 |= (parser->info.seq_hdr.vertical_size_value & MPEG2_BIT_MASK_11) << 14;
+ MPEG2_DEB("Core Sequence Info 2: 0x%.8X\n", parser->wi.csi2);
+
+ /* Populate Core Picture Info */
+ parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_forward_vect);
+ parser->wi.cpi1 |= (parser->info.pic_hdr.forward_f_code) << 1;
+ parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_backward_vect) << 4;
+ parser->wi.cpi1 |= (parser->info.pic_hdr.backward_f_code) << 5;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode00) << 8;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode01) << 12;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode10) << 16;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode11) << 20;
+ parser->wi.cpi1 |= (parser->info.pic_cod_ext.intra_dc_precision) << 24;
+ parser->wi.cpi1 |= (parser->info.pic_hdr.picture_coding_type-1) << 26;
+ MPEG2_DEB("Core Picture Info 1: 0x%.8X\n", parser->wi.cpi1);
+
+ /* Populate Core Picture Extension Info */
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.composite_display_flag);
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.progressive_frame) << 1;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.chroma_420_type) << 2;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.repeat_first_field) << 3;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.alternate_scan) << 4;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.intra_vlc_format) << 5;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.q_scale_type) << 6;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.concealment_motion_vectors) << 7;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.frame_pred_frame_dct) << 8;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.top_field_first) << 9;
+ parser->wi.cpce1 |= (parser->info.pic_cod_ext.picture_structure) << 10;
+ MPEG2_DEB("Core Picture Ext Info 1: 0x%.8X\n", parser->wi.cpce1);
+
+ return;
+}
+
+/* mpeg2_emit_display_frame() - Sends the frame id as a workload item. */
+static inline void mpeg2_emit_frameid(void *parent, int32_t wl_type, uint8_t flag)
+{
+ viddec_workload_item_t wi;
+ wi.vwi_type = wl_type;
+
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ viddec_mpeg2_append_workitem( parent, &wi, flag );
+}
+
+/* mpeg2_send_ref_reorder() - Reorders reference frames */
+static inline void mpeg2_send_ref_reorder(void *parent, uint8_t flag)
+{
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+ wi.ref_reorder.ref_table_offset = 0;
+ /* Reorder index 1 to index 0 only */
+ wi.ref_reorder.ref_reorder_00010203 = 0x01010203;
+ wi.ref_reorder.ref_reorder_04050607 = 0x04050607;
+ viddec_mpeg2_append_workitem( parent, &wi, flag );
+}
+
+/* viddec_mpeg2_manage_ref() - Manages frame references by inserting the */
+/* past and future references (if any) for every frame inserted in the */
+/* workload. */
+static void viddec_mpeg2_manage_ref(void *parent, void *ctxt)
+{
+ int32_t frame_id = 1;
+ int32_t frame_type;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+ viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload );
+ wl->is_reference_frame = 0;
+
+ /* Identify the frame type (I, P or B) */
+ frame_type = parser->info.pic_hdr.picture_coding_type;
+
+ /* Send reference frame information based on whether the picture is a */
+ /* frame picture or field picture. */
+ if ((!parser->mpeg2_picture_interlaced)
+ || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)))
+ {
+ /* Check if we need to reorder frame references/send frame for display */
+ /* in case of I or P type */
+ if (frame_type != MPEG2_PC_TYPE_B)
+ {
+ /* Checking reorder */
+ if (parser->mpeg2_ref_table_updated)
+ {
+ mpeg2_send_ref_reorder(parent, parser->mpeg2_use_next_workload);
+ }
+ }
+
+ /* Send reference frame workitems */
+ switch(frame_type)
+ {
+ case MPEG2_PC_TYPE_I:
+ {
+ break;
+ }
+ case MPEG2_PC_TYPE_P:
+ {
+ mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload);
+ break;
+ }
+ case MPEG2_PC_TYPE_B:
+ {
+ mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload);
+ mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, parser->mpeg2_use_next_workload);
+ }
+ }
+
+ /* Set reference information updated flag */
+ if (!parser->mpeg2_picture_interlaced)
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO;
+ }
+ }
+ else
+ {
+ /* Set reference information updated flag for second fiel */
+ parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO;
+ }
+
+ /* Set the reference frame flags for I and P types */
+ if (frame_type != MPEG2_PC_TYPE_B)
+ {
+ wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK);
+ parser->mpeg2_ref_table_updated = true;
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_check_unsupported() - Check for unsupported feature in the stream */
+static void viddec_mpeg2_check_unsupported(void *parent, void *ctxt)
+{
+ unsigned int unsupported_feature_found = 0;
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Get workload */
+ viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload );
+
+ /* Get attributes in workload */
+ viddec_frame_attributes_t *attrs = &wl->attrs;
+
+ /* Check for unsupported content size */
+ unsupported_feature_found |= (attrs->cont_size.height > MPEG2_MAX_CONTENT_HEIGHT);
+ unsupported_feature_found |= (attrs->cont_size.width > MPEG2_MAX_CONTENT_WIDTH);
+
+ /* Update parser status, if found */
+ if (unsupported_feature_found)
+ {
+ parser->mpeg2_wl_status |= MPEG2_WL_UNSUPPORTED;
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_append_metadata() - Appends meta data from the stream. */
+void viddec_mpeg2_append_metadata(void *parent, void *ctxt)
+{
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ viddec_workload_item_t wi;
+
+ /* Append sequence info, if found with current frame */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+ viddec_fw_mp2_sh_set_horizontal_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.horizontal_size_value);
+ viddec_fw_mp2_sh_set_vertical_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.vertical_size_value);
+ viddec_fw_mp2_sh_set_aspect_ratio_information ( &(wi.mp2_sh) , parser->info.seq_hdr.aspect_ratio_information);
+ viddec_fw_mp2_sh_set_frame_rate_code ( &(wi.mp2_sh) , parser->info.seq_hdr.frame_rate_code);
+ viddec_fw_mp2_sh_set_bit_rate_value ( &(wi.mp2_sh) , parser->info.seq_hdr.bit_rate_value);
+ viddec_fw_mp2_sh_set_vbv_buffer_size_value ( &(wi.mp2_sh) , parser->info.seq_hdr.vbv_buffer_size_value);
+
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ }
+
+ /* Append sequence extension info, if found with current frame */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_EXT)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG2_SEQ_EXT;
+
+ viddec_fw_mp2_se_set_profile_and_level_indication( &(wi.mp2_se) , parser->info.seq_ext.profile_and_level_indication);
+ viddec_fw_mp2_se_set_progressive_sequence ( &(wi.mp2_se) , parser->info.seq_ext.progressive_sequence);
+ viddec_fw_mp2_se_set_chroma_format ( &(wi.mp2_se) , parser->info.seq_ext.chroma_format);
+ viddec_fw_mp2_se_set_horizontal_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.horizontal_size_extension);
+ viddec_fw_mp2_se_set_vertical_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.vertical_size_extension);
+ viddec_fw_mp2_se_set_bit_rate_extension ( &(wi.mp2_se) , parser->info.seq_ext.bit_rate_extension);
+ viddec_fw_mp2_se_set_vbv_buffer_size_extension ( &(wi.mp2_se) , parser->info.seq_ext.vbv_buffer_size_extension);
+ viddec_fw_mp2_se_set_frame_rate_extension_n ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_n);
+ viddec_fw_mp2_se_set_frame_rate_extension_d ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_d);
+
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ }
+
+ /* Append Display info, if present */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_DISP_EXT)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+ wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+
+ viddec_fw_mp2_sde_set_video_format ( &(wi.mp2_sde) , parser->info.seq_disp_ext.video_format);
+ viddec_fw_mp2_sde_set_color_description ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_description);
+ viddec_fw_mp2_sde_set_color_primaries ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_primaries);
+ viddec_fw_mp2_sde_set_transfer_characteristics( &(wi.mp2_sde) , parser->info.seq_disp_ext.transfer_characteristics);
+ viddec_fw_mp2_sde_set_display_horizontal_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_horizontal_size);
+ viddec_fw_mp2_sde_set_display_vertical_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_vertical_size);
+
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ }
+
+ /* Append GOP info, if present */
+ if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO;
+
+ viddec_fw_mp2_gop_set_closed_gop ( &(wi.mp2_gop) , parser->info.gop_hdr.closed_gop);
+ viddec_fw_mp2_gop_set_broken_link( &(wi.mp2_gop) , parser->info.gop_hdr.broken_link);
+
+ viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_append_workitems() - Appends decoder specific workitems */
+/* to the workload starting at the address and length specified. */
+static void viddec_mpeg2_append_workitems
+(
+ void *parent,
+ uint32_t* address,
+ int workitem_type,
+ int num_items,
+ uint8_t flag
+)
+{
+ int32_t index=0;
+ const uint32_t* initial_address = address;
+ viddec_workload_item_t wi;
+
+ for (index=0; index < num_items; index++)
+ {
+ wi.vwi_type = workitem_type;
+ wi.data.data_offset = (char *) address - (const char *) initial_address;
+ wi.data.data_payload[0] = address[0];
+ wi.data.data_payload[1] = address[1];
+ address += 2;
+
+ viddec_mpeg2_append_workitem(parent, &wi, flag);
+ }
+
+ return;
+}
+
+/* viddec_mpeg2_emit_workload() - Emits MPEG2 parser generated work load */
+/* items. */
+/* Items include: MPEG2 DMEM Data, Quantization Matrices. */
+/* Pixel ES data sent separately whenever parser sees slice data */
+void viddec_mpeg2_emit_workload(void *parent, void *ctxt)
+{
+ MPEG2_DEB("Emitting workloads.\n");
+
+ /* Get MPEG2 Parser context */
+ struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+ /* Append meta data workitems */
+ viddec_mpeg2_append_metadata(parent, ctxt);
+
+ /* Transfer metadata into attributes */
+ viddec_mpeg2_translate_attr(parent, ctxt);
+
+ /* Check for unsupported features in the stream and update parser status */
+ viddec_mpeg2_check_unsupported(parent, ctxt);
+
+ /* Transfer all stored metadata into MPEG2 Hardware Info */
+ viddec_mpeg2_trans_metadata_workitems(parser);
+
+ /* Send MPEG2 DMEM workitems */
+ viddec_mpeg2_append_workitems(parent,
+ (uint32_t *) &parser->wi,
+ VIDDEC_WORKLOAD_MPEG2_DMEM,
+ MPEG2_NUM_DMEM_WL_ITEMS,
+ parser->mpeg2_use_next_workload);
+ parser->mpeg2_wl_status |= MPEG2_WL_DMEM_DATA;
+ MPEG2_DEB("Adding %d items as DMEM Data.\n", MPEG2_NUM_DMEM_WL_ITEMS);
+
+ /* Send MPEG2 Quantization Matrix workitems, if updated */
+ viddec_mpeg2_pack_qmat(parser);
+ viddec_mpeg2_append_workitems(parent,
+ (uint32_t *) parser->wi.qmat,
+ VIDDEC_WORKLOAD_MPEG2_QMAT,
+ MPEG2_NUM_QMAT_WL_ITEMS,
+ parser->mpeg2_use_next_workload);
+ MPEG2_DEB("Adding %d items as QMAT Data.\n", MPEG2_NUM_QMAT_WL_ITEMS);
+
+ /* Manage reference frames */
+ viddec_mpeg2_manage_ref(parent, ctxt);
+
+ return;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h b/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h
new file mode 100644
index 0000000..7084161
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/include/viddec_fw_mp4.h
@@ -0,0 +1,231 @@
+#ifndef VIDDEC_FW_MP4_H
+#define VIDDEC_FW_MP4_H
+
+#include "viddec_fw_workload.h"
+
+enum viddec_fw_mp4_ref_frame_id
+{
+ VIDDEC_MP4_FRAME_CURRENT = 0,
+ VIDDEC_MP4_FRAME_PAST = 1,
+ VIDDEC_MP4_FRAME_FUTURE = 2,
+ VIDDEC_MP4_FRAME_MAX = 3,
+};
+
+enum mp4_workload_item_type
+{
+ VIDDEC_WORKLOAD_MP4_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+ VIDDEC_WORKLOAD_MP4_FUTURE_FRAME,
+ VIDDEC_WORKLOAD_MP4_VOL_INFO = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+ VIDDEC_WORKLOAD_MP4_VOP_INFO,
+ VIDDEC_WORKLOAD_MP4_BVOP_INFO,
+ VIDDEC_WORKLOAD_MP4_SPRT_TRAJ,
+ VIDDEC_WORKLOAD_MP4_IQUANT,
+ VIDDEC_WORKLOAD_MP4_NIQUANT,
+ VIDDEC_WORKLOAD_MP4_SVH,
+};
+
+enum viddec_fw_mp4_vop_coding_type_t
+{
+ VIDDEC_MP4_VOP_TYPE_I = 0,
+ VIDDEC_MP4_VOP_TYPE_P,
+ VIDDEC_MP4_VOP_TYPE_B,
+ VIDDEC_MP4_VOP_TYPE_S
+};
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOL_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Flags extracted from the Video Object Layer
+ // 0:0 - short_video_header
+ // 1:2 - vol_shape
+ // 3:3 - interlaced
+ // 4:4 - obmc_disable
+ // 5:5 - quarter_sample
+ // 6:6 - resync_marker_disable
+ // 7:7 - data_partitioned
+ // 8:8 - reversible_vlc
+ #define viddec_fw_mp4_get_reversible_vlc(x) viddec_fw_bitfields_extract((x)->vol_flags, 8, 0x1)
+ #define viddec_fw_mp4_set_reversible_vlc(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 8, 0x1)
+ #define viddec_fw_mp4_get_data_partitioned(x) viddec_fw_bitfields_extract((x)->vol_flags, 7, 0x1)
+ #define viddec_fw_mp4_set_data_partitioned(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 7, 0x1)
+ #define viddec_fw_mp4_get_resync_marker_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 6, 0x1)
+ #define viddec_fw_mp4_set_resync_marker_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 6, 0x1)
+ #define viddec_fw_mp4_get_quarter_sample(x) viddec_fw_bitfields_extract((x)->vol_flags, 5, 0x1)
+ #define viddec_fw_mp4_set_quarter_sample(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 5, 0x1)
+ #define viddec_fw_mp4_get_obmc_disable(x) viddec_fw_bitfields_extract((x)->vol_flags, 4, 0x1)
+ #define viddec_fw_mp4_set_obmc_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 4, 0x1)
+ #define viddec_fw_mp4_get_interlaced(x) viddec_fw_bitfields_extract((x)->vol_flags, 3, 0x1)
+ #define viddec_fw_mp4_set_interlaced(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 3, 0x1)
+ #define viddec_fw_mp4_get_vol_shape(x) viddec_fw_bitfields_extract((x)->vol_flags, 1, 0x3)
+ #define viddec_fw_mp4_set_vol_shape(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 1, 0x3)
+ #define viddec_fw_mp4_get_short_video_header_flag(x) viddec_fw_bitfields_extract((x)->vol_flags, 0, 0x1)
+ #define viddec_fw_mp4_set_short_video_header_flag(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 0, 0x1)
+ unsigned int vol_flags;
+
+ // Size extracted from the Video Object Layer
+ // 0:12 - width
+ // 13:25 - height
+ // MFD_MPG4VD_MB_PER_ROW can be calculated as (width+15) >> 4
+ // MFD_MPG4VD_MB_ROWS can be calculated as (height+15) >> 4
+ #define viddec_fw_mp4_get_vol_width(x) viddec_fw_bitfields_extract((x)->vol_size, 13, 0x1FFF)
+ #define viddec_fw_mp4_set_vol_width(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 13, 0x1FFF)
+ #define viddec_fw_mp4_get_vol_height(x) viddec_fw_bitfields_extract((x)->vol_size, 0, 0x1FFF)
+ #define viddec_fw_mp4_set_vol_height(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 0, 0x1FFF)
+ unsigned int vol_size;
+
+ // Sprite, time increments and quantization details from the Video Object Layer
+ // 0:15 - vop_time_increment_resolution
+ // 16:17 - sprite_enable
+ // 18:23 - sprite_warping_points
+ // 24:25 - sprite_warping_accuracy
+ // 26:29 - quant_precision
+ // 30:30 - quant_type
+ #define viddec_fw_mp4_get_quant_type(x) viddec_fw_bitfields_extract((x)->vol_item, 30, 0x1)
+ #define viddec_fw_mp4_set_quant_type(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 30, 0x1)
+ #define viddec_fw_mp4_get_quant_precision(x) viddec_fw_bitfields_extract((x)->vol_item, 26, 0xF)
+ #define viddec_fw_mp4_set_quant_precision(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 26, 0xF)
+ #define viddec_fw_mp4_get_sprite_warping_accuracy(x) viddec_fw_bitfields_extract((x)->vol_item, 24, 0x3)
+ #define viddec_fw_mp4_set_sprite_warping_accuracy(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 24, 0x3)
+ #define viddec_fw_mp4_get_sprite_warping_points(x) viddec_fw_bitfields_extract((x)->vol_item, 18, 0x3F)
+ #define viddec_fw_mp4_set_sprite_warping_points(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 18, 0x3F)
+ #define viddec_fw_mp4_get_sprite_enable(x) viddec_fw_bitfields_extract((x)->vol_item, 16, 0x3)
+ #define viddec_fw_mp4_set_sprite_enable(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 16, 0x3)
+ #define viddec_fw_mp4_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_item, 0, 0xFFFF)
+ #define viddec_fw_mp4_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 0, 0xFFFF)
+ unsigned int vol_item;
+
+} viddec_fw_mp4_vol_info_t;
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOP_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Frame Info - to populate register MFD_MPG4VD_BSP_FRAME_INFO
+ // 0:4 - current_frame_id
+ // 5:5 - current_field_frame
+ // 6:10 - future_frame_id
+ // 11:11 - future_field_frame
+ // 12:16 - past_frame_id
+ // 17:17 - past_field_frame
+ #define viddec_fw_mp4_get_past_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 17, 0x1)
+ #define viddec_fw_mp4_set_past_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 17, 0x1)
+ #define viddec_fw_mp4_get_past_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 12, 0x1F)
+ #define viddec_fw_mp4_set_past_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 12, 0x1F)
+ #define viddec_fw_mp4_get_future_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 11, 0x1)
+ #define viddec_fw_mp4_set_future_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 11, 0x1)
+ #define viddec_fw_mp4_get_future_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 6, 0x1F)
+ #define viddec_fw_mp4_set_future_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 6, 0x1F)
+ #define viddec_fw_mp4_get_current_field_frame(x) viddec_fw_bitfields_extract((x)->frame_info, 5, 0x1)
+ #define viddec_fw_mp4_set_current_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 5, 0x1)
+ #define viddec_fw_mp4_get_current_frame_id(x) viddec_fw_bitfields_extract((x)->frame_info, 0, 0x1F)
+ #define viddec_fw_mp4_set_current_frame_id(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 0, 0x1F)
+ unsigned int frame_info;
+
+ // Video Object Plane Info
+ // 0:1 - vop_coding_type
+ // 2:2 - vop_rounding_type
+ // 3:5 - intra_dc_vlc_thr
+ // 6:6 - top_field_first
+ // 7:7 - alternate_vertical_scan_flag
+ // 8:16 - vop_quant
+ // 17:19 - vop_fcode_forward
+ // 20:22 - vop_fcode_backward
+ // 23:31 - quant_scale
+ #define viddec_fw_mp4_get_vop_quant_scale(x) viddec_fw_bitfields_extract((x)->vop_data, 23, 0x1FF)
+ #define viddec_fw_mp4_set_vop_quant_scale(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 23, 0x1FF)
+ #define viddec_fw_mp4_get_vop_fcode_backward(x) viddec_fw_bitfields_extract((x)->vop_data, 20, 0x7)
+ #define viddec_fw_mp4_set_vop_fcode_backward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 20, 0x7)
+ #define viddec_fw_mp4_get_vop_fcode_forward(x) viddec_fw_bitfields_extract((x)->vop_data, 17, 0x7)
+ #define viddec_fw_mp4_set_vop_fcode_forward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 17, 0x7)
+ #define viddec_fw_mp4_get_vop_quant(x) viddec_fw_bitfields_extract((x)->vop_data, 8, 0x1FF)
+ #define viddec_fw_mp4_set_vop_quant(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 8, 0x1FF)
+ #define viddec_fw_mp4_get_alternate_vertical_scan_flag(x) viddec_fw_bitfields_extract((x)->vop_data, 7, 0x1)
+ #define viddec_fw_mp4_set_alternate_vertical_scan_flag(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 7, 0x1)
+ #define viddec_fw_mp4_get_top_field_first(x) viddec_fw_bitfields_extract((x)->vop_data, 6, 0x1)
+ #define viddec_fw_mp4_set_top_field_first(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 6, 0x1)
+ #define viddec_fw_mp4_get_intra_dc_vlc_thr(x) viddec_fw_bitfields_extract((x)->vop_data, 3, 0x7)
+ #define viddec_fw_mp4_set_intra_dc_vlc_thr(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 3, 0x7)
+ #define viddec_fw_mp4_get_vop_rounding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 2, 0x1)
+ #define viddec_fw_mp4_set_vop_rounding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 2, 0x1)
+ #define viddec_fw_mp4_get_vop_coding_type(x) viddec_fw_bitfields_extract((x)->vop_data, 0, 0x3)
+ #define viddec_fw_mp4_set_vop_coding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 0, 0x3)
+ unsigned int vop_data;
+
+ // No of bits used in first byte of MB data
+ unsigned int bit_offset;
+
+} viddec_fw_mp4_vop_info_t;
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_BVOP_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Frame period = T(first B-VOP after VOL) - T(past reference of first B-VOP after VOL)
+ unsigned int Tframe;
+
+ // TRD is the difference in temporal reference of the temporally next reference VOP with
+ // temporally previous reference VOP, assuming B-VOPs or skipped VOPs in between.
+ unsigned int TRD;
+
+ // TRB is the difference in temporal reference of the B-VOP and the previous reference VOP.
+ unsigned int TRB;
+
+} viddec_fw_mp4_bvop_info_t;
+
+// This structure contains the information extracted from the sprite trajectory.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SPRT_TRAJ,
+// using the fields vwi_payload in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Sprite Trajectory can have dmv_codes for each warping point.
+ // 0:13 - warping_mv_code_dv
+ // 14:27 - warping_mv_code_du
+ // 28:31 - warping_point_index - identifies which warping point the warping code refers to.
+ // The default value for index is 0xF which should be treated as invalid.
+ #define viddec_fw_mp4_get_warping_point_index(x) viddec_fw_bitfields_extract((x), 28, 0xF)
+ #define viddec_fw_mp4_set_warping_point_index(x, val) viddec_fw_bitfields_insert((x), val, 28, 0xF)
+ #define viddec_fw_mp4_get_warping_mv_code_du(x) viddec_fw_bitfields_extract((x), 14, 0x3FFF)
+ #define viddec_fw_mp4_set_warping_mv_code_du(x, val) viddec_fw_bitfields_insert((x), val, 14, 0x3FFF)
+ #define viddec_fw_mp4_get_warping_mv_code_dv(x) viddec_fw_bitfields_extract((x), 0, 0x3FFF)
+ #define viddec_fw_mp4_set_warping_mv_code_dv(x, val) viddec_fw_bitfields_insert((x), val, 0, 0x3FFF)
+ unsigned int warping_mv_code[3];
+} viddec_fw_mp4_sprite_trajectory_t;
+
+// IQUANT entries will be populated in the workload using items of type VIDDEC_WORKLOAD_MP4_IQUANT and the
+// vwi_payload array. The entries will be in the order in which they need to be programmed in the registers.
+// There is no need for a separate structure for these values.
+
+// This structure contains the information extracted from the Video Plane with Short Header.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SVH, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+ // Video Plane with Short Header
+ // 0:7 - temporal_reference
+ // 8:19 - num_macroblocks_in_gob
+ // 20:24 - num_gobs_in_vop
+ // 25:27 - num_rows_in_gob
+ #define viddec_fw_mp4_get_num_rows_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 25, 0x7)
+ #define viddec_fw_mp4_set_num_rows_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 25, 0x7)
+ #define viddec_fw_mp4_get_num_gobs_in_vop(x) viddec_fw_bitfields_extract((x)->svh_data, 20, 0x1F)
+ #define viddec_fw_mp4_set_num_gobs_in_vop(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 20, 0x1F)
+ #define viddec_fw_mp4_get_num_macroblocks_in_gob(x) viddec_fw_bitfields_extract((x)->svh_data, 8, 0xFFF)
+ #define viddec_fw_mp4_set_num_macroblocks_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 8, 0xFFF)
+ #define viddec_fw_mp4_get_temporal_reference(x) viddec_fw_bitfields_extract((x)->svh_data, 0, 0xFF)
+ #define viddec_fw_mp4_set_temporal_reference(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 0, 0xFF)
+ unsigned int svh_data;
+
+ unsigned int pad1;
+ unsigned int pad2;
+} viddec_fw_mp4_svh_t;
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c
new file mode 100644
index 0000000..f595c91
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_fw_mp4_workload.c
@@ -0,0 +1,371 @@
+#include "viddec_fw_workload.h"
+#include "viddec_parser_ops.h"
+#include "viddec_fw_mp4.h"
+#include "viddec_mp4_parse.h"
+
+uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_frame_attributes_t *attr = &(wl->attrs);
+ mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+ memset(attr, 0, sizeof(viddec_frame_attributes_t));
+
+ attr->cont_size.width = vol->video_object_layer_width;
+ attr->cont_size.height = vol->video_object_layer_height;
+
+ // Translate vop_coding_type
+ switch(vol->VideoObjectPlane.vop_coding_type)
+ {
+ case MP4_VOP_TYPE_B:
+ attr->frame_type = VIDDEC_FRAME_TYPE_B;
+ break;
+ case MP4_VOP_TYPE_P:
+ attr->frame_type = VIDDEC_FRAME_TYPE_P;
+ break;
+ case MP4_VOP_TYPE_S:
+ attr->frame_type = VIDDEC_FRAME_TYPE_S;
+ break;
+ case MP4_VOP_TYPE_I:
+ attr->frame_type = VIDDEC_FRAME_TYPE_I;
+ break;
+ default:
+ break;
+ } // switch on vop_coding_type
+
+ attr->mpeg4.top_field_first = vol->VideoObjectPlane.top_field_first;
+
+ return result;
+} // viddec_fw_mp4_populate_attr
+
+uint32_t viddec_fw_mp4_insert_vol_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ viddec_fw_mp4_vol_info_t vol_info;
+ mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+ memset(&vol_info, 0, sizeof(viddec_fw_mp4_vol_info_t));
+
+ // Get vol_flags
+ viddec_fw_mp4_set_reversible_vlc(&vol_info, vol->reversible_vlc);
+ viddec_fw_mp4_set_data_partitioned(&vol_info, vol->data_partitioned);
+ viddec_fw_mp4_set_resync_marker_disable(&vol_info, vol->resync_marker_disable);
+ viddec_fw_mp4_set_quarter_sample(&vol_info, vol->quarter_sample);
+ viddec_fw_mp4_set_obmc_disable(&vol_info, vol->obmc_disable);
+ viddec_fw_mp4_set_interlaced(&vol_info, vol->interlaced);
+ viddec_fw_mp4_set_vol_shape(&vol_info, vol->video_object_layer_shape);
+ viddec_fw_mp4_set_short_video_header_flag(&vol_info, vol->short_video_header);
+
+ // Get vol_size
+ viddec_fw_mp4_set_vol_width(&vol_info, vol->video_object_layer_width);
+ viddec_fw_mp4_set_vol_height(&vol_info, vol->video_object_layer_height);
+
+ // Get vol_item
+ viddec_fw_mp4_set_quant_type(&vol_info, vol->quant_type);
+ viddec_fw_mp4_set_quant_precision(&vol_info, vol->quant_precision);
+ viddec_fw_mp4_set_sprite_warping_accuracy(&vol_info, vol->sprite_info.sprite_warping_accuracy);
+ viddec_fw_mp4_set_sprite_warping_points(&vol_info, vol->sprite_info.no_of_sprite_warping_points);
+ viddec_fw_mp4_set_sprite_enable(&vol_info, vol->sprite_enable);
+ viddec_fw_mp4_set_vop_time_increment_resolution(&vol_info, vol->vop_time_increment_resolution);
+
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_VOL_INFO;
+ wi.vwi_payload[0] = vol_info.vol_flags;
+ wi.vwi_payload[1] = vol_info.vol_size;
+ wi.vwi_payload[2] = vol_info.vol_item;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_vol_workitem
+
+uint32_t viddec_fw_mp4_insert_vop_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ viddec_fw_mp4_vop_info_t vop_info;
+ mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane);
+ uint32_t byte = 0;
+ unsigned char is_emul;
+
+ memset(&vop_info, 0, sizeof(viddec_fw_mp4_vop_info_t));
+
+ // Get frame_info
+ viddec_fw_mp4_set_past_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_2].is_field);
+ viddec_fw_mp4_set_past_frame_id(&vop_info, VIDDEC_MP4_FRAME_PAST);
+ viddec_fw_mp4_set_future_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_1].is_field);
+ viddec_fw_mp4_set_future_frame_id(&vop_info, VIDDEC_MP4_FRAME_FUTURE);
+ viddec_fw_mp4_set_current_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_0].is_field);
+ viddec_fw_mp4_set_current_frame_id(&vop_info, VIDDEC_MP4_FRAME_CURRENT);
+
+ // HW has a limitation that the enums for PAST(1), FUTURE(2) and CURRENT(0) cannot be changed and
+ // the spec does not support field pictures. Hence the field_frame bits are always zero.
+ // This gives us the constant 0x10200.
+ vop_info.frame_info = 0x10200;
+
+ // Get vop_data
+ // Quant scale is in the video_packet_header or the gob_layer - both of which are parsed by the BSP
+ viddec_fw_mp4_set_vop_quant_scale(&vop_info, 0);
+ viddec_fw_mp4_set_vop_fcode_backward(&vop_info, vop->vop_fcode_backward);
+ viddec_fw_mp4_set_vop_fcode_forward(&vop_info, vop->vop_fcode_forward);
+ viddec_fw_mp4_set_vop_quant(&vop_info, vop->vop_quant);
+ viddec_fw_mp4_set_alternate_vertical_scan_flag(&vop_info, vop->alternate_vertical_scan_flag);
+ viddec_fw_mp4_set_top_field_first(&vop_info, vop->top_field_first);
+ viddec_fw_mp4_set_intra_dc_vlc_thr(&vop_info, vop->intra_dc_vlc_thr);
+ viddec_fw_mp4_set_vop_rounding_type(&vop_info, vop->vop_rounding_type);
+ viddec_fw_mp4_set_vop_coding_type(&vop_info, vop->vop_coding_type);
+
+ // Get vol_item
+ result = viddec_pm_get_au_pos(parent, &vop_info.bit_offset, &byte, &is_emul);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_VOP_INFO;
+ wi.vwi_payload[0] = vop_info.frame_info;
+ wi.vwi_payload[1] = vop_info.vop_data;
+ wi.vwi_payload[2] = vop_info.bit_offset;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_vop_workitem
+
+uint32_t viddec_fw_mp4_insert_vpsh_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ viddec_fw_mp4_svh_t svh_info;
+ mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263);
+
+ memset(&svh_info, 0, sizeof(viddec_fw_mp4_svh_t));
+
+ // Get svh_data
+ viddec_fw_mp4_set_temporal_reference(&svh_info, svh->temporal_reference);
+ viddec_fw_mp4_set_num_macroblocks_in_gob(&svh_info, svh->num_macroblocks_in_gob);
+ viddec_fw_mp4_set_num_gobs_in_vop(&svh_info, svh->num_gobs_in_vop);
+ viddec_fw_mp4_set_num_rows_in_gob(&svh_info, svh->num_rows_in_gob);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_SVH;
+ wi.vwi_payload[0] = svh_info.svh_data;
+ wi.vwi_payload[1] = svh_info.pad1;
+ wi.vwi_payload[2] = svh_info.pad2;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_vpsh_workitem
+
+uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ viddec_fw_mp4_sprite_trajectory_t sprite_info;
+ mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+ mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane);
+ uint8_t no_of_entries_per_item = 3;
+ uint8_t no_of_sprite_workitems = 0;
+ uint8_t warp_index = 0;
+ int i, j;
+
+ if(!vol->sprite_info.no_of_sprite_warping_points)
+ return result;
+
+ no_of_sprite_workitems = (vol->sprite_info.no_of_sprite_warping_points > 3) ? 2 : 1;
+
+ for(i=0; i<no_of_sprite_workitems; i++)
+ {
+ memset(&sprite_info, 0, sizeof(viddec_fw_mp4_sprite_trajectory_t));
+
+ for(j=0; j<no_of_entries_per_item; j++)
+ {
+ if(warp_index < vol->sprite_info.no_of_sprite_warping_points)
+ {
+ viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index);
+ viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]);
+ viddec_fw_mp4_set_warping_mv_code_dv(sprite_info.warping_mv_code[j], vop->warping_mv_code_dv[warp_index]);
+ }
+ else
+ {
+ sprite_info.warping_mv_code[j] = 0xF << 28;
+ }
+ warp_index++;
+ }
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_SPRT_TRAJ;
+ wi.vwi_payload[0] = sprite_info.warping_mv_code[0];
+ wi.vwi_payload[1] = sprite_info.warping_mv_code[1];
+ wi.vwi_payload[2] = sprite_info.warping_mv_code[2];
+
+ result = viddec_pm_append_workitem(parent, &wi);
+ }
+
+ return result;
+} // viddec_fw_mp4_insert_sprite_workitem
+
+uint32_t viddec_fw_mp4_insert_bvop_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_BVOP_INFO;
+ wi.vwi_payload[0] = vol->Tframe;
+ wi.vwi_payload[1] = vol->TRD;
+ wi.vwi_payload[2] = vol->TRB;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_bvop_workitem
+
+uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint32_t *qmat)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+ uint8_t i;
+
+ // No of items = (64/4 Dwords / 3 entries per workload item)
+ // 64 8b entries => 64 * 8 / 32 DWORDS => 64/4 DWORDS => 16 DWORDS
+ // Each item can store 3 DWORDS, 16 DWORDS => 16/3 items => 6 items
+ for(i=0; i<6; i++)
+ {
+ memset(&wi, 0, sizeof(viddec_workload_item_t));
+
+ if(intra_quant_flag)
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_IQUANT;
+ else
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_NIQUANT;
+
+ if(i == 6)
+ {
+ wi.vwi_payload[0] = qmat[0];
+ wi.vwi_payload[1] = 0;
+ wi.vwi_payload[2] = 0;
+ }
+ else
+ {
+ wi.vwi_payload[0] = qmat[0];
+ wi.vwi_payload[1] = qmat[1];
+ wi.vwi_payload[2] = qmat[2];
+ }
+
+ qmat += 3;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+ }
+
+ return result;
+} // viddec_fw_mp4_insert_qmat
+
+uint32_t viddec_fw_mp4_insert_inversequant_workitem(void *parent, mp4_VOLQuant_mat_t *qmat)
+{
+ uint32_t result = MP4_STATUS_OK;
+
+ if(qmat->load_intra_quant_mat)
+ {
+ result = viddec_fw_mp4_insert_qmat(parent, true, (uint32_t *) &(qmat->intra_quant_mat));
+ }
+
+ if(qmat->load_nonintra_quant_mat)
+ {
+ result = viddec_fw_mp4_insert_qmat(parent, false, (uint32_t *) &(qmat->nonintra_quant_mat));
+ }
+
+ return result;
+} // viddec_fw_mp4_insert_inversequant_workitem
+
+uint32_t viddec_fw_mp4_insert_past_frame_workitem(void *parent)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_PAST_FRAME;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_past_frame_workitem
+
+uint32_t viddec_fw_mp4_insert_future_frame_workitem(void *parent)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MP4_FUTURE_FRAME;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_future_frame_workitem
+
+uint32_t viddec_fw_mp4_insert_reorder_workitem(void *parent)
+{
+ uint32_t result = MP4_STATUS_OK;
+ viddec_workload_item_t wi;
+
+ // Move frame at location 1 of the reference table to location 0
+ wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+ wi.ref_reorder.ref_table_offset = 0;
+ wi.ref_reorder.ref_reorder_00010203 = 0x01010203;
+ wi.ref_reorder.ref_reorder_04050607 = 0x04050607;
+
+ result = viddec_pm_append_workitem(parent, &wi);
+
+ return result;
+} // viddec_fw_mp4_insert_reorder_workitem
+
+uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt)
+{
+ uint32_t result = 0;
+ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt;
+ viddec_workload_t *wl = viddec_pm_get_header(parent);
+
+ result = viddec_fw_mp4_populate_attr(wl, parser);
+ result = viddec_fw_mp4_insert_vol_workitem(parent, parser);
+ result = viddec_fw_mp4_insert_vop_workitem(parent, parser);
+ result = viddec_fw_mp4_insert_sprite_workitem(parent, parser);
+ result = viddec_fw_mp4_insert_inversequant_workitem(parent, &(parser->info.VisualObject.VideoObject.quant_mat_info));
+
+ if(parser->info.VisualObject.VideoObject.short_video_header)
+ result = viddec_fw_mp4_insert_vpsh_workitem(parent, parser);
+
+ if(!parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded)
+ wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME;
+
+ // Send reference re-order tag for all reference frame types
+ if (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type != MP4_VOP_TYPE_B)
+ {
+ result = viddec_fw_mp4_insert_reorder_workitem(parent);
+ }
+
+ // Handle vop_coding_type based information
+ switch(parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type)
+ {
+ case MP4_VOP_TYPE_B:
+ result = viddec_fw_mp4_insert_bvop_workitem(parent, parser);
+ result = viddec_fw_mp4_insert_past_frame_workitem(parent);
+ result = viddec_fw_mp4_insert_future_frame_workitem(parent);
+ break;
+ case MP4_VOP_TYPE_P:
+ case MP4_VOP_TYPE_S:
+ result = viddec_fw_mp4_insert_past_frame_workitem(parent);
+ // Deliberate fall-thru to type I
+ case MP4_VOP_TYPE_I:
+ wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (1 & WORKLOAD_REFERENCE_FRAME_BMASK);
+ // Swap reference information
+ parser->ref_frame[VIDDEC_MP4_INDX_2] = parser->ref_frame[VIDDEC_MP4_INDX_1];
+ parser->ref_frame[VIDDEC_MP4_INDX_1] = parser->ref_frame[VIDDEC_MP4_INDX_0];
+ break;
+ break;
+ default:
+ break;
+ } // switch on vop_coding_type
+
+ result = viddec_pm_append_pixeldata(parent);
+
+ return result;
+} // viddec_fw_mp4_emit_workload
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c
new file mode 100644
index 0000000..4ae9135
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.c
@@ -0,0 +1,98 @@
+#include "viddec_mp4_decodevideoobjectplane.h"
+
+mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo)
+{
+ mp4_Status_t status = MP4_STATUS_OK;
+ uint32_t vop_time=0;
+// mp4_VisualObject_t *vo = &(pInfo->VisualObject);
+ mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject);
+ mp4_GroupOfVideoObjectPlane_t *gvop = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane);
+ mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+ // set VOP time
+ if (vol->short_video_header)
+ {
+ vop_time = vol->vop_sync_time +
+ pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.temporal_reference * 1001;
+
+// if (vo->currentFrame.time > vop_time)
+ {
+ vol->vop_sync_time += 256 * 1001;
+ vop_time += 256 * 1001;
+ }
+ }
+ else
+ {
+ if (vop->vop_coding_type == MP4_VOP_TYPE_B)
+ {
+ vop_time = vol->vop_sync_time_b + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment;
+ }
+ else
+ {
+ if (gvop->time_base > vol->vop_sync_time)
+ vol->vop_sync_time = gvop->time_base;
+
+ vop_time = vol->vop_sync_time + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment;
+
+ if (vol->vop_sync_time_b < vol->vop_sync_time)
+ vol->vop_sync_time_b = vol->vop_sync_time;
+
+ if (vop->modulo_time_base != 0)
+ vol->vop_sync_time = vop_time - vop->vop_time_increment;
+ }
+ }
+
+ if(vop->vop_coded)
+ {
+ switch (vop->vop_coding_type)
+ {
+ case MP4_VOP_TYPE_S:
+ if (vol->sprite_enable != MP4_SPRITE_GMC)
+ break;
+ // Deliberate fall-through from this case
+ case MP4_VOP_TYPE_I:
+ case MP4_VOP_TYPE_P:
+ // set past and future time for B-VOP
+ vol->pastFrameTime = vol->futureFrameTime;
+ vol->futureFrameTime = vop_time;
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (vop->vop_coded)
+// || (vop_time != vo->currentFrame.time && vop_time != vo->pastFrame.time && vop_time != vo->futureFrame.time) )
+ {
+ if(vop->vop_coding_type == MP4_VOP_TYPE_B)
+ {
+ if (!vol->Tframe)
+ vol->Tframe = (int) (vop_time); // - vo->pastFrame.time);
+
+ if (vop->vop_coded)
+ {
+ vol->TRB = (int) (vop_time - vol->pastFrameTime);
+ vol->TRD = (int) (vol->futureFrameTime - vol->pastFrameTime);
+
+ // defense from bad streams when B-VOPs are before Past and/or Future
+ if (vol->TRB <= 0)
+ vol->TRB = 1;
+
+ if (vol->TRD <= 0)
+ vol->TRD = 2;
+
+ if (vol->TRD <= vol->TRB)
+ {
+ vol->TRB = 1;
+ vol->TRD = 2;
+ }
+
+ if (vol->Tframe >= vol->TRD)
+ vol->Tframe = vol->TRB;
+ }
+ }
+ }
+
+ return status;
+} // mp4_DecodeVideoObjectPlane
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h
new file mode 100644
index 0000000..2cb3c87
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h
@@ -0,0 +1,11 @@
+#ifndef VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H
+#define VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t *pInfo);
+
+//void mp4_copy_info_to_dmem(mp4_Info_t *pInfo, mp4_MBHWInterface *ptr_parameters);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c
new file mode 100644
index 0000000..b4cc302
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.c
@@ -0,0 +1,278 @@
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+#include "viddec_mp4_decodevideoobjectplane.h"
+#include "viddec_mp4_shortheader.h"
+#include "viddec_mp4_videoobjectlayer.h"
+#include "viddec_mp4_videoobjectplane.h"
+#include "viddec_mp4_visualobject.h"
+
+extern uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state);
+
+void viddec_mp4_get_context_size(viddec_parser_memory_sizes_t *size)
+{
+ /* Should return size of my structure */
+ size->context_size = sizeof(viddec_mp4_parser_t);
+ size->persist_size = 0;
+ return;
+} // viddec_mp4_get_context_size
+
+uint32_t viddec_mp4_wkld_done(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors)
+{
+ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt;
+ int result = VIDDEC_PARSE_SUCESS;
+ uint8_t frame_boundary = false;
+ uint8_t force_frame_complete = false;
+
+ //DEB("entering is_wkld_done: next_sc: 0x%x, sc_seen: %d\n", next_sc, parser->sc_seen);
+
+ parent = parent;
+
+ // VS, VO, VOL, VOP or GVOP start codes indicate frame boundary.
+ frame_boundary = ( (MP4_SC_VISUAL_OBJECT_SEQUENCE == next_sc) ||
+ (MP4_SC_VISUAL_OBJECT == next_sc) ||
+ ((MP4_SC_VIDEO_OBJECT_LAYER_MIN <= next_sc) && (next_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) ||
+ (next_sc <= MP4_SC_VIDEO_OBJECT_MAX) ||
+ (MP4_SC_VIDEO_OBJECT_PLANE == next_sc) ||
+ ((SHORT_THIRD_STARTCODE_BYTE & 0xFC) == (next_sc & 0xFC)) ||
+ (MP4_SC_GROUP_OF_VOP == next_sc) );
+
+ // EOS and discontinuity should force workload completion.
+ force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc));
+
+ if(frame_boundary | force_frame_complete)
+ {
+ *codec_specific_errors = 0;
+
+ // Frame is considered complete and without errors, if a VOL was received since startup and
+ // if a VOP was received for this workload.
+ if (!((parser->sc_seen & MP4_SC_SEEN_VOL) && (parser->sc_seen & MP4_SC_SEEN_VOP)) && !(parser->sc_seen & MP4_SC_SEEN_SVH))
+ *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+
+ /*
+ This is the strategy for error detection.
+ Errors in any field needed by the firmware (parser/decoder) are treated as non-decodable.
+ Errors in other fields will be considered decodable.
+ Defaults/alternate strategies will be considered on a case-by-case basis as customer content is seen.
+
+ ERROR_TYPE | PARSING | INVALID/UNSUPPORTED | BS = Bitstream error
+ ----------------------------------------------------------------- UNSUP = Un-supported
+ DFLT_PRESENT | YES | NO | YES | NO | ND = Non-decodable
+ COMPONENT USED | | | | | DFLT = Populate defaults
+ -----------------------------------------------------------------
+ FIRMWARE | BS+ND | BS+ND | UNSUP+ND | UNSUP+ND |
+ DRIVER/USER | BS+DFLT | BS | UNSUP | UNSUP |
+ NONE | BS | BS | UNSUP | UNSUP |
+ | | | Continue Parsing |
+ */
+ if((parser->bitstream_error & MP4_BS_ERROR_HDR_NONDEC) || (parser->bitstream_error & MP4_BS_ERROR_FRM_NONDEC))
+ *codec_specific_errors |= (VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE | VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM);
+
+ if((parser->bitstream_error & MP4_BS_ERROR_HDR_UNSUP) || (parser->bitstream_error & MP4_BS_ERROR_FRM_UNSUP))
+ *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED;
+
+ if((parser->bitstream_error & MP4_BS_ERROR_HDR_PARSE) || (parser->bitstream_error & MP4_BS_ERROR_FRM_PARSE))
+ *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR;
+
+ parser->bitstream_error &= MP4_HDR_ERROR_MASK;
+ parser->sc_seen &= MP4_SC_SEEN_VOL;
+ result = VIDDEC_PARSE_FRMDONE;
+ }
+ //DEB("exiting is_wkld_done: next_sc: 0x%x, sc_seen: %d, err: %d, fr_bnd:%d, force:%d\n",
+ // next_sc, parser->sc_seen, *codec_specific_errors, frame_boundary, force_frame_complete);
+
+ return result;
+} // viddec_mp4_wkld_done
+
+void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+{
+ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt;
+
+ persist_mem = persist_mem;
+ parser->is_frame_start = false;
+ parser->prev_sc = MP4_SC_INVALID;
+ parser->current_sc = MP4_SC_INVALID;
+ parser->cur_sc_prefix = false;
+ parser->next_sc_prefix = false;
+ parser->ignore_scs = false;
+
+ if(preserve)
+ {
+ // Need to maintain information till VOL
+ parser->sc_seen &= MP4_SC_SEEN_VOL;
+ parser->bitstream_error &= MP4_HDR_ERROR_MASK;
+
+ // Reset only frame related data
+ memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlane), 0, sizeof(mp4_VideoObjectPlane_t));
+ memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263), 0, sizeof(mp4_VideoObjectPlaneH263));
+ }
+ else
+ {
+ parser->sc_seen = MP4_SC_SEEN_INVALID;
+ parser->bitstream_error = MP4_BS_ERROR_NONE;
+ memset(&(parser->info), 0, sizeof(mp4_Info_t));
+ }
+
+ return;
+} // viddec_mp4_init
+
+static uint32_t viddec_mp4_decodevop_and_emitwkld(void *parent, void *ctxt)
+{
+ int status = MP4_STATUS_OK;
+ viddec_mp4_parser_t *cxt = (viddec_mp4_parser_t *)ctxt;
+
+ status = mp4_DecodeVideoObjectPlane(&(cxt->info));
+
+#ifndef VBP
+ status = viddec_fw_mp4_emit_workload(parent, ctxt);
+#endif
+
+ return status;
+} // viddec_mp4_decodevop_and_emitwkld
+
+uint32_t viddec_mp4_parse(void *parent, void *ctxt)
+{
+ uint32_t sc=0;
+ viddec_mp4_parser_t *cxt;
+ uint8_t is_svh=0;
+ int32_t getbits=0;
+ int32_t status = 0;
+
+ cxt = (viddec_mp4_parser_t *)ctxt;
+ is_svh = (cxt->cur_sc_prefix) ? false: true;
+ if((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1)
+ {
+ DEB("Start code not found\n");
+ return VIDDEC_PARSE_ERROR;
+ }
+
+ if(!is_svh)
+ {
+ viddec_pm_get_bits(parent, &sc, 32);
+ sc = sc & 0xFF;
+ cxt->current_sc = sc;
+ cxt->current_sc |= 0x100;
+ DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc);
+
+ switch(sc)
+ {
+ case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+ {
+ status = mp4_Parse_VisualSequence(parent, cxt);
+ cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE;
+ DEB("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n");
+ break;
+ }
+ case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC:
+ {/* Not required to do anything */
+ break;
+ }
+ case MP4_SC_USER_DATA:
+ { /* Copy userdata to user-visible buffer (EMIT) */
+ status = mp4_Parse_UserData(parent, cxt);
+ DEB("MP4_USER_DATA_SC: \n");
+ break;
+ }
+ case MP4_SC_GROUP_OF_VOP:
+ {
+ status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt);
+ cxt->prev_sc = MP4_SC_GROUP_OF_VOP;
+ DEB("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status);
+ break;
+ }
+ case MP4_SC_VIDEO_SESSION_ERROR:
+ {/* Not required to do anything?? */
+ break;
+ }
+ case MP4_SC_VISUAL_OBJECT:
+ {
+ status = mp4_Parse_VisualObject(parent, cxt);
+ cxt->prev_sc = MP4_SC_VISUAL_OBJECT;
+ DEB("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status);
+ break;
+ }
+ case MP4_SC_VIDEO_OBJECT_PLANE:
+ {
+ /* We must decode the VOP Header information, it does not end on a byte boundary, so we need to emit
+ a starting bit offset after parsing the header. */
+ status = mp4_Parse_VideoObjectPlane(parent, cxt);
+ status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+ // TODO: Fix this for interlaced
+ cxt->is_frame_start = true;
+ cxt->sc_seen |= MP4_SC_SEEN_VOP;
+
+ DEB("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status);
+ break;
+ }
+ case MP4_SC_STUFFING:
+ {
+ break;
+ }
+ default:
+ {
+ if( (sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX) )
+ {
+ status = mp4_Parse_VideoObjectLayer(parent, cxt);
+ cxt->sc_seen = MP4_SC_SEEN_VOL;
+ cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN;
+ DEB("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status);
+ sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN;
+ }
+ // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN
+ else if(sc <= MP4_SC_VIDEO_OBJECT_MAX)
+ {
+ // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer
+ getbits = viddec_pm_get_bits(parent, &sc, 22);
+ if(getbits != -1)
+ {
+ cxt->current_sc = sc;
+ status = mp4_Parse_VideoObject_svh(parent, cxt);
+ status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+ cxt->sc_seen = MP4_SC_SEEN_SVH;
+ cxt->is_frame_start = true;
+ DEB("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc);
+ DEB("MP4_VIDEO_OBJECT_MIN_SC:status=0x%.8X\n", status);
+ }
+ }
+ else
+ {
+ DEB("UNKWON Cod:0x%08X\n", sc);
+ }
+ }
+ break;
+ }
+ }
+ else
+ {
+ viddec_pm_get_bits(parent, &sc, 22);
+ cxt->current_sc = sc;
+ DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc);
+ status = mp4_Parse_VideoObject_svh(parent, cxt);
+ status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+ cxt->sc_seen = MP4_SC_SEEN_SVH;
+ cxt->is_frame_start = true;
+ DEB("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc);
+ }
+
+ // Current sc becomes the previous sc
+ cxt->prev_sc = sc;
+
+ return VIDDEC_PARSE_SUCESS;
+} // viddec_mp4_parse
+
+uint32_t viddec_mp4_is_frame_start(void *ctxt)
+{
+ viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *)ctxt;
+ return parser->is_frame_start;
+} // viddec_mp4_is_frame_start
+
+void viddec_mp4_get_ops(viddec_parser_ops_t *ops)
+{
+ ops->parse_syntax = viddec_mp4_parse;
+ ops->get_cxt_size = viddec_mp4_get_context_size;
+ ops->is_wkld_done = viddec_mp4_wkld_done;
+ ops->parse_sc = viddec_parse_sc_mp4;
+ ops->is_frame_start = viddec_mp4_is_frame_start;
+ ops->init = viddec_mp4_init;
+ return;
+} // viddec_mp4_get_ops
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h
new file mode 100644
index 0000000..12447a4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_parse.h
@@ -0,0 +1,527 @@
+#ifndef VIDDEC_MP4_PARSE_H
+#define VIDDEC_MP4_PARSE_H
+
+#include "viddec_fw_debug.h"
+#include "viddec_fw_mp4.h"
+
+/* Macros for MP4 start code detection */
+#define FIRST_STARTCODE_BYTE 0x00
+#define SECOND_STARTCODE_BYTE 0x00
+#define THIRD_STARTCODE_BYTE 0x01
+#define SHORT_THIRD_STARTCODE_BYTE 0x80
+#define SC_BYTE_MASK0 0x00ff0000
+#define SC_BYTE_MASK1 0x000000ff
+
+/* status codes */
+typedef enum
+{
+ MP4_STATUS_OK = 0, /* Success */
+ MP4_STATUS_PARSE_ERROR = (1 << 0), /* Invalid syntax */
+ MP4_STATUS_NOTSUPPORT = (1 << 1), /* unsupported feature */
+ MP4_STATUS_REQD_DATA_ERROR = (1 << 2), /* supported data either invalid or missing */
+} mp4_Status_t;
+
+/* feature codes */
+typedef enum
+{
+ MP4_VOP_FEATURE_DEFAULT = 0, // Default VOP features, no code image update needed
+ MP4_VOP_FEATURE_SVH = 1, // VOP has Short Video Header
+ MP4_VOP_FEATURE_DP = 2 // VOP is Data Partitioned
+} mp4_Vop_feature;
+
+/* MPEG-4 start code values: Table 6-3 */
+typedef enum
+{
+ MP4_SC_VIDEO_OBJECT_MIN = 0x00,
+ MP4_SC_VIDEO_OBJECT_MAX = 0x1F,
+ MP4_SC_VIDEO_OBJECT_LAYER_MIN = 0x20,
+ MP4_SC_VIDEO_OBJECT_LAYER_MAX = 0x2F,
+ MP4_SC_FGS_BP_MIN = 0x40, /* Unsupported */
+ MP4_SC_FGS_BP_MAX = 0x5F, /* Unsupported */
+ MP4_SC_VISUAL_OBJECT_SEQUENCE = 0xB0,
+ MP4_SC_VISUAL_OBJECT_SEQUENCE_EC = 0xB1,
+ MP4_SC_USER_DATA = 0xB2,
+ MP4_SC_GROUP_OF_VOP = 0xB3,
+ MP4_SC_VIDEO_SESSION_ERROR = 0xB4,
+ MP4_SC_VISUAL_OBJECT = 0xB5,
+ MP4_SC_VIDEO_OBJECT_PLANE = 0xB6,
+ MP4_SC_SLICE = 0xB7, /* Unsupported */
+ MP4_SC_EXTENSION = 0xB8, /* Unsupported */
+ MP4_SC_FGS_VOP = 0xB9, /* Unsupported */
+ MP4_SC_FBA_OBJECT = 0xBA, /* Unsupported */
+ MP4_SC_FBA_OBJECT_PLANE = 0xBB, /* Unsupported */
+ MP4_SC_MESH_OBJECT = 0xBC, /* Unsupported */
+ MP4_SC_MESH_OBJECT_PLANE = 0xBD, /* Unsupported */
+ MP4_SC_STILL_TEXTURE_OBJECT = 0xBE, /* Unsupported */
+ MP4_SC_TEXTURE_SPATIAL_LAYER = 0xBF, /* Unsupported */
+ MP4_SC_TEXTURE_SNR_LAYER = 0xC0, /* Unsupported */
+ MP4_SC_TEXTURE_TILE = 0xC1, /* Unsupported */
+ MP4_SC_TEXTURE_SHAPE_LAYER = 0xC2, /* Unsupported */
+ MP4_SC_STUFFING = 0xC3,
+ MP4_SC_SYTEM_MIN = 0xC6, /* Unsupported */
+ MP4_SC_SYTEM_MAX = 0xFF, /* Unsupported */
+ MP4_SC_INVALID = 0x100, /* Invalid */
+}mp4_start_code_values_t;
+
+/* MPEG-4 code values
+ ISO/IEC 14496-2:2004 table 6-6 */
+enum
+{
+ MP4_VISUAL_OBJECT_TYPE_VIDEO = 1,
+ MP4_VISUAL_OBJECT_TYPE_TEXTURE = 2,
+ MP4_VISUAL_OBJECT_TYPE_MESH = 3,
+ MP4_VISUAL_OBJECT_TYPE_FBA = 4,
+ MP4_VISUAL_OBJECT_TYPE_3DMESH = 5
+};
+
+/* ISO/IEC 14496-2:2004 table 6-7 */
+enum
+{
+ MP4_VIDEO_FORMAT_COMPONENT = 0,
+ MP4_VIDEO_FORMAT_PAL = 1,
+ MP4_VIDEO_FORMAT_NTSC = 2,
+ MP4_VIDEO_FORMAT_SECAM = 3,
+ MP4_VIDEO_FORMAT_MAC = 4,
+ MP4_VIDEO_FORMAT_UNSPECIFIED = 5
+};
+
+/* ISO/IEC 14496-2:2004 table 6-8..10 */
+enum
+{
+ MP4_VIDEO_COLORS_FORBIDDEN = 0,
+ MP4_VIDEO_COLORS_ITU_R_BT_709 = 1,
+ MP4_VIDEO_COLORS_UNSPECIFIED = 2,
+ MP4_VIDEO_COLORS_RESERVED = 3,
+ MP4_VIDEO_COLORS_ITU_R_BT_470_2_M = 4,
+ MP4_VIDEO_COLORS_ITU_R_BT_470_2_BG = 5,
+ MP4_VIDEO_COLORS_SMPTE_170M = 6,
+ MP4_VIDEO_COLORS_SMPTE_240M = 7,
+ MP4_VIDEO_COLORS_GENERIC_FILM = 8
+};
+
+/* ISO/IEC 14496-2:2004 table 6-11 */
+enum
+{
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE = 1,
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE_SCALABLE = 2,
+ MP4_VIDEO_OBJECT_TYPE_CORE = 3,
+ MP4_VIDEO_OBJECT_TYPE_MAIN = 4,
+ MP4_VIDEO_OBJECT_TYPE_NBIT = 5,
+ MP4_VIDEO_OBJECT_TYPE_2DTEXTURE = 6,
+ MP4_VIDEO_OBJECT_TYPE_2DMESH = 7,
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE_FACE = 8,
+ MP4_VIDEO_OBJECT_TYPE_STILL_SCALABLE_TEXTURE = 9,
+ MP4_VIDEO_OBJECT_TYPE_ADVANCED_REAL_TIME_SIMPLE = 10,
+ MP4_VIDEO_OBJECT_TYPE_CORE_SCALABLE = 11,
+ MP4_VIDEO_OBJECT_TYPE_ADVANCED_CODING_EFFICIENCY = 12,
+ MP4_VIDEO_OBJECT_TYPE_ADVANCED_SCALABLE_TEXTURE = 13,
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE_FBA = 14,
+ MP4_VIDEO_OBJECT_TYPE_SIMPLE_STUDIO = 15,
+ MP4_VIDEO_OBJECT_TYPE_CORE_STUDIO = 16,
+ MP4_VIDEO_OBJECT_TYPE_ADVANCED_SIMPLE = 17,
+ MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE = 18
+};
+
+/* ISO/IEC 14496-2:2004 table 6.17 (maximum defined video_object_layer_shape_extension) */
+#define MP4_SHAPE_EXT_NUM 13
+
+/* ISO/IEC 14496-2:2004 table 6-14 */
+enum
+{
+ MP4_ASPECT_RATIO_FORBIDDEN = 0,
+ MP4_ASPECT_RATIO_1_1 = 1,
+ MP4_ASPECT_RATIO_12_11 = 2,
+ MP4_ASPECT_RATIO_10_11 = 3,
+ MP4_ASPECT_RATIO_16_11 = 4,
+ MP4_ASPECT_RATIO_40_33 = 5,
+ MP4_ASPECT_RATIO_EXTPAR = 15
+};
+
+/* ISO/IEC 14496-2:2004 table 6-15 */
+#define MP4_CHROMA_FORMAT_420 1
+
+/* ISO/IEC 14496-2:2004 table 6-16 */
+enum
+{
+ MP4_SHAPE_TYPE_RECTANGULAR = 0,
+ MP4_SHAPE_TYPE_BINARY = 1,
+ MP4_SHAPE_TYPE_BINARYONLY = 2,
+ MP4_SHAPE_TYPE_GRAYSCALE = 3
+};
+
+/* ISO/IEC 14496-2:2004 table 6-19 */
+#define MP4_SPRITE_STATIC 1
+#define MP4_SPRITE_GMC 2
+
+/* ISO/IEC 14496-2:2004 table 6-24 */
+enum
+{
+ MP4_VOP_TYPE_I = 0,
+ MP4_VOP_TYPE_P = 1,
+ MP4_VOP_TYPE_B = 2,
+ MP4_VOP_TYPE_S = 3,
+};
+
+/* ISO/IEC 14496-2:2004 table 6-26 */
+enum
+{
+ MP4_SPRITE_TRANSMIT_MODE_STOP = 0,
+ MP4_SPRITE_TRANSMIT_MODE_PIECE = 1,
+ MP4_SPRITE_TRANSMIT_MODE_UPDATE = 2,
+ MP4_SPRITE_TRANSMIT_MODE_PAUSE = 3
+};
+
+/* ISO/IEC 14496-2:2004 table 7-3 */
+enum
+{
+ MP4_BAB_TYPE_MVDSZ_NOUPDATE = 0,
+ MP4_BAB_TYPE_MVDSNZ_NOUPDATE = 1,
+ MP4_BAB_TYPE_TRANSPARENT = 2,
+ MP4_BAB_TYPE_OPAQUE = 3,
+ MP4_BAB_TYPE_INTRACAE = 4,
+ MP4_BAB_TYPE_MVDSZ_INTERCAE = 5,
+ MP4_BAB_TYPE_MVDSNZ_INTERCAE = 6
+};
+
+#define MP4_DC_MARKER 0x6B001 // 110 1011 0000 0000 0001
+#define MP4_MV_MARKER 0x1F001 // 1 1111 0000 0000 0001
+
+
+/* ISO/IEC 14496-2:2004 table G.1 */
+enum
+{
+ MP4_SIMPLE_PROFILE_LEVEL_1 = 0x01,
+ MP4_SIMPLE_PROFILE_LEVEL_2 = 0x02,
+ MP4_SIMPLE_PROFILE_LEVEL_3 = 0x03,
+ MP4_SIMPLE_PROFILE_LEVEL_4a = 0x04,
+ MP4_SIMPLE_PROFILE_LEVEL_5 = 0x05,
+ MP4_SIMPLE_PROFILE_LEVEL_6 = 0x06,
+ MP4_SIMPLE_PROFILE_LEVEL_0 = 0x08,
+ MP4_CORE_PROFILE_LEVEL_1 = 0x21,
+ MP4_CORE_PROFILE_LEVEL_2 = 0x22,
+ MP4_MAIN_PROFILE_LEVEL_2 = 0x32,
+ MP4_MAIN_PROFILE_LEVEL_3 = 0x33,
+ MP4_MAIN_PROFILE_LEVEL_4 = 0x34,
+ MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_1 = 0x91,
+ MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_2 = 0x92,
+ MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_3 = 0x93,
+ MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_4 = 0x94,
+ MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_1 = 0xB1,
+ MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_2 = 0xB2,
+ MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_3 = 0xB3,
+ MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_4 = 0xB4,
+ MP4_ADVANCED_CORE_PROFILE_LEVEL_1 = 0xC1,
+ MP4_ADVANCED_CORE_PROFILE_LEVEL_2 = 0xC2,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0 = 0xF0,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1 = 0xF1,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2 = 0xF2,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3 = 0xF3,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4 = 0xF4,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5 = 0xF5,
+ MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B = 0xF7
+};
+
+/* Group Of Video Object Plane Info */
+typedef struct
+{
+ uint8_t closed_gov;
+ uint8_t broken_link;
+ uint8_t time_code_hours;
+ uint8_t time_code_minutes;
+ uint8_t time_code_seconds;
+ uint8_t dummy1;
+ uint16_t dummy2;
+ uint32_t time_base;
+} mp4_GroupOfVideoObjectPlane_t;
+
+
+/* Video Object Plane Info */
+typedef struct
+{
+ uint8_t vop_coding_type;
+ uint32_t modulo_time_base;
+ uint16_t vop_time_increment;
+ uint8_t vop_coded;
+
+ uint16_t vop_id;
+ uint16_t vop_id_for_prediction;
+ uint8_t is_vop_id_for_prediction_indication;
+ uint8_t vop_rounding_type;
+ uint8_t vop_reduced_resolution;
+ uint8_t align_dummy;
+
+ uint16_t vop_width;
+ uint16_t vop_height;
+ uint16_t vop_horizontal_mc_spatial_ref;
+ uint16_t vop_vertical_mc_spatial_ref;
+
+ uint8_t background_composition;
+ uint8_t change_conv_ratio_disable;
+ uint8_t is_vop_constant_alpha;
+ uint8_t vop_constant_alpha_value;
+ uint8_t intra_dc_vlc_thr;
+ uint8_t top_field_first;
+ uint8_t alternate_vertical_scan_flag;
+ uint8_t sprite_transmit_mode;
+
+ int32_t brightness_change_factor;
+ uint16_t vop_quant;
+ uint8_t vop_fcode_forward;
+ uint8_t vop_fcode_backward;
+
+ uint16_t warping_mv_code_du[4];
+ uint16_t warping_mv_code_dv[4];
+
+} mp4_VideoObjectPlane_t;
+
+/* VOLControlParameters Info */
+typedef struct
+{
+ uint8_t chroma_format;
+ uint8_t low_delay;
+ uint8_t vbv_parameters;
+ uint8_t align_dummy1;
+ uint32_t bit_rate;
+ uint32_t vbv_buffer_size;
+ uint32_t vbv_occupancy;
+} mp4_VOLControlParameters_t;
+
+/* Video Object Plane with short header Info */
+typedef struct _mp4_VideoObjectPlaneH263
+{
+ uint8_t temporal_reference;
+ uint8_t split_screen_indicator;
+ uint8_t document_camera_indicator;
+ uint8_t full_picture_freeze_release;
+ uint8_t source_format;
+ uint8_t picture_coding_type;
+ uint8_t vop_quant;
+ uint16_t num_gobs_in_vop;
+ uint16_t num_macroblocks_in_gob;
+ uint8_t num_rows_in_gob;
+
+#if 0
+ uint8_t gob_number;
+ int gob_header_empty;
+ int gob_frame_id;
+ int quant_scale;
+#endif
+} mp4_VideoObjectPlaneH263;
+
+typedef struct
+{
+ uint16_t sprite_width;
+ uint16_t sprite_height;
+ uint16_t sprite_left_coordinate;
+ uint16_t sprite_top_coordinate;
+ uint16_t no_of_sprite_warping_points;
+ uint16_t sprite_warping_accuracy;
+ uint16_t sprite_brightness_change;
+ uint16_t low_latency_sprite_enable;
+}mp4_VOLSpriteInfo_t;
+
+typedef struct
+{
+ uint8_t load_intra_quant_mat;
+ uint8_t load_nonintra_quant_mat;
+ uint16_t align_dummy1;
+ uint8_t intra_quant_mat[64];
+ uint8_t nonintra_quant_mat[64];
+}mp4_VOLQuant_mat_t;
+
+/* Video Object Layer Info */
+typedef struct
+{
+ uint8_t video_object_layer_id; /* Last 4 bits of start code. */
+ uint8_t short_video_header;
+ uint8_t random_accessible_vol;
+ uint8_t video_object_type_indication;
+
+ uint8_t is_object_layer_identifier;
+ uint8_t video_object_layer_verid;
+ uint8_t video_object_layer_priority;
+ uint8_t aspect_ratio_info;
+
+ uint8_t aspect_ratio_info_par_width;
+ uint8_t aspect_ratio_info_par_height;
+ uint8_t align_dummy1;
+ uint8_t is_vol_control_parameters;
+
+ mp4_VOLControlParameters_t VOLControlParameters;
+
+ uint8_t video_object_layer_shape;
+ uint16_t vop_time_increment_resolution;
+ uint8_t vop_time_increment_resolution_bits;
+
+ uint8_t fixed_vop_rate;
+ uint16_t fixed_vop_time_increment;
+ uint16_t video_object_layer_width;
+ uint16_t video_object_layer_height;
+ uint8_t interlaced;
+
+ uint8_t obmc_disable;
+ uint8_t sprite_enable;
+ mp4_VOLSpriteInfo_t sprite_info;
+ uint8_t not_8_bit;
+ uint8_t quant_precision;
+
+ uint8_t bits_per_pixel;
+ uint8_t quant_type;
+ mp4_VOLQuant_mat_t quant_mat_info;
+ uint8_t quarter_sample;
+ uint8_t complexity_estimation_disable;
+
+ uint8_t resync_marker_disable;
+ uint8_t data_partitioned;
+ uint8_t reversible_vlc;
+ uint8_t newpred_enable;
+
+ uint8_t reduced_resolution_vop_enable; // verid != 1
+ uint8_t scalability;
+ uint8_t low_latency_sprite_enable;
+
+ mp4_GroupOfVideoObjectPlane_t GroupOfVideoObjectPlane;
+ mp4_VideoObjectPlane_t VideoObjectPlane;
+ mp4_VideoObjectPlaneH263 VideoObjectPlaneH263;
+
+ // for interlaced B-VOP direct mode
+ uint32_t Tframe;
+ // for B-VOP direct mode
+ uint32_t TRB, TRD;
+ // time increment of past and future VOP for B-VOP
+ uint32_t pastFrameTime, futureFrameTime;
+ // VOP global time
+ uint32_t vop_sync_time, vop_sync_time_b;
+
+} mp4_VideoObjectLayer_t;
+
+/* video_signal_type Info */
+typedef struct
+{
+ uint8_t is_video_signal_type;
+ uint8_t video_format;
+ uint8_t video_range;
+ uint8_t is_colour_description;
+ uint8_t colour_primaries;
+ uint8_t transfer_characteristics;
+ uint8_t matrix_coefficients;
+} mp4_VideoSignalType_t;
+
+typedef struct _mp4_Frame {
+ long long int time;
+} mp4_Frame;
+
+/* Visual Object Info */
+typedef struct
+{
+ uint8_t is_visual_object_identifier;
+ uint8_t visual_object_verid;
+ uint8_t visual_object_priority;
+ uint8_t visual_object_type;
+ mp4_VideoSignalType_t VideoSignalType;
+ mp4_VideoObjectLayer_t VideoObject;
+
+ mp4_Frame currentFrame; // current
+ mp4_Frame pastFrame; // reference in past
+ mp4_Frame futureFrame; // reference in future
+} mp4_VisualObject_t;
+
+/* Full Info */
+typedef struct
+{
+ mp4_VisualObject_t VisualObject;
+ uint8_t profile_and_level_indication;
+} mp4_Info_t;
+
+enum
+{
+ MP4_SC_SEEN_INVALID = 0x0,
+ MP4_SC_SEEN_VOL = 0x1,
+ MP4_SC_SEEN_VOP = 0x2,
+ MP4_SC_SEEN_SVH = 0x4,
+};
+
+enum
+{
+ MP4_BS_ERROR_NONE = (0 << 0),
+ MP4_BS_ERROR_HDR_PARSE = (1 << 0),
+ MP4_BS_ERROR_HDR_NONDEC = (1 << 1),
+ MP4_BS_ERROR_HDR_UNSUP = (1 << 2),
+ MP4_BS_ERROR_FRM_PARSE = (1 << 3),
+ MP4_BS_ERROR_FRM_NONDEC = (1 << 4),
+ MP4_BS_ERROR_FRM_UNSUP = (1 << 5),
+};
+
+#define MP4_HDR_ERROR_MASK (MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC | MP4_BS_ERROR_HDR_UNSUP)
+
+typedef enum
+{
+ VIDDEC_MP4_INDX_0 = 0,
+ VIDDEC_MP4_INDX_1 = 1,
+ VIDDEC_MP4_INDX_2 = 2,
+ VIDDEC_MP4_INDX_MAX = 3,
+} viddec_fw_mp4_ref_index_t;
+
+typedef struct
+{
+ uint8_t is_field;
+} viddec_mp4_ref_info_t;
+
+typedef struct
+{
+ // The relevant bitstream data for current stream
+ mp4_Info_t info;
+
+ // The previous start code (without the prefix)
+ uint32_t prev_sc;
+
+ // The current start code (without the prefix)
+ // TODO: Revisit for SVH
+ uint32_t current_sc;
+
+ // Indicates if we look for both short and long video header or just the long video header
+ // If false, sc detection looks for both short and long video headers.
+ // If true, long video header has been seen and sc detection does not look for short video header any more.
+ uint8_t ignore_scs;
+
+ // Indicates if the current start code prefix is long (if true).
+ uint8_t cur_sc_prefix;
+
+ // Indicates if the next start code prefix is long (if true).
+ uint8_t next_sc_prefix;
+
+ // Indicates start of a frame
+ uint8_t is_frame_start;
+
+ // Indicates which start codes were seen for this workload
+ uint8_t sc_seen;
+
+ // Indicates bitstream errors if any
+ uint16_t bitstream_error;
+
+ // Reference frame information
+ viddec_mp4_ref_info_t ref_frame[VIDDEC_MP4_INDX_MAX];
+
+}viddec_mp4_parser_t;
+
+#define BREAK_GETBITS_FAIL(x, ret) { \
+ if(x == -1){ \
+ FWTRACE; \
+ ret = MP4_STATUS_PARSE_ERROR; \
+ break;} \
+ }
+
+#define BREAK_GETBITS_REQD_MISSING(x, ret) { \
+ if(x == -1){ \
+ FWTRACE; \
+ ret = MP4_STATUS_REQD_DATA_ERROR; \
+ break;} \
+ }
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt);
+
+void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c
new file mode 100644
index 0000000..a3d894d
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.c
@@ -0,0 +1,134 @@
+#include "viddec_mp4_shortheader.h"
+
+typedef struct
+{
+ uint16_t vop_width;
+ uint16_t vop_height;
+ uint16_t num_macroblocks_in_gob;
+ uint16_t num_gobs_in_vop;
+ uint8_t num_rows_in_gob;
+} svh_src_fmt_params_t;
+
+const svh_src_fmt_params_t svh_src_fmt_defaults[5] =
+{
+ {128, 96, 8, 6, 1},
+ {176, 144, 11, 9, 1},
+ {352, 288, 22, 18, 1},
+ {704, 576, 88, 18, 2},
+ {1408, 1152, 352, 18, 4},
+};
+
+mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Status_t ret = MP4_STATUS_OK;
+ unsigned int data;
+ mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263);
+ int32_t getbits = 0;
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 27);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+ data = data >> 1; // zero_bit
+ svh->vop_quant = (data & 0x1F);
+ data = data >> 9; // vop_quant + four_reserved_zero_bits
+ svh->picture_coding_type = (data & 0x1);
+ data = data >> 1; // vop_quant + four_reserved_zero_bits
+ svh->source_format = (data & 0x7);
+ data = data >> 8; // source_format + full_picture_freeze_release + document_camera_indicator + split_screen_indicator + zero_bit + marker_bit
+ svh->temporal_reference = data;
+
+ if (svh->source_format == 0 || svh->source_format > 5)
+ {
+ DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n");
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ for (;;)
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 1); // pei
+ BREAK_GETBITS_FAIL(getbits, ret);
+ if (!data)
+ break;
+ getbits = viddec_pm_get_bits(parent, &data, 8); // psupp
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+
+ // Anything after this needs to be fed to the decoder as PIXEL_ES
+ } while(0);
+
+ return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Status_t ret=MP4_STATUS_OK;
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VideoSignalType_t *vst = &(pInfo->VisualObject.VideoSignalType);
+ mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject);
+ mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+ mp4_VideoObjectPlaneH263 *svh = &(pInfo->VisualObject.VideoObject.VideoObjectPlaneH263);
+ uint8_t index = 0;
+
+ ret = mp4_Parse_VideoObjectPlane_svh(parent, parser);
+ if(ret == MP4_STATUS_OK)
+ {
+ // Populate defaults for the svh
+ vol->short_video_header = 1;
+ vol->video_object_layer_shape = MP4_SHAPE_TYPE_RECTANGULAR;
+ vol->obmc_disable = 1;
+ vol->quant_type = 0;
+ vol->resync_marker_disable = 1;
+ vol->data_partitioned = 0;
+ vol->reversible_vlc = 0;
+ vol->interlaced = 0;
+ vol->complexity_estimation_disable = 1;
+ vol->scalability = 0;
+ vol->not_8_bit = 0;
+ vol->bits_per_pixel = 8;
+ vol->quant_precision = 5;
+ vol->vop_time_increment_resolution = 30000;
+ vol->fixed_vop_time_increment = 1001;
+ vol->aspect_ratio_info = MP4_ASPECT_RATIO_12_11;
+
+ vop->vop_rounding_type = 0;
+ vop->vop_fcode_forward = 1;
+ vop->vop_coded = 1;
+ vop->vop_coding_type = svh->picture_coding_type ? MP4_VOP_TYPE_P: MP4_VOP_TYPE_I;
+ vop->vop_quant = svh->vop_quant;
+
+ vst->colour_primaries = 1;
+ vst->transfer_characteristics = 1;
+ vst->matrix_coefficients = 6;
+
+ index = svh->source_format - 1;
+ vol->video_object_layer_width = svh_src_fmt_defaults[index].vop_width;
+ vol->video_object_layer_height = svh_src_fmt_defaults[index].vop_height;
+ svh->num_macroblocks_in_gob = svh_src_fmt_defaults[index].num_macroblocks_in_gob;
+ svh->num_gobs_in_vop = svh_src_fmt_defaults[index].num_gobs_in_vop;
+ svh->num_rows_in_gob = svh_src_fmt_defaults[index].num_rows_in_gob;
+ }
+
+ mp4_set_hdr_bitstream_error(parser, false, ret);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT;
+
+ wi.mp4_vpsh.info = 0;
+ wi.mp4_vpsh.pad1 = 0;
+ wi.mp4_vpsh.pad2 = 0;
+
+ viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format);
+
+ ret = viddec_pm_append_workitem(parent, &wi);
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+ }
+
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h
new file mode 100644
index 0000000..e2ecaaa
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_shortheader.h
@@ -0,0 +1,11 @@
+#ifndef VIDDEC_MP4_SHORTHEADER_H
+#define VIDDEC_MP4_SHORTHEADER_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *cxt);
+
+mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *cxt);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c
new file mode 100644
index 0000000..6df06b6
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.c
@@ -0,0 +1,596 @@
+#include "viddec_mp4_videoobjectlayer.h"
+
+const unsigned char mp4_DefaultIntraQuantMatrix[64] = {
+ 8, 17, 18, 19, 21, 23, 25, 27,
+ 17, 18, 19, 21, 23, 25, 27, 28,
+ 20, 21, 22, 23, 24, 26, 28, 30,
+ 21, 22, 23, 24, 26, 28, 30, 32,
+ 22, 23, 24, 26, 28, 30, 32, 35,
+ 23, 24, 26, 28, 30, 32, 35, 38,
+ 25, 26, 28, 30, 32, 35, 38, 41,
+ 27, 28, 30, 32, 35, 38, 41, 45
+};
+const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = {
+ 16, 17, 18, 19, 20, 21, 22, 23,
+ 17, 18, 19, 20, 21, 22, 23, 24,
+ 18, 19, 20, 21, 22, 23, 24, 25,
+ 19, 20, 21, 22, 23, 24, 26, 27,
+ 20, 21, 22, 23, 25, 26, 27, 28,
+ 21, 22, 23, 24, 26, 27, 28, 30,
+ 22, 23, 24, 26, 27, 28, 30, 31,
+ 23, 24, 25, 27, 28, 30, 31, 33
+};
+const unsigned char mp4_ClassicalZigzag[64] = {
+ 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+};
+
+static inline int mp4_GetMacroBlockNumberSize(int nmb)
+{
+ int nb = 0;
+ nmb --;
+ do {
+ nmb >>= 1;
+ nb ++;
+ } while (nmb);
+ return nb;
+}
+
+static inline void mp4_copy_default_table(const uint8_t *src, uint8_t *dst, uint32_t len)
+{
+ uint32_t i;
+ for(i=0; i< len; i++)
+ dst[i] = src[i];
+}
+
+
+static inline mp4_Status_t mp4_Parse_QuantMatrix(void *parent, uint8_t *pQM)
+{
+ uint32_t i,code=0;
+ uint8_t last=0;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_OK;
+
+ for (i = 0; i < 64; i ++)
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 8);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ if (code == 0) break;
+ pQM[mp4_ClassicalZigzag[i]] = (uint8_t)(code & 0xFF);
+ }
+ last = pQM[mp4_ClassicalZigzag[i-1]];
+ for (; i < 64; i ++)
+ {
+ pQM[mp4_ClassicalZigzag[i]] = last;
+ }
+ return ret;;
+}
+
+static inline uint8_t mp4_pvt_valid_object_type_indication(uint8_t val)
+{
+ return ((1 <= val) || (val <= 18));
+}
+
+static inline uint8_t mp4_pvt_valid_object_layer_verid(uint8_t val)
+{
+ uint8_t ret=false;
+ switch(val)
+ {
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ {
+ ret = true;
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+ return ret;
+}
+
+static mp4_Status_t
+mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_VOLControlParameters_t *cxt = &(parser->info.VisualObject.VideoObject.VOLControlParameters);
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+ uint32_t code=0;
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 4);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ cxt->chroma_format = (code >> 2) & 0x3;
+ cxt->low_delay = ((code & 0x2) > 0);
+ cxt->vbv_parameters = code & 0x1;
+
+ if (cxt->chroma_format != MP4_CHROMA_FORMAT_420)
+ {
+ DEB("Warning: mp4_Parse_VideoObject:vol_control_parameters.chroma_format != 4:2:0\n");
+ cxt->chroma_format= MP4_CHROMA_FORMAT_420;
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ ret = MP4_STATUS_NOTSUPPORT;
+ }
+
+ if(cxt->vbv_parameters)
+ {/* TODO: Check for validity of marker bits */
+ getbits = viddec_pm_get_bits(parent, &(code), 32);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ /* 32 bits= firsthalf(15) + M + LatterHalf(15) + M */
+ cxt->bit_rate = (code & 0xFFFE) >> 1; // Get rid of 1 marker bit
+ cxt->bit_rate |= ((code & 0xFFFE0000) >> 2); // Get rid of 2 marker bits
+
+ if(cxt->bit_rate == 0)
+ {
+ DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.bit_rate = 0\n");
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ ret = MP4_STATUS_NOTSUPPORT;
+ // Do we need to really break here? Why not just set an error and proceed
+ //break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 19);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ /* 19 bits= firsthalf(15) + M + LatterHalf(3)*/
+ cxt->vbv_buffer_size = code & 0x7;
+ cxt->vbv_buffer_size |= ( (code >> 4) & 0x7FFF);
+ if(cxt->vbv_buffer_size == 0)
+ {
+ DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.vbv_buffer_size = 0\n");
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ ret = MP4_STATUS_NOTSUPPORT;
+ // Do we need to really break here? Why not just set an error and proceed
+ //break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 28);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ /* 28 bits= firsthalf(11) + M + LatterHalf(15) + M */
+ code = code >>1;
+ cxt->vbv_occupancy = code & 0x7FFF;
+ code = code >>16;
+ cxt->vbv_occupancy |= (code & 0x07FF);
+ }
+ ret = MP4_STATUS_OK;
+ } while(0);
+
+ return ret;
+}
+
+static uint32_t mp4_pvt_count_number_of_bits(uint32_t val)
+{
+ uint32_t num_bits=0;
+ do{
+ val >>= 1;
+ num_bits++;
+ }while(val);
+ return num_bits;
+}
+
+static mp4_Status_t
+mp4_Parse_VOL_sprite(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_VideoObjectLayer_t *vidObjLay = (&parser->info.VisualObject.VideoObject);
+ mp4_VOLSpriteInfo_t *cxt = &(vidObjLay->sprite_info);
+ uint32_t sprite_enable = vidObjLay->sprite_enable;
+ uint32_t code;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+
+ do{
+ if ((sprite_enable == MP4_SPRITE_STATIC) ||
+ (sprite_enable == MP4_SPRITE_GMC))
+ {
+ if (sprite_enable != MP4_SPRITE_GMC)
+ {
+ /* This is not a supported type by HW */
+ DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 9);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ cxt->sprite_brightness_change = code & 0x1;
+ cxt->sprite_warping_accuracy = (code >> 1) & 0x3;
+ cxt->no_of_sprite_warping_points = code >> 3;
+ if(cxt->no_of_sprite_warping_points > 1)
+ {
+ DEB("Error: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n",
+ cxt->no_of_sprite_warping_points);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ if((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change))
+ {
+ DEB("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ if (vidObjLay->sprite_enable != MP4_SPRITE_GMC)
+ {
+ DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ }
+ ret = MP4_STATUS_OK;
+ }while(0);
+
+ return ret;
+}
+
+static mp4_Status_t mp4_Parse_VOL_quant_mat(void *parent, mp4_VideoObjectLayer_t *vidObjLay)
+{
+ uint32_t code;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+ mp4_VOLQuant_mat_t *quant = &(vidObjLay->quant_mat_info);
+
+ do{
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ quant->load_intra_quant_mat = code;
+ if (quant->load_intra_quant_mat)
+ {
+ mp4_Parse_QuantMatrix(parent, &(quant->intra_quant_mat[0]));
+ }
+ else
+ {
+ mp4_copy_default_table((const uint8_t *)&mp4_DefaultIntraQuantMatrix[0], (uint8_t *)&(quant->intra_quant_mat[0]), 64);
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ quant->load_nonintra_quant_mat = code;
+ if (quant->load_nonintra_quant_mat)
+ {
+ mp4_Parse_QuantMatrix(parent, &(quant->nonintra_quant_mat[0]));
+ }
+ else
+ {
+ mp4_copy_default_table((const uint8_t *)&mp4_DefaultNonIntraQuantMatrix[0], (uint8_t *)&(quant->nonintra_quant_mat[0]), 64);
+ }
+ ret = MP4_STATUS_OK;
+ }while(0);
+ return ret;
+}
+
+static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t code;
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+
+ do{
+ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR)
+ {
+ /* TODO: check for validity of marker bits */
+ getbits = viddec_pm_get_bits(parent, &(code), 29);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->video_object_layer_height = (code >> 1) & 0x1FFF;
+ vidObjLay->video_object_layer_width = (code >> 15) & 0x1FFF;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->interlaced = ((code & 0x2) > 0);
+ vidObjLay->obmc_disable = ((code & 0x1) > 0);
+
+ {
+ uint32_t num_bits=1;
+ if(vidObjLay->video_object_layer_verid != 1) num_bits=2;
+ getbits = viddec_pm_get_bits(parent, &(code), num_bits);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->sprite_enable = code;
+ }
+
+ ret = mp4_Parse_VOL_sprite(parent, parser);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+
+ if ((vidObjLay->video_object_layer_verid != 1) &&
+ (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR))
+ {
+ /* not supported shape*/
+ DEB("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidObjLay->not_8_bit = (code > 0 );
+ if(vidObjLay->not_8_bit)
+ {
+ /* 8 bit is only supported mode*/
+ DEB("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ else
+ {/* We use default values since only 8 bit mode is supported */
+ vidObjLay->quant_precision = 5;
+ vidObjLay->bits_per_pixel = 8;
+ }
+
+ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)
+ {
+ /* Should not get here as shape is checked earlier */
+ DEB("Error: mp4_Parse_VideoObject: GRAYSCALE, not supp\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->quant_type = code;
+ if (vidObjLay->quant_type)
+ {
+ ret = mp4_Parse_VOL_quant_mat(parent, vidObjLay);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ }
+
+ if (vidObjLay->video_object_layer_verid != 1)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->quarter_sample = code;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->complexity_estimation_disable = code;
+ if(!vidObjLay->complexity_estimation_disable)
+ {/* complexity estimation not supported */
+ DEB("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->resync_marker_disable = ((code & 0x2) > 0);
+ vidObjLay->data_partitioned = code & 0x1;
+ if(vidObjLay->data_partitioned)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->reversible_vlc = code;
+ }
+
+ if (vidObjLay->video_object_layer_verid != 1)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidObjLay->newpred_enable = code;
+ if(vidObjLay->newpred_enable)
+ {
+ DEB("Error: NEWPRED mode is not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidObjLay->reduced_resolution_vop_enable = code;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidObjLay->scalability = code;
+ if(vidObjLay->scalability)
+ {
+ DEB("Error: VOL scalability is not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ // No need to parse further - none of the fields are interesting to parser/decoder/user
+ ret = MP4_STATUS_OK;
+ }while(0);
+ return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t code;
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VisualObject_t *visObj = &(pInfo->VisualObject);
+ mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ int32_t getbits=0;
+
+//DEB("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret);
+ do{
+ vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE;
+
+ vidObjLay->short_video_header = 0;
+ vidObjLay->video_object_layer_id = (parser->current_sc & 0xF);
+
+ getbits = viddec_pm_get_bits(parent, &code, 9);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->video_object_type_indication = code & 0xFF;
+ vidObjLay->random_accessible_vol = ((code & 0x100) > 0);
+
+ if(!mp4_pvt_valid_object_type_indication(vidObjLay->video_object_type_indication))
+ { /* Streams with "unknown" type mismatch with ref */
+ DEB("Warning: video_object_type_indication = %d, forcing to 1\n",
+ vidObjLay->video_object_type_indication);
+ vidObjLay->video_object_type_indication = 1;
+ }
+
+ if(vidObjLay->video_object_type_indication == MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE)
+ {/* This is not a supported type by HW */
+ DEB("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n",
+ vidObjLay->video_object_type_indication);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ else
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->is_object_layer_identifier = code;
+ vidObjLay->video_object_layer_verid =
+ (mp4_pvt_valid_object_layer_verid(visObj->visual_object_verid)) ? visObj->visual_object_verid : 1;
+
+ if (vidObjLay->is_object_layer_identifier)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 7);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->video_object_layer_priority = code & 0x7;
+ vidObjLay->video_object_layer_verid = (code >> 3) & 0xF;
+ if(!mp4_pvt_valid_object_layer_verid(vidObjLay->video_object_layer_verid))
+ {
+ DEB("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n",
+ vidObjLay->video_object_layer_verid);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ /* Video object layer ID supercedes visual object ID */
+ visObj->visual_object_verid = vidObjLay->video_object_layer_verid;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 4);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->aspect_ratio_info = code & 0xF;
+ if(vidObjLay->aspect_ratio_info == MP4_ASPECT_RATIO_EXTPAR)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), 16);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->aspect_ratio_info_par_width = (code >> 8) & 0xFF;
+ vidObjLay->aspect_ratio_info_par_height = code & 0xFF;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->is_vol_control_parameters = code;
+ if(vidObjLay->is_vol_control_parameters)
+ {
+ ret = mp4_pvt_VOL_volcontrolparameters(parent, parser);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->video_object_layer_shape = code;
+ /* If shape is not rectangluar exit early without parsing */
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)
+ {
+ DEB("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n",
+ MP4_SHAPE_TYPE_RECTANGULAR, vidObjLay->video_object_layer_shape);
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ if ((vidObjLay->video_object_layer_verid != 1) &&
+ (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE))
+ {/* Grayscale not supported */
+ DEB("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &(code), 19);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ /* TODO: check validity of marker */
+ vidObjLay->vop_time_increment_resolution = (code >> 2) & 0xFFFF;
+ vidObjLay->fixed_vop_rate = code & 0x1;
+
+ if(vidObjLay->vop_time_increment_resolution == 0)
+ {
+ DEB("Error: 0 value for vop_time_increment_resolution\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ /* calculate number bits in vop_time_increment_resolution */
+ vidObjLay->vop_time_increment_resolution_bits = (uint8_t)mp4_pvt_count_number_of_bits(
+ (uint32_t)(vidObjLay->vop_time_increment_resolution -1));
+
+ if(vidObjLay->fixed_vop_rate)
+ {
+ getbits = viddec_pm_get_bits(parent, &(code), vidObjLay->vop_time_increment_resolution_bits);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjLay->fixed_vop_time_increment = code;
+ }
+
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+ {
+ ret = mp4_Parse_VOL_notbinaryonly(parent, parser);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ }
+ else
+ {
+ DEB("Error: MP4_SHAPE_TYPE_BINARYONLY not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR;
+ break;
+ }
+ }
+
+ vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE;
+ ret = MP4_STATUS_OK;
+ } while(0);
+
+ mp4_set_hdr_bitstream_error(parser, true, ret);
+ if(ret != MP4_STATUS_OK)
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC;
+//DEB("before wkld mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+ viddec_workload_t *wl = viddec_pm_get_header(parent);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ;
+
+ wi.mp4_vol.vol_aspect_ratio = 0;
+ wi.mp4_vol.vol_bit_rate = 0;
+ wi.mp4_vol.vol_frame_rate = 0;
+
+ viddec_fw_mp4_vol_set_aspect_ratio_info(&wi.mp4_vol, vidObjLay->aspect_ratio_info);
+ viddec_fw_mp4_vol_set_par_width(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_width);
+ viddec_fw_mp4_vol_set_par_height(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_height);
+ viddec_fw_mp4_vol_set_control_param(&wi.mp4_vol, vidObjLay->is_vol_control_parameters);
+ viddec_fw_mp4_vol_set_chroma_format(&wi.mp4_vol, vidObjLay->VOLControlParameters.chroma_format);
+ viddec_fw_mp4_vol_set_interlaced(&wi.mp4_vol, vidObjLay->interlaced);
+ viddec_fw_mp4_vol_set_fixed_vop_rate(&wi.mp4_vol, vidObjLay->fixed_vop_rate);
+
+ viddec_fw_mp4_vol_set_vbv_param(&wi.mp4_vol, vidObjLay->VOLControlParameters.vbv_parameters);
+ viddec_fw_mp4_vol_set_bit_rate(&wi.mp4_vol, vidObjLay->VOLControlParameters.bit_rate);
+
+ viddec_fw_mp4_vol_set_fixed_vop_time_increment(&wi.mp4_vol, vidObjLay->fixed_vop_time_increment);
+ viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution);
+
+ ret = viddec_pm_append_workitem(parent, &wi);
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+
+ memset(&(wl->attrs), 0, sizeof(viddec_frame_attributes_t));
+
+ wl->attrs.cont_size.width = vidObjLay->video_object_layer_width;
+ wl->attrs.cont_size.height = vidObjLay->video_object_layer_height;
+ }
+
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h
new file mode 100644
index 0000000..4540b6b
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h
@@ -0,0 +1,17 @@
+#ifndef VIDDEC_MP4_VIDEOOBJECTLAYER_H
+#define VIDDEC_MP4_VIDEOOBJECTLAYER_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+void mp4_ResetVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_InitVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_FreeVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *cxt);
+
+
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c
new file mode 100644
index 0000000..9840af4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.c
@@ -0,0 +1,422 @@
+#include "viddec_mp4_videoobjectplane.h"
+
+mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Info_t* pInfo = &(parser->info);
+ uint32_t code;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_REQD_DATA_ERROR;
+ mp4_GroupOfVideoObjectPlane_t *data;
+ uint32_t time_code = 0;
+
+ data = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane);
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 20);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ ret = MP4_STATUS_OK;
+
+ data->broken_link = ((code & 0x1) > 0);
+ data->closed_gov = ((code & 0x2) > 0);
+ time_code = code = code >> 2;
+ data->time_code_seconds = code & 0x3F;
+ code = code >> 6;
+ if((code & 1) == 0)
+ {/* SGA:Should we ignore marker bit? */
+ DEB("Error:mp4_Parse_GroupOfVideoObjectPlane: Invalid marker\n");
+ }
+ code = code >>1;
+ data->time_code_minutes = code & 0x3F;
+ code = code >> 6;
+ data->time_code_hours = code & 0x1F;
+
+ // This is the timebase in full second units
+ data->time_base = data->time_code_seconds + (60*data->time_code_minutes) + (3600*data->time_code_hours);
+ // Need to convert this into no. of ticks
+ data->time_base *= pInfo->VisualObject.VideoObject.vop_time_increment_resolution;
+
+ } while(0);
+
+ mp4_set_hdr_bitstream_error(parser, true, ret);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ;
+
+ wi.mp4_gvop.gvop_info = 0;
+ wi.mp4_gvop.pad1 = 0;
+ wi.mp4_gvop.pad2 = 0;
+
+ viddec_fw_mp4_gvop_set_broken_link(&wi.mp4_gvop, data->broken_link);
+ viddec_fw_mp4_gvop_set_closed_gov(&wi.mp4_gvop, data->closed_gov);
+ viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code);
+
+ ret = viddec_pm_append_workitem(parent, &wi);
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+ }
+
+ return ret;
+}
+
+static inline mp4_Status_t mp4_brightness_change(void *parent, int32_t *b_change)
+{
+ uint32_t code;
+ int32_t getbits=0;
+
+ *b_change = 0;
+ getbits = viddec_pm_peek_bits(parent, &code, 4);
+ if (code == 15)
+ {
+ getbits = viddec_pm_skip_bits(parent, 4);
+ getbits = viddec_pm_get_bits(parent, &code, 10);
+ *b_change = 625 + code;
+ }
+ else if (code == 14)
+ {
+ getbits = viddec_pm_skip_bits(parent, 4);
+ getbits = viddec_pm_get_bits(parent, &code, 9);
+ *b_change = 113 + code;
+ }
+ else if (code >= 12)
+ {
+ getbits = viddec_pm_skip_bits(parent, 3);
+ getbits = viddec_pm_get_bits(parent, &code, 7);
+ *b_change = (code < 64) ? ((int32_t)code - 112) : ((int32_t)code - 15);
+ }
+ else if (code >= 8)
+ {
+ getbits = viddec_pm_skip_bits(parent, 2);
+ getbits = viddec_pm_get_bits(parent, &code, 6);
+ *b_change = (code < 32) ? ((int32_t)code - 48) : ((int32_t)code - 15);
+ }
+ else
+ {
+ getbits = viddec_pm_skip_bits(parent, 1);
+ getbits = viddec_pm_get_bits(parent, &code, 5);
+ *b_change = (code < 16) ? ((int32_t)code - 16) : ((int32_t)code - 15);
+ }
+
+ return ( (getbits == -1) ? MP4_STATUS_PARSE_ERROR: MP4_STATUS_OK);
+}
+static inline int32_t mp4_Sprite_dmv_length(void * parent, int32_t *dmv_length)
+{
+ uint32_t code, skip;
+ int32_t getbits=0;
+ mp4_Status_t ret= MP4_STATUS_PARSE_ERROR;
+ *dmv_length=0;
+ skip=3;
+ do{
+ getbits = viddec_pm_peek_bits(parent, &code, skip);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+ if(code == 7)
+ {
+ viddec_pm_skip_bits(parent, skip);
+ getbits = viddec_pm_peek_bits(parent, &code, 9);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+ skip=1;
+ while((code & 256) != 0)
+ {/* count number of 1 bits */
+ code <<=1;
+ skip++;
+ }
+ *dmv_length = 5 + skip;
+ }
+ else
+ {
+ skip=(code <= 1) ? 2 : 3;
+ *dmv_length = code - 1;
+ }
+ viddec_pm_skip_bits(parent, skip);
+ ret= MP4_STATUS_OK;
+
+ }while(0);
+ return ret;
+}
+
+static inline mp4_Status_t
+mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_VideoObjectPlane_t *vidObjPlane)
+{
+ uint32_t code, i;
+ int32_t dmv_length=0, dmv_code=0, getbits=0;
+ mp4_Status_t ret = MP4_STATUS_OK;
+ for(i=0; i < (uint32_t)vidObjLay->sprite_info.no_of_sprite_warping_points; i++ )
+ {
+ ret = mp4_Sprite_dmv_length(parent, &dmv_length);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ if(dmv_length <= 0)
+ {
+ dmv_code = 0;
+ }
+ else
+ {
+ getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ dmv_code = (int32_t)code;
+ if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+ {
+ dmv_code -= (1 << dmv_length) - 1;
+ }
+ }
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ if(code != 1)
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ vidObjPlane->warping_mv_code_du[i] = dmv_code;
+ /* TODO: create another inline function to avoid code duplication */
+ ret = mp4_Sprite_dmv_length(parent, &dmv_length);
+ if(ret != MP4_STATUS_OK)
+ {
+ break;
+ }
+ if(dmv_length <= 0)
+ {
+ dmv_code = 0;
+ }
+ else
+ {
+ getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ dmv_code = (int32_t)code;
+ if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+ {
+ dmv_code -= (1 << dmv_length) - 1;
+ }
+ }
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ if(code != 1)
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ vidObjPlane->warping_mv_code_dv[i] = dmv_code;
+
+ }
+ return ret;
+}
+
+static inline mp4_Status_t mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(void *parent, uint32_t *base)
+{
+ mp4_Status_t ret= MP4_STATUS_OK;
+ int32_t getbits=0;
+ uint32_t code = 0;
+
+ *base = 0;
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ *base += code;
+ }while(code != 0);
+ return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t code;
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+ mp4_VideoObjectPlane_t *vidObjPlane = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+ int32_t getbits=0;
+ mp4_Status_t ret= MP4_STATUS_PARSE_ERROR;
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_coding_type = code & 0x3;
+ if( mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(parent,
+ &(vidObjPlane->modulo_time_base)) == MP4_STATUS_REQD_DATA_ERROR)
+ {
+ break;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ /* TODO: check for marker bit validity */
+ {
+ uint32_t numbits=0;
+ numbits = vidObjLay->vop_time_increment_resolution_bits;
+ if(numbits == 0) numbits=1; /*TODO:check if its greater than 16 bits ?? */
+ getbits = viddec_pm_get_bits(parent, &code, numbits);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_time_increment = code;
+ }
+
+ getbits = viddec_pm_get_bits(parent, &code, 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+ vidObjPlane->vop_coded = code & 0x1;
+ if(vidObjPlane->vop_coded == 0)
+ {
+ ret = MP4_STATUS_OK;/* Exit point 1 */
+ break;
+ }
+
+ if(vidObjLay->newpred_enable)
+ {
+ /* New pred mode not supported in HW */
+ DEB("Error: mp4_Parse_VideoObjectPlane: New pred in vidObjPlane is not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) &&
+ ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P) ||
+ ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S) &&
+ (vidObjLay->sprite_enable == MP4_SPRITE_GMC))))
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_rounding_type = code;
+ }
+
+ if (vidObjLay->reduced_resolution_vop_enable &&
+ (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) &&
+ ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) ||
+ (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P)))
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_reduced_resolution = code;
+ if (vidObjPlane->vop_reduced_resolution)
+ {
+ DEB("Error: mp4_Parse_VideoObjectPlane: Reduced Resolution vidObjPlane is not supported\n");
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ }
+
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)
+ {
+ /* we support only rectangular shapes so the following logic is not required */
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) &&
+ (!vidObjLay->complexity_estimation_disable))
+ {
+ /* Not required according to DE team */
+ //read_vop_complexity_estimation_header();
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->intra_dc_vlc_thr = code;
+ if (vidObjLay->interlaced)
+ {
+ getbits = viddec_pm_get_bits(parent, &code, 2);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->top_field_first = ((code & 0x2) > 0);
+ vidObjPlane->alternate_vertical_scan_flag = code & 0x1;
+ }
+ }
+
+ if (((vidObjLay->sprite_enable == MP4_SPRITE_STATIC) || (vidObjLay->sprite_enable == MP4_SPRITE_GMC)) &&
+ (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S))
+ {
+ if (vidObjLay->sprite_info.no_of_sprite_warping_points > 0){
+ if (mp4_Sprite_Trajectory(parent, vidObjLay, vidObjPlane) != MP4_STATUS_OK){
+ break;
+ }
+ }
+ vidObjPlane->brightness_change_factor = 0;
+ if (vidObjLay->sprite_info.sprite_brightness_change)
+ {
+ int32_t change=0;
+ if(mp4_brightness_change(parent, &change) == MP4_STATUS_PARSE_ERROR)
+ {
+ break;
+ }
+ vidObjPlane->brightness_change_factor = change;
+ }
+
+ if (vidObjLay->sprite_enable == MP4_SPRITE_STATIC)
+ {
+ /* SGA: IS decode sprite not required. Is static even supported */
+ ret = MP4_STATUS_OK;/* Exit point 2 */
+ break;
+ }
+ }
+
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+ {
+ // Length of vop_quant is specified by quant_precision
+ getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_quant = code;
+ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ if (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I)
+ {
+ vidObjPlane->vop_fcode_forward = 0;
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_fcode_forward = code & 0x7;
+ if (vidObjPlane->vop_fcode_forward == 0)
+ {
+ DEB("Error: vop_fcode_forward == 0\n");
+ break;
+ }
+ }
+ if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B)
+ {
+ vidObjPlane->vop_fcode_backward = 0;
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_REQD_MISSING(getbits, ret);
+ vidObjPlane->vop_fcode_backward = code &0x7;
+ if (vidObjPlane->vop_fcode_backward == 0)
+ {
+ DEB("Error: vop_fcode_backward == 0\n");
+ break;
+ }
+ }
+ if (!vidObjLay->scalability)
+ {
+ if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) &&
+ (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I))
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ // The remaining data contains the macroblock information that is handled by the BSP
+ // The offsets to be sent to the BSP are obtained in the workload population
+ }
+ else
+ {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ }
+ else
+ {/* Binary Not supported */
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ /* Since we made it all the way here it a success condition */
+ ret = MP4_STATUS_OK; /* Exit point 3 */
+ }while(0);
+
+ mp4_set_hdr_bitstream_error(parser, false, ret);
+
+ return ret;
+} // mp4_Parse_VideoObjectPlane
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h
new file mode 100644
index 0000000..b54f642
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h
@@ -0,0 +1,11 @@
+#ifndef VIDDEC_MP4_VIDEOOBJECTPLANE_H
+#define VIDDEC_MP4_VIDEOOBJECTPLANE_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c
new file mode 100644
index 0000000..36c0b29
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.c
@@ -0,0 +1,287 @@
+#include "viddec_mp4_visualobject.h"
+
+static inline uint8_t mp4_pvt_isValid_verID(uint8_t id)
+{
+ uint8_t ret=true;
+ switch(id)
+ {
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ {
+ break;
+ }
+ default:
+ {
+ ret = false;
+ break;
+ }
+ }
+ return ret;
+} // mp4_pvt_isValid_verID
+
+static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalType_t *vidSignal)
+{
+ uint32_t data=0;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+ /* Set default values defined in spec first */
+ vidSignal->video_format = 5;
+ vidSignal->video_range = 0;
+ vidSignal->colour_primaries = 1;
+ vidSignal->transfer_characteristics = 1;
+ vidSignal->matrix_coefficients = 1;
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidSignal->is_video_signal_type = (data > 0);
+ if(vidSignal->is_video_signal_type)
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 5);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidSignal->is_colour_description = data & 0x1;
+ vidSignal->video_range = ((data & 0x2) > 0);
+ data = data >> 2;
+ vidSignal->video_format = data & 0x7;
+ if(vidSignal->is_colour_description)
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 24);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ vidSignal->colour_primaries = (data >> 16) & 0xFF;
+ vidSignal->transfer_characteristics = (data >> 8) & 0xFF;
+ vidSignal->matrix_coefficients = data & 0xFF;
+ }
+ }
+ ret = MP4_STATUS_OK;
+ }while(0);
+
+ return ret;
+} // mp4_Parse_video_signal_type
+
+void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status)
+{
+ //DEB("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n",
+ // parser->bitstream_error, hdr_flag, parse_status);
+
+ if(hdr_flag)
+ {
+ if(parse_status & MP4_STATUS_NOTSUPPORT)
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ if(parse_status & MP4_STATUS_PARSE_ERROR)
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_PARSE;
+ if(parse_status & MP4_STATUS_REQD_DATA_ERROR)
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC;
+ parser->bitstream_error &= MP4_HDR_ERROR_MASK;
+ }
+ else
+ {
+ if(parse_status & MP4_STATUS_NOTSUPPORT)
+ parser->bitstream_error |= MP4_BS_ERROR_FRM_UNSUP;
+ if(parse_status & MP4_STATUS_PARSE_ERROR)
+ parser->bitstream_error |= MP4_BS_ERROR_FRM_PARSE;
+ if(parse_status & MP4_STATUS_REQD_DATA_ERROR)
+ parser->bitstream_error |= MP4_BS_ERROR_FRM_NONDEC;
+ }
+
+ //DEB("Exiting mp4_set_hdr_bitstream_error: bs_err: 0x%x\n", parser->bitstream_error);
+
+ return;
+} // mp4_set_hdr_bitstream_error
+
+mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser)
+{
+ uint32_t data=0;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+ getbits = viddec_pm_get_bits(parent, &data, 8);
+ if(getbits != -1)
+ {
+ parser->info.profile_and_level_indication = data & 0xFF;
+ // If present, check for validity
+ switch(parser->info.profile_and_level_indication)
+ {
+ case MP4_SIMPLE_PROFILE_LEVEL_0:
+ case MP4_SIMPLE_PROFILE_LEVEL_1:
+ case MP4_SIMPLE_PROFILE_LEVEL_2:
+ case MP4_SIMPLE_PROFILE_LEVEL_3:
+ case MP4_SIMPLE_PROFILE_LEVEL_4a:
+ case MP4_SIMPLE_PROFILE_LEVEL_5:
+ case MP4_SIMPLE_PROFILE_LEVEL_6:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5:
+ case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B:
+ parser->bitstream_error = MP4_BS_ERROR_NONE;
+ ret = MP4_STATUS_OK;
+ break;
+ default:
+ parser->bitstream_error = MP4_BS_ERROR_HDR_UNSUP | MP4_BS_ERROR_HDR_NONDEC;
+ break;
+ }
+ }
+ else
+ {
+ parser->bitstream_error = MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC;
+ }
+
+ return ret;
+} // mp4_Parse_VisualSequence
+
+mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Info_t *pInfo = &(parser->info);
+ mp4_VisualObject_t *visObj = &(pInfo->VisualObject);
+ uint32_t data=0;
+ int32_t getbits=0;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+ do
+ {
+ getbits = viddec_pm_get_bits(parent, &data, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ visObj->is_visual_object_identifier = (data > 0);
+
+ visObj->visual_object_verid = 1; /* Default value as per spec */
+ if (visObj->is_visual_object_identifier)
+ {
+ viddec_pm_get_bits(parent, &data, 7);
+ visObj->visual_object_priority = data & 0x7;
+ data = data >> 3;
+ if(mp4_pvt_isValid_verID(data & 0xF))
+ {
+ visObj->visual_object_verid = data & 0xF;
+ }
+ else
+ {
+ DEB("Warning: Unsupported visual_object_verid\n");
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ // Continue parsing as it is not a required field for decoder
+ }
+ }
+
+ getbits = viddec_pm_get_bits(parent, &data, 4);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ visObj->visual_object_type = data;
+ if (visObj->visual_object_type != MP4_VISUAL_OBJECT_TYPE_VIDEO)
+ {
+ /* VIDEO is the only supported type */
+ DEB("Error: Unsupported object: visual_object_type != video ID\n");
+ parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+ break;
+ }
+
+ /* Not required to check for visual_object_type as we already handle it above */
+ ret = mp4_Parse_video_signal_type(parent, &(visObj->VideoSignalType));
+
+ // No need to check for user data or visual object layer because they have a different start code
+ // and will not be part of this header
+
+ } while(0);
+
+ mp4_set_hdr_bitstream_error(parser, true, ret);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+ mp4_VideoSignalType_t *vst = &(visObj->VideoSignalType);
+
+ wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ;
+
+ wi.mp4_vs_vo.vs_item = 0;
+ wi.mp4_vs_vo.video_signal_type = 0;
+ wi.mp4_vs_vo.color_desc = 0;
+
+ viddec_fw_mp4_vs_set_profile_and_level_indication(&wi.mp4_vs_vo, pInfo->profile_and_level_indication);
+
+ viddec_fw_mp4_vo_set_video_signal_type(&wi.mp4_vs_vo, vst->is_video_signal_type);
+ if(vst->is_video_signal_type)
+ {
+ viddec_fw_mp4_vo_set_video_range(&wi.mp4_vs_vo, vst->video_range);
+ viddec_fw_mp4_vo_set_video_format(&wi.mp4_vs_vo, vst->video_format);
+ viddec_fw_mp4_vo_set_colour_description(&wi.mp4_vs_vo, vst->is_colour_description);
+ if(vst->is_colour_description)
+ {
+ viddec_fw_mp4_vo_set_transfer_char(&wi.mp4_vs_vo, vst->transfer_characteristics);
+ viddec_fw_mp4_vo_set_color_primaries(&wi.mp4_vs_vo, vst->colour_primaries);
+ }
+ }
+
+ ret = viddec_pm_append_workitem(parent, &wi);
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+ }
+
+ return ret;
+} // mp4_Parse_VisualObject
+
+mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser)
+{
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ uint32_t user_data;
+ viddec_workload_item_t wi;
+
+ DEB("ParseUser-prev_sc: 0x%x\n", parser->prev_sc);
+
+ /* find the scope based on start code sc */
+ switch(parser->prev_sc) {
+ case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+ break;
+ case MP4_SC_VISUAL_OBJECT:
+ wi.vwi_type = VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA;
+ break;
+ case MP4_SC_GROUP_OF_VOP:
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+ break;
+ case MP4_SC_VIDEO_OBJECT_LAYER_MIN:
+ wi.vwi_type = VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA;
+ break;
+ default:
+ wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen
+ break;
+ }
+
+ /* Read 1 byte of user data and store it in workitem for the current stream level (VS/VO/VOL/GVOP).
+ Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size,
+ append the workitem. This loop is repeated till all user data is extracted and appended. */
+ wi.user_data.size = 0;
+ while(viddec_pm_get_bits(parent, &user_data, 8) != -1)
+ {
+ /* Store the valid byte in data payload */
+ wi.user_data.data_payload[wi.user_data.size] = user_data;
+ wi.user_data.size++;
+
+ /* When size exceeds payload size, append workitem and continue */
+ if (wi.user_data.size >= 11)
+ {
+ viddec_pm_setup_userdata(&wi);
+ ret = viddec_pm_append_workitem(parent, &wi);
+ wi.user_data.size = 0;
+ }
+ }
+ /* If size is not 0, append remaining user data. */
+ if (wi.user_data.size > 0)
+ {
+ int i;
+ for(i=wi.user_data.size;i<11;i++)
+ {
+ wi.user_data.data_payload[i] = 0;
+ }
+ viddec_pm_setup_userdata(&wi);
+ ret = viddec_pm_append_workitem(parent, &wi);
+ wi.user_data.size = 0;
+ }
+
+ if(ret == 1)
+ ret = MP4_STATUS_OK;
+
+ return ret;
+} // mp4_Parse_UserData
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h
new file mode 100644
index 0000000..0aec9ad
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_mp4_visualobject.h
@@ -0,0 +1,13 @@
+#ifndef VIDDEC_MP4_VISUALOBJECT_H
+#define VIDDEC_MP4_VISUALOBJECT_H
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c
new file mode 100644
index 0000000..6a34500
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/mp4/parser/viddec_parse_sc_mp4.c
@@ -0,0 +1,143 @@
+#include "viddec_pm_parse.h"
+#include "viddec_fw_debug.h"
+#include "viddec_mp4_parse.h"
+
+/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success.
+ The conext is updated with current phase and sc_code position in the buffer.
+
+ What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+ Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+ if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+ we are looking for. Its incremented to 4 once we see a byte after this pattern.
+
+ For MP4 there are two startcode patterns LVH & SVH. LVH is same as other codecs (00 00 01), SVH
+ A.K.A H263 is (00 00 8X). So we have to look for both kind of start codes. The spec doesn't
+ explicitly say if both of them can exist in a stream? So current implemenation will assume
+ that only one of them is present in a given stream to simplify implementation. The reason it can
+ get complicated is resync marker in LVH can potentially be (00 00 8) which will cause false detect
+ of SVH start code.
+*/
+
+uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state)
+{
+ uint8_t *ptr;
+ uint32_t size;
+ uint32_t data_left=0, phase = 0, ret = 0;
+ viddec_sc_parse_cubby_cxt_t *cxt;
+ viddec_mp4_parser_t *p_info;
+
+ cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+ viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+ size = 0;
+ data_left = cxt->size;
+ ptr = cxt->buf;
+ phase = cxt->phase;
+ cxt->sc_end_pos = -1;
+ p_info = (viddec_mp4_parser_t *)pcxt;
+
+ /* parse until there is more data and start code not found */
+ while((data_left > 0) &&(phase < 3))
+ {
+ /* Check if we are byte aligned & phase=0, if thats the case we can check
+ work at a time instead of byte*/
+ if(((((uint32_t)ptr) & 0x3) == 0) && (phase == 0))
+ {
+ while(data_left > 3)
+ {
+ uint32_t data;
+ char mask1 = 0, mask2=0;
+
+ data = *((uint32_t *)ptr);
+#ifndef MFDBIGENDIAN
+ data = SWAP_WORD(data);
+#endif
+ mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+ mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+ /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+ two consecutive zero bytes for a start code pattern */
+ if(mask1 && mask2)
+ {/* Success so skip 4 bytes and start over */
+ ptr+=4;size+=4;data_left-=4;
+ continue;
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+
+ /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+ two zero bytes in the word so we look one byte at a time*/
+ if(data_left > 0)
+ {
+ if(*ptr == FIRST_STARTCODE_BYTE)
+ {/* Phase can be 3 only if third start code byte is found */
+ phase++;
+ ptr++;size++;data_left--;
+ if(phase > 2)
+ {
+ phase = 2;
+
+ if ( (((uint32_t)ptr) & 0x3) == 0 )
+ {
+ while( data_left > 3 )
+ {
+ if(*((uint32_t *)ptr) != 0)
+ {
+ break;
+ }
+ ptr+=4;size+=4;data_left-=4;
+ }
+ }
+ }
+ }
+ else
+ {
+ uint8_t normal_sc=0, short_sc=0;
+ if(phase == 2)
+ {
+ normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
+ short_sc = (p_info->ignore_scs == 0) && (SHORT_THIRD_STARTCODE_BYTE == ( *ptr & 0xFC));
+ }
+
+ if(!(normal_sc | short_sc))
+ {
+ phase = 0;
+ }
+ else
+ {/* Match for start code so update context with byte position */
+ cxt->sc_end_pos = size;
+ phase = 3;
+ p_info->cur_sc_prefix = p_info->next_sc_prefix;
+ p_info->next_sc_prefix = (normal_sc) ? 1: 0;
+ if(normal_sc)
+ {
+ p_info->ignore_scs=1;
+ }
+ else
+ {
+ /* For short start code since start code is in one nibble just return at this point */
+ phase += 1;
+ state->next_sc = *ptr;
+ state->second_scprfx_length = 2;
+ ret=1;
+ break;
+ }
+ }
+ ptr++;size++;data_left--;
+ }
+ }
+ }
+ if((data_left > 0) && (phase == 3))
+ {
+ cxt->sc_end_pos++;
+ state->next_sc = cxt->buf[cxt->sc_end_pos];
+ state->second_scprfx_length = 3;
+ phase++;
+ ret = 1;
+ }
+ cxt->phase = phase;
+ /* Return SC found only if phase is 4, else always success */
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h
new file mode 100644
index 0000000..d57a9bf
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/include/vc1common.h
@@ -0,0 +1,111 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: VC1 header.
+//
+*/
+
+#ifndef _VC1_COMMON_H_
+#define _VC1_COMMON_H_
+
+/* If the pixel data is left near an emulation prevention sequence, the decoder will be unaware
+ unless we send some previous bytes */
+//#define PADDING_FOR_EMUL 3
+#define PADDING_FOR_EMUL 0
+
+#define GET_BLSB( name, bitf ) BLSB_MFD_##name##_##bitf
+#define GET_BMSK( name, bitf ) BMSK_MFD_##name##_##bitf
+
+#define BF_READ( name, bitf, value ) ((value & GET_BMSK(name, bitf) ) >> GET_BLSB(name, bitf) )
+#define BF_WRITE( name, bitf, value, data ) value = ((value & ~GET_BMSK(name, bitf)) | ((data) << GET_BLSB(name, bitf)))
+
+enum vc1_workload_item_type
+{
+ VIDDEC_WORKLOAD_VC1_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+ VIDDEC_WORKLOAD_VC1_BITOFFSET,
+ VIDDEC_WORKLOAD_VC1_BITPLANE0,
+ VIDDEC_WORKLOAD_VC1_BITPLANE1,
+ VIDDEC_WORKLOAD_VC1_BITPLANE2,
+ VIDDEC_WORKLOAD_VC1_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+ VIDDEC_WORKLOAD_VC1_FUTURE_FRAME,
+};
+
+typedef enum
+{
+ vc1_ProgressiveFrame = 0,
+ vc1_InterlacedFrame = 2,
+ vc1_InterlacedField = 3,
+ vc1_PictureFormatNone
+} vc1_fcm;
+
+/** This enumeration defines the various frame types as defined in PTYPE syntax
+element.
+PTYPE interpretation depends on bitstream profile. The value that needs to get
+programmed in the frame_type register 0x2218 is this generic enum obtained
+from Canmore code.
+Changing this enum to match the spec for each profile caused md5 mismatches.
+TODO: Why are these the values to program - is this the case with reference decoder?
+*/
+enum
+{
+ VC1_I_FRAME = (1 << 0),
+ VC1_P_FRAME = (1 << 1),
+ VC1_B_FRAME = (1 << 2),
+ VC1_BI_FRAME = VC1_I_FRAME | VC1_B_FRAME,
+ VC1_SKIPPED_FRAME = (1 << 3) | VC1_P_FRAME
+};
+
+enum {
+ vc1_FrameDone = 1 << 0,
+ vc1_FieldDone = 1 << 1,
+ vc1_SliceDone = 1 << 2,
+ vc1_Field1Done = 1 << 3,
+ vc1_Field2Done = 1 << 4,
+ vc1_FrameError = 1 << 8,
+};
+
+typedef struct {
+ /* 0x00 */ uint32_t general;
+ /* 0x04 */ uint32_t stream_format1;
+ /* 0x08 */ uint32_t coded_size;
+ /* 0x0c */ uint32_t stream_format2;
+ /* 0x10 */ uint32_t entrypoint1;
+ /* 0x14 */ uint32_t range_map;
+ /* 0x18 */ uint32_t frame_type;
+ /* 0x1c */ uint32_t recon_control;
+ /* 0x20 */ uint32_t mv_control;
+ /* 0x24 */ uint32_t intcomp_fwd_top;
+ /* 0x28 */ uint32_t ref_bfraction;
+ /* 0x2c */ uint32_t blk_control;
+ /* 0x30 */ uint32_t trans_data;
+ /* 0x34 */ uint32_t vop_dquant;
+#define NUM_REF_ID 4
+ /* 0x38-0x48 */ uint32_t ref_frm_id[NUM_REF_ID];
+ /* 0x48 */ uint32_t fieldref_ctrl_id;
+ /* 0x4c */ uint32_t auxfrmctrl;
+ /* 0x50 */ uint32_t imgstruct;
+ /* 0x54 */ uint32_t alt_frame_type;
+ /* 0x58 */ uint32_t intcomp_fwd_bot;
+ /* 0x5c */ uint32_t intcomp_bwd_top;
+ /* 0x60 */ uint32_t intcomp_bwd_bot;
+ /* 0x64 */ uint32_t _stuffing;
+} VC1D_SPR_REGS;
+
+/*
+In VC1, past reference is the fwd reference and future reference is the backward reference
+i.e. P frame has only a forward reference and B frame has both a forward and a backward reference.
+*/
+enum {
+ VC1_FRAME_CURRENT_REF = 0,
+ VC1_FRAME_CURRENT_DIS,
+ VC1_FRAME_PAST,
+ VC1_FRAME_FUTURE,
+};
+
+#endif //_VC1_COMMON_H_
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c
new file mode 100644
index 0000000..a2d6721
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/mix_vbp_vc1_stubs.c
@@ -0,0 +1,16 @@
+#include "vc1.h"
+
+void vc1_start_new_frame (void *parent, vc1_viddec_parser_t *parser )
+{
+ return;
+}
+
+void vc1_end_frame (vc1_viddec_parser_t *parser)
+{
+ return;
+}
+
+int32_t vc1_parse_emit_current_frame( void *parent, vc1_viddec_parser_t *parser )
+{
+ return(0);
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h
new file mode 100644
index 0000000..8416b24
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1.h
@@ -0,0 +1,224 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+// Description: VC1 header.
+//
+*/
+
+#ifndef _VC1_H_
+#define _VC1_H_
+
+#ifdef MFD_FIRMWARE
+ typedef unsigned int size_t;
+ #define LOG(...)
+#else
+ #include <stdio.h>
+ #include <unistd.h>
+ #include <stdint.h>
+ enum {
+ NONE = 0,
+ CRITICAL,
+ WARNING,
+ INFO,
+ DEBUG,
+ } log_level;
+
+ #define vc1_log_level DEBUG
+
+ #define LOG( log_lev, format, args ... ) \
+ if (vc1_log_level >= log_lev) { OS_INFO("%s[%d]:: " format "\n", __FUNCTION__ , __LINE__ , ## args ); }
+#endif
+
+#include "viddec_fw_workload.h"
+#include "vc1parse_common_defs.h"
+#include "vc1common.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define LOG_CRIT(format, args ... ) LOG( CRITICAL, format, ## args)
+#define LOG_WARN(format, args ... ) LOG( WARNING, format, ## args)
+#define LOG_INFO(format, args ... ) LOG( INFO, format, ## args)
+#define LOG_DEBUG(format, args ... ) LOG( DEBUG, format, ## args)
+
+// Seems to be hardware bug: DO NOT TRY TO SWAP BITPLANE0 and BITPLANE2
+// Block Control Register at offset 222C uses Bitplane_raw_ID0 to indicate directmb/fieldtx while
+// and Bitplane_raw_ID2 for acpred/mvtypemb/forwardmb
+// but when we send bitplane index 0 for directmb/fieldtx and bitplane index 2 for acpred/mvtypemb/forwardmb
+// md5 mismatches are seen
+typedef enum
+{
+ BPP_FORWARDMB = VIDDEC_WORKLOAD_VC1_BITPLANE0,
+ BPP_ACPRED = VIDDEC_WORKLOAD_VC1_BITPLANE0,
+ BPP_MVTYPEMB = VIDDEC_WORKLOAD_VC1_BITPLANE0,
+ BPP_OVERFLAGS = VIDDEC_WORKLOAD_VC1_BITPLANE1,
+ BPP_SKIPMB = VIDDEC_WORKLOAD_VC1_BITPLANE1,
+ BPP_DIRECTMB = VIDDEC_WORKLOAD_VC1_BITPLANE2,
+ BPP_FIELDTX = VIDDEC_WORKLOAD_VC1_BITPLANE2,
+} vc1_bpp_type_t;
+
+/* status codes */
+typedef enum {
+ VC1_STATUS_EOF = 1, // end of file
+ VC1_STATUS_OK = 0, // no error
+ VC1_STATUS_NO_MEM = 2, // out of memory
+ VC1_STATUS_FILE_ERROR = 2, // file error
+ VC1_STATUS_NOTSUPPORT = 2, // not supported mode
+ VC1_STATUS_PARSE_ERROR = 2, // fail in parse MPEG-4 stream
+ VC1_STATUS_ERROR = 2 // unknown/unspecified error
+} vc1_Status;
+
+/* VC1 start code values */
+typedef enum {
+ vc1_Forbidden = 0x80,/*0x80-0xFF*/
+ vc1_Reserved1 = 0x09,/*0x00-0x09*/
+ vc1_Reserved2 = 0x10,
+ vc1_Reserved3 = 0x1A,
+ vc1_Reserved4 = 0x20,/*0x20-0x7F*/
+ vc1_SCEndOfSequence = 0x0A,
+ vc1_SCSlice = 0x0B,
+ vc1_SCField = 0x0C,
+ vc1_SCFrameHeader = 0x0D,
+ vc1_SCEntryPointHeader = 0x0E,
+ vc1_SCSequenceHeader = 0x0F,
+ vc1_SCSliceUser = 0x1B,
+ vc1_SCFieldUser = 0x1C,
+ vc1_SCFrameUser = 0x1D,
+ vc1_SCEntryPointUser = 0x1E,
+ vc1_SCSequenceUser = 0x1F
+} vc1_sc;
+
+#if 0
+typedef enum
+{
+ vc1_ProfileSimple = 0, /** Simple profile */
+ vc1_ProfileMain, /** Main profile */
+ vc1_ProfileReserved, /** Reserved */
+ vc1_ProfileAdvanced /** Advanced profile */
+} vc1_Profile;
+#endif
+
+typedef enum
+{
+ vc1_PtypeI = 1,
+ vc1_PtypeP = 2,
+ vc1_PtypeB = 4,
+ vc1_PtypeBI = 5,
+ vc1_PtypeSkipped = 8|2,
+} vc1_ptype;
+
+typedef enum
+{
+ vc1_PtypeII = 0,
+ vc1_PtypeIP = 1,
+ vc1_PtypePI = 2,
+ vc1_PtypePP = 3,
+ vc1_PtypeBB = 4,
+ vc1_PtypeBBI = 5,
+ vc1_PtypeBIB = 6,
+ vc1_PtypeBIBI = 7
+} vc1_fptype;
+
+typedef enum
+{
+ vc1_Imode_Raw = 0, //0x0000
+ vc1_Imode_Norm2, //0x10
+ vc1_Imode_Diff2, //0x001
+ vc1_Imode_Norm6, //0x11
+ vc1_Imode_Diff6, //0x0001
+ vc1_Imode_Rowskip, //0x010
+ vc1_Imode_Colskip, //0x011
+} vc1_Imode;
+
+/* calculation of MAX_BITPLANE_SZ 2048/16x1088/16 pel= 128x68 bit used for bitplane
+ * as rows are packed in DWORDS
+ * we have (128)/32 * 68 Dwords needed for bitplane storage
+ */
+#define MAX_BITPLANE_SZ 272
+
+/* Full Info */
+typedef struct {
+ unsigned char* bufptr; /* current frame, point to header or data */
+ int bitoff; /* mostly point to next frame header or PSC */
+ int picture_info_has_changed;
+ vc1_metadata_t metadata;
+ vc1_PictureLayerHeader picLayerHeader;
+ uint32_t bitplane[MAX_BITPLANE_SZ];
+} vc1_Info;
+
+#ifdef __cplusplus
+}
+#endif
+
+enum {
+ VC1_REF_FRAME_T_MINUS_1 = 0,
+ VC1_REF_FRAME_T_MINUS_2,
+ VC1_REF_FRAME_T_MINUS_0,
+ VC1_NUM_REFERENCE_FRAMES,
+};
+
+enum vc1_sc_seen_flags
+{
+ VC1_SC_INVALID = 0 << 0,
+ VC1_SC_SEQ = 1 << 0,
+ VC1_SC_EP = 1 << 1,
+ VC1_SC_FRM = 1 << 2,
+ VC1_SC_FLD = 1 << 3,
+ VC1_SC_SLC = 1 << 4,
+ VC1_SC_UD = 1 << 5,
+};
+#define VC1_SEQ_MASK VC1_SC_SEQ
+#define VC1_EP_MASK VC1_SC_SEQ | VC1_SC_EP
+#define VC1_FRM_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM
+#define VC1_FLD_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM | VC1_SC_FLD
+
+typedef struct {
+ int id;
+ uint32_t intcomp_top;
+ uint32_t intcomp_bot;
+ int fcm; /* frame coding mode */
+ int type;
+ int anchor[2]; /* one per field */
+ int rr_en; /* range reduction enable flag at sequence layer */
+ int rr_frm; /* range reduction flag at picture layer */
+} ref_frame_t;
+
+typedef struct
+{
+ uint32_t sc_seen_since_last_wkld;
+ uint32_t sc_seen;
+ uint32_t is_frame_start;
+ uint8_t is_reference_picture;
+ uint32_t intcomp_last[4]; /* for B frames */
+ uint32_t intcomp_top[2];
+ uint32_t intcomp_bot[2];
+ vc1_Info info;
+ VC1D_SPR_REGS spr;
+ ref_frame_t ref_frame[VC1_NUM_REFERENCE_FRAMES];
+#ifdef VBP
+ /* A storage area is provided for each type of bit plane. Only one of */
+ /* each type will ever be used for a picture and never more than three */
+ /* bit-planes per picture, and often only one is used. We never clear */
+ /* this data and writes into it when we need to. vc1parse_bitplane.c */
+ /* makes use of these set them to one of the bitplane types included */
+ /* in the picture header structure. Those sturctures are set every */
+ /* time a picture parse begins. */
+ uint32_t bp_forwardmb[4096];
+ uint32_t bp_acpred[4096];
+ uint32_t bp_mvtypemb[4096];
+ uint32_t bp_overflags[4096];
+ uint32_t bp_skipmb[4096];
+ uint32_t bp_directmb[4096];
+ uint32_t bp_fieldtx[4096];
+ uint32_t start_code;
+#endif
+} vc1_viddec_parser_t;
+
+#endif //_VC1_H_
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c
new file mode 100644
index 0000000..a033385
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c
@@ -0,0 +1,557 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 bitstream layers down to but not including
+// macroblock layer.
+//
+*/
+
+#include "viddec_fw_debug.h"
+#include "vc1parse.h"
+
+#define VC1_PIXEL_IN_LUMA 16
+
+/*------------------------------------------------------------------------------
+ * Parse modified rcv file, start codes are inserted using rcv2vc1.c.
+ * source is in
+ * http://svn.jf.intel.com/svn/DHG_Src/CESWE_Src/DEV/trunk/sv/mfd/tools/utils.
+ * Assumme rcv file width < 90,112 pixel to differenciate from real VC1
+ * advanced profile header.
+ * Original rcv description is in annex L
+ * Table 263 of SMPTE 421M.
+ */
+vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t result;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_RcvSequenceHeader rcv;
+
+ memset(&rcv, 0, sizeof(vc1_RcvSequenceHeader));
+
+ result = viddec_pm_get_bits(ctxt, &rcv.struct_a_rcv, 32);
+ md->width = rcv.struct_a.HORIZ_SIZE;
+ md->height = rcv.struct_a.VERT_SIZE;
+
+ result = viddec_pm_get_bits(ctxt, &rcv.struct_c_rcv, 32);
+ md->PROFILE = rcv.struct_c.PROFILE >> 2;
+ md->LOOPFILTER = rcv.struct_c.LOOPFILTER;
+ md->MULTIRES = rcv.struct_c.MULTIRES;
+ md->FASTUVMC = rcv.struct_c.FASTUVMC;
+ md->EXTENDED_MV = rcv.struct_c.EXTENDED_MV;
+ md->DQUANT = rcv.struct_c.DQUANT;
+ md->VSTRANSFORM = rcv.struct_c.VSTRANSFORM;
+ md->OVERLAP = rcv.struct_c.OVERLAP;
+ md->RANGERED = rcv.struct_c.RANGERED;
+ md->MAXBFRAMES = rcv.struct_c.MAXBFRAMES;
+ md->QUANTIZER = rcv.struct_c.QUANTIZER;
+ md->FINTERPFLAG = rcv.struct_c.FINTERPFLAG;
+#ifdef VBP
+ md->SYNCMARKER = rcv.struct_c.SYNCMARKER;
+#endif
+
+ if ((md->PROFILE == VC1_PROFILE_SIMPLE) ||
+ (md->MULTIRES && md->PROFILE == VC1_PROFILE_MAIN))
+ {
+ md->DQUANT = 0;
+ }
+ // TODO: NEED TO CHECK RESERVED BITS ARE 0
+
+ md->widthMB = (md->width + 15 ) / VC1_PIXEL_IN_LUMA;
+ md->heightMB = (md->height + 15) / VC1_PIXEL_IN_LUMA;
+
+ DEB("rcv: beforemod: res: %dx%d\n", md->width, md->height);
+
+ /* WL takes resolution in unit of 2 pel - sec. 6.2.13.1 */
+ md->width = md->width/2 -1;
+ md->height = md->height/2 -1;
+
+ DEB("rcv: res: %dx%d\n", md->width, md->height);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C;
+
+ wi.vc1_sh_struct_a_c.size = 0;
+ wi.vc1_sh_struct_a_c.flags = 0;
+ wi.vc1_sh_struct_a_c.pad = 0;
+
+ viddec_fw_vc1_set_rcv_horiz_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.HORIZ_SIZE);
+ viddec_fw_vc1_set_rcv_vert_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.VERT_SIZE);
+
+ viddec_fw_vc1_set_rcv_bitrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.BITRTQ_POSTPROC);
+ viddec_fw_vc1_set_rcv_frmrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.FRMRTQ_POSTPROC);
+ viddec_fw_vc1_set_rcv_profile(&wi.vc1_sh_struct_a_c, rcv.struct_c.PROFILE);
+ viddec_fw_vc1_set_rcv_level(&wi.vc1_sh_struct_a_c, 0);
+ viddec_fw_vc1_set_rcv_cbr(&wi.vc1_sh_struct_a_c, 0);
+ viddec_fw_vc1_set_rcv_rangered(&wi.vc1_sh_struct_a_c, rcv.struct_c.RANGERED);
+ viddec_fw_vc1_set_rcv_maxbframes(&wi.vc1_sh_struct_a_c, rcv.struct_c.MAXBFRAMES);
+ viddec_fw_vc1_set_rcv_finterpflag(&wi.vc1_sh_struct_a_c, rcv.struct_c.FINTERPFLAG);
+
+ result = viddec_pm_append_workitem(ctxt, &wi);
+ }
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse sequence layer. This function is only applicable to advanced profile
+ * as simple and main profiles use other mechanisms to communicate these
+ * metadata.
+ * Table 3 of SMPTE 421M.
+ * Table 13 of SMPTE 421M for HRD_PARAM().
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_SequenceLayerHeader sh;
+ uint32_t result;
+
+ memset(&sh, 0, sizeof(vc1_SequenceLayerHeader));
+
+ // PARSE SEQUENCE HEADER
+ result = viddec_pm_get_bits(ctxt, &sh.flags, 15);
+ if(result == 1)
+ {
+ md->PROFILE = sh.seq_flags.PROFILE;
+#ifdef VBP
+ md->LEVEL = sh.seq_flags.LEVEL;
+#endif
+ }
+
+ result = viddec_pm_get_bits(ctxt, &sh.max_size, 32);
+ if(result == 1)
+ {
+ md->POSTPROCFLAG = sh.seq_max_size.POSTPROCFLAG;
+ md->width = sh.seq_max_size.MAX_CODED_WIDTH;
+ md->height = sh.seq_max_size.MAX_CODED_HEIGHT;
+ md->PULLDOWN = sh.seq_max_size.PULLDOWN;
+ md->INTERLACE = sh.seq_max_size.INTERLACE;
+ md->TFCNTRFLAG = sh.seq_max_size.TFCNTRFLAG;
+ md->FINTERPFLAG = sh.seq_max_size.FINTERPFLAG;
+ md->PSF = sh.seq_max_size.PSF;
+ }
+
+ if (sh.seq_max_size.DISPLAY_EXT == 1)
+ {
+ result = viddec_pm_get_bits(ctxt, &sh.disp_size, 29);
+ if(result == 1)
+ {
+ if (sh.seq_disp_size.ASPECT_RATIO_FLAG == 1)
+ {
+ result = viddec_pm_get_bits(ctxt, &tempValue, 4);
+ sh.ASPECT_RATIO = tempValue;
+ if (sh.ASPECT_RATIO == 15)
+ {
+ result = viddec_pm_get_bits(ctxt, &sh.aspect_size, 16);
+ }
+ }
+
+ result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+ sh.FRAMERATE_FLAG = tempValue;
+ if (sh.FRAMERATE_FLAG == 1)
+ {
+ result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+ sh.FRAMERATEIND = tempValue;
+ if (sh.FRAMERATEIND == 0)
+ {
+ result = viddec_pm_get_bits(ctxt, &sh.framerate_fraction, 12);
+ }
+ else
+ {
+ result = viddec_pm_get_bits(ctxt, &tempValue, 16);
+ sh.FRAMERATEEXP = tempValue;
+ }
+ }
+
+ result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+ sh.COLOR_FORMAT_FLAG = tempValue;
+ if (sh.COLOR_FORMAT_FLAG == 1)
+ {
+ result = viddec_pm_get_bits(ctxt, &sh.color_format, 24);
+ }
+ } // Successful get of display size
+ } // DISPLAY_EXT is 1
+
+ result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+ sh.HRD_PARAM_FLAG = tempValue;
+ if (sh.HRD_PARAM_FLAG == 1)
+ {
+ /* HRD_PARAM(). */
+ result = viddec_pm_get_bits(ctxt, &tempValue, 5);
+ sh.HRD_NUM_LEAKY_BUCKETS = tempValue;
+ md->HRD_NUM_LEAKY_BUCKETS = sh.HRD_NUM_LEAKY_BUCKETS;
+ // Skip the rest of the parsing - hrdinfo is not required for decode or for attributes
+ }
+ else
+ {
+ md->HRD_NUM_LEAKY_BUCKETS = 0;
+ }
+
+ md->widthMB = (((md->width + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA;
+ md->heightMB = (((md->height + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA;
+
+ DEB("md: res: %dx%d\n", md->width, md->height);
+ DEB("sh: dispres: %dx%d\n", sh.seq_disp_size.DISP_HORIZ_SIZE, sh.seq_disp_size.DISP_VERT_SIZE);
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi_sl, wi_de;
+
+ wi_sl.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+ wi_sl.vc1_sl.size = 0;
+ wi_sl.vc1_sl.flags = 0;
+ wi_sl.vc1_sl.pad = 0;
+
+ viddec_fw_vc1_set_profile(&wi_sl.vc1_sl, sh.seq_flags.PROFILE);
+ viddec_fw_vc1_set_level(&wi_sl.vc1_sl, sh.seq_flags.LEVEL);
+ viddec_fw_vc1_set_colordiff_format(&wi_sl.vc1_sl, sh.seq_flags.COLORDIFF_FORMAT);
+ viddec_fw_vc1_set_pulldown(&wi_sl.vc1_sl, sh.seq_max_size.PULLDOWN);
+ viddec_fw_vc1_set_max_coded_width(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_WIDTH);
+ viddec_fw_vc1_set_max_coded_height(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_HEIGHT);
+
+ viddec_fw_vc1_set_bitrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.BITRTQ_POSTPROC);
+ viddec_fw_vc1_set_frmrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.FRMRTQ_POSTPROC);
+ viddec_fw_vc1_set_interlace(&wi_sl.vc1_sl, sh.seq_max_size.INTERLACE);
+ viddec_fw_vc1_set_tfcntrflag(&wi_sl.vc1_sl, sh.seq_max_size.TFCNTRFLAG);
+ viddec_fw_vc1_set_finterpflag(&wi_sl.vc1_sl, sh.seq_max_size.FINTERPFLAG);
+ viddec_fw_vc1_set_psf(&wi_sl.vc1_sl, sh.seq_max_size.PSF);
+ viddec_fw_vc1_set_display_ext(&wi_sl.vc1_sl, sh.seq_max_size.DISPLAY_EXT);
+
+ result = viddec_pm_append_workitem(ctxt, &wi_sl);
+
+ // send DISPLAY EXTENSION metadata if present
+ if (sh.seq_max_size.DISPLAY_EXT)
+ {
+ wi_de.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+
+ wi_de.vc1_sl_de.size = 0;
+ wi_de.vc1_sl_de.framerate = 0;
+ wi_de.vc1_sl_de.aspectsize = 0;
+
+ viddec_fw_vc1_set_disp_horiz_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_HORIZ_SIZE);
+ viddec_fw_vc1_set_disp_vert_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_VERT_SIZE);
+ viddec_fw_vc1_set_disp_aspect_ratio_flag(&wi_de.vc1_sl_de, sh.seq_disp_size.ASPECT_RATIO_FLAG);
+ viddec_fw_vc1_set_disp_color_format_flag(&wi_de.vc1_sl_de, sh.COLOR_FORMAT_FLAG);
+ viddec_fw_vc1_set_disp_framerate_flag(&wi_de.vc1_sl_de, sh.FRAMERATE_FLAG);
+ viddec_fw_vc1_set_disp_framerateind(&wi_de.vc1_sl_de, sh.FRAMERATEIND);
+
+ viddec_fw_vc1_set_disp_aspect_ratio(&wi_de.vc1_sl_de, sh.ASPECT_RATIO);
+ viddec_fw_vc1_set_disp_frameratenr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATENR);
+ viddec_fw_vc1_set_disp_frameratedr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATEDR);
+ viddec_fw_vc1_set_disp_framerateexp(&wi_de.vc1_sl_de, sh.FRAMERATEEXP);
+
+ viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_HORIZ_SIZE);
+ viddec_fw_vc1_set_disp_aspect_ratio_vert_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_VERT_SIZE);
+ viddec_fw_vc1_set_disp_color_prim(&wi_de.vc1_sl_de, sh.seq_color_format.COLOR_PRIM);
+ viddec_fw_vc1_set_disp_transfer_char(&wi_de.vc1_sl_de, sh.seq_color_format.TRANSFER_CHAR);
+
+ result = viddec_pm_append_workitem(ctxt, &wi_de);
+ }
+ }
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse entry point layer. This function is only applicable for advanced
+ * profile and is used to signal a random access point and changes in coding
+ * control parameters.
+ * Table 14 of SMPTE 421M.
+ * Table 15 of SMPTE 421M for HRD_FULLNESS().
+ *------------------------------------------------------------------------------
+ */
+vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_EntryPointHeader ep;
+ uint32_t result;
+ uint32_t temp;
+
+ memset(&ep, 0, sizeof(vc1_EntryPointHeader));
+
+ // PARSE ENTRYPOINT HEADER
+ result = viddec_pm_get_bits(ctxt, &ep.flags, 13);
+ if(result == 1)
+ {
+ // Skip the flags already peeked at (13) and the unneeded hrd_full data
+ // NOTE: HRD_NUM_LEAKY_BUCKETS is initialized to 0 when HRD_PARAM_FLAG is not present
+ int hrd_bits = md->HRD_NUM_LEAKY_BUCKETS * 8;
+ while(hrd_bits >= 32)
+ {
+ result = viddec_pm_skip_bits(ctxt, 32);
+ hrd_bits -= 32;
+ }
+ result = viddec_pm_skip_bits(ctxt, hrd_bits);
+
+ md->REFDIST = 0;
+ md->PANSCAN_FLAG = ep.ep_flags.PANSCAN_FLAG;
+ md->REFDIST_FLAG = ep.ep_flags.REFDIST_FLAG;
+ md->LOOPFILTER = ep.ep_flags.LOOPFILTER;
+ md->FASTUVMC = ep.ep_flags.FASTUVMC;
+ md->EXTENDED_MV = ep.ep_flags.EXTENDED_MV;
+ md->DQUANT = ep.ep_flags.DQUANT;
+ md->VSTRANSFORM = ep.ep_flags.VSTRANSFORM;
+ md->OVERLAP = ep.ep_flags.OVERLAP;
+ md->QUANTIZER = ep.ep_flags.QUANTIZER;
+
+ result = viddec_pm_get_bits(ctxt, &temp, 1);
+ if(result == 1)
+ {
+ ep.CODED_SIZE_FLAG = temp;
+ if(ep.CODED_SIZE_FLAG)
+ {
+ result = viddec_pm_get_bits(ctxt, &ep.size, 24);
+ md->width = ep.ep_size.CODED_WIDTH;
+ md->height = ep.ep_size.CODED_HEIGHT;
+ }
+ }
+ if(ep.ep_flags.EXTENDED_MV)
+ {
+ result = viddec_pm_get_bits(ctxt, &temp, 1);
+ md->EXTENDED_DMV = ep.EXTENDED_DMV = temp;
+ }
+
+ result = viddec_pm_get_bits(ctxt, &temp, 1);
+ if(result == 1)
+ {
+ md->RANGE_MAPY_FLAG = ep.RANGE_MAPY_FLAG = temp;
+ if(ep.RANGE_MAPY_FLAG)
+ {
+ result = viddec_pm_get_bits(ctxt, &temp, 3);
+ md->RANGE_MAPY = ep.RANGE_MAPY = temp;
+ }
+ }
+
+ result = viddec_pm_get_bits(ctxt, &temp, 1);
+ if(result == 1)
+ {
+ md->RANGE_MAPUV_FLAG = ep.RANGE_MAPUV_FLAG = temp;
+ if(ep.RANGE_MAPUV_FLAG)
+ {
+ result = viddec_pm_get_bits(ctxt, &temp, 3);
+ md->RANGE_MAPUV = ep.RANGE_MAPUV = temp;
+ }
+ }
+ }
+
+ // POPULATE WORKLOAD ITEM
+ {
+ viddec_workload_item_t wi;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO;
+
+ wi.vc1_ep.size = 0;
+ wi.vc1_ep.flags = 0;
+ wi.vc1_ep.pad = 0;
+
+ viddec_fw_vc1_set_ep_size_flag(&wi.vc1_ep, ep.CODED_SIZE_FLAG);
+ viddec_fw_vc1_set_ep_horiz_size(&wi.vc1_ep, ep.ep_size.CODED_WIDTH);
+ viddec_fw_vc1_set_ep_vert_size(&wi.vc1_ep, ep.ep_size.CODED_HEIGHT);
+
+ viddec_fw_vc1_set_ep_broken_link(&wi.vc1_ep, ep.ep_flags.BROKEN_LINK);
+ viddec_fw_vc1_set_ep_closed_entry(&wi.vc1_ep, ep.ep_flags.CLOSED_ENTRY);
+ viddec_fw_vc1_set_ep_panscan_flag(&wi.vc1_ep, ep.ep_flags.PANSCAN_FLAG);
+ viddec_fw_vc1_set_ep_range_mapy_flag(&wi.vc1_ep, ep.RANGE_MAPY_FLAG);
+ viddec_fw_vc1_set_ep_range_mapy(&wi.vc1_ep, ep.RANGE_MAPY);
+ viddec_fw_vc1_set_ep_range_mapuv_flag(&wi.vc1_ep, ep.RANGE_MAPUV_FLAG);
+ viddec_fw_vc1_set_ep_range_mapuv(&wi.vc1_ep, ep.RANGE_MAPUV);
+
+ result = viddec_pm_append_workitem(ctxt, &wi);
+ }
+
+#ifdef VBP
+ md->BROKEN_LINK = ep.ep_flags.BROKEN_LINK;
+ md->CLOSED_ENTRY = ep.ep_flags.CLOSED_ENTRY;
+#endif
+
+ DEB("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT);
+ DEB("md: after ep: res: %dx%d\n", md->width, md->height);
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses the picture layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t temp;
+ int i;
+
+ for(i=0; i<VC1_MAX_BITPLANE_CHUNKS; i++)
+ {
+ pInfo->metadata.bp_raw[i] = true;
+ }
+
+ if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED)
+ {
+ VC1_PEEK_BITS(2, temp); /* fcm */
+ if( (pInfo->metadata.INTERLACE == 1) && (temp == VC1_FCM_FIELD_INTERLACE))
+ {
+ status = vc1_ParseFieldHeader_Adv(ctxt, pInfo);
+ }
+ else
+ {
+ status = vc1_ParsePictureHeader_Adv(ctxt, pInfo);
+ }
+ }
+ else
+ {
+ status = vc1_ParsePictureHeader(ctxt, pInfo);
+ }
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse field picture layer. This function parses the field picture layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_PARSE_ERROR;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) {
+ if (picLayerHeader->CurrField == 0)
+ {
+ picLayerHeader->PTYPE = picLayerHeader->PTypeField1;
+ picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF);
+ }
+ else
+ {
+ picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF);
+ picLayerHeader->PTYPE = picLayerHeader->PTypeField2;
+ }
+ status = vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+ }
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse slice layer. This function parses the slice layer, which is only
+ * supported by advanced profile.
+ * Table 26 of SMPTE 421M but skipping parsing of macroblock layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ uint32_t SLICE_ADDR;
+ vc1_Status status = VC1_STATUS_OK;
+
+ VC1_GET_BITS9(9, SLICE_ADDR);
+ VC1_GET_BITS9(1, tempValue); /* PIC_HEADER_FLAG. */
+ if (tempValue == 1) {
+ uint8_t *last_bufptr = pInfo->bufptr;
+ uint32_t last_bitoff = pInfo->bitoff;
+ status = vc1_ParsePictureLayer(ctxt, pInfo);
+ pInfo->picture_info_has_changed = 1;
+ if( status ) {
+ /* FIXME - is this a good way of handling this? Failed, see if it's for fields */
+ pInfo->bufptr = last_bufptr;
+ pInfo->bitoff = last_bitoff;
+ status = vc1_ParseFieldHeader_Adv(ctxt, pInfo);
+ }
+ } else
+ pInfo->picture_info_has_changed = 0;
+
+ pInfo->picLayerHeader.SLICE_ADDR = SLICE_ADDR;
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * This function parses the user data information as defined in SMPTE 421M annex F.
+ * It then appends that data to the workload.
+ * Assume the flush byte 0x80 is within the 3 bytes before next start code.
+ * let's put 1 byte per item first
+ *------------------------------------------------------------------------------
+ */
+vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t user_data;
+ viddec_workload_item_t wi;
+ uint32_t ud_id;
+
+ /* find the scope based on start code sc */
+ switch(sc) {
+ case vc1_SCSequenceUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+ break;
+ case vc1_SCEntryPointUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+ break;
+ case vc1_SCFrameUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA;
+ break;
+ case vc1_SCFieldUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_FLD_USER_DATA;
+ break;
+ case vc1_SCSliceUser:
+ wi.vwi_type = VIDDEC_WORKLOAD_SLC_USER_DATA;
+ break;
+ default:
+ wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen
+ break;
+ }
+
+ /* get identifier - 4 bytes*/
+ // Extract this information but discard it for now
+ VC1_GET_BITS(32, ud_id);
+
+ /* Read 1 byte of user data and store it in workitem for the current stream level (SEQ/GOP/PIC).
+ Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size,
+ append the workitem. This loop is repeated till all user data is extracted and appended. */
+ wi.user_data.size = 0;
+ while(viddec_pm_get_bits(ctxt, &user_data, 8) != -1)
+ {
+ /* Store the valid byte in data payload */
+ wi.user_data.data_payload[wi.user_data.size] = user_data;
+ wi.user_data.size++;
+
+ /* When size exceeds payload size, append workitem and continue */
+ if (wi.user_data.size >= 11)
+ {
+ viddec_pm_setup_userdata(&wi);
+ viddec_pm_append_workitem(ctxt, &wi);
+ wi.user_data.size = 0;
+ }
+ if(user_data == 0x80) // flushing byte
+ break;
+ }
+ /* If size is not 0, append remaining user data. */
+ if (wi.user_data.size > 0)
+ {
+ int i;
+ for(i=wi.user_data.size;i<11;i++)
+ {
+ wi.user_data.data_payload[i] = 0;
+ }
+ viddec_pm_setup_userdata(&wi);
+ viddec_pm_append_workitem(ctxt, &wi);
+ wi.user_data.size = 0;
+ }
+
+ return(status);
+} // vc1_ParseAndAppendUserData
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h
new file mode 100644
index 0000000..d0e2f00
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.h
@@ -0,0 +1,136 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Common functions for parsing VC-1 bitstreams.
+//
+*/
+
+#ifndef _VC1PARSE_H_
+#define _VC1PARSE_H_
+
+#include "viddec_parser_ops.h"
+#include "vc1.h"
+
+/** @weakgroup vc1parse_defs VC-1 Parse Definitions */
+/** @ingroup vc1parse_defs */
+/*@{*/
+
+/* This macro gets the next less-than-nine bits from the bitstream. It is
+assumed that numBits is less than ten. */
+#ifdef VC1_VERBOSE
+#include <stdio.h>
+#define AUTO_TRACE OS_INFO("trace: %s\n", __FUNCTION__)
+#define DEBUGBITS(arg1, args ...) OS_INFO( arg1, ## args)
+#else
+#define AUTO_TRACE
+#define DEBUGBITS(...)
+#endif
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+/* This macro gets the next numBits from the bitstream. */
+#define VC1_GET_BITS VC1_GET_BITS9
+#define VC1_GET_BITS9(numBits, value) \
+{ uint32_t __tmp__; \
+ viddec_pm_get_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \
+ value = __tmp__;\
+ DEBUGBITS("BIT:%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \
+}
+
+#define VC1_PEEK_BITS(numBits, value) \
+{ uint32_t __tmp__; \
+ viddec_pm_peek_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \
+ value = __tmp__;\
+ DEBUGBITS("PEEK%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \
+}
+
+/* This macro asserts if the condition is not true. */
+#ifdef VC1_VERBOSE
+#define VC1_ASSERT(condition) \
+{ \
+ if (! (condition)) \
+ OS_INFO("Failed " #condition "!\n"); \
+}
+#else
+#define VC1_ASSERT(condition)
+#endif
+
+/*@}*/
+
+/** @weakgroup vc1parse VC-1 Parse Functions */
+/** @ingroup vc1parse */
+/*@{*/
+
+extern const uint8_t VC1_MVMODE_LOW_TBL[];
+extern const uint8_t VC1_MVMODE_HIGH_TBL[];
+extern const int32_t VC1_BITPLANE_IMODE_TBL[];
+extern const int32_t VC1_BITPLANE_K_TBL[];
+extern const int32_t VC1_BFRACTION_TBL[];
+extern const int32_t VC1_REFDIST_TBL[];
+
+void vc1_end_frame(vc1_viddec_parser_t *parser);
+
+/* Top-level functions to parse bitstream layers for rcv format. */
+vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse bitstream layers for the various profiles. */
+vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse headers for various picture layers for the
+simple and main profiles. */
+vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse common part of the headers for various picture
+layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_Adv (void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various progressive
+picture layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various interlace frame
+layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various interlace frame
+layers for the advanced profile. */
+vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse syntax element in bitstream. */
+vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo);
+vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint32_t height, vc1_bpp_type_t bptype);
+vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable);
+vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, int8_t *pFirst, int16_t *pSecond);
+
+void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser);
+int32_t vc1_parse_emit_current_frame(void *parent, vc1_viddec_parser_t *parser);
+
+/* function to handle user data */
+vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc);
+
+/*@}*/
+
+#endif /* _VC1PARSE_H_. */
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c
new file mode 100644
index 0000000..5ee9e18
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bitplane.c
@@ -0,0 +1,753 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 bitstreams.
+//
+*/
+
+#include "vc1parse.h"
+
+#ifdef VBP
+#include "viddec_pm.h"
+#endif
+
+/*----------------------------------------------------------------------------*/
+
+
+/* put one bit into a buffer
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ * mbx - image width in MB
+ * mby - image height in MB
+ * x - x location (column) of MB in MB unit
+ * y - y location (row) of MB in MB unit
+ * output: outp - buffer to fill
+ */
+//#define put_bit(value,x,y,mbx,mby,invert,outp)
+static inline void put_bit( uint32_t value, int x, int y, int mbx, int mby, uint8_t invert, uint32_t* outp)
+{
+ int bit;
+ uint32_t *out;
+
+ bit = mby;
+
+ value ^= invert;
+ if (!value) return; /* assume buffer is initialized with zeros */
+
+ out = outp;
+ /* go to corresponding row location in DW unit */
+ out += (( mbx + 31 ) >> 5) * y;
+ out += x >> 5; /* go to corresponding column location in DW unit */
+ bit = x & 0x1f; /* compute remaining bits */
+ *out |= 1 << bit; /* put bit */
+}
+
+/* if b is the bit at location (x,y)
+ * b = b^invert
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ * x - x location (column) of MB in MB unit
+ * y - y location (row) of MB in MB unit
+ * mbx - image width in MB
+ * output: outp - buffer to fill
+ * returns bit value
+ */
+static inline int xor_bit( int x, int y, int mbx, uint32_t invert, uint32_t* outp)
+{
+ int bit;
+ uint32_t *out;
+ uint8_t value;
+ //if (invert == 0) return; /* do nothing if XOR with 0 */
+
+ out = outp;
+ out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */
+ out += x >> 5; /* go to corresponding row location in DW unit */
+ bit = x & 0x1f; /* compute remaining bits */
+
+ if (invert == 1)
+ *out ^= (1 << bit); /* put XOR bit */
+ value = (*out & (1 << bit)) >> bit; /* return bit value */
+
+ return(value);
+
+}
+
+/* get bit at location (x,y)
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ * x - x location (column) of MB in MB unit
+ * y - y location (row) of MB in MB unit
+ * mbx - image width in MB
+ * outp - bit buffer in dwords
+ * returns bit value
+ */
+static inline int get_bit( int x, int y, int mbx, uint32_t* outp)
+{
+ int bit;
+ uint32_t *out;
+ uint8_t value;
+
+ out = outp;
+ out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */
+ out += x >> 5; /* go to corresponding row location in DW unit */
+ bit = x & 0x1f; /* compute remaining bits */
+ value = (*out & (1 << bit)) >> bit; /* return bit value */
+
+ return(value);
+
+}
+
+static void vc1_InverseDiff(vc1_Bitplane *pBitplane, int32_t widthMB, int32_t heightMB)
+{
+ int32_t i, j, previousBit=0, temp;
+
+ for (i = 0; i < heightMB; i++)
+ {
+ for (j = 0; j < widthMB; j++)
+ {
+ if ((i == 0 && j == 0))
+ {
+ previousBit=xor_bit(j, i, widthMB, pBitplane->invert,
+ pBitplane->databits);
+ }
+ else if (j == 0) /* XOR with TOP */
+ {
+ previousBit = get_bit(0, i-1, widthMB, pBitplane->databits);
+ temp=xor_bit(j, i, widthMB, previousBit,
+ pBitplane->databits);
+ previousBit = temp;
+ }
+ //TODO isSameAsTop can be optimized
+ else if (((i > 0) && (previousBit !=
+ get_bit(j, i-1, widthMB, pBitplane->databits))))
+ {
+ temp=xor_bit(j, i, widthMB, pBitplane->invert,
+ pBitplane->databits);
+ previousBit = temp;
+ }
+ else
+ {
+ temp=xor_bit(j, i, widthMB, previousBit,
+ pBitplane->databits);
+ previousBit = temp;
+ }
+ }
+ }
+}
+
+
+/*----------------------------------------------------------------------------*/
+/* implement normal 2 mode bitplane decoding, SMPTE 412M 8.7.3.2
+ * width, height are in MB unit.
+ */
+static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane,
+ int32_t width, int32_t height)
+{
+ int32_t i;
+ int32_t tmp_databits = 0;
+
+ int32_t row[2], col[2];
+ int8_t tmp=0;
+
+ /* disable pBitplane->invert in the Norm2 decode stage of
+ VC1_BITPLANE_DIFF2_MODE */
+ if (pBitplane->imode == VC1_BITPLANE_DIFF2_MODE)
+ {
+ tmp = pBitplane->invert;
+ pBitplane->invert=0;
+ }
+
+ // By default, initialize the values for the even case
+ col[0] = 0; /* i%width; */
+ row[0] = 0; /* i/width; */
+ col[1] = 1; /* (i+1)%width; */
+ row[1] = 0; /* (i+1)/width; */
+
+ // If width*height is odd, the first bit is the value of the bitplane
+ // for the first macroblock
+ if ((width*height) & 1) /* first bit if size is odd */
+ {
+ VC1_GET_BITS(1, tmp_databits);
+ put_bit(tmp_databits, 0, 0, width, height, pBitplane->invert,
+ pBitplane->databits);
+
+ // Modify initialization for odd sizes
+ col[0] = 1; /* i%width; */
+ col[1] = 2; /* (i+1)%width; */
+
+ // Consider special case where width is 1
+ if(width == 1)
+ {
+ col[0] = 0; /* i%width; */
+ row[0] = 1; /* i/width; */
+ col[1] = 0; /* (i+1)%width; */
+ row[1] = 2; /* (i+1)/width; */
+ }
+ }
+
+ /* decode every pair of bits in natural scan order */
+ for (i = (width*height) & 1; i < (width*height/2)*2; i += 2)
+ {
+ int32_t tmp = 0;
+
+ //col[0]=i%width;
+ //row[0]=i/width;
+ //col[1]=(i+1)%width;
+ //row[1]=(i+1)/width;
+
+ VC1_GET_BITS(1, tmp);
+ if (tmp == 0)
+ {
+ put_bit(0, col[0],row[0], width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(0, col[1],row[1], width, height, pBitplane->invert,
+ pBitplane->databits);
+ }
+ else
+ {
+ VC1_GET_BITS(1, tmp);
+ if (tmp == 1)
+ {
+ put_bit(1, col[0],row[0], width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(1, col[1],row[1], width, height, pBitplane->invert,
+ pBitplane->databits);
+ }
+ else
+ {
+ VC1_GET_BITS(1, tmp);
+ if (tmp == 0)
+ {
+ put_bit(1, col[0],row[0], width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(0, col[1],row[1], width, height, pBitplane->invert,
+ pBitplane->databits);
+ }
+ else
+ {
+ put_bit(0, col[0],row[0], width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(1, col[1],row[1], width, height, pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+ }
+
+ // Consider special case where width is 1
+ if(width == 1)
+ {
+ row[0] += 2;
+ row[1] += 2;
+ }
+ else
+ {
+ col[0] += 2; /* i%width; */
+ if ( col[0] >= width )
+ {
+ // For odd sizes, col[0] can alternatively start at 0 and 1
+ col[0] -= width;
+ row[0]++;
+ }
+
+ col[1] += 2; /* (i+1)%width; */
+ if ( col[1] >= width )
+ {
+ // For odd sizes, col[1] can alternatively start at 0 and 1
+ col[1] -= width;
+ row[1]++;
+ }
+ }
+ }
+
+ /* restore value */
+ pBitplane->invert=tmp;
+}
+
+/*----------------------------------------------------------------------------*/
+/* compute Normal-6 mode bitplane decoding
+ * algorithm is described in SMPTE 421M 8.7.3.4
+ * width, height are in MB unit.
+ */
+static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane,
+ int32_t width, int32_t height)
+{
+ vc1_Status status;
+ int32_t i, j, k;
+ int32_t ResidualX = 0;
+ int32_t ResidualY = 0;
+ uint8_t _2x3tiled = (((width%3)!=0)&&((height%3)==0));
+
+ int32_t row, col;
+ int8_t tmp=0;
+
+ /* disable pBitplane->invert in the Norm2 decode stage of
+ VC1_BITPLANE_DIFF2_MODE */
+ if (pBitplane->imode == VC1_BITPLANE_DIFF6_MODE)
+ {
+ tmp = pBitplane->invert;
+ pBitplane->invert=0;
+ }
+
+ if (_2x3tiled)
+ {
+ int32_t sizeW = width/2;
+ int32_t sizeH = height/3;
+
+ for (i = 0; i < sizeH; i++)
+ {
+ row = 3*i; /* compute row location for tile */
+
+ for (j = 0; j < sizeW; j++)
+ {
+ col = 2*j + (width & 1); /* compute column location for tile */
+
+ /* get k=sum(bi2^i) were i is the ith bit of the tile */
+ status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL);
+ VC1_ASSERT(status == VC1_STATUS_OK);
+
+ /* put bits in tile */
+ put_bit(k&1, col, row, width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit(((k&2)>>1), col+1, row, width, height,
+ pBitplane->invert,pBitplane->databits);
+
+ put_bit(((k&4)>>2), col, row+1, width, height,
+ pBitplane->invert,pBitplane->databits);
+ put_bit(((k&8)>>3), col+1, row+1, width, height,
+ pBitplane->invert,pBitplane->databits);
+
+ put_bit(((k&16)>>4), col, row+2, width, height,
+ pBitplane->invert,pBitplane->databits);
+ put_bit(((k&32)>>5), col+1, row+2, width,
+ height,pBitplane->invert, pBitplane->databits);
+ }
+ }
+ ResidualX = width & 1;
+ ResidualY = 0;
+ }
+ else /* 3x2 tile */
+ {
+ int32_t sizeW = width/3;
+ int32_t sizeH = height/2;
+
+ for (i = 0; i < sizeH; i++)
+ {
+ row = 2*i + (height&1) ; /* compute row location for tile */
+
+ for (j = 0; j < sizeW; j++)
+ {
+ col = 3*j + (width%3); /* compute column location for tile */
+
+ /* get k=sum(bi2^i) were i is the ith bit of the tile */
+ status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL);
+ VC1_ASSERT(status == VC1_STATUS_OK);
+
+ put_bit(k&1, col, row, width, height,pBitplane->invert,
+ pBitplane->databits);
+ put_bit((k&2)>>1, col+1, row, width, height, pBitplane->invert,
+ pBitplane->databits);
+ put_bit((k&4)>>2, col+2, row, width, height, pBitplane->invert,
+ pBitplane->databits);
+
+ put_bit((k&8)>>3, col, row+1, width, height,pBitplane->invert,
+ pBitplane->databits);
+ put_bit((k&16)>>4, col+1, row+1, width,
+ height,pBitplane->invert, pBitplane->databits);
+ put_bit((k&32)>>5, col+2, row+1, width,
+ height,pBitplane->invert, pBitplane->databits);
+ }
+ }
+ ResidualX = width % 3;
+ ResidualY = height & 1;
+ }
+
+#ifndef VBP
+ for (i = 0; i < ResidualX; i++)
+ {
+ int32_t ColSkip;
+ VC1_GET_BITS(1, ColSkip);
+
+ if (1 == ColSkip)
+ {
+ for(j = 0; j < height; j++)
+ {
+ int32_t Value = 0;
+ VC1_GET_BITS(1, Value);
+ put_bit(Value, i, j, width, height,pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+ }
+
+ for (j = 0; j < ResidualY; j++)
+ {
+ int32_t RowSkip;
+ VC1_GET_BITS(1, RowSkip);
+ if (1 == RowSkip)
+ {
+ for (i = ResidualX; i < width; i++)
+ {
+ int32_t Value = 0;
+ VC1_GET_BITS(1, Value);
+ put_bit(Value, i, j, width, height,pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+ }
+ #else
+ int32_t Value = 0;
+ for (i = 0; i < ResidualX; i++)
+ {
+ int32_t ColSkip;
+ VC1_GET_BITS(1, ColSkip);
+ Value = 0;
+ for(j = 0; j < height; j++)
+ {
+ if (1 == ColSkip)
+ {
+ VC1_GET_BITS(1, Value);
+ }
+ put_bit(Value, i, j, width, height,pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+
+ for (j = 0; j < ResidualY; j++)
+ {
+ int32_t RowSkip;
+ VC1_GET_BITS(1, RowSkip);
+ Value = 0;
+ for (i = ResidualX; i < width; i++)
+ {
+ if (1 == RowSkip)
+ {
+ VC1_GET_BITS(1, Value);
+ }
+ put_bit(Value, i, j, width, height,pBitplane->invert,
+ pBitplane->databits);
+ }
+ }
+ #endif
+
+ /* restore value */
+ pBitplane->invert=tmp;
+
+}
+
+/*----------------------------------------------------------------------------*/
+/* initialize bitplane to array of zeros
+ * each row begins with a dword
+ * input:
+ * width: widh in MB unit
+ * height: height in MB unit
+ * returns even bitplane size in dwords
+ */
+int initBitplane(vc1_Bitplane *pBitplane,uint32_t width, uint32_t height)
+{
+ int i;
+ int numDword = 0;
+
+ numDword = ((width + 31)>>5) * height;
+ numDword += numDword & 1; /* add 1 in case numDword is odd */
+
+ for (i=0;i<numDword;i++) pBitplane->databits[i] = 0;
+ return(numDword);
+}
+
+/*----------------------------------------------------------------------------*/
+/* modified IPP code for bitplane decoding
+ * width: width in MB unit
+ * height: height in MB unit
+ */
+vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo,
+ uint32_t width, uint32_t height, vc1_bpp_type_t bpnum)
+{
+ uint32_t i, j;
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t biplaneSz; /* bitplane sz in dwords */
+ vc1_Bitplane bp;
+ vc1_Bitplane *bpp = &bp;
+
+ // By default, set imode to raw
+ pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = true;
+
+ // bitplane data would be temporarily stored in the vc1 context
+ bpp->databits = pInfo->bitplane;
+
+ /* init bitplane to zero, function retunr bitplane buffer size in dword */
+ biplaneSz = initBitplane(bpp, width, height);
+
+ VC1_GET_BITS(1, tempValue);
+ bpp->invert = (uint8_t) tempValue;
+
+ if ((status = vc1_DecodeHuffmanOne(ctxt, &bpp->imode,
+ VC1_BITPLANE_IMODE_TBL)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ // If the imode is VC1_BITPLANE_RAW_MODE: bitplane information is in the MB layer
+ // there is no need to parse for bitplane information in the picture layer
+ // Only bits need to be appropriately set in the block control register
+ // In all other modes, bitplane information follows and needs to be parsed and sent to the decoder
+
+ if (bpp->imode == VC1_BITPLANE_NORM2_MODE)
+ {
+ vc1_Norm2ModeDecode(ctxt, bpp, width, height);
+ }
+ else if (bpp->imode == VC1_BITPLANE_DIFF2_MODE)
+ {
+ vc1_Norm2ModeDecode(ctxt, bpp, width, height);
+ vc1_InverseDiff(bpp, width, height);
+ }
+ else if (bpp->imode == VC1_BITPLANE_NORM6_MODE)
+ {
+ vc1_Norm6ModeDecode(ctxt, bpp, width, height);
+
+ }
+ else if (bpp->imode == VC1_BITPLANE_DIFF6_MODE)
+ {
+ vc1_Norm6ModeDecode(ctxt, bpp, width, height);
+ vc1_InverseDiff(bpp, width, height);
+ }
+ else if (bpp->imode == VC1_BITPLANE_ROWSKIP_MODE)
+ {
+
+ for (i = 0; i < height; i++)
+ {
+ VC1_GET_BITS(1, tempValue);
+ /* if tempValue==0, put row of zeros Dwords*/
+ if (tempValue == 1)
+ {
+ for (j = 0; j < width; j++)
+ {
+ VC1_GET_BITS(1, tempValue);
+ put_bit( tempValue, j, i, width, height, bpp->invert,
+ bpp->databits);
+ }
+ }
+ else if (bpp->invert) { //TO TEST
+ for (j = 0; j < width; j++) {
+ put_bit( 0, j, i, width, height, bpp->invert,
+ bpp->databits);
+ }
+ }
+ }
+
+ }
+ else if (bpp->imode == VC1_BITPLANE_COLSKIP_MODE)
+ {
+ for (i = 0; i < width; i++)
+ {
+ VC1_GET_BITS(1, tempValue);
+ /* if tempValue==0, and invert == 0, fill column with zeros */
+ if (tempValue == 1)
+ {
+ for (j = 0; j < height; j++)
+ {
+ VC1_GET_BITS(1, tempValue);
+ put_bit( tempValue, i, j, width, height, bpp->invert,
+ bpp->databits);
+ }
+ }
+ else if (bpp->invert) { // fill column with ones
+ for (j = 0; j < height; j++) {
+ put_bit( 0, i, j, width, height, bpp->invert,
+ bpp->databits);
+ }
+ }//end for else
+ }
+ }
+
+ if(bpp->imode != VC1_BITPLANE_RAW_MODE)
+ {
+ uint32_t* pl;
+ int sizeinbytes,nitems,i;
+ viddec_workload_item_t wi;
+ uint32_t *bit_dw;
+
+ pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = false;
+
+ sizeinbytes = ((( width + 31 ) / 32)) * (height) * 4;
+
+ pl = bpp->databits;
+ bit_dw = bpp->databits;
+
+ // How many payloads must be generated
+ nitems = (sizeinbytes + (sizeof(wi.data.data_payload) - 1)) /
+ sizeof(wi.data.data_payload);
+
+ // Dump DMEM to an array of workitems
+ for( i = 0; i < nitems; i++ )
+ {
+ wi.vwi_type = bpnum;
+ wi.data.data_offset = (char *)pl - (char *)bit_dw; // offset within struct
+ wi.data.data_payload[0] = pl[0];
+ wi.data.data_payload[1] = pl[1];
+ pl += 2;
+
+ viddec_pm_append_workitem( ctxt, &wi );
+ }
+ }
+
+#ifdef VBP
+ {
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)ctxt;
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)(cxt->codec_data);
+
+ if (biplaneSz > 4096)
+ {
+ /* bigger than we got, so let's bail with a non meaningful error. */
+ return VC1_STATUS_ERROR;
+ }
+
+ /* At this point bp contains the information we need for the bit-plane */
+ /* bpnum is the enumeration that tells us which bitplane this is for. */
+ /* pInfo->picLayerHeader.ACPRED is one of the bitplanes I need to fill.*/
+ switch (bpnum)
+ {
+ case VIDDEC_WORKLOAD_VC1_BITPLANE0:
+ if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.FORWARDMB.invert = bp.invert;
+ pInfo->picLayerHeader.FORWARDMB.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_forwardmb[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.FORWARDMB.databits = parser->bp_forwardmb;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_FORWARDMB = 1;
+ }
+ }
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.ACPRED.invert = bp.invert;
+ pInfo->picLayerHeader.ACPRED.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_acpred[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.ACPRED.databits = parser->bp_acpred;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_ACPRED = 1;
+ }
+ }
+ if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.MVTYPEMB.invert = bp.invert;
+ pInfo->picLayerHeader.MVTYPEMB.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_mvtypemb[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.MVTYPEMB.databits = parser->bp_mvtypemb;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_MVTYPEMB = 1;
+ }
+ }
+ break;
+ case VIDDEC_WORKLOAD_VC1_BITPLANE1:
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.OVERFLAGS.invert = bp.invert;
+ pInfo->picLayerHeader.OVERFLAGS.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_overflags[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.OVERFLAGS.databits = parser->bp_overflags;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_OVERFLAGS = 1;
+ }
+ }
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.SKIPMB.invert = bp.invert;
+ pInfo->picLayerHeader.SKIPMB.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_skipmb[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.SKIPMB.databits = parser->bp_skipmb;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_SKIPMB = 1;
+ }
+ }
+ break;
+ case VIDDEC_WORKLOAD_VC1_BITPLANE2:
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.DIRECTMB.invert = bp.invert;
+ pInfo->picLayerHeader.DIRECTMB.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_directmb[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.DIRECTMB.databits = parser->bp_directmb;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_DIRECTMB = 1;
+ }
+ }
+ if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+ || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+ {
+ if(bp.imode != VC1_BITPLANE_RAW_MODE)
+ {
+ pInfo->picLayerHeader.FIELDTX.invert = bp.invert;
+ pInfo->picLayerHeader.FIELDTX.imode = bp.imode;
+ for (i = 0; i < biplaneSz; i++)
+ {
+ parser->bp_fieldtx[i] = bp.databits[i];
+ }
+ pInfo->picLayerHeader.FIELDTX.databits = parser->bp_fieldtx;
+ }
+ else
+ {
+ pInfo->picLayerHeader.raw_FIELDTX = 1;
+ }
+ }
+ break;
+ }
+ }
+#endif
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c
new file mode 100644
index 0000000..e73cde3
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic.c
@@ -0,0 +1,100 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive B picture in simple
+// or main profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h" // For DEB
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive B picture for main
+ * profile bitstream. This parser starts after PTYPE was parsed but stops
+ * before parsing of macroblock layer.
+ * Table 21 of SMPTE 421M after processing up to PTYPE for B picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else picLayerHeader->HALFQP=0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ }
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ?
+ VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+ {
+ return VC1_STATUS_OK;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+ VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c
new file mode 100644
index 0000000..4074309
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_bpic_adv.c
@@ -0,0 +1,257 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive B picture in advanced
+// profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h" // For DEB
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive B picture for advanced
+ * profile bitstream.
+ * Table 22 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ?
+ VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+ VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace B frame for advanced
+ * profile bitstream.
+ * Table 84 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ VC1_GET_BITS9(1, picLayerHeader->INTCOMP);
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ // EPC picLayerHeader->MVMODE = VC1_MVMODE_1MV;
+ VC1_GET_BITS9(2, picLayerHeader->MBMODETAB);
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+ VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+ VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */
+ VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace B field for advanced
+ * profile bitstream.
+ * Table 89 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint8_t bit_count;
+ const uint8_t *table;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader* picLayerHeader = &pInfo->picLayerHeader;
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else
+ picLayerHeader->HALFQP = 0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ if (md->POSTPROCFLAG == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+ }
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQUANT > 12)
+ table = VC1_MVMODE_LOW_TBL;
+ else
+ table = VC1_MVMODE_HIGH_TBL;
+
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ while ((picLayerHeader->MVMODE == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ bit_count++;
+ }
+ if ((bit_count == 2) && (picLayerHeader->MVMODE == 0))
+ bit_count++;
+ picLayerHeader->MVMODE = table[bit_count];
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(3, picLayerHeader->MBMODETAB);
+ VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */
+ VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+
+ if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+ }
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h
new file mode 100644
index 0000000..9e621fc
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h
@@ -0,0 +1,608 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Common definitions for parsing VC-1 bitstreams.
+//
+*/
+
+#ifndef _VC1PARSE_COMMON_DEFS_H_
+#define _VC1PARSE_COMMON_DEFS_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdint.h>
+
+/** @weakgroup vc1parse_common_defs VC-1 Common Definitions */
+/** @ingroup vc1parse_common_defs */
+/*@{*/
+
+/** This defines the maximum number of horizontal macroblocks in a picture. */
+#define VC1_WIDTH_MB_MAX ((2048+15)/16)
+
+/** This defines the maximum number of vertical macroblocks in a picture. */
+#define VC1_HEIGHT_MB_MAX ((1088+15)/16)
+
+/** This defines the maximum number of bitplane storage per picture. */
+#define VC1_MAX_BITPLANE_CHUNKS 3
+
+/** This defines the value for an invalid BFRACTION syntax element. */
+#define VC1_BFRACTION_INVALID 0
+
+/** This defines the value for BFRACTION syntax element that defines a BI
+picture. */
+#define VC1_BFRACTION_BI 9
+
+/** This enumeration defines the various supported profiles as defined in
+PROFILE syntax element. */
+enum
+{
+ VC1_PROFILE_SIMPLE,
+ VC1_PROFILE_MAIN,
+ VC1_PROFILE_RESERVED,
+ VC1_PROFILE_ADVANCED
+};
+
+/** This enumeration defines the frame coding mode as defined in FCM syntax
+element. */
+enum
+{
+ VC1_FCM_PROGRESSIVE,
+ VC1_FCM_FRAME_INTERLACE = 2,
+ VC1_FCM_FIELD_INTERLACE = 3
+};
+
+/** This enumeration defines the various bitplane types as defined in IMODE
+syntax element. */
+enum
+{
+ VC1_BITPLANE_RAW_MODE,
+ VC1_BITPLANE_NORM2_MODE,
+ VC1_BITPLANE_DIFF2_MODE,
+ VC1_BITPLANE_NORM6_MODE,
+ VC1_BITPLANE_DIFF6_MODE,
+ VC1_BITPLANE_ROWSKIP_MODE,
+ VC1_BITPLANE_COLSKIP_MODE
+};
+
+/** This enumeration defines the various motion vector modes as defined in
+MVMODE or MVMODE2 syntax element. */
+enum
+{
+ VC1_MVMODE_1MV,
+#ifdef VBP
+ VC1_MVMODE_HPELBI_1MV,
+ VC1_MVMODE_HPEL_1MV,
+#else
+ VC1_MVMODE_HPEL_1MV,
+ VC1_MVMODE_HPELBI_1MV,
+#endif
+ VC1_MVMODE_MIXED_MV,
+ VC1_MVMODE_INTENSCOMP
+};
+
+/** This enumeration defines the extended differential motion vector range flag
+as defined in DMVRANGE syntax element. */
+enum
+{
+ VC1_DMVRANGE_NONE,
+ VC1_DMVRANGE_HORIZONTAL_RANGE,
+ VC1_DMVRANGE_VERTICAL_RANGE,
+ VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE
+};
+
+/** This enumeration defines the intensity compensation field as defined in
+INTCOMPFIELD syntax element. */
+enum
+{
+ VC1_INTCOMP_TOP_FIELD = 1,
+ VC1_INTCOMP_BOTTOM_FIELD = 2,
+ VC1_INTCOMP_BOTH_FIELD = 3
+};
+
+/** This enumeration defines the differential quantizer profiles as defined in
+DQPROFILE syntax element. */
+enum
+{
+ VC1_DQPROFILE_ALL4EDGES,
+ VC1_DQPROFILE_DBLEDGES,
+ VC1_DQPROFILE_SNGLEDGES,
+ VC1_DQPROFILE_ALLMBLKS
+};
+
+/** This enumeration defines the conditional overlap flag as defined in CONDOVER
+syntax element. */
+enum
+{
+ VC1_CONDOVER_FLAG_NONE = 0,
+ VC1_CONDOVER_FLAG_ALL = 2,
+ VC1_CONDOVER_FLAG_SOME = 3
+};
+
+/** This enumeration defines the type of quantizer to be used and is derived
+from bitstream syntax. */
+enum
+{
+ VC1_QUANTIZER_NONUNIFORM,
+ VC1_QUANTIZER_UNIFORM
+};
+
+/** This structure represents the various bitplanes within VC-1 bitstream. */
+typedef struct
+{
+ uint8_t invert;
+ int32_t imode;
+ uint32_t *databits;
+} vc1_Bitplane;
+
+/** This structure represents all bitstream metadata needed for register programming. */
+typedef struct
+{
+ // From Sequence Layer for Advanced Profile
+ uint8_t PROFILE; /** 2 bit(s). */
+#ifdef VBP
+ uint8_t LEVEL;
+#endif
+ uint8_t POSTPROCFLAG; /** 1 bit(s). */
+ uint8_t PULLDOWN; /** 1 bit(s). */
+ uint8_t INTERLACE; /** 1 bit(s). */
+ uint8_t TFCNTRFLAG; /** 1 bit(s). */
+ uint8_t FINTERPFLAG; /** 1 bit(s). */
+ uint8_t PSF; /** 1 bit(s). */
+ uint8_t HRD_NUM_LEAKY_BUCKETS; /** 5 bit(s). */
+
+ // From STRUCT_C
+ uint8_t MAXBFRAMES; /** 3 bit(s). */
+ uint8_t MULTIRES; /** 1 bit(s). */
+
+ // From EntryPoint Layer for Advanced Profile
+ uint8_t PANSCAN_FLAG;
+ uint8_t REFDIST_FLAG;
+ uint8_t LOOPFILTER;
+ uint8_t FASTUVMC;
+ uint8_t EXTENDED_MV;
+ uint8_t DQUANT;
+ uint8_t VSTRANSFORM;
+ uint8_t OVERLAP;
+ uint8_t QUANTIZER;
+ uint8_t EXTENDED_DMV;
+ uint8_t RANGE_MAPY_FLAG;
+ uint8_t RANGE_MAPY;
+ uint8_t RANGE_MAPUV_FLAG;
+ uint8_t RANGE_MAPUV;
+
+ // From Picture Header
+ uint8_t RANGERED; /** 1 bit(s). */
+ uint8_t RNDCTRL; /** 1 bit(s), rcv specific. */
+
+ // REFDIST is present only in field-interlaced mode on I/I, I/P, P/I, P/P frames
+ // From Canmore, looks like this needs to be propagated to following B frames
+ uint8_t REFDIST;
+ uint8_t INTCOMPFIELD; /** ? bit(s)? */
+ uint8_t LUMSCALE2; /** 6 bit(s). */
+ uint8_t LUMSHIFT2; /** 6 bit(s). */
+ uint8_t bp_raw[VC1_MAX_BITPLANE_CHUNKS];
+
+ // From SequenceLayerHeader, EntryPointHeader or Struct_A
+ uint16_t width;
+ uint16_t height;
+ uint16_t widthMB;
+ uint16_t heightMB;
+
+#ifdef VBP
+ uint8_t CLOSED_ENTRY;
+ uint8_t BROKEN_LINK;
+ uint8_t SYNCMARKER;
+#endif
+
+} vc1_metadata_t;
+
+/** This structure represents the sequence header for advanced profile. */
+typedef struct
+{
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned BITRTQ_POSTPROC:5;
+ unsigned FRMRTQ_POSTPROC:3;
+ unsigned COLORDIFF_FORMAT:2;
+ unsigned LEVEL:3;
+ unsigned PROFILE:2;
+ unsigned pad:17;
+ } seq_flags;
+#else
+ struct
+ {
+ unsigned pad:17;
+ unsigned PROFILE:2;
+ unsigned LEVEL:3;
+ unsigned COLORDIFF_FORMAT:2;
+ unsigned FRMRTQ_POSTPROC:3;
+ unsigned BITRTQ_POSTPROC:5;
+ } seq_flags;
+#endif
+ uint32_t flags;
+ };
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned DISPLAY_EXT:1;
+ unsigned PSF:1;
+ unsigned RESERVED:1;
+ unsigned FINTERPFLAG:1;
+ unsigned TFCNTRFLAG:1;
+ unsigned INTERLACE:1;
+ unsigned PULLDOWN:1;
+ unsigned MAX_CODED_HEIGHT:12;
+ unsigned MAX_CODED_WIDTH:12;
+ unsigned POSTPROCFLAG:1;
+ } seq_max_size;
+#else
+ struct
+ {
+ unsigned POSTPROCFLAG:1;
+ unsigned MAX_CODED_WIDTH:12;
+ unsigned MAX_CODED_HEIGHT:12;
+ unsigned PULLDOWN:1;
+ unsigned INTERLACE:1;
+ unsigned TFCNTRFLAG:1;
+ unsigned FINTERPFLAG:1;
+ unsigned RESERVED:1;
+ unsigned PSF:1;
+ unsigned DISPLAY_EXT:1;
+ } seq_max_size;
+#endif
+ uint32_t max_size;
+ };
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned ASPECT_RATIO_FLAG:1;
+ unsigned DISP_VERT_SIZE:14;
+ unsigned DISP_HORIZ_SIZE:14;
+ unsigned pad:3;
+ } seq_disp_size;
+#else
+ struct
+ {
+ unsigned pad:3;
+ unsigned DISP_HORIZ_SIZE:14;
+ unsigned DISP_VERT_SIZE:14;
+ unsigned ASPECT_RATIO_FLAG:1;
+ } seq_disp_size;
+#endif
+ uint32_t disp_size;
+ };
+
+ uint8_t ASPECT_RATIO; // 4 bits
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned ASPECT_VERT_SIZE:8;
+ unsigned ASPECT_HORIZ_SIZE:8;
+ unsigned pad:16;
+ } seq_aspect_size;
+#else
+ struct
+ {
+ unsigned pad:16;
+ unsigned ASPECT_HORIZ_SIZE:8;
+ unsigned ASPECT_VERT_SIZE:8;
+ } seq_aspect_size;
+#endif
+ uint32_t aspect_size;
+ };
+
+ uint8_t FRAMERATE_FLAG; // 1b
+ uint8_t FRAMERATEIND; // 1b
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned FRAMERATEDR:4;
+ unsigned FRAMERATENR:8;
+ unsigned pad:20;
+ } seq_framerate_fraction;
+#else
+ struct
+ {
+ unsigned pad:20;
+ unsigned FRAMERATENR:8;
+ unsigned FRAMERATEDR:4;
+ } seq_framerate_fraction;
+#endif
+ uint32_t framerate_fraction;
+ };
+
+ uint16_t FRAMERATEEXP; // 16b
+ uint8_t COLOR_FORMAT_FLAG; // 1b
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned MATRIX_COEF:8;
+ unsigned TRANSFER_CHAR:8;
+ unsigned COLOR_PRIM:8;
+ unsigned pad:8;
+ } seq_color_format;
+#else
+ struct
+ {
+ unsigned pad:8;
+ unsigned COLOR_PRIM:8;
+ unsigned TRANSFER_CHAR:8;
+ unsigned MATRIX_COEF:8;
+ } seq_color_format;
+#endif
+ uint32_t color_format;
+ };
+
+ uint8_t HRD_PARAM_FLAG; // 1b
+ uint8_t HRD_NUM_LEAKY_BUCKETS; // 5b
+ // No need to parse remaining items - not needed so far
+} vc1_SequenceLayerHeader;
+
+/** This structure represents metadata for struct c. */
+typedef struct
+{
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned res6:1;
+ unsigned FINTERPFLAG:1;
+ unsigned QUANTIZER:2;
+ unsigned MAXBFRAMES:3;
+ unsigned RANGERED:1;
+ unsigned SYNCMARKER:1;
+ unsigned OVERLAP:1;
+ unsigned res5:1;
+ unsigned VSTRANSFORM:1;
+ unsigned DQUANT:2;
+ unsigned EXTENDED_MV:1;
+ unsigned FASTUVMC:1;
+ unsigned res4:1;
+ unsigned MULTIRES:1;
+ unsigned res3:1;
+ unsigned LOOPFILTER:1;
+ unsigned BITRTQ_POSTPROC:5;
+ unsigned FRMRTQ_POSTPROC:3;
+ unsigned PROFILE:4;
+ } struct_c;
+#else
+ struct
+ {
+ unsigned PROFILE:4;
+ unsigned FRMRTQ_POSTPROC:3;
+ unsigned BITRTQ_POSTPROC:5;
+ unsigned LOOPFILTER:1;
+ unsigned res3:1;
+ unsigned MULTIRES:1;
+ unsigned res4:1;
+ unsigned FASTUVMC:1;
+ unsigned EXTENDED_MV:1;
+ unsigned DQUANT:2;
+ unsigned VSTRANSFORM:1;
+ unsigned res5:1;
+ unsigned OVERLAP:1;
+ unsigned SYNCMARKER:1;
+ unsigned RANGERED:1;
+ unsigned MAXBFRAMES:3;
+ unsigned QUANTIZER:2;
+ unsigned FINTERPFLAG:1;
+ unsigned res6:1;
+ } struct_c;
+#endif
+ uint32_t struct_c_rcv;
+ };
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned VERT_SIZE:16;
+ unsigned HORIZ_SIZE:16;
+ } struct_a;
+#else
+ struct
+ {
+ unsigned HORIZ_SIZE:16;
+ unsigned VERT_SIZE:16;
+ } struct_a;
+#endif
+ uint32_t struct_a_rcv;
+ };
+
+} vc1_RcvSequenceHeader;
+
+/** This structure represents metadata for entry point layers. */
+typedef struct
+{
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned QUANTIZER:2;
+ unsigned OVERLAP:1;
+ unsigned VSTRANSFORM:1;
+ unsigned DQUANT:2;
+ unsigned EXTENDED_MV:1;
+ unsigned FASTUVMC:1;
+ unsigned LOOPFILTER:1;
+ unsigned REFDIST_FLAG:1;
+ unsigned PANSCAN_FLAG:1;
+ unsigned CLOSED_ENTRY:1;
+ unsigned BROKEN_LINK:1;
+ unsigned pad1:19;
+ } ep_flags;
+#else
+ struct
+ {
+ unsigned pad1:19;
+ unsigned BROKEN_LINK:1;
+ unsigned CLOSED_ENTRY:1;
+ unsigned PANSCAN_FLAG:1;
+ unsigned REFDIST_FLAG:1;
+ unsigned LOOPFILTER:1;
+ unsigned FASTUVMC:1;
+ unsigned EXTENDED_MV:1;
+ unsigned DQUANT:2;
+ unsigned VSTRANSFORM:1;
+ unsigned OVERLAP:1;
+ unsigned QUANTIZER:2;
+ } ep_flags;
+#endif
+ uint32_t flags;
+ };
+
+ // Skipping HRD data because it is not needed for our processing
+
+ union
+ {
+#ifndef MFDBIGENDIAN
+ struct
+ {
+ unsigned CODED_HEIGHT:12;
+ unsigned CODED_WIDTH:12;
+ unsigned pad2:8;
+ } ep_size;
+#else
+ struct
+ {
+ unsigned pad2:8;
+ unsigned CODED_WIDTH:12;
+ unsigned CODED_HEIGHT:12;
+ } ep_size;
+#endif
+ uint32_t size;
+ };
+
+ uint8_t CODED_SIZE_FLAG; /** 1 bit(s). */
+ uint8_t EXTENDED_DMV; /** 1 bit(s). */
+ uint8_t RANGE_MAPY_FLAG; /** 1 bit(s). */
+ uint8_t RANGE_MAPY; /** 3 bit(s). */
+ uint8_t RANGE_MAPUV_FLAG; /** 1 bit(s). */
+ uint8_t RANGE_MAPUV; /** 3 bit(s). */
+} vc1_EntryPointHeader;
+
+/** This structure represents metadata for slice and picture layers. */
+typedef struct
+{
+ /* Slice layer. */
+ uint16_t SLICE_ADDR; /** 9 bit(s). */
+
+ /* Picture layer for simple or main profile. */
+ uint8_t RANGEREDFRM; /** 1 bit(s). */
+ uint8_t PTYPE; /** 4 bit(s)? */
+ int8_t BFRACTION_NUM; /** ? bit(s). */
+ int16_t BFRACTION_DEN; /** ? bit(s). */
+ uint8_t PQINDEX; /** 5 bit(s). */
+ uint8_t HALFQP; /** 1 bit(s). */
+ uint8_t PQUANTIZER; /** 1 bit(s). */
+ uint8_t MVRANGE; /** 3 bit(s)? */
+ uint8_t MVMODE; /** 4 bit(s)? */
+ uint8_t MVMODE2; /** 3 bit(s)? */
+ uint8_t LUMSCALE; /** 6 bit(s). */
+ uint8_t LUMSHIFT; /** 6 bit(s). */
+ uint8_t MVTAB; /** 2 bit(s). */
+ uint8_t CBPTAB; /** 2 bit(s). */
+ uint8_t TTMBF; /** 1 bit(s). */
+ uint8_t TTFRM; /** 2 bit(s). */
+ uint8_t TRANSACFRM; /** 2 bit(s)? */
+ uint8_t TRANSACFRM2; /** 2 bit(s)? */
+ uint8_t TRANSDCTAB; /** 1 bit(s). */
+
+ /* Picture layer for advanced profile. */
+ uint8_t FCM; /** 2 bit(s)? */
+ uint8_t FPTYPE; /** 3 bit(s). */
+ uint8_t TFCNTR; /** 8 bit(s) */
+ uint8_t RPTFRM; /** 2 bit(s) */
+ uint8_t TFF; /** 1 bit(s). */
+ uint8_t RFF; /** 1 bit(s) */
+ uint8_t RNDCTRL; /** 1 bit(s). */
+ uint8_t UVSAMP; /** 1 bit(s). */
+ uint8_t POSTPROC; /** 2 bit(s). */
+ uint8_t CONDOVER; /** 2 bit(s)? */
+ uint8_t DMVRANGE; /** ? bit(s)? */
+ uint8_t MV4SWITCH; /** 1 bit(s). */
+ uint8_t INTCOMP; /** 1 bit(s). */
+ uint8_t MBMODETAB; /** 2 bit(s). */
+ uint8_t MV2BPTAB; /** 2 bit(s). */
+ uint8_t MV4BPTAB; /** 2 bit(s). */
+ uint8_t NUMREF; /** 1 bit(s). */
+ uint8_t REFFIELD; /** 1 bit(s). */
+
+ /* PAN SCAN */
+ uint8_t PS_PRESENT; /** 1 bit(s). */
+ uint8_t number_of_pan_scan_window; /** 4 max. */
+ viddec_vc1_pan_scan_window_t PAN_SCAN_WINDOW[VIDDEC_PANSCAN_MAX_OFFSETS];
+
+ /* VOPDQUANT. */
+ uint8_t PQDIFF; /** 3 bit(s). */
+ uint8_t ABSPQ; /** 5 bit(s). */
+ uint8_t DQUANTFRM; /** 1 bit(s). */
+ uint8_t DQPROFILE; /** 2 bit(s). */
+ uint8_t DQSBEDGE; /** 2 bit(s). */
+ uint8_t DQBILEVEL; /** 1 bit(s). */
+
+ /* Others. */
+ uint8_t PTypeField1;
+ uint8_t PTypeField2;
+ uint32_t PQUANT;
+ uint8_t CurrField;
+ uint8_t BottomField;
+ uint32_t UniformQuant;
+
+#ifdef VBP
+ uint8_t raw_MVTYPEMB;
+ uint8_t raw_DIRECTMB;
+ uint8_t raw_SKIPMB;
+ uint8_t raw_ACPRED;
+ uint8_t raw_FIELDTX;
+ uint8_t raw_OVERFLAGS;
+ uint8_t raw_FORWARDMB;
+
+ vc1_Bitplane MVTYPEMB;
+ vc1_Bitplane DIRECTMB;
+ vc1_Bitplane SKIPMB;
+ vc1_Bitplane ACPRED;
+ vc1_Bitplane FIELDTX;
+ vc1_Bitplane OVERFLAGS;
+ vc1_Bitplane FORWARDMB;
+ uint32_t ALTPQUANT;
+ uint8_t DQDBEDGE;
+#endif
+
+} vc1_PictureLayerHeader;
+
+/*@}*/
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus. */
+
+#endif /* _VC1PARSE_COMMON_DEFS_H_. */
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c
new file mode 100644
index 0000000..6fec35a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_tables.c
@@ -0,0 +1,198 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Contains tables for VLC decoding of syntax elements in simple
+// or main profile of VC-1 bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+const uint8_t VC1_MVMODE_LOW_TBL[] =
+{
+ VC1_MVMODE_HPELBI_1MV,
+ VC1_MVMODE_1MV,
+ VC1_MVMODE_HPEL_1MV,
+ VC1_MVMODE_MIXED_MV,
+ VC1_MVMODE_INTENSCOMP
+};
+
+const uint8_t VC1_MVMODE_HIGH_TBL[] =
+{
+ VC1_MVMODE_1MV,
+ VC1_MVMODE_MIXED_MV,
+ VC1_MVMODE_HPEL_1MV,
+ VC1_MVMODE_HPELBI_1MV,
+ VC1_MVMODE_INTENSCOMP
+};
+
+const int32_t VC1_BITPLANE_IMODE_TBL[] =
+{
+ 4, /* max bits */
+ 1, /* total subtables */
+ 4, /* subtable sizes */
+
+ 0, /* 1-bit codes */
+ 2, /* 2-bit codes */
+ 2, VC1_BITPLANE_NORM2_MODE,
+ 3, VC1_BITPLANE_NORM6_MODE,
+ 3, /* 3-bit codes */
+ 1, VC1_BITPLANE_DIFF2_MODE,
+ 2, VC1_BITPLANE_ROWSKIP_MODE,
+ 3, VC1_BITPLANE_COLSKIP_MODE,
+ 2, /* 4-bit codes */
+ 0, VC1_BITPLANE_RAW_MODE,
+ 1, VC1_BITPLANE_DIFF6_MODE,
+-1
+};
+
+/* This VLC table is used for decoding of k in bitplane. */
+const int32_t VC1_BITPLANE_K_TBL[] =
+{
+ 13, /* max bits */
+ 2, /* total subtables */
+ 6,7,/* subtable sizes */
+
+ 1, /* 1-bit codes */
+ 1, 0 ,
+ 0, /* 2-bit codes */
+ 0, /* 3-bit codes */
+ 6, /* 4-bit codes */
+ 2, 1, 3, 2, 4, 4, 5, 8,
+ 6, 16, 7, 32,
+ 0, /* 5-bit codes */
+ 1, /* 6-bit codes */
+ (3 << 1)| 1, 63,
+ 0, /* 7-bit codes */
+ 15, /* 8-bit codes */
+ 0, 3, 1, 5, 2, 6, 3, 9,
+ 4, 10, 5, 12, 6, 17, 7, 18,
+ 8, 20, 9, 24, 10, 33, 11, 34,
+ 12, 36, 13, 40, 14, 48,
+ 6, /* 9-bit codes */
+ (3 << 4)| 7, 31,
+ (3 << 4)| 6, 47,
+ (3 << 4)| 5, 55,
+ (3 << 4)| 4, 59,
+
+ (3 << 4)| 3, 61,
+ (3 << 4)| 2, 62,
+ 20, /* 10-bit codes */
+ (1 << 6)| 11, 11,
+ (1 << 6)| 7, 7 ,
+ (1 << 6)| 13, 13,
+ (1 << 6)| 14, 14,
+
+ (1 << 6)| 19, 19,
+ (1 << 6)| 21, 21,
+ (1 << 6)| 22, 22,
+ (1 << 6)| 25, 25,
+
+ (1 << 6)| 26, 26,
+ (1 << 6)| 28, 28,
+ (1 << 6)| 3, 35,
+ (1 << 6)| 5, 37,
+
+ (1 << 6)| 6, 38,
+ (1 << 6)| 9, 41,
+ (1 << 6)| 10, 42,
+ (1 << 6)| 12, 44,
+
+ (1 << 6)| 17, 49,
+ (1 << 6)| 18, 50,
+ (1 << 6)| 20, 52,
+ (1 << 6)| 24, 56,
+ 0, /* 11-bit codes */
+ 0, /* 12-bit codes */
+ 15, /* 13-bit codes */
+ (3 << 8)| 14, 15,
+ (3 << 8)| 13, 23,
+ (3 << 8)| 12, 27,
+ (3 << 8)| 11, 29,
+
+ (3 << 8)| 10, 30,
+ (3 << 8)| 9, 39,
+ (3 << 8)| 8, 43,
+ (3 << 8)| 7, 45,
+
+ (3 << 8)| 6, 46,
+ (3 << 8)| 5, 51,
+ (3 << 8)| 4, 53,
+ (3 << 8)| 3, 54,
+
+ (3 << 8)| 2, 57,
+ (3 << 8)| 1, 58,
+ (3 << 8)| 0, 60,
+ -1
+};
+
+/* This VLC table is used for decoding of BFRACTION. */
+const int32_t VC1_BFRACTION_TBL[] =
+{
+ 7, /* max bits */
+ 2, /* total subtables */
+ 3,4, /* subtable sizes */
+ 0, /* 1-bit codes */
+ 0, /* 2-bit codes */
+ 7, /* 3-bit codes */
+ 0x00,1,2, 0x01,1,3, 0x02,2,3, 0x03,1,4,
+ 0x04,3,4, 0x05,1,5, 0x06,2,5,
+ 0, /* 4-bit codes */
+ 0, /* 5-bit codes */
+ 0, /* 6-bit codes */
+ 16, /* 7-bit codes */
+ 0x70, 3,5, 0x71, 4,5, 0x72, 1,6, 0x73, 5,6,
+ 0x74, 1,7, 0x75, 2,7, 0x76, 3,7, 0x77, 4,7,
+ 0x78, 5,7, 0x79, 6,7, 0x7A, 1,8, 0x7B, 3,8,
+ 0x7C, 5,8, 0x7D, 7,8,
+ 0x7E, VC1_BFRACTION_INVALID,VC1_BFRACTION_INVALID,
+ 0x7F, VC1_BFRACTION_BI, VC1_BFRACTION_BI,
+
+ -1
+};
+
+/* This table is used for VLC decoding of REFDIST. */
+const int32_t VC1_REFDIST_TBL[] =
+{
+ 16, /* Max bits. */
+ 3, /* Total sub-tables. */
+ 5, 6, 5, /* Sub-table sizes. */
+
+ 0, /* 1-bit codes. */
+ 3, /* 2-bit codes. */
+ 0, 0, 1, 1, 2, 2,
+ 1, /* 3-bit codes. */
+ 6, 3,
+ 1, /* 4-bit codes. */
+ 14, 4,
+ 1, /* 5-bit codes. */
+ 30, 5,
+ 1, /* 6-bit codes. */
+ 62, 6,
+ 1, /* 7-bit codes. */
+ 126, 7,
+ 1, /* 8-bit codes. */
+ 254, 8,
+ 1, /* 9-bit codes. */
+ 510, 9,
+ 1, /* 10-bit codes. */
+ 1022, 10,
+ 1, /* 11-bit codes. */
+ 2046, 11,
+ 1, /* 12-bit codes. */
+ 4094, 12,
+ 1, /* 13-bit codes. */
+ 8190, 13,
+ 1, /* 14-bit codes. */
+ 16382, 14,
+ 1, /* 15-bit codes. */
+ 32766, 15,
+ 1, /* 16-bit codes. */
+ 65534, 16,
+ -1 /* end of table. */
+};
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c
new file mode 100644
index 0000000..c2f5985
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_huffman.c
@@ -0,0 +1,97 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VLC syntax elements within VC-1 bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*----------------------------------------------------------------------------*/
+
+vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable)
+{
+ uint32_t tempValue;
+ const int32_t *pTable = pDecodeTable;
+ vc1_Status status = VC1_STATUS_OK;
+ int32_t i, j, maxBits, loopCount, totalBits, value;
+
+ maxBits = *pTable++;
+ loopCount = *pTable++;
+ totalBits = 0;
+ for (i = 0; i < loopCount; i++)
+ totalBits += *pTable++;
+
+ if (totalBits != maxBits)
+ return VC1_STATUS_PARSE_ERROR;
+
+ value = 0;
+ for (i = 0; i < maxBits; i++)
+ {
+ VC1_GET_BITS9(1, tempValue);
+ value = (value << 1) | tempValue;
+ loopCount = *pTable++;
+ if (loopCount == -1)
+ break;
+ for (j = 0; j < loopCount; j++)
+ {
+ if (value == *pTable++)
+ {
+ *pDst = *pTable;
+ return status;
+ }
+ else
+ pTable++;
+ }
+ }
+
+ return status;
+}
+
+/*----------------------------------------------------------------------------*/
+
+vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable,
+ int8_t *pFirst, int16_t *pSecond)
+{
+ uint32_t tempValue;
+ const int32_t *pTable = pDecodeTable;
+ vc1_Status status = VC1_STATUS_OK;
+ int32_t i, j, maxBits, loopCount, totalBits, value;
+
+ maxBits = *pTable++;
+ loopCount = *pTable++;
+ totalBits = 0;
+ for (i = 0; i < loopCount; i++)
+ totalBits += *pTable++;
+
+ if (totalBits != maxBits)
+ return VC1_STATUS_PARSE_ERROR;
+
+ value = 0;
+ for (i = 0; i < maxBits; i++)
+ {
+ VC1_GET_BITS9(1, tempValue);
+ value = (value << 1) | tempValue;
+ loopCount = *pTable++;
+ if (loopCount == -1)
+ break;
+ for (j = 0; j < loopCount; j++)
+ {
+ if (value == *pTable++)
+ {
+ *pFirst = *pTable++;
+ *pSecond = *pTable;
+ return status;
+ }
+ else
+ pTable += 2;
+ }
+ }
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c
new file mode 100644
index 0000000..1a37929
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic.c
@@ -0,0 +1,101 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive I picture in simple
+// or main profile bitstream or progressive BI picture in main profile
+// bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive I picture for simple
+ * or main profile bitstream or progressive BI picture in main profile
+ * bitstream. This parser starts after PTYPE was parsed but stops before
+ * parsing of macroblock layer.
+ * Table 16 of SMPTE 421M after processing up to PTYPE for I picture.
+ * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7.
+ For each I or BI frame, RND shall be set to 1 */
+ if (md->PROFILE != VC1_PROFILE_ADVANCED)
+ {
+ picLayerHeader->RNDCTRL = md->RNDCTRL | 1 ;
+ md->RNDCTRL = picLayerHeader->RNDCTRL;
+ }
+
+
+ if (picLayerHeader->PTYPE == VC1_BI_FRAME)
+ {
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN))
+ != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ if (picLayerHeader->BFRACTION_DEN != VC1_BFRACTION_BI)
+ return VC1_STATUS_PARSE_ERROR;
+ }
+
+ VC1_GET_BITS9(7, tempValue); /* BF. */
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else picLayerHeader->HALFQP=0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ /* MVRANGE but only for main profile. */
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->MULTIRES == 1 && picLayerHeader->PTYPE != VC1_BI_FRAME)
+ {
+ VC1_GET_BITS9(2, tempValue); /* RESPIC. */
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ if (picLayerHeader->TRANSACFRM2)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ picLayerHeader->TRANSACFRM2 += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c
new file mode 100644
index 0000000..03aeb79
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ipic_adv.c
@@ -0,0 +1,257 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive I or BI picture in
+// advanced profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h"
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive I or BI picture for
+ * advanced profile bitstream.
+ * Table 18 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (picLayerHeader->CONDOVER)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (! picLayerHeader->CONDOVER)
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+ else
+ {
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB,
+ md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+ }
+ else
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ if (picLayerHeader->TRANSACFRM2)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ picLayerHeader->TRANSACFRM2 += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ status = vc1_VOPDQuant(ctxt, pInfo);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace I or BI frame for
+ * advanced profile bitstream.
+ * Table 82 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (picLayerHeader->CONDOVER)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (! picLayerHeader->CONDOVER)
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+ else
+ {
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB,
+ md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+ }
+ else
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ if (picLayerHeader->TRANSACFRM2)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ picLayerHeader->TRANSACFRM2 += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ status = vc1_VOPDQuant(ctxt, pInfo);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace I or BI field for
+ * advanced profile bitstream.
+ * Table 87 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ // Reset MVMODE when the second field is an I picture
+ // to avoid carrying forward the mvmode values from previous field
+ // especially the intensity compensation value
+ picLayerHeader->MVMODE = 0;
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) {
+ DEB("Error parsing I field \n");
+ return status;
+ }
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else
+ picLayerHeader->HALFQP = 0;
+
+ if (md->QUANTIZER == 1) {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ if (md->POSTPROCFLAG == 1)
+ VC1_GET_BITS9(2, tempValue); /* POSTPROC. */
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) !=
+ VC1_STATUS_OK)
+ {
+ DEB("Error parsing I field \n");
+ return status;
+ }
+
+ if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (picLayerHeader->CONDOVER)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+ if (! picLayerHeader->CONDOVER)
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+ else
+ {
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB,
+ (md->heightMB+1)/2, BPP_OVERFLAGS)) !=
+ VC1_STATUS_OK)
+ {
+ DEB("Error parsing I field \n");
+ return status;
+ }
+ }
+ }
+ else
+ picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ if (picLayerHeader->TRANSACFRM2)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+ picLayerHeader->TRANSACFRM2 += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ status = vc1_VOPDQuant(ctxt, pInfo);
+ if (status != VC1_STATUS_OK) {
+ DEB("Error parsing I field \n");
+ return status;
+ }
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c
new file mode 100644
index 0000000..7cbcc34
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_mv_com.c
@@ -0,0 +1,82 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 syntax elements MVRANGE and DMVRANGE.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element MVRANGE, which exists for main and advanced profiles.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (md->EXTENDED_MV == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+ if (picLayerHeader->MVRANGE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+ if (picLayerHeader->MVRANGE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+ picLayerHeader->MVRANGE += 1;
+ }
+ picLayerHeader->MVRANGE += 1;
+ }
+ }
+ else
+ picLayerHeader->MVRANGE = 0;
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element DMVRANGE.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (md->EXTENDED_DMV == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+ if (picLayerHeader->DMVRANGE == 0)
+ picLayerHeader->DMVRANGE = VC1_DMVRANGE_NONE;
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+ if (picLayerHeader->DMVRANGE == 0)
+ picLayerHeader->DMVRANGE = VC1_DMVRANGE_HORIZONTAL_RANGE;
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+ if (picLayerHeader->DMVRANGE == 0)
+ picLayerHeader->DMVRANGE = VC1_DMVRANGE_VERTICAL_RANGE;
+ else
+ {
+ picLayerHeader->DMVRANGE =
+ VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE;
+ }
+ }
+ }
+ }
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c
new file mode 100644
index 0000000..c363456
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com.c
@@ -0,0 +1,101 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for simple and main profiles.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses the picture header for simple or
+ * main profile down to macroblock layer.
+ * Table 16 of SMPTE 421M after processing up to PTYPE for I picture.
+ * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture.
+ * Table 19 of SMPTE 421M after processing up to PTYPE for P picture.
+ * Table 21 of SMPTE 421M after processing up to PTYPE for B picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+ int32_t result;
+
+ if (md->PROFILE != VC1_PROFILE_ADVANCED)
+ {
+ // As per spec, for main/simple profile, if the size of the coded picture is <= 1B,
+ // it shall be treated as a skipped frame.
+ // In content with skipped frames, the data is "00".
+ // rcv to vc1 conversion process adds an additional byte (0x80) to the picture, hence
+ // the data looks like "00 80"
+ // Hence if data is <= 2B, we will consider it skipped (check for 16+1b, if it fails, the frame is skipped).
+ result = viddec_pm_peek_bits(ctxt, &tempValue, 17);
+ if(result == -1)
+ {
+ picLayerHeader->PTYPE = VC1_SKIPPED_FRAME;
+ return status;
+ }
+ }
+
+ if (md->FINTERPFLAG == 1)
+ {
+ VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */
+ }
+
+ VC1_GET_BITS9(2, tempValue); /* FRMCNT. */
+
+ if (md->RANGERED == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->RANGEREDFRM);
+ }
+
+ if (md->MAXBFRAMES == 0)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE == 0)
+ picLayerHeader->PTYPE = VC1_I_FRAME;
+ else
+ picLayerHeader->PTYPE = VC1_P_FRAME;
+ }
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE == 0)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE == 0) {
+ picLayerHeader->PTYPE = VC1_B_FRAME; /* Or VC1_BI_FRAME. */
+ /* if peek(7) = 0b1111111 then ptype = bi */
+ VC1_PEEK_BITS( 7, tempValue );
+ if ( tempValue == 0x7f )
+ picLayerHeader->PTYPE = VC1_BI_FRAME;
+ } else
+ picLayerHeader->PTYPE = VC1_I_FRAME;
+ }
+ else
+ picLayerHeader->PTYPE = VC1_P_FRAME;
+ }
+
+ if (picLayerHeader->PTYPE == VC1_I_FRAME ||
+ picLayerHeader->PTYPE == VC1_BI_FRAME)
+ {
+ status = vc1_ParsePictureHeader_ProgressiveIpicture(ctxt, pInfo);
+ }
+ else if (picLayerHeader->PTYPE == VC1_P_FRAME)
+ status = vc1_ParsePictureHeader_ProgressivePpicture(ctxt, pInfo);
+ else if (picLayerHeader->PTYPE == VC1_B_FRAME)
+ status = vc1_ParsePictureHeader_ProgressiveBpicture(ctxt, pInfo);
+ else
+ status = VC1_STATUS_PARSE_ERROR;
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c
new file mode 100644
index 0000000..fa9c3c7
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_pic_com_adv.c
@@ -0,0 +1,403 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for advanced profile.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses the picture header for advanced
+ * profile down to POSTPROC syntax element.
+ * Table 18 of SMPTE 421M for progressive I or BI picture.
+ * Table 20 of SMPTE 421M for progressive P picture.
+ * Table 22 of SMPTE 421M for progressive B picture.
+ * Table 23 of SMPTE 421M for skipped picture.
+ * Table 82 of SMPTE 421M for interlace I or BI frame.
+ * Table 83 of SMPTE 421M for interlace P frame.
+ * Table 84 of SMPTE 421M for interlace B frame.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t i = 0;
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t number_of_pan_scan_window;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (md->INTERLACE == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->FCM);
+ if (picLayerHeader->FCM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->FCM);
+ if (picLayerHeader->FCM)
+ {
+ picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE;
+ return VC1_STATUS_PARSE_ERROR;
+ }
+ else
+ picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE;
+ }
+ else
+ picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+ }
+ else
+ picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+
+
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+ if (picLayerHeader->PTYPE)
+ picLayerHeader->PTYPE = VC1_SKIPPED_FRAME;
+ else
+ picLayerHeader->PTYPE = VC1_BI_FRAME;
+ }
+ else
+ picLayerHeader->PTYPE = VC1_I_FRAME;
+ }
+ else
+ picLayerHeader->PTYPE = VC1_B_FRAME;
+ }
+ else
+ picLayerHeader->PTYPE = VC1_P_FRAME;
+
+ if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME)
+ {
+ if (md->TFCNTRFLAG)
+ {
+ VC1_GET_BITS9(8, picLayerHeader->TFCNTR); /* TFCNTR. */
+ }
+ }
+
+ if (md->PULLDOWN)
+ {
+ if ((md->INTERLACE == 0) || (md->PSF == 1))
+ {
+ VC1_GET_BITS9(2, picLayerHeader->RPTFRM);
+ }
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TFF);
+ VC1_GET_BITS9(1, picLayerHeader->RFF);
+ }
+ }
+
+ if (md->PANSCAN_FLAG == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); /* PS_PRESENT. */
+ if (picLayerHeader->PS_PRESENT == 1)
+ {
+ if ((md->INTERLACE == 1) &&
+ (md->PSF == 0))
+ {
+ if (md->PULLDOWN == 1)
+ number_of_pan_scan_window = 2 + picLayerHeader->RFF;
+ else
+ number_of_pan_scan_window = 2;
+ }
+ else
+ {
+ if (md->PULLDOWN == 1)
+ number_of_pan_scan_window = 1 + picLayerHeader->RPTFRM;
+ else
+ number_of_pan_scan_window = 1;
+ }
+ picLayerHeader->number_of_pan_scan_window = number_of_pan_scan_window;
+
+ for (i = 0; i < number_of_pan_scan_window; i++)
+ {
+ VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */
+ VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */
+ VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */
+ VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */
+ }
+ }
+ }
+
+ if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->RNDCTRL);
+ md->RNDCTRL = picLayerHeader->RNDCTRL;
+
+ if ((md->INTERLACE == 1) ||
+ (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->UVSAMP);
+ }
+
+ if ((md->FINTERPFLAG == 1) &&
+ (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE))
+ {
+ VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */
+ }
+
+ if ((picLayerHeader->PTYPE == VC1_B_FRAME) &&
+ (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE))
+ {
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN))
+ != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else
+ picLayerHeader->HALFQP = 0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ if (md->POSTPROCFLAG == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+ }
+ }
+
+ return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses the picture header for advanced
+ * profile down to BFRACTION syntax element.
+ * Table 85 of SMPTE 421M.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint32_t i = 0;
+ vc1_Status status = VC1_STATUS_OK;
+ uint32_t number_of_pan_scan_window;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ VC1_GET_BITS9(1, picLayerHeader->FCM);
+ if (picLayerHeader->FCM)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->FCM);
+ if (picLayerHeader->FCM)
+ picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE;
+ else
+ picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE;
+ }
+ else
+ picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+ if (picLayerHeader->FCM != VC1_FCM_FIELD_INTERLACE)
+ return VC1_STATUS_PARSE_ERROR;
+
+ VC1_GET_BITS9(3, picLayerHeader->FPTYPE);
+ if (picLayerHeader->FPTYPE == 0)
+ {
+ picLayerHeader->PTypeField1 = VC1_I_FRAME;
+ picLayerHeader->PTypeField2 = VC1_I_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 1)
+ {
+ picLayerHeader->PTypeField1 = VC1_I_FRAME;
+ picLayerHeader->PTypeField2 = VC1_P_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 2)
+ {
+ picLayerHeader->PTypeField1 = VC1_P_FRAME;
+ picLayerHeader->PTypeField2 = VC1_I_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 3)
+ {
+ picLayerHeader->PTypeField1 = VC1_P_FRAME;
+ picLayerHeader->PTypeField2 = VC1_P_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 4)
+ {
+ picLayerHeader->PTypeField1 = VC1_B_FRAME;
+ picLayerHeader->PTypeField2 = VC1_B_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 5)
+ {
+ picLayerHeader->PTypeField1 = VC1_B_FRAME;
+ picLayerHeader->PTypeField2 = VC1_BI_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 6)
+ {
+ picLayerHeader->PTypeField1 = VC1_BI_FRAME;
+ picLayerHeader->PTypeField2 = VC1_B_FRAME;
+ }
+ else if (picLayerHeader->FPTYPE == 7)
+ {
+ picLayerHeader->PTypeField1 = VC1_BI_FRAME;
+ picLayerHeader->PTypeField2 = VC1_BI_FRAME;
+ }
+
+ if (md->TFCNTRFLAG)
+ {
+ VC1_GET_BITS9(8, picLayerHeader->TFCNTR);
+ }
+
+ if (md->PULLDOWN == 1)
+ {
+ if (md->PSF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->RPTFRM);
+ }
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TFF);
+ VC1_GET_BITS9(1, picLayerHeader->RFF);
+ }
+ } else
+ picLayerHeader->TFF = 1;
+
+ if (md->PANSCAN_FLAG == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT);
+ if (picLayerHeader->PS_PRESENT)
+ {
+ if (md->PULLDOWN)
+ number_of_pan_scan_window = 2 + picLayerHeader->RFF;
+ else
+ number_of_pan_scan_window = 2;
+ picLayerHeader->number_of_pan_scan_window =number_of_pan_scan_window;
+
+ for (i = 0; i < number_of_pan_scan_window; i++)
+ {
+ VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */
+ VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */
+ VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */
+ VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */
+ }
+ }
+ }
+ VC1_GET_BITS9(1, md->RNDCTRL);
+
+#ifdef VBP
+ picLayerHeader->RNDCTRL = md->RNDCTRL;
+#endif
+
+ VC1_GET_BITS9(1, picLayerHeader->UVSAMP);
+
+ if ((md->REFDIST_FLAG == 1) && (picLayerHeader->FPTYPE <= 3))
+ {
+ int32_t tmp;
+ if ((status = vc1_DecodeHuffmanOne(ctxt, &tmp,
+ VC1_REFDIST_TBL)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ md->REFDIST = tmp;
+ }
+
+ if ((picLayerHeader->FPTYPE >= 4) && (picLayerHeader->FPTYPE <= 7))
+ {
+ if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+ &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+
+ if (picLayerHeader->CurrField == 0)
+ {
+ picLayerHeader->PTYPE = picLayerHeader->PTypeField1;
+ picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF);
+ }
+ else
+ {
+ picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF);
+ picLayerHeader->PTYPE = picLayerHeader->PTypeField2;
+ }
+
+ return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function calls the appropriate function to further
+ * parse the picture header for advanced profile down to macroblock layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_PARSE_ERROR;
+
+ if (pInfo->picLayerHeader.FCM == VC1_FCM_PROGRESSIVE)
+ {
+ if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) ||
+ (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME))
+ {
+ status = vc1_ParsePictureHeader_ProgressiveIpicture_Adv(ctxt, pInfo);
+ }
+ else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ status = vc1_ParsePictureHeader_ProgressivePpicture_Adv(ctxt, pInfo);
+ else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+ status = vc1_ParsePictureHeader_ProgressiveBpicture_Adv(ctxt, pInfo);
+ else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME)
+ status = VC1_STATUS_OK;
+ }
+ else if (pInfo->picLayerHeader.FCM == VC1_FCM_FRAME_INTERLACE)
+ {
+ if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) ||
+ (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME))
+ {
+ status = vc1_ParsePictureHeader_InterlaceIpicture_Adv(ctxt, pInfo);
+ }
+ else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+ status = vc1_ParsePictureHeader_InterlacePpicture_Adv(ctxt, pInfo);
+ else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+ status = vc1_ParsePictureHeader_InterlaceBpicture_Adv(ctxt, pInfo);
+ else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME)
+ status = VC1_STATUS_OK;
+ }
+ else if (pInfo->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE)
+ {
+ int ptype;
+ if( pInfo->picLayerHeader.CurrField == 0)
+ ptype = pInfo->picLayerHeader.PTypeField1;
+ else
+ ptype = pInfo->picLayerHeader.PTypeField2;
+
+ if ((ptype == VC1_I_FRAME) ||
+ (ptype == VC1_BI_FRAME))
+ {
+ status = vc1_ParseFieldHeader_InterlaceIpicture_Adv(ctxt, pInfo);
+ }
+ else if (ptype == VC1_P_FRAME)
+ status = vc1_ParseFieldHeader_InterlacePpicture_Adv(ctxt, pInfo);
+ else if (ptype == VC1_B_FRAME)
+ status = vc1_ParseFieldHeader_InterlaceBpicture_Adv(ctxt, pInfo);
+ else if (ptype == VC1_SKIPPED_FRAME)
+ status = VC1_STATUS_OK;
+ }
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c
new file mode 100644
index 0000000..ba9c756
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic.c
@@ -0,0 +1,149 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive P picture in simple
+// or main profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive P picture for simple
+ * or main profile bitstream. This parser starts after PTYPE was parsed but
+ * stops before parsing of macroblock layer.
+ * Table 19 of SMPTE 421M after processing up to PTYPE for P picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo)
+{
+ uint8_t bit_count;
+ const uint8_t *table;
+ uint32_t tempValue;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7.
+ It toggles back and forth between 0 and 1 for P frames */
+ if (md->PROFILE != VC1_PROFILE_ADVANCED)
+ {
+ picLayerHeader->RNDCTRL = md->RNDCTRL ^ 1 ;
+ md->RNDCTRL = picLayerHeader->RNDCTRL;
+ }
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else picLayerHeader->HALFQP=0;
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ /* MVRANGE. */
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->MULTIRES == 1)
+ VC1_GET_BITS9(2, tempValue); /* RESPIC. */
+
+ if (picLayerHeader->PQUANT > 12)
+ table = VC1_MVMODE_LOW_TBL;
+ else
+ table = VC1_MVMODE_HIGH_TBL;
+
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ while ((picLayerHeader->MVMODE == 0) && (bit_count < 3))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ bit_count++;
+ }
+ if (bit_count == 3)
+ bit_count += picLayerHeader->MVMODE;
+ picLayerHeader->MVMODE = table[bit_count];
+
+ if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+ {
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ bit_count++;
+ }
+ if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+ bit_count++;
+ picLayerHeader->MVMODE2 = table[bit_count];
+ VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+ VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+ }
+#ifdef VBP
+ else
+ picLayerHeader->MVMODE2 = 0;
+#else
+ else
+ picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+ if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) ||
+ ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) &&
+ (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)))
+ {
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_MVTYPEMB))
+ != VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+ VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c
new file mode 100644
index 0000000..144c138
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_ppic_adv.c
@@ -0,0 +1,368 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 picture layer for progressive P picture in advanced
+// profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+#include "viddec_fw_debug.h"
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses progressive P picture for advanced
+ * profile bitstream.
+ * Table 20 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint8_t bit_count;
+ const uint8_t *table;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ /* MVRANGE. */
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQUANT > 12)
+ table = VC1_MVMODE_LOW_TBL;
+ else
+ table = VC1_MVMODE_HIGH_TBL;
+
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ while ((picLayerHeader->MVMODE == 0) && (bit_count < 3))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ bit_count++;
+ }
+ if (bit_count == 3)
+ bit_count += picLayerHeader->MVMODE;
+ picLayerHeader->MVMODE = table[bit_count];
+
+ if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+ {
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ bit_count++;
+ }
+ if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+ bit_count++;
+ picLayerHeader->MVMODE2 = table[bit_count];
+ VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+ VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+ md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+ md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+ }
+ else
+#ifdef VBP
+ picLayerHeader->MVMODE2 = 0;
+#else
+ picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+ if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) ||
+ ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) &&
+ (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)))
+ {
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_MVTYPEMB)) !=
+ VC1_STATUS_OK)
+ {
+ return status;
+ }
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+ VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace P frame for advanced
+ * profile bitstream.
+ * Table 83 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ /* MVRANGE. */
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ /* DMVRANGE. */
+ if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ VC1_GET_BITS9(1, picLayerHeader->MV4SWITCH);
+
+ VC1_GET_BITS9(1, picLayerHeader->INTCOMP);
+ if (picLayerHeader->INTCOMP)
+ {
+ VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+ VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+ md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+ md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+ }
+
+ if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+ md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+ {
+ return status;
+ }
+
+ VC1_GET_BITS9(2, picLayerHeader->MBMODETAB);
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+ VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+ VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */
+
+ if (picLayerHeader->MV4SWITCH == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+ }
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer. This function parses interlace P field for advanced
+ * profile bitstream.
+ * Table 88 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+ uint8_t bit_count;
+ const uint8_t *table;
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+
+ VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+ if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQINDEX <= 8)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+ }
+ else
+ picLayerHeader->HALFQP = 0;
+
+
+ if (md->QUANTIZER == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+ picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+ }
+
+ if (md->POSTPROCFLAG == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->NUMREF);
+
+ if (picLayerHeader->NUMREF == 0)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->REFFIELD);
+ }
+
+ if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) {
+ DEB("Error in vc1_MVRangeDecode \n");
+ return status;
+ }
+
+ if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (picLayerHeader->PQUANT > 12)
+ table = VC1_MVMODE_LOW_TBL;
+ else
+ table = VC1_MVMODE_HIGH_TBL;
+
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ while ((picLayerHeader->MVMODE == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+ bit_count++;
+ }
+ if (bit_count == 2 && picLayerHeader->MVMODE == 0) {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+
+ if ( picLayerHeader->MVMODE == 1)
+ bit_count ++;
+
+ bit_count++;
+ }
+ picLayerHeader->MVMODE = table[bit_count];
+
+ if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+ {
+ bit_count = 0;
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+ {
+ VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+ bit_count++;
+ }
+ if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+ bit_count++;
+ picLayerHeader->MVMODE2 = table[bit_count];
+
+ VC1_GET_BITS9(1, md->INTCOMPFIELD);
+ if (md->INTCOMPFIELD == 1)
+ md->INTCOMPFIELD = VC1_INTCOMP_BOTH_FIELD;
+ else
+ {
+ VC1_GET_BITS9(1, md->INTCOMPFIELD);
+ if(md->INTCOMPFIELD == 1)
+ md->INTCOMPFIELD = VC1_INTCOMP_BOTTOM_FIELD;
+ else
+ md->INTCOMPFIELD = VC1_INTCOMP_TOP_FIELD;
+ }
+ VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); /* LUMSCALE1. */
+ VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); /* LUMSHIFT1. */
+ if ( md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD ) {
+ md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+ md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+ }
+ if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD)
+ {
+ VC1_GET_BITS9(6, md->LUMSCALE2);
+ VC1_GET_BITS9(6, md->LUMSHIFT2);
+ }
+ }
+ else
+#ifdef VBP
+ picLayerHeader->MVMODE2 = 0;
+#else
+ picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+ VC1_GET_BITS9(3, picLayerHeader->MBMODETAB);
+
+ if (picLayerHeader->NUMREF)
+ {
+ VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */
+ }
+ else
+ {
+ VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+ }
+
+ VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+
+#ifdef VBP
+ if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV)
+#else
+ if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)
+#endif
+ {
+ VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+ }
+
+ if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+ return status;
+
+ if (md->VSTRANSFORM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+ if (picLayerHeader->TTMBF == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+ }
+ }
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ if (picLayerHeader->TRANSACFRM == 1)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+ picLayerHeader->TRANSACFRM += 2;
+ }
+ picLayerHeader->TRANSACFRM2 = 0;
+
+ VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+ /* Skip parsing of macroblock layer. */
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c
new file mode 100644
index 0000000..559a0dd
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_vopdq.c
@@ -0,0 +1,130 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+// INTEL CORPORATION PROPRIETARY INFORMATION
+// This software is supplied under the terms of a license agreement or
+// nondisclosure agreement with Intel Corporation and may not be copied
+// or disclosed except in accordance with the terms of that agreement.
+// Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+// Description: Parses VC-1 syntax elements VOPDQUANT and DQUANT.
+//
+*/
+
+#include "vc1parse.h"
+
+#define VC1_UNDEF_PQUANT 0
+
+static const uint8_t MapPQIndToQuant_Impl[] =
+{
+ VC1_UNDEF_PQUANT,
+ 1, 2, 3, 4, 5, 6, 7, 8,
+ 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 27, 29, 31
+};
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element VOPDQuant as defined in Table 24 of SMPTE 421M.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ if (md->DQUANT == 0)
+ return status;
+
+ if (md->DQUANT == 2)
+ {
+ VC1_GET_BITS9(3, picLayerHeader->PQDIFF);
+ if (picLayerHeader->PQDIFF == 7)
+ {
+ VC1_GET_BITS9(5, picLayerHeader->ABSPQ);
+ }
+ }
+ else
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DQUANTFRM);
+ if (picLayerHeader->DQUANTFRM == 1)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->DQPROFILE);
+ if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_SNGLEDGES)
+ {
+ VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE);
+ }
+ else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_DBLEDGES)
+ {
+#ifdef VBP
+ VC1_GET_BITS9(2, picLayerHeader->DQDBEDGE);
+#else
+ VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); /* DQDBEDGE. */
+#endif
+ }
+ else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS)
+ {
+ VC1_GET_BITS9(1, picLayerHeader->DQBILEVEL);
+ }
+ if (! (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS &&
+ picLayerHeader->DQBILEVEL == 0))
+ {
+ VC1_GET_BITS9(3, picLayerHeader->PQDIFF);
+ if (picLayerHeader->PQDIFF == 7)
+ {
+ VC1_GET_BITS9(5, picLayerHeader->ABSPQ);
+ }
+ }
+ }
+ }
+#ifdef VBP
+ if ((picLayerHeader->DQUANTFRM == 1 && md->DQUANT == 1) || (md->DQUANT == 2))
+ {
+ if (picLayerHeader->PQDIFF == 7)
+ {
+ picLayerHeader->ALTPQUANT = picLayerHeader->ABSPQ;
+ }
+ else
+ {
+ picLayerHeader->ALTPQUANT = picLayerHeader->PQUANT + picLayerHeader->PQDIFF + 1;
+ }
+ }
+#endif
+ return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Compute value for PQUANT syntax element that does not exist in bitstreams for
+ * progressive I and BI pictures.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo)
+{
+ vc1_Status status = VC1_STATUS_OK;
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+ picLayerHeader->PQUANT = picLayerHeader->PQINDEX;
+ picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM;
+
+ if (md->QUANTIZER == 0)
+ {
+ if (picLayerHeader->PQINDEX < 9)
+ picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM;
+ else
+ {
+ picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM;
+ picLayerHeader->PQUANT =
+ MapPQIndToQuant_Impl[picLayerHeader->PQINDEX];
+ }
+ }
+ else
+ {
+ if (md->QUANTIZER == 2)
+ picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM;
+ }
+
+ return status;
+}
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c
new file mode 100644
index 0000000..6af6f09
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_parse.c
@@ -0,0 +1,345 @@
+#include "viddec_fw_debug.h" // For DEB
+#include "viddec_parser_ops.h" // For parser helper functions
+#include "vc1.h" // For the parser structure
+#include "vc1parse.h" // For vc1 parser helper functions
+#ifdef VBP
+#include "viddec_pm.h"
+#endif
+#define vc1_is_frame_start_code( ch ) \
+ (( vc1_SCField == ch ||vc1_SCSlice == ch || vc1_SCFrameHeader == ch ) ? 1 : 0)
+
+/* init function */
+#ifdef VBP
+void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#else
+static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#endif
+{
+ vc1_viddec_parser_t *parser = ctxt;
+ int i;
+
+ persist_mem = persist_mem;
+
+ for(i=0; i<VC1_NUM_REFERENCE_FRAMES; i++)
+ {
+ parser->ref_frame[i].id = -1; /* first I frame checks that value */
+ parser->ref_frame[i].anchor[0] = 1;
+ parser->ref_frame[i].anchor[1] = 1;
+ parser->ref_frame[i].intcomp_top = 0;
+ parser->ref_frame[i].intcomp_bot = 0;
+ }
+
+ parser->intcomp_top[0] = 0;
+ parser->intcomp_bot[0] = 0;
+ parser->intcomp_top[1] = 0;
+ parser->intcomp_bot[1] = 0;
+ parser->is_reference_picture = false;
+
+ memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader));
+
+ if(preserve)
+ {
+ parser->sc_seen &= VC1_EP_MASK;
+ parser->sc_seen_since_last_wkld &= VC1_EP_MASK;
+ }
+ else
+ {
+ parser->sc_seen = VC1_SC_INVALID;
+ parser->sc_seen_since_last_wkld = VC1_SC_INVALID;
+ memset(&parser->info.metadata, 0, sizeof(parser->info.metadata));
+ }
+
+ return;
+} // viddec_vc1_init
+
+static void vc1_swap_intcomp(vc1_viddec_parser_t *parser)
+{
+ parser->intcomp_top[1] = parser->intcomp_top[0];
+ parser->intcomp_bot[1] = parser->intcomp_bot[0];
+ parser->intcomp_top[0] = 0;
+ parser->intcomp_bot[0] = 0;
+
+ return;
+} // vc1_swap_intcomp
+
+#ifdef VBP
+uint32_t viddec_vc1_parse(void *parent, void *ctxt)
+#else
+static uint32_t viddec_vc1_parse(void *parent, void *ctxt)
+#endif
+{
+ vc1_viddec_parser_t *parser = ctxt;
+ uint32_t sc=0x0;
+ int32_t ret=0, status=0;
+
+#ifdef VBP
+ /* This works only if there is one slice and no start codes */
+ /* A better fix would be to insert start codes it there aren't any. */
+ ret = viddec_pm_peek_bits(parent, &sc, 32);
+ if ((sc > 0x0100) && (sc < 0x0200)) /* a Start code will be in this range. */
+ {
+ ret = viddec_pm_get_bits(parent, &sc, 32);
+ }
+ else
+ {
+ /* In cases where we get a buffer with no start codes, we assume */
+ /* that this is a frame of data. We may have to fix this later. */
+ sc = vc1_SCFrameHeader;
+ }
+#else
+ ret = viddec_pm_get_bits(parent, &sc, 32);
+#endif
+ sc = sc & 0xFF;
+ parser->is_frame_start = (sc == vc1_SCFrameHeader);
+ DEB("START_CODE = %02x\n", sc);
+ switch( sc )
+ {
+ case vc1_SCSequenceHeader:
+ {
+ uint32_t data=0;
+ parser->ref_frame[0].anchor[0] = 1;
+ parser->ref_frame[0].anchor[1] = 1;
+ parser->ref_frame[1].anchor[0] = 1;
+ parser->ref_frame[1].anchor[1] = 1;
+ memset( &parser->info.metadata, 0, sizeof(parser->info.metadata));
+ /* look if we have a rcv header for main or simple profile */
+ ret = viddec_pm_peek_bits(parent,&data ,2);
+
+ if (data == 3)
+ {
+ status = vc1_ParseSequenceLayer(parent, &parser->info);
+ }
+ else
+ {
+ status = vc1_ParseRCVSequenceLayer(parent, &parser->info);
+ }
+ parser->sc_seen = VC1_SC_SEQ;
+ parser->sc_seen_since_last_wkld |= VC1_SC_SEQ;
+#ifdef VBP
+ parser->start_code = VC1_SC_SEQ;
+#endif
+ break;
+ }
+
+ case vc1_SCEntryPointHeader:
+ {
+ status = vc1_ParseEntryPointLayer(parent, &parser->info);
+ parser->sc_seen |= VC1_SC_EP;
+ // Clear all bits indicating data below ep header
+ parser->sc_seen &= VC1_EP_MASK;
+ parser->sc_seen_since_last_wkld |= VC1_SC_EP;
+#ifdef VBP
+ parser->start_code = VC1_SC_EP;
+#endif
+ break;
+ }
+
+ case vc1_SCFrameHeader:
+ {
+ memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader));
+ status = vc1_ParsePictureLayer(parent, &parser->info);
+ if((parser->info.picLayerHeader.PTypeField1 == VC1_I_FRAME) ||
+ (parser->info.picLayerHeader.PTypeField1 == VC1_P_FRAME) ||
+ (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) ||
+ (parser->info.picLayerHeader.PTYPE == VC1_P_FRAME))
+ {
+ vc1_swap_intcomp(parser);
+ }
+ parser->sc_seen |= VC1_SC_FRM;
+ // Clear all bits indicating data below frm header
+ parser->sc_seen &= VC1_FRM_MASK;
+ parser->sc_seen_since_last_wkld |= VC1_SC_FRM;
+ vc1_start_new_frame ( parent, parser );
+#ifdef VBP
+ parser->start_code = VC1_SC_FRM;
+#endif
+ break;
+ }
+
+ case vc1_SCSlice:
+ {
+ status = vc1_ParseSliceLayer(parent, &parser->info);
+ parser->sc_seen_since_last_wkld |= VC1_SC_SLC;
+#ifdef VBP
+ parser->start_code = VC1_SC_SLC;
+#endif
+ break;
+ }
+
+ case vc1_SCField:
+ {
+ parser->info.picLayerHeader.SLICE_ADDR = 0;
+ parser->info.picLayerHeader.CurrField = 1;
+ parser->info.picLayerHeader.REFFIELD = 0;
+ parser->info.picLayerHeader.NUMREF = 0;
+ parser->info.picLayerHeader.MBMODETAB = 0;
+ parser->info.picLayerHeader.MV4SWITCH = 0;
+ parser->info.picLayerHeader.DMVRANGE = 0;
+ parser->info.picLayerHeader.MVTAB = 0;
+ parser->info.picLayerHeader.MVMODE = 0;
+ parser->info.picLayerHeader.MVRANGE = 0;
+#ifdef VBP
+ parser->info.picLayerHeader.raw_MVTYPEMB = 0;
+ parser->info.picLayerHeader.raw_DIRECTMB = 0;
+ parser->info.picLayerHeader.raw_SKIPMB = 0;
+ parser->info.picLayerHeader.raw_ACPRED = 0;
+ parser->info.picLayerHeader.raw_FIELDTX = 0;
+ parser->info.picLayerHeader.raw_OVERFLAGS = 0;
+ parser->info.picLayerHeader.raw_FORWARDMB = 0;
+
+ memset(&(parser->info.picLayerHeader.MVTYPEMB), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.DIRECTMB), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.SKIPMB), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.ACPRED), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.FIELDTX), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.OVERFLAGS), 0, sizeof(vc1_Bitplane));
+ memset(&(parser->info.picLayerHeader.FORWARDMB), 0, sizeof(vc1_Bitplane));
+
+ parser->info.picLayerHeader.ALTPQUANT = 0;
+ parser->info.picLayerHeader.DQDBEDGE = 0;
+ #endif
+
+ status = vc1_ParseFieldLayer(parent, &parser->info);
+ if((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) ||
+ (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME))
+ {
+ vc1_swap_intcomp(parser);
+ }
+
+ parser->sc_seen |= VC1_SC_FLD;
+ parser->sc_seen_since_last_wkld |= VC1_SC_FLD;
+#ifdef VBP
+ parser->start_code = VC1_SC_FLD;
+#endif
+ break;
+ }
+
+ case vc1_SCSequenceUser:
+ case vc1_SCEntryPointUser:
+ case vc1_SCFrameUser:
+ case vc1_SCSliceUser:
+ case vc1_SCFieldUser:
+ {/* Handle user data */
+ status = vc1_ParseAndAppendUserData(parent, sc); //parse and add items
+ parser->sc_seen_since_last_wkld |= VC1_SC_UD;
+#ifdef VBP
+ parser->start_code = VC1_SC_UD;
+#endif
+ break;
+ }
+
+ case vc1_SCEndOfSequence:
+ {
+ parser->sc_seen = VC1_SC_INVALID;
+ parser->sc_seen_since_last_wkld |= VC1_SC_INVALID;
+#ifdef VBP
+ parser->start_code = VC1_SC_INVALID;
+#endif
+ break;
+ }
+ default: /* Any other SC that is not handled */
+ {
+ DEB("SC = %02x - unhandled\n", sc );
+#ifdef VBP
+ parser->start_code = VC1_SC_INVALID;
+#endif
+ break;
+ }
+ }
+
+ if( vc1_is_frame_start_code( sc ) ) {
+ vc1_parse_emit_current_frame( parent, parser );
+ }
+
+ return VIDDEC_PARSE_SUCESS;
+} // viddec_vc1_parse
+
+/**
+ If a picture header was seen and the next start code is a sequence header, entrypoint header,
+ end of sequence or another frame header, this api returns frame done.
+ If a sequence header and a frame header was not seen before this point, all the
+ information needed for decode is not present and parser errors are reported.
+*/
+#ifdef VBP
+uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors)
+#else
+static uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors)
+#endif
+{
+ vc1_viddec_parser_t *parser = ctxt;
+ int ret = VIDDEC_PARSE_SUCESS;
+ parent = parent;
+ switch (next_sc)
+ {
+ case vc1_SCFrameHeader:
+ if(((parser->sc_seen_since_last_wkld & VC1_SC_EP) ||
+ (parser->sc_seen_since_last_wkld & VC1_SC_SEQ)) &&
+ (!(parser->sc_seen_since_last_wkld & VC1_SC_FRM)))
+ {
+ break;
+ }
+ // Deliberate fall-thru case
+ case vc1_SCEntryPointHeader:
+ if((next_sc == vc1_SCEntryPointHeader) &&
+ (parser->sc_seen_since_last_wkld & VC1_SC_SEQ) &&
+ (!(parser->sc_seen_since_last_wkld & VC1_SC_EP)))
+ {
+ break;
+ }
+ // Deliberate fall-thru case
+ case vc1_SCSequenceHeader:
+ case vc1_SCEndOfSequence:
+ case VIDDEC_PARSE_EOS:
+ case VIDDEC_PARSE_DISCONTINUITY:
+ ret = VIDDEC_PARSE_FRMDONE;
+ // Set errors for progressive
+ if((parser->sc_seen & VC1_SC_SEQ) && (parser->sc_seen & VC1_SC_FRM))
+ *codec_specific_errors = 0;
+ else
+ *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ vc1_end_frame(parser);
+ parser->sc_seen_since_last_wkld = VC1_SC_INVALID;
+ // TODO: Need to check for interlaced
+ break;
+ default:
+ ret = VIDDEC_PARSE_SUCESS;
+ break;
+ } //switch
+ DEB("sc: 0x%x, sc_seen: 0x%x, sc_since_last_wkld:%d, error:%d, ret: %d\n",
+ next_sc, parser->sc_seen, parser->sc_seen_since_last_wkld,
+ *codec_specific_errors, ret);
+
+ return ret;
+} // viddec_vc1_wkld_done
+
+#ifdef VBP
+void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size)
+#else
+static void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size)
+#endif
+{
+ size->context_size = sizeof(vc1_viddec_parser_t);
+ size->persist_size = 0;
+ return;
+} // viddec_vc1_get_context_size
+
+#ifdef VBP
+uint32_t viddec_vc1_is_start_frame(void *ctxt)
+#else
+static uint32_t viddec_vc1_is_start_frame(void *ctxt)
+#endif
+{
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *) ctxt;
+ return parser->is_frame_start;
+} // viddec_vc1_is_start_frame
+
+void viddec_vc1_get_ops(viddec_parser_ops_t *ops)
+{
+ ops->init = viddec_vc1_init;
+ ops->parse_syntax = viddec_vc1_parse;
+ ops->get_cxt_size = viddec_vc1_get_context_size;
+ ops->is_wkld_done = viddec_vc1_wkld_done;
+ ops->is_frame_start = viddec_vc1_is_start_frame;
+ return;
+} // viddec_vc1_get_ops
+
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c
new file mode 100644
index 0000000..b787831
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/viddec_vc1_workload.c
@@ -0,0 +1,691 @@
+/* Any workload management goes in this file */
+
+#include "viddec_fw_debug.h"
+#include "vc1.h"
+#include "vc1parse.h"
+#include "viddec_fw_workload.h"
+#include <auto_eas/gen4_mfd.h>
+#include "viddec_pm_utils_bstream.h"
+
+/* this function returns workload frame types corresponding to VC1 PTYPES (frame types)
+ * VC1 frame types: can be found in vc1parse_common_defs.h
+ * workload frame types are in viddec_workload.h
+*/
+static inline uint32_t vc1_populate_frame_type(uint32_t vc1_frame_type)
+{
+ uint32_t viddec_frame_type;
+
+ switch(vc1_frame_type)
+ {
+ case VC1_I_FRAME:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_I;
+ break;
+ case VC1_P_FRAME:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_P;
+ break;
+ case VC1_B_FRAME:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_B;
+ break;
+ case VC1_BI_FRAME:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_BI;
+ break;
+ case VC1_SKIPPED_FRAME :
+ viddec_frame_type = VIDDEC_FRAME_TYPE_SKIP;
+ break;
+ default:
+ viddec_frame_type = VIDDEC_FRAME_TYPE_INVALID;
+ break;
+ } // switch on vc1 frame type
+
+ return(viddec_frame_type);
+} // vc1_populate_frame_type
+
+static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_parser_t *parser)
+{
+ viddec_workload_t *wl = viddec_pm_get_header( parent );
+ viddec_frame_attributes_t *attrs = &wl->attrs;
+ vc1_Info *info = &parser->info;
+ unsigned i;
+
+ /* typical sequence layer and entry_point data */
+ attrs->cont_size.height = info->metadata.height * 2 + 2;
+ attrs->cont_size.width = info->metadata.width * 2 + 2;
+
+ /* frame type */
+ /* we can have two fileds with different types for field interlace coding mode */
+ if (info->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) {
+ attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField1);
+ attrs->bottom_field_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField2);
+ } else {
+ attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTYPE);
+ attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; //unknown
+ }
+
+ /* frame counter */
+ attrs->vc1.tfcntr = info->picLayerHeader.TFCNTR;
+
+ /* TFF, repeat frame, field */
+ attrs->vc1.tff = info->picLayerHeader.TFF;
+ attrs->vc1.rptfrm = info->picLayerHeader.RPTFRM;
+ attrs->vc1.rff = info->picLayerHeader.RFF;
+
+ /* PAN Scan */
+ attrs->vc1.ps_present = info->picLayerHeader.PS_PRESENT;
+ attrs->vc1.num_of_pan_scan_windows = info->picLayerHeader.number_of_pan_scan_window;
+ for (i=0;i<attrs->vc1.num_of_pan_scan_windows;i++) {
+ attrs->vc1.pan_scan_window[i].hoffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].hoffset;
+ attrs->vc1.pan_scan_window[i].voffset = info->picLayerHeader.PAN_SCAN_WINDOW[i].voffset;
+ attrs->vc1.pan_scan_window[i].width = info->picLayerHeader.PAN_SCAN_WINDOW[i].width;
+ attrs->vc1.pan_scan_window[i].height = info->picLayerHeader.PAN_SCAN_WINDOW[i].height;
+ } //end for i
+
+ return;
+} // translate_parser_info_to_frame_attributes
+
+void vc1_intcomp(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr)
+{
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader;
+ uint32_t intcomp1 = 1;
+ uint32_t intcomp2 = 0;
+
+ // Get the intensity compensation from the bitstream
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp1, pic->LUMSCALE);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp1, pic->LUMSHIFT);
+
+ if(md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD)
+ {
+ intcomp2 = 1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp2, md->LUMSCALE2);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp2, md->LUMSHIFT2);
+ }
+
+ switch(md->INTCOMPFIELD)
+ {
+ case VC1_INTCOMP_TOP_FIELD:
+ if(pic->CurrField == 0) // First field decoded
+ {
+ if(pic->TFF)
+ {
+ //parser->intcomp_bot[0] = intcomp1 << 13;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1);
+ }
+ else
+ {
+ parser->intcomp_top[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, intcomp1);
+ }
+ }
+ else // Second field
+ {
+ if(pic->TFF)
+ {
+ parser->intcomp_top[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_top, intcomp1);
+ }
+ else
+ {
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1);
+ }
+ }
+ break;
+ case VC1_INTCOMP_BOTTOM_FIELD:
+ if(pic->CurrField == 0) // First field decoded
+ {
+ if(pic->TFF)
+ {
+ parser->intcomp_bot[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_bot, intcomp1);
+ }
+ else
+ {
+ parser->intcomp_bot[0] = intcomp1 << 13;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1);
+ }
+ }
+ else // Second field
+ {
+ if(pic->TFF)
+ {
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1);
+ }
+ else
+ {
+ parser->intcomp_bot[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_bot, intcomp1);
+ }
+ }
+ break;
+ case VC1_INTCOMP_BOTH_FIELD:
+ if(pic->CurrField == 0) // First field decoded
+ {
+ if(pic->TFF)
+ {
+ parser->intcomp_bot[0] = intcomp2;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp2;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_bot, intcomp2);
+ }
+ else
+ {
+ parser->intcomp_top[0] = intcomp2;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp2;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, intcomp2);
+ }
+ }
+ else // Second field
+ {
+ if(pic->TFF)
+ {
+ parser->intcomp_top[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_top, intcomp1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, intcomp2);
+ }
+ else
+ {
+ parser->intcomp_bot[0] = intcomp1;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp1;
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_bwd_bot, intcomp1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_top, intcomp2);
+ }
+ }
+ break;
+ default:
+ break;
+ } // switch on INTCOMPFIELD
+
+ return;
+} // vc1_intcomp
+
+static void handle_intensity_compensation(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr)
+{
+ vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader;
+ uint8_t intcomp_present = false;
+
+ if((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP))
+ {
+ intcomp_present = true;
+ if(pic->FCM == VC1_FCM_FIELD_INTERLACE)
+ {
+ vc1_intcomp(parser, pInfo, spr);
+ }
+ else
+ {
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, spr->intcomp_fwd_top, 1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, spr->intcomp_fwd_top, pic->LUMSCALE);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, spr->intcomp_fwd_top, pic->LUMSHIFT);
+
+ if(parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE)
+ {
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, spr->intcomp_fwd_bot, 1);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, spr->intcomp_fwd_bot, pic->LUMSCALE);
+ BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, spr->intcomp_fwd_bot, pic->LUMSHIFT);
+ }
+
+ parser->intcomp_top[0] = spr->intcomp_fwd_top;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = spr->intcomp_fwd_top;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = spr->intcomp_fwd_top;
+ }
+ }
+
+ // Propagate the previous picture's intensity compensation
+ if(pic->FCM == VC1_FCM_FIELD_INTERLACE)
+ {
+ if( (pic->CurrField) ||
+ ((pic->CurrField == 0) && (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE)))
+ {
+ spr->intcomp_fwd_top |= parser->intcomp_top[1];
+ spr->intcomp_fwd_bot |= parser->intcomp_bot[1];
+ }
+ }
+ if(pic->FCM == VC1_FCM_FRAME_INTERLACE)
+ {
+ if( (pic->CurrField) ||
+ ((pic->CurrField == 0) && (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm == VC1_FCM_FIELD_INTERLACE)))
+ {
+ spr->intcomp_fwd_bot |= parser->intcomp_bot[1];
+ }
+ }
+
+ switch(pic->PTYPE)
+ {
+ case VC1_B_FRAME:
+ spr->intcomp_fwd_top = parser->intcomp_last[0];
+ spr->intcomp_fwd_bot = parser->intcomp_last[1];
+ spr->intcomp_bwd_top = parser->intcomp_last[2];
+ spr->intcomp_bwd_bot = parser->intcomp_last[3];
+ break;
+ case VC1_P_FRAME:
+ // If first field, store the intcomp values to propagate.
+ // If second field has valid intcomp values, store them
+ // to propagate.
+ if(pic->CurrField == 0) // first field
+ {
+ parser->intcomp_last[0] = spr->intcomp_fwd_top;
+ parser->intcomp_last[1] = spr->intcomp_fwd_bot;
+ parser->intcomp_last[2] = spr->intcomp_bwd_top;
+ parser->intcomp_last[3] = spr->intcomp_bwd_bot;
+ }
+ else // Second field
+ {
+ parser->intcomp_last[0] |= spr->intcomp_fwd_top;
+ parser->intcomp_last[1] |= spr->intcomp_fwd_bot;
+ parser->intcomp_last[2] |= spr->intcomp_bwd_top;
+ parser->intcomp_last[3] |= spr->intcomp_bwd_bot;
+ }
+ break;
+ case VC1_I_FRAME:
+ case VC1_BI_FRAME:
+ break;
+ default:
+ break;
+ }
+
+ return;
+} // handle_intensity_compensation
+
+/**
+ * This function populates the registers for range reduction (main profile)
+ * This function assumes pInfo->metadata.RANGERED is ON at the sequence layer (J.1.17)
+ * A frame is marked as range reduced by the RANGEREDFRM flag at the picture layer,
+ * and the output of the decoded range reduced frame needs to be scaled up (8.1.1.4).
+ * Previous reference frame needs be upscaled or downscaled based on the RR status of
+ * current and previous frame (8.3.4.11)
+ */
+static inline void vc1_fill_RR_hw_struct(vc1_viddec_parser_t *parser, vc1_Info *pInfo, VC1D_SPR_REGS *spr)
+{
+ vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader;
+ int is_previous_ref_rr=0;
+
+ /* range reduction applies to luma and chroma component
+ which are the same register bit as RANGE_MAPY_FLAG, RANGE_MAPUV_FLAG */
+ BF_WRITE(VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, spr->range_map, pic->RANGEREDFRM);
+ BF_WRITE(VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, spr->range_map, pic->RANGEREDFRM);
+
+ /* Get the range reduced status of the previous frame */
+ switch (pic->PTYPE)
+ {
+ case VC1_P_FRAME:
+ {
+ is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm;
+ break;
+ }
+ case VC1_B_FRAME:
+ {
+ is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm;
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+
+ /* if current frame is RR and previous frame is not
+ donwscale the reference pixel ( RANGE_REF_RED_TYPE =1 in register) */
+ if(pic->RANGEREDFRM)
+ {
+ if(!is_previous_ref_rr)
+ {
+ BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, spr->recon_control, 1);
+ BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, spr->recon_control, 1);
+ }
+ }
+ else
+ {
+ /* if current frame is not RR but previous was RR, scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */
+ if(is_previous_ref_rr)
+ {
+ BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, spr->recon_control, 1);
+ BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, spr->recon_control, 0);
+ }
+ } // end for RR upscale
+
+} // vc1_fill_RR_hw_struct
+
+/**
+ * fill workload items that will load registers for HW decoder
+ */
+static void vc1_fill_hw_struct(vc1_viddec_parser_t *parser, vc1_Info* pInfo, VC1D_SPR_REGS *spr)
+{
+ vc1_metadata_t *md = &pInfo->metadata;
+ vc1_PictureLayerHeader *pic = &pInfo->picLayerHeader;
+ int field = pic->CurrField;
+ int ptype;
+
+ ptype = pic->PTYPE;
+
+ LOG_CRIT("ptype = %d, field = %d, topfield = %d, slice = %d", ptype, pic->CurrField, pic->BottomField, pic->SLICE_ADDR);
+
+ /* Common to both fields */
+ BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, spr->stream_format1, md->PROFILE);
+
+ BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, spr->coded_size, md->width);
+ BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, spr->coded_size, md->height);
+
+ BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, spr->stream_format2, md->INTERLACE);
+
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER, spr->entrypoint1, md->LOOPFILTER);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC, spr->entrypoint1, md->FASTUVMC);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV, spr->entrypoint1, md->EXTENDED_MV);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT, spr->entrypoint1, md->DQUANT);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM, spr->entrypoint1, md->VSTRANSFORM);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP, spr->entrypoint1, md->OVERLAP);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER, spr->entrypoint1, md->QUANTIZER);
+ BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV, spr->entrypoint1, md->EXTENDED_DMV);
+
+ /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/
+ if(md->RANGERED)
+ {
+ vc1_fill_RR_hw_struct(parser, pInfo, spr );
+ }
+ else
+ { //range mapping
+ BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, spr->range_map, md->RANGE_MAPY_FLAG);
+ BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, spr->range_map, md->RANGE_MAPY);
+ BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, spr->range_map, md->RANGE_MAPUV_FLAG);
+ BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, spr->range_map, md->RANGE_MAPUV);
+ }
+
+ BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, spr->frame_type, pic->FCM);
+ BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, spr->frame_type, pic->PTYPE);
+
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, spr->recon_control, md->RNDCTRL);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, spr->recon_control, pic->UVSAMP);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, spr->recon_control, pic->PQUANT);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, spr->recon_control, pic->HALFQP);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, spr->recon_control, pic->UniformQuant);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, spr->recon_control, pic->POSTPROC);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, spr->recon_control, pic->CONDOVER);
+ BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, spr->recon_control, (pic->PQINDEX <= 8));
+
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE, spr->mv_control, pic->MVRANGE);
+ if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP)
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, spr->mv_control, pic->MVMODE2);
+ else
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE, spr->mv_control, pic->MVMODE);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB, spr->mv_control, pic->MVTAB);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE, spr->mv_control, pic->DMVRANGE);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, spr->mv_control, pic->MV4SWITCH);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, spr->mv_control, pic->MBMODETAB);
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF, spr->mv_control,
+ pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) ));
+ BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD, spr->mv_control, pic->REFFIELD);
+
+ handle_intensity_compensation(parser, pInfo, spr);
+
+ BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, spr->ref_bfraction, pic->BFRACTION_DEN);
+ BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, spr->ref_bfraction, pic->BFRACTION_NUM);
+ BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, spr->ref_bfraction, md->REFDIST);
+
+ // BLOCK CONTROL REGISTER Offset 0x2C
+ BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, spr->blk_control, pic->CBPTAB);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, spr->blk_control, pic->TTMBF);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, spr->blk_control, pic->TTFRM);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, spr->blk_control, pic->MV2BPTAB);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, spr->blk_control, pic->MV4BPTAB);
+ if((field == 1) && (pic->SLICE_ADDR))
+ {
+ int mby = md->height * 2 + 2;
+ mby = (mby + 15 ) / 16;
+ pic->SLICE_ADDR -= (mby/2);
+ }
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, spr->blk_control, pic->SLICE_ADDR);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, spr->blk_control, md->bp_raw[0]);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, spr->blk_control, md->bp_raw[1]);
+ BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, spr->blk_control, md->bp_raw[2]);
+
+ BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM, spr->trans_data, pic->TRANSACFRM);
+ BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, spr->trans_data, pic->TRANSACFRM2);
+ BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB, spr->trans_data, pic->TRANSDCTAB);
+
+ // When DQUANT is 1 or 2, we have the VOPDQUANT structure in the bitstream that
+ // controls the value calculated for ALTPQUANT
+ // ALTPQUANT must be in the range of 1 and 31 for it to be valid
+ // DQUANTFRM is present only when DQUANT is 1 and ALTPQUANT setting should be dependent on DQUANT instead
+ if(md->DQUANT)
+ {
+ if(pic->PQDIFF == 7)
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, spr->vop_dquant, pic->ABSPQ);
+ else if (pic->DQUANTFRM == 1)
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, spr->vop_dquant, pic->PQUANT + pic->PQDIFF + 1);
+ }
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, spr->vop_dquant, pic->DQUANTFRM);
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, spr->vop_dquant, pic->DQPROFILE);
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES, spr->vop_dquant, pic->DQSBEDGE);
+ BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, spr->vop_dquant, pic->DQBILEVEL);
+
+ BF_WRITE(VC1_0_SEQPIC_CURR_FRAME_ID,FCM, spr->ref_frm_id[VC1_FRAME_CURRENT_REF], pic->FCM );
+
+ if ( ptype == VC1_B_FRAME) {
+ // Forward reference is past reference and is the second temporally closest reference - hence minus_2
+ BF_WRITE(VC1_0_SEQPIC_FWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_PAST], parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm );
+ // Backward reference is future reference frame and is temporally the closest - hence minus_1
+ BF_WRITE(VC1_0_SEQPIC_BWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_FUTURE], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm );
+ } else {
+ // Only Forward reference is valid and is the temporally closest reference - hence minus_1, backward is set same as forward
+ BF_WRITE(VC1_0_SEQPIC_FWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_PAST], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm );
+ BF_WRITE(VC1_0_SEQPIC_BWD_REF_FRAME_ID, FCM, parser->spr.ref_frm_id[VC1_FRAME_FUTURE], parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm );
+ }
+
+ BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD, spr->fieldref_ctrl_id, pic->BottomField);
+ BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, spr->fieldref_ctrl_id, pic->CurrField);
+ if(parser->info.picLayerHeader.PTYPE == VC1_I_FRAME)
+ {
+ BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, spr->fieldref_ctrl_id, 1);
+ }
+ else
+ {
+ BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, spr->fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]);
+ }
+
+ if( pic->FCM == VC1_FCM_FIELD_INTERLACE ) {
+ BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, spr->imgstruct, (pic->BottomField) ? 2 : 1);
+ }
+
+ return;
+} // vc1_fill_hw_struct
+
+int32_t vc1_parse_emit_current_frame(void *parent, vc1_viddec_parser_t *parser)
+{
+ viddec_workload_item_t wi;
+ const uint32_t *pl;
+ int i;
+ int nitems;
+
+ if( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME ) {
+ translate_parser_info_to_frame_attributes( parent, parser );
+ return 0;
+ }
+
+ translate_parser_info_to_frame_attributes( parent, parser );
+ memset(&parser->spr, 0, sizeof(VC1D_SPR_REGS));
+ vc1_fill_hw_struct( parser, &parser->info, &parser->spr );
+
+ /* STUFF BSP Data Memory it into a variety of workload items */
+
+ pl = (const uint32_t *) &parser->spr;
+
+ // How many payloads must be generated
+ nitems = (sizeof(parser->spr) + 7) / 8; /* In QWORDs rounded up */
+
+
+ // Dump DMEM to an array of workitems
+ for( i = 0; (i < nitems) && ( (parser->info.picLayerHeader.SLICE_ADDR == 0) || parser->info.picture_info_has_changed ); i++ )
+ {
+ wi.vwi_type = VIDDEC_WORKLOAD_DECODER_SPECIFIC;
+ wi.data.data_offset = (unsigned int)pl - (unsigned int)&parser->spr; // offset within struct
+ wi.data.data_payload[0] = pl[0];
+ wi.data.data_payload[1] = pl[1];
+ pl += 2;
+
+ viddec_pm_append_workitem( parent, &wi );
+ }
+
+ {
+ uint32_t bit, byte;
+ uint8_t is_emul;
+ viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul);
+ // Send current bit offset and current slice
+ wi.vwi_type = VIDDEC_WORKLOAD_VC1_BITOFFSET;
+ // If slice data starts in the middle of the emulation prevention sequence -
+ // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data
+ // to the decoder starting at the first byte of 0s so that the decoder can detect the
+ // emulation prevention. But the actual data starts are offset 8 in this bit sequence.
+ wi.vwi_payload[0] = bit + (is_emul*8);
+ wi.vwi_payload[1] = parser->info.picLayerHeader.SLICE_ADDR;
+ wi.vwi_payload[2] = 0xdeaddead;
+ viddec_pm_append_workitem( parent, &wi );
+ }
+
+ viddec_pm_append_pixeldata( parent );
+
+ return(0);
+}
+
+/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */
+static inline void vc1_send_past_ref_items(void *parent)
+{
+ viddec_workload_item_t wi;
+ wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ viddec_pm_append_workitem( parent, &wi );
+ return;
+}
+
+/* send future frame item */
+static inline void vc1_send_future_ref_items(void *parent)
+{
+ viddec_workload_item_t wi;
+ wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME;
+ wi.ref_frame.reference_id = 0;
+ wi.ref_frame.luma_phys_addr = 0;
+ wi.ref_frame.chroma_phys_addr = 0;
+ viddec_pm_append_workitem( parent, &wi );
+ return;
+}
+
+/* send reorder frame item to host
+ * future frame gets push to past */
+static inline void send_reorder_ref_items(void *parent)
+{
+ viddec_workload_item_t wi;
+ wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+ wi.ref_reorder.ref_table_offset = 0;
+ wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0
+ wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same
+ viddec_pm_append_workitem( parent, &wi );
+ return;
+} // send_reorder_ref_items
+
+/** update workload with more workload items for ref and update values to store...
+ */
+void vc1_start_new_frame(void *parent, vc1_viddec_parser_t *parser)
+{
+ vc1_metadata_t *md = &(parser->info.metadata);
+ viddec_workload_t *wl = viddec_pm_get_header(parent);
+ int frame_type = parser->info.picLayerHeader.PTYPE;
+ int frame_id = 1; // new reference frame is assigned index 1
+
+ /* init */
+ memset(&parser->spr, 0, sizeof(parser->spr));
+ wl->is_reference_frame = 0;
+
+ /* set flag - extra ouput frame needed for range adjustment (range mapping or range reduction */
+ if (parser->info.metadata.RANGE_MAPY_FLAG ||
+ parser->info.metadata.RANGE_MAPUV_FLAG ||
+ parser->info.picLayerHeader.RANGEREDFRM)
+ {
+ wl->is_reference_frame |= WORKLOAD_FLAGS_RA_FRAME;
+ }
+
+ LOG_CRIT("vc1_start_new_frame: frame_type=%d \n",frame_type);
+
+ parser->is_reference_picture = ((VC1_B_FRAME != frame_type) && (VC1_BI_FRAME != frame_type));
+
+ /* reference / anchor frames processing
+ * we need to send reorder before reference frames */
+ if (parser->is_reference_picture)
+ {
+ /* one frame has been sent */
+ if (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != -1)
+ {
+ /* there is a frame in the reference buffer, move it to the past */
+ send_reorder_ref_items(parent);
+ }
+ }
+
+ /* send workitems for reference frames */
+ switch( frame_type )
+ {
+ case VC1_B_FRAME:
+ {
+ vc1_send_past_ref_items(parent);
+ vc1_send_future_ref_items(parent);
+ break;
+ }
+ case VC1_SKIPPED_FRAME:
+ {
+ wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME;
+ vc1_send_past_ref_items(parent);
+ break;
+ }
+ case VC1_P_FRAME:
+ {
+ vc1_send_past_ref_items( parent);
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* reference / anchor frames from previous code
+ * we may need it for frame reduction */
+ if (parser->is_reference_picture)
+ {
+ wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK);
+
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].id = frame_id;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].fcm = parser->info.picLayerHeader.FCM;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0] = (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME);
+ if(parser->info.picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE)
+ {
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = (parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME);
+ }
+ else
+ {
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0];
+ }
+
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].type = parser->info.picLayerHeader.PTYPE;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_en = md->RANGERED;
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_frm = parser->info.picLayerHeader.RANGEREDFRM;
+
+ LOG_CRIT("anchor[0] = %d, anchor[1] = %d",
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0],
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] );
+ }
+
+ return;
+} // vc1_start_new_frame
+
+void vc1_end_frame(vc1_viddec_parser_t *parser)
+{
+ /* update status of reference frames */
+ if(parser->is_reference_picture)
+ {
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_2] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1];
+ parser->ref_frame[VC1_REF_FRAME_T_MINUS_1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0];
+ }
+
+ return;
+} // vc1_end_frame
+
diff --git a/mix_vbp/viddec_fw/fw/include/stdint.h b/mix_vbp/viddec_fw/fw/include/stdint.h
new file mode 100644
index 0000000..885cfe1
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/include/stdint.h
@@ -0,0 +1,23 @@
+#ifndef __STDINT_H
+#define __STDINT_H
+
+typedef unsigned char uint8_t;
+typedef unsigned short uint16_t;
+typedef unsigned int uint32_t;
+typedef unsigned long long uint64_t;
+
+//#ifndef _MACHTYPES_H_
+typedef signed char int8_t;
+typedef signed short int16_t;
+typedef signed int int32_t;
+typedef signed long long int64_t;
+//#endif
+
+#ifndef NULL
+#define NULL (void*)0x0
+#endif
+
+#define true 1
+#define false 0
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/include/viddec_debug.h b/mix_vbp/viddec_fw/fw/include/viddec_debug.h
new file mode 100644
index 0000000..23db98f
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/include/viddec_debug.h
@@ -0,0 +1,31 @@
+#ifndef VIDDEC_DEBUG_H
+#define VIDDEC_DEBUG_H
+
+#ifndef VBP
+
+#ifdef HOST_ONLY
+ #include <stdio.h>
+ #include <osal.h>
+ #define DEB OS_PRINT
+ #define FWTRACE OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ );
+// #define DEB(format, args...)
+// #define FWTRACE
+ #define DEB_FNAME(format, args...) OS_PRINT("%s: %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ , ## args )
+ #define CDEB(a, format, args...) if(a != 0) {DEB(format, ##args);}
+#else
+ #define DEB(format, args...)
+ #define FWTRACE
+ #define CDEB(a, format, args...)
+ #define DEB_FNAME(format, args...)
+#endif
+
+#else // VBP is defined
+
+#define DEB(format, args...)
+#define FWTRACE
+#define CDEB(a, format, args...)
+#define DEB_FNAME(format, args...)
+
+#endif // end of VBP
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h b/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h
new file mode 100644
index 0000000..099be69
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/include/viddec_fw_version.h
@@ -0,0 +1,7 @@
+#ifndef VIDDEC_FW_VERSION_H
+#define VIDDEC_FW_VERSION_H
+
+#define VIDDEC_FW_MAJOR_NUM 0
+#define VIDDEC_FW_MINOR_NUM 8
+#define VIDDEC_FW_BUILD_NUM 11
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/Makefile.am b/mix_vbp/viddec_fw/fw/parser/Makefile.am
new file mode 100644
index 0000000..c94b935
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/Makefile.am
@@ -0,0 +1,205 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+VC1PATH=./../codecs/vc1/parser
+MP2PATH=./../codecs/mp2/parser
+MP4PATH=./../codecs/mp4/parser
+H264PATH=./../codecs/h264/parser
+
+PARSER_INCLUDE_PATH=-I./include \
+ -I../include \
+ -I../../include \
+ -I./vc1/include \
+ -I../codecs/vc1/include \
+ -I../codecs/mp2/include \
+ -I../codecs/mp4/include \
+ -I../codecs/h264/include \
+ -I../codecs/vc1/parser
+
+
+PARSER_MACROS= -DVBP \
+ -DHOST_ONLY \
+ -DG_LOG_DOMAIN=\"vbp\"
+
+
+la_CFLAGS = $(GLIB_CFLAGS) \
+ $(MIX_CFLAGS) \
+ $(GOBJECT_CFLAGS) \
+ $(GTHREAD_CFLAGS) \
+ $(PARSER_INCLUDE_PATH) \
+ $(PARSER_MACROS) \
+ -DMIXVBP_CURRENT=@MIXVBP_CURRENT@ \
+ -DMIXVBP_AGE=@MIXVBP_AGE@ \
+ -DMIXVBP_REVISION=@MIXVBP_REVISION@
+
+la_LIBADD = $(GLIB_LIBS) \
+ $(GOBJECT_LIBS) \
+ $(GTHREAD_LIBS)
+
+la_LDFLAGS = $(GLIB_LIBS) \
+ $(GOBJECT_LIBS) \
+ $(GTHREAD_LIBS) \
+ -version-info @MIXVBP_CURRENT@:@MIXVBP_REVISION@:@MIXVBP_AGE@
+
+lib_LTLIBRARIES = libmixvbp.la \
+ libmixvbp_vc1.la \
+ libmixvbp_mpeg2.la \
+ libmixvbp_mpeg4.la \
+ libmixvbp_h264.la
+
+
+###################################### vbp loader ########################################
+
+# sources used to compile
+libmixvbp_la_SOURCES = vbp_loader.c \
+ vbp_utils.c \
+ vbp_trace.c \
+ vbp_h264_parser.c \
+ vbp_vc1_parser.c \
+ vbp_mp42_parser.c \
+ viddec_pm.c \
+ viddec_pm_stubs.c \
+ viddec_pm_parser_ops.c \
+ viddec_pm_utils_bstream.c \
+ viddec_pm_tags.c \
+ viddec_emit.c \
+ viddec_pm_utils_list.c \
+ viddec_parse_sc.c \
+ viddec_parse_sc_stub.c
+
+libmixvbp_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_la_LIBADD = $(la_LIBADD)
+libmixvbp_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_la_LIBTOOLFLAGS = --tag=disable-static
+
+###################################### VC-1 parser ########################################
+
+libmixvbp_vc1_la_SOURCES = $(VC1PATH)/vc1parse.c \
+ $(VC1PATH)/vc1parse_bitplane.c \
+ $(VC1PATH)/vc1parse_bpic.c \
+ $(VC1PATH)/vc1parse_bpic_adv.c \
+ $(VC1PATH)/vc1parse_common_tables.c \
+ $(VC1PATH)/vc1parse_huffman.c \
+ $(VC1PATH)/vc1parse_ipic.c \
+ $(VC1PATH)/vc1parse_ipic_adv.c \
+ $(VC1PATH)/vc1parse_mv_com.c \
+ $(VC1PATH)/vc1parse_pic_com.c \
+ $(VC1PATH)/vc1parse_pic_com_adv.c \
+ $(VC1PATH)/vc1parse_ppic.c \
+ $(VC1PATH)/vc1parse_ppic_adv.c \
+ $(VC1PATH)/vc1parse_vopdq.c \
+ $(VC1PATH)/viddec_vc1_parse.c \
+ $(VC1PATH)/mix_vbp_vc1_stubs.c
+
+libmixvbp_vc1_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_vc1_la_LIBADD = $(la_LIBADD) libmixvbp.la
+libmixvbp_vc1_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_vc1_la_LIBTOOLFLAGS = --tag=disable-static
+
+###################################### MPEG-2 parser ########################################
+
+libmixvbp_mpeg2_la_SOURCES = $(MP2PATH)/viddec_mpeg2_metadata.c \
+ $(MP2PATH)/viddec_mpeg2_parse.c \
+ $(MP2PATH)/mix_vbp_mpeg2_stubs.c
+
+libmixvbp_mpeg2_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_mpeg2_la_LIBADD = $(la_LIBADD) libmixvbp.la
+libmixvbp_mpeg2_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_mpeg2_la_LIBTOOLFLAGS = --tag=disable-static
+
+###################################### MPEG-4 parser ########################################
+
+libmixvbp_mpeg4_la_SOURCES = $(MP4PATH)/viddec_mp4_parse.c \
+ $(MP4PATH)/viddec_parse_sc_mp4.c \
+ $(MP4PATH)/viddec_mp4_visualobject.c \
+ $(MP4PATH)/viddec_mp4_videoobjectplane.c \
+ $(MP4PATH)/viddec_mp4_shortheader.c \
+ $(MP4PATH)/viddec_mp4_videoobjectlayer.c \
+ $(MP4PATH)/viddec_mp4_decodevideoobjectplane.c
+
+libmixvbp_mpeg4_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_mpeg4_la_LIBADD = $(la_LIBADD) libmixvbp.la
+libmixvbp_mpeg4_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_mpeg4_la_LIBTOOLFLAGS = --tag=disable-static
+
+###################################### H.264 parser ########################################
+
+libmixvbp_h264_la_SOURCES = $(H264PATH)/h264parse.c \
+ $(H264PATH)/h264parse_bsd.c \
+ $(H264PATH)/h264parse_math.c \
+ $(H264PATH)/h264parse_mem.c \
+ $(H264PATH)/h264parse_sei.c \
+ $(H264PATH)/h264parse_sh.c \
+ $(H264PATH)/h264parse_pps.c \
+ $(H264PATH)/h264parse_sps.c \
+ $(H264PATH)/h264parse_dpb.c \
+ $(H264PATH)/viddec_h264_parse.c \
+ $(H264PATH)/mix_vbp_h264_stubs.c
+
+libmixvbp_h264_la_CFLAGS = $(la_CFLAGS)
+libmixvbp_h264_la_LIBADD = $(la_LIBADD) libmixvbp.la
+libmixvbp_h264_la_LDFLAGS = $(la_LDFLAGS)
+libmixvbp_h264_la_LIBTOOLFLAGS = --tag=disable-static
+
+##############################################################################################
+
+# headers we need but don't want installed
+noinst_HEADERS = ./vbp_h264_parser.h \
+ ./vbp_mp42_parser.h \
+ ./vbp_vc1_parser.h \
+ ./vbp_trace.h \
+ ./vbp_loader.h \
+ ./vbp_utils.h \
+ ./include/fw_pvt.h \
+ ./include/ipc_fw_custom.h \
+ ./include/viddec_emitter.h \
+ ./include/viddec_fw_debug.h \
+ ./include/viddec_fw_parser_fw_ipc.h \
+ ./include/viddec_h264_parse.h \
+ ./include/viddec_mp4_parse.h \
+ ./include/viddec_mpeg2_parse.h \
+ ./include/viddec_parser_ops.h \
+ ./include/viddec_pm.h \
+ ./include/viddec_pm_parse.h \
+ ./include/viddec_pm_tags.h \
+ ./include/viddec_pm_utils_bstream.h \
+ ./include/viddec_pm_utils_list.h \
+ ./include/viddec_vc1_parse.h \
+ ../include/stdint.h \
+ ../include/viddec_debug.h \
+ ../include/viddec_fw_version.h \
+ ../../include/viddec_fw_common_defs.h \
+ ../../include/viddec_fw_decoder_host.h \
+ ../../include/viddec_fw_frame_attr.h \
+ ../../include/viddec_fw_item_types.h \
+ ../../include/viddec_fw_parser_host.h \
+ ../../include/viddec_fw_workload.h \
+ ../../fw/include/stdint.h \
+ ../../fw/include/viddec_debug.h \
+ ../../fw/include/viddec_fw_version.h \
+ ../../fw/codecs/h264/include/h264.h \
+ ../../fw/codecs/h264/include/h264parse.h \
+ ../../fw/codecs/h264/include/h264parse_dpb.h \
+ ../../fw/codecs/h264/include/h264parse_sei.h \
+ ../../fw/codecs/mp2/include/mpeg2.h \
+ ../../fw/codecs/mp2/include/viddec_mpeg2.h \
+ ../../fw/codecs/mp4/include/viddec_fw_mp4.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_decodevideoobjectplane.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_parse.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_shortheader.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_videoobjectlayer.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_videoobjectplane.h \
+ ../../fw/codecs/mp4/parser/viddec_mp4_visualobject.h \
+ ../../fw/codecs/vc1/include/vc1common.h \
+ ../../fw/codecs/vc1/parser/vc1.h \
+ ../../fw/codecs/vc1/parser/vc1parse.h \
+ ../../fw/codecs/vc1/parser/vc1parse_common_defs.h
+
+
+mixincludedir=$(includedir)/mixvbp
+mixinclude_HEADERS = vbp_loader.h
+
+##############################################################################################
diff --git a/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c
new file mode 100644
index 0000000..299dbce
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/gv_sven_devh.c
@@ -0,0 +1,224 @@
+/*
+
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+ Intel Corporation
+ 2200 Mission College Blvd.
+ Santa Clara, CA 97052
+
+ BSD LICENSE
+
+ Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef SVEN_FW_H
+#include "sven_fw.h"
+#endif
+
+#define _OSAL_IO_MEMMAP_H /* to prevent errors when including sven_devh.h */
+#define _OSAL_ASSERT_H /* to prevent errors when including sven_devh.h */
+#include "sven_devh.h"
+
+#include "fw_pvt.h"
+
+static os_devhandle_t g_svenh;
+
+#define FW_SVEN_DEVH_DISABLE_SVEN_REGISTER_IO
+//#define SVEN_DEVH_DISABLE_SVEN
+
+extern int sven_fw_is_tx_enabled(
+ struct SVENHandle *svenh );
+
+#ifndef SVEN_DEVH_DISABLE_SVEN
+static void sven_write_event(
+ struct SVENHandle *svenh,
+ struct SVENEvent *ev )
+{
+ if ( NULL == svenh )
+ svenh = &g_svenh.devh_svenh;
+
+ if ( NULL != svenh->phot )
+ sven_fw_write_event(svenh,ev);
+}
+
+static void sven_fw_initialize_event_top(
+ struct SVENEvent *ev,
+ int module,
+ int unit,
+ int event_type,
+ int event_subtype )
+{
+ ev->se_et.et_gencount = 0;
+ ev->se_et.et_module = module;
+ ev->se_et.et_unit = unit;
+ ev->se_et.et_type = event_type;
+ ev->se_et.et_subtype = event_subtype;
+}
+#endif
+
+uint32_t sven_get_timestamp()
+{
+ uint32_t value = 0;
+
+ if ( NULL != g_svenh.devh_svenh.ptime )
+ {
+ value = sven_fw_read_external_register( &g_svenh.devh_svenh, g_svenh.devh_svenh.ptime );
+ }
+
+ return(value);
+}
+
+/* ---------------------------------------------------------------------- */
+/* ---------------------------------------------------------------------- */
+
+void devh_SVEN_SetModuleUnit(
+ os_devhandle_t *devh,
+ int sven_module,
+ int sven_unit )
+{
+#ifndef SVEN_DEVH_DISABLE_SVEN
+ if ( NULL == devh )
+ devh = &g_svenh;
+ devh->devh_sven_module = sven_module;
+ devh->devh_sven_unit = sven_unit;
+#endif
+}
+
+os_devhandle_t *devhandle_factory( const char *desc )
+{
+ /* pointer to global vsparc local registers */
+ g_svenh.devh_regs_ptr = (void *) 0x10000000; /* firmware address to Local (GV) registers */
+
+ return( &g_svenh );
+}
+
+int devhandle_connect_name(
+ os_devhandle_t *devh,
+ const char *devname )
+{
+ return(1);
+}
+
+/* ---------------------------------------------------------------------- */
+/* ---------------------------------------------------------------------- */
+
+void devh_SVEN_WriteModuleEvent(
+ os_devhandle_t *devh,
+ int module_event_subtype,
+ unsigned int payload0,
+ unsigned int payload1,
+ unsigned int payload2,
+ unsigned int payload3,
+ unsigned int payload4,
+ unsigned int payload5 )
+{
+#ifndef SVEN_DEVH_DISABLE_SVEN
+ struct SVENEvent ev __attribute__ ((aligned(8)));
+
+ devh = (NULL != devh) ? devh : &g_svenh;
+
+ if ( ! sven_fw_is_tx_enabled( &devh->devh_svenh ) )
+ return;
+
+ sven_fw_initialize_event_top( &ev,
+ devh->devh_sven_module,
+ 1 /* devh->devh_sven_unit */,
+ SVEN_event_type_module_specific,
+ module_event_subtype );
+
+ ev.u.se_uint[0] = payload0;
+ ev.u.se_uint[1] = payload1;
+ ev.u.se_uint[2] = payload2;
+ ev.u.se_uint[3] = payload3;
+ ev.u.se_uint[4] = payload4;
+ ev.u.se_uint[5] = payload5;
+
+ sven_write_event( &devh->devh_svenh, &ev );
+#endif
+}
+
+/* ---------------------------------------------------------------------- */
+/* SVEN FW TX: Required custom routines to enable FW TX */
+/* ---------------------------------------------------------------------- */
+int sven_fw_set_globals(
+ struct SVEN_FW_Globals *fw_globals )
+{
+ sven_fw_attach( &g_svenh.devh_svenh, fw_globals );
+ devh_SVEN_SetModuleUnit( &g_svenh, SVEN_module_GEN4_GV, 1 );
+ return(0);
+}
+
+uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+unsigned int sven_fw_read_external_register(
+ struct SVENHandle *svenh,
+ volatile unsigned int *preg )
+{
+ unsigned int reg __attribute__ ((aligned(8)));
+
+ (void)svenh; // argument unused
+
+ cp_using_dma_phys( (uint32_t) preg, (uint32_t) &reg, 4, 0, 0 );
+
+ return( reg );
+}
+
+void sven_fw_copy_event_to_host_mem(
+ struct SVENHandle *svenh,
+ volatile struct SVENEvent *to,
+ const struct SVENEvent *from )
+{
+ (void)svenh; // argument unused
+
+ cp_using_dma_phys( (uint32_t) to, (uint32_t) from, sizeof(*to), 1, 0 );
+}
+/* ---------------------------------------------------------------------- */
+/* ---------------------------------------------------------------------- */
diff --git a/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h b/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h
new file mode 100644
index 0000000..0928ad3
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/fw_pvt.h
@@ -0,0 +1,114 @@
+#ifndef FW_PVT_H
+#define FW_PVT_H
+
+#include <stdint.h>
+#include "viddec_fw_parser_fw_ipc.h"
+#include "viddec_fw_parser_ipclib_config.h"
+#include "viddec_emitter.h"
+#include "viddec_pm.h"
+#include "viddec_fw_debug.h"
+
+#define GET_IPC_HANDLE(x) (FW_IPC_Handle *)&(x.fwIpc)
+#define GV_DDR_MEM_MASK 0x80000000
+/* Macros for Interrupts */
+#define TRAPS_ENABLE __asm__ volatile ("mov %%psr, %%l0; or %%l0, 0x20, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0")
+#define TRAPS_DISABLE __asm__ volatile ("mov %%psr, %%l0; and %%l0, ~0x20, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0")
+
+#define TRAPS_INT_ENABLE __asm__ volatile ("mov %%psr, %%l0; and %%l0, ~0xF00, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0")
+#define TRAPS_INT_DISABLE __asm__ volatile ("mov %%psr, %%l0; or %%l0, 0xF00, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0")
+
+#define TRAPS_ENABLED(enabled) __asm__ volatile ("mov %%psr, %0; and %0, 0x20, %0": "=r" (enabled):)
+
+#define TRAPS_INT_DISABLED(enabled) __asm__ volatile ("mov %%psr, %0; and %0, 0xF00, %0": "=r" (enabled):)
+
+#define VIDDEC_WATCHDOG_COUNTER_MAX (0x000FFFFF)
+
+/* Synchronous message buffer, which is shared by both Host and Fw for handling synchronous messages */
+typedef struct
+{
+ uint8_t data[CONFIG_IPC_SYNC_MESSAGE_BUF_SIZE];
+}mfd_sync_msg_t;
+
+/* Required Information needed by Parser Kernel for each stream */
+typedef struct
+{
+ uint32_t ddr_cxt; /* phys addr of swap space where Parser kernel stores pvt information */
+ uint32_t cxt_size; /* size of context buffer */
+ uint32_t strm_type; /* Current stream information*/
+ uint32_t wl_time; /* ticks for processing current workload */
+ uint32_t es_time; /* ticks for processing current workload */
+ uint32_t low_watermark; /* On crossing this value we generate low watermark interrupt */
+ uint8_t state; /* Current state of stream ... start(1), stop(0).. */
+ uint8_t priority; /* Priority of current stream Real time or Non real time */
+ uint8_t buffered_data;/* Do we have data from past buffer */
+ uint8_t pending_interrupt;/* Whether an Interrupt needs to be generated for this stream */
+}mfd_stream_info;
+
+/* Global data for Parser kernel */
+typedef struct
+{
+ int32_t low_id; /* last scheduled low priority stream id */
+ int32_t high_id;/* last scheduled high priority stream id */
+ uint32_t g_parser_tables; /* should point to global_parser_table in DDR */
+}mfd_pk_data_t;
+
+typedef struct
+{
+ ipc_msg_data input;
+ ipc_msg_data wkld1;
+ ipc_msg_data wkld2;
+ viddec_pm_cxt_t pm;
+}mfd_pk_strm_cxt;
+
+/* This structure defines the layout of local memory */
+typedef struct
+{
+ mfd_sync_msg_t buf;
+ _IPC_int_state_t int_status[FW_SUPPORTED_STREAMS];
+ FW_IPC_Handle fwIpc;
+ mfd_stream_info stream_info[FW_SUPPORTED_STREAMS];
+ mfd_pk_data_t g_pk_data;
+ mfd_pk_strm_cxt srm_cxt;
+}dmem_t;
+
+/* Pvt Functions which will be used by multiple modules */
+
+static inline void reg_write(uint32_t offset, uint32_t value)
+{
+ *((volatile uint32_t*) (GV_SI_MMR_BASE_ADDRESS + offset)) = value;
+}
+
+static inline uint32_t reg_read(uint32_t offset)
+{
+ uint32_t value=0;
+ value = *((volatile uint32_t*) (GV_SI_MMR_BASE_ADDRESS + offset));
+ return value;
+}
+
+
+static inline void DEBUG(uint32_t print, uint32_t code, uint32_t val)
+{
+ if(print > 0)
+ {
+ DUMP_TO_MEM(code);
+ DUMP_TO_MEM(val);
+ dump_ptr = (dump_ptr + 7) & ~0x7;
+ }
+}
+
+void *memcpy(void *dest, const void *src, uint32_t n);
+
+void *memset(void *s, int32_t c, uint32_t n);
+
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+uint32_t set_wdog(uint32_t offset);
+
+void get_wdog(uint32_t *value);
+
+void enable_intr(void);
+
+uint32_t get_total_ticks(uint32_t start, uint32_t end);
+
+void viddec_fw_init_swap_memory(unsigned int stream_id, unsigned int swap, unsigned int clean);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h b/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h
new file mode 100644
index 0000000..adfdabf
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/ipc_fw_custom.h
@@ -0,0 +1,87 @@
+/*
+
+This file is provided under a dual BSD/GPLv2 license. When using or
+redistributing this file, you may do so under either license.
+
+GPL LICENSE SUMMARY
+
+Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of version 2 of the GNU General Public License as
+published by the Free Software Foundation.
+
+This program is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+The full GNU General Public License is included in this distribution
+in the file called LICENSE.GPL.
+
+Contact Information:
+Intel Corporation
+2200 Mission College Blvd.
+Santa Clara, CA 97052
+
+BSD LICENSE
+
+Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+* Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in
+the documentation and/or other materials provided with the
+distribution.
+* Neither the name of Intel Corporation nor the names of its
+contributors may be used to endorse or promote products derived
+from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef IPC_FW_CUSTOM_H
+#define IPC_FW_CUSTOM_H 1
+
+#include "viddec_fw_parser_fw_ipc.h"
+
+extern void custom_ipclib_firmware_out_of_reset(void);
+
+extern struct FW_IPC_Handler *custom_ipclib_get_fwipc(void);
+extern void *custom_ipclib_get_sync_message_area(void);
+
+extern void custom_ipclib_firmware_setup(void);
+extern void custom_ipclib_firmware_ready(void);
+
+extern int custom_ipclib_firmware_is_sync_command_requested(void);
+extern void custom_ipclib_firmware_ack_sync_command(void);
+
+void custom_ipclib_memcpy_to_host_mem(
+ void *to,
+ const void *from,
+ int size );
+
+void custom_ipclib_memcpy_from_host_mem(
+ void *to,
+ const void *from,
+ int size );
+
+#endif /* IPC_FW_CUSTOM_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h
new file mode 100644
index 0000000..bb96bab
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_emitter.h
@@ -0,0 +1,96 @@
+#ifndef VIDDEC_EMITTER_H
+#define VIDDEC_EMITTER_H
+
+#include <stdint.h>
+#ifndef HOST_ONLY
+#define DDR_MEM_MASK 0x80000000
+#else
+#define DDR_MEM_MASK 0x0
+#endif
+#include "viddec_fw_workload.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_fw_debug.h"
+
+typedef struct
+{
+ viddec_workload_t *data;
+ uint32_t max_items;
+ uint32_t num_items;
+ uint32_t result;
+}viddec_emitter_wkld;
+
+typedef struct
+{
+ viddec_emitter_wkld cur;
+ viddec_emitter_wkld next;
+}viddec_emitter;
+
+/*
+ whats this for? Emitting current tag for ES buffer
+*/
+int32_t viddec_emit_assoc_tag(viddec_emitter *emit, uint32_t id, uint32_t using_next);
+
+int32_t viddec_emit_contr_tag(viddec_emitter *emit, viddec_input_buffer_t *ibuf, uint8_t incomplete, uint32_t using_next);
+
+int32_t viddec_emit_flush_current_wkld(viddec_emitter *emit);
+
+int32_t viddec_emit_append(viddec_emitter_wkld *cxt, viddec_workload_item_t *item);
+
+/*
+ Init function for setting up emitter context.
+*/
+static inline void viddec_emit_init(viddec_emitter *cxt)
+{
+ cxt->cur.data = cxt->next.data = 0;
+ cxt->cur.max_items = cxt->next.max_items = 0;
+ cxt->cur.num_items = cxt->next.num_items = 0;
+ cxt->cur.result = cxt->next.result = VIDDEC_FW_WORKLOAD_SUCCESS;
+}
+
+static inline void viddec_emit_update(viddec_emitter *cxt, uint32_t cur, uint32_t next, uint32_t cur_size, uint32_t next_size)
+{
+ cxt->cur.data = (cur != 0) ? (viddec_workload_t *)(cur | DDR_MEM_MASK) : NULL;
+ cxt->next.data = (next != 0) ? (viddec_workload_t *)(next | DDR_MEM_MASK): NULL;
+ cxt->cur.max_items = (cur_size - sizeof(viddec_workload_t))/sizeof(viddec_workload_item_t);
+ cxt->next.max_items = (next_size - sizeof(viddec_workload_t))/sizeof(viddec_workload_item_t);
+}
+
+static inline void viddec_emit_time(viddec_emitter *cxt, uint32_t time)
+{
+ viddec_emitter_wkld *cur;
+ cur = &(cxt->cur);
+ cur->data->time = time;
+}
+
+static inline void viddec_emit_set_codec(viddec_emitter *emit, uint32_t codec_type)
+{
+ emit->cur.data->codec = codec_type;
+}
+
+static inline void viddec_emit_set_codec_errors(viddec_emitter *emit, uint32_t codec_error)
+{
+ emit->cur.result |= codec_error;
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PM_WORKLOAD_STATUS, (int)emit->cur.result, (int)emit->cur.data,
+ (int)emit->cur.num_items, 0, 0, 0);
+}
+
+static inline void viddec_emit_set_workload_error(viddec_emitter *emit, uint32_t error, uint32_t using_next)
+{
+ viddec_emitter_wkld *cur_wkld;
+ cur_wkld = (using_next == false)? &(emit->cur):&(emit->next);
+ cur_wkld->result |= error;
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PM_WORKLOAD_STATUS, (int)cur_wkld->result, (int)cur_wkld->data,
+ (int)cur_wkld->num_items, using_next, 0, 0);
+}
+
+static inline void viddec_emit_set_inband_tag(viddec_emitter *emit, uint32_t type, uint32_t using_next)
+{
+ viddec_emitter_wkld *cur_wkld;
+ viddec_workload_item_t item;
+ cur_wkld = (using_next == false)? &(emit->cur):&(emit->next);
+ item.vwi_type = type;
+ item.vwi_payload[0] = item.vwi_payload[1] = item.vwi_payload[2] = 0;
+ viddec_emit_append(cur_wkld, &item);
+}
+
+#endif /* VIDDEC_EMITTER_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h
new file mode 100644
index 0000000..cccc437
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_debug.h
@@ -0,0 +1,80 @@
+#ifndef VIDDEC_FW_DEBUG_H
+#define VIDDEC_FW_DEBUG_H
+
+//#define SWAP_BYTE(x,y,z) (( ( (x)>>(8*y))& 0xFF) << (8*z))
+#define SWAP_BYTE(x,y,z) (( ( (x) >> ((y) << 3))& 0xFF) << ((z) << 3))
+#define SWAP_WORD(x) ( SWAP_BYTE((x),0,3) | SWAP_BYTE((x),1,2) |SWAP_BYTE((x),2,1) |SWAP_BYTE((x),3,0))
+
+#ifndef VBP
+
+#ifndef HOST_ONLY
+#define _OSAL_IO_MEMMAP_H /* to prevent errors when including sven_devh.h */
+#define _OSAL_ASSERT_H /* to prevent errors when including sven_devh.h */
+#endif
+#include <stdint.h>
+#include "viddec_debug.h"
+#include "sven_devh.h"
+#include "auto_eas/gen4_gv.h"
+
+#ifdef HOST_ONLY
+#define DUMP_TO_MEM(x) DEB("0x%.08X ",x);
+#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6) DEB("Sven evnt=0x%.8X p1=%d p2=%d p3=%d p4=%d p5=%d p6=%d\n",event, p1, p2, p3, p4, p5, p6)
+#define read_ret(x)
+#define read_fp(x)
+#define read_sp(x)
+#define read_wim(x)
+#define read_psr(x)
+#else
+extern uint32_t dump_ptr;
+/* Macros for Dumping data to DDR */
+#define DUMP_TO_MEM(x) ((volatile unsigned int *)0x8F000000)[dump_ptr++] = SWAP_WORD(x);
+#define read_ret(x) asm("mov %%i7, %0\n":"=r" (x))
+#define read_fp(x) asm("mov %%i6, %0\n":"=r" (x))
+#define read_sp(x) asm("mov %%sp, %0\n":"=r" (x))
+#define read_wim(x) asm("mov %%wim, %0\n":"=r" (x))
+#define read_psr(x) asm("mov %%psr, %0\n":"=r" (x))
+#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6) devh_SVEN_WriteModuleEvent( NULL, event, p1, p2, p3, p4, p5, p6)
+#endif
+
+#else // VBP is defined
+
+#include <stdint.h>
+#include "viddec_debug.h"
+#define DUMP_TO_MEM(x)
+#define WRITE_SVEN(event, p1, p2, p3, p4, p5, p6)
+#define read_ret(x)
+#define read_fp(x)
+#define read_sp(x)
+#define read_wim(x)
+#define read_psr(x)
+
+
+#endif
+
+static inline void DEBUG_WRITE(uint32_t p1, uint32_t p2, uint32_t p3, uint32_t p4, uint32_t p5, uint32_t p6)
+{
+ //uint32_t temp;
+ DUMP_TO_MEM(0xCACAFEED);
+ DUMP_TO_MEM(p1);
+ DUMP_TO_MEM(p2);
+ DUMP_TO_MEM(p3);
+ DUMP_TO_MEM(p4);
+ DUMP_TO_MEM(p5);
+ DUMP_TO_MEM(p6);
+ DUMP_TO_MEM(0xCACA0000);
+ //temp = dump_ptr;
+ //DUMP_TO_MEM(temp);
+}
+static inline void DUMP_SPARC_REG(void)
+{
+ uint32_t ret1, fp, sp, wim, psr;
+ read_ret(ret1);
+ read_fp(fp);
+ read_sp(sp);
+ read_wim(wim);
+ read_psr(psr);
+ //crash = (uint32_t *)0x1000bf0c;
+ //DEBUG_WRITE(sp, wim, fp, ret1, (*crash), 0xFED);
+ DEBUG_WRITE(sp, wim, fp, ret1, psr, 0xFFFFFFFF);
+}
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h
new file mode 100644
index 0000000..a77b645
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_fw_parser_fw_ipc.h
@@ -0,0 +1,194 @@
+/*
+
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+ Intel Corporation
+ 2200 Mission College Blvd.
+ Santa Clara, CA 97052
+
+ BSD LICENSE
+
+ Copyright(c) 2005-2008 Intel Corporation. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_PARSER_FW_IPC_H
+#define VIDDEC_FW_PARSER_FW_IPC_H 1
+
+#include "viddec_fw_parser_ipclib.h"
+
+/** Generic Firmware-to-host Message Send Queue */
+typedef struct
+{
+ struct IPC_MsgQueue mq; /* local MSGQueue handle */
+} FW_IPC_SendQue;
+
+/** Generic Host-to-Firmware Message Receive Queue */
+typedef struct
+{
+ struct IPC_MsgQueue mq; /* local MSGQueue handle */
+} FW_IPC_ReceiveQue;
+
+typedef struct
+{
+ unsigned int state;
+ unsigned int priority;
+}FW_IPC_stream_info;
+
+/* ---------------------------------------------------------------------- */
+/* ---------------------------------------------------------------------- */
+
+typedef struct
+{
+ /** Synchronous Message Buffer, shared between host and firmware */
+ volatile char *sync_msg_buf;
+
+ /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */
+ FW_IPC_SendQue snd_q[CONFIG_IPC_HOST_MAX_RX_QUEUES];
+
+ /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */
+ FW_IPC_ReceiveQue rcv_q[CONFIG_IPC_FW_MAX_RX_QUEUES];
+ /** WARNING: EACH OF THESE STRUCTS MUST BE 8 BYTE ALIGNED */
+ FW_IPC_ReceiveQue wkld_q[CONFIG_IPC_FW_MAX_RX_QUEUES];
+
+ /** FIRMWARE_TO_HOST Message Queues (outbound) */
+ struct _IPC_QueueHeader *snd_q_shared[CONFIG_IPC_HOST_MAX_RX_QUEUES];
+ /** HOST_TO_FIRMWARE Message Queues (inbbound) */
+ struct _IPC_QueueHeader *rcv_q_shared[CONFIG_IPC_FW_MAX_RX_QUEUES];
+ /** HOST_TO_FIRMWARE Message Queues (inbbound) */
+ struct _IPC_QueueHeader *wkld_q_shared[CONFIG_IPC_FW_MAX_RX_QUEUES];
+ /** Actual qheaders allocated in FW memory */
+ struct _IPC_QueueHeader snd_qh[CONFIG_IPC_HOST_MAX_RX_QUEUES];
+ struct _IPC_QueueHeader rcv_qh[CONFIG_IPC_FW_MAX_RX_QUEUES];
+ struct _IPC_QueueHeader wkld_qh[CONFIG_IPC_FW_MAX_RX_QUEUES];
+
+ /** Stream releated info like priority */
+ FW_IPC_stream_info strm_info[CONFIG_IPC_FW_MAX_RX_QUEUES];
+
+ unsigned int one_msg_size;
+ unsigned char one_msg[CONFIG_IPC_MESSAGE_MAX_SIZE];
+} FW_IPC_Handle;
+
+/*@}*/
+
+/** @weakgroup Host IPC Functions */
+/** @ingroup fw_ipc */
+/*@{*/
+
+/**
+This function allows us to check and see if there's space available on the send queue(output) of fw
+for the message of size(message_size). It also provides us the amount of space available.
+@param[in] fwipc : Ipc handle.
+@param[in] message_size : size of message that we want to write.
+@param[out] bytes : returns the amount of space available for writing.
+@retval 0 : if space is not available for current message.
+@retval 1 : if space is available for current message.
+*/
+int FwIPC_SpaceAvailForMessage(FW_IPC_Handle *fwipc, FW_IPC_SendQue *snd_q, unsigned int message_size, unsigned int *bytes);
+
+/**
+This function writes the message of message_size into queue(host_rx_queue).
+@param[in] fwipc : Ipc handle.
+@param[in] host_rx_queue : id of the queue that needs to be written.
+@param[in] message : Message that we want to write.
+@param[in] message_size : size of message that we want to write.
+@retval 0 : if write fails.
+@retval 1 : if write succeeds.
+*/
+int FwIPC_SendMessage(FW_IPC_Handle *fwipc, unsigned int host_rx_queue, const char *message, unsigned int message_size );
+
+/**
+This function reads a message(which is <= max_message_size) from rcv_queue of firmware into input parameter message.
+@param[in] fwipc : Ipc handle.
+@param[in] rcv_q : Receive queue to read from.
+@param[out] message : Message that we want to read.
+@param[in] max_message_size : max possible size of the message.
+@retval : The size of message that was read.
+*/
+int FwIPC_ReadMessage(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, char *message, unsigned int max_message_size );
+
+/**
+This function Initialises shared queue headers and sync command buffer for IPC.
+@param[in] fwipc : Ipc handle.
+@param[in] synchronous_command_buffer : update handle with pointer to shared memory
+ between host and FW.
+@retval 0 : if write succeeds.
+*/
+int FwIPC_Initialize(FW_IPC_Handle *fwipc, volatile char *synchronous_command_buffer );
+
+/**
+This function Initialises Sendqueue with circular buffer which has actual data.
+@param[in] fwipc : Ipc handle.
+@param[in] snd_q : Send queue that needs to be initialized.
+@param[in] snd_circbuf : Address of circular buffer.
+*/
+void FWIPC_SendQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_SendQue *snd_q, void *snd_circbuf );
+
+/**
+This function Initialises Recvqueue with circular buffer which has actual data.
+@param[in] fwipc : Ipc handle.
+@param[in] rcv_q : Receive queue that needs to be initialized.
+@param[in] rcv_circbuf : Address of circular buffer.
+*/
+void FwIPC_ReceiveQueue_Init(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, void *rcv_circbuf );
+
+/**
+This function reads the nth(index) message(which is <= max_message_size ) from rcv_queue of firmware into input parameter message
+by peeking the queue.
+@param[in] fwipc : Ipc handle.
+@param[in] rcv_q : Send queue to read from.
+@param[out] message : Message that we want to read.
+@param[in] max_message_size : max possible size of the message.
+@param[in] index : nth message(index >=0).
+@retval : The size of message that was read.
+*/
+int FwIPC_PeekReadMessage(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, char *message, unsigned int max_message_size, unsigned int index );
+
+/*@}*/
+#endif /* FW_IPC_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h
new file mode 100644
index 0000000..4712be7
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_h264_parse.h
@@ -0,0 +1,6 @@
+#ifndef VIDDEC_H264_PARSE_H
+#define VIDDEC_H264_PARSE_H
+
+void viddec_h264_get_ops(viddec_parser_ops_t *ops);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h
new file mode 100644
index 0000000..e3e795a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_mp4_parse.h
@@ -0,0 +1,6 @@
+#ifndef VIDDEC_MP4_PARSE_H
+#define VIDDEC_MP4_PARSE_H
+
+void viddec_mp4_get_ops(viddec_parser_ops_t *ops);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h
new file mode 100644
index 0000000..7c0efea
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_mpeg2_parse.h
@@ -0,0 +1,6 @@
+#ifndef VIDDEC_MPEG2_PARSE_H
+#define VIDDEC_MPEG2_PARSE_H
+
+void viddec_mpeg2_get_ops(viddec_parser_ops_t *ops);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h
new file mode 100644
index 0000000..a61e340
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_parser_ops.h
@@ -0,0 +1,106 @@
+#ifndef VIDDEC_PARSER_OPS_H
+#define VIDDEC_PARSER_OPS_H
+
+#include "viddec_fw_workload.h"
+
+#define VIDDEC_PARSE_INVALID_POS 0xFFFFFFFF
+
+typedef enum
+{
+ VIDDEC_PARSE_EOS = 0x0FFF, /* Dummy start code to force EOS */
+ VIDDEC_PARSE_DISCONTINUITY, /* Dummy start code to force completion and flush */
+}viddec_parser_inband_messages_t;
+
+typedef struct
+{
+ uint32_t context_size;
+ uint32_t persist_size;
+}viddec_parser_memory_sizes_t;
+
+typedef struct
+{
+ void (*init)(void *ctxt, uint32_t *persist, uint32_t preserve);
+ uint32_t (*parse_sc) (void *ctxt, void *pcxt, void *sc_state);
+ uint32_t (*parse_syntax) (void *parent, void *ctxt);
+ void (*get_cxt_size) (viddec_parser_memory_sizes_t *size);
+ uint32_t (*is_wkld_done)(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors);
+ uint32_t (*is_frame_start)(void *ctxt);
+ uint32_t (*gen_contrib_tags)(void *parent, uint32_t ignore_partial);
+ uint32_t (*gen_assoc_tags)(void *parent);
+}viddec_parser_ops_t;
+
+
+typedef enum
+{
+ VIDDEC_PARSE_ERROR = 0xF0,
+ VIDDEC_PARSE_SUCESS = 0xF1,
+ VIDDEC_PARSE_FRMDONE = 0xF2,
+}viddec_parser_error_t;
+
+/*
+ *
+ *Functions used by Parsers
+ *
+ */
+
+/* This function returns the requested number of bits(<=32) and increments au byte position.
+ */
+int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+/* This function returns requested number of bits(<=32) with out incrementing au byte position
+ */
+int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+/* This function skips requested number of bits(<=32) by incrementing au byte position.
+ */
+int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits);
+
+/* This function appends a work item to current workload.
+ */
+int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item);
+
+/* This function appends a work item to next workload.
+ */
+int32_t viddec_pm_append_workitem_next(void *parent, viddec_workload_item_t *item);
+
+/* This function gets current byte and bit positions and information on whether an emulation byte is present after
+current byte.
+ */
+int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul);
+
+/* This function appends Pixel tag to current work load starting from current position to end of au unit.
+ */
+int32_t viddec_pm_append_pixeldata(void *parent);
+
+/* This function appends Pixel tag to next work load starting from current position to end of au unit.
+ */
+int32_t viddec_pm_append_pixeldata_next(void *parent);
+
+/* This function provides the workload header for pasers to fill in attribute values
+ */
+viddec_workload_t* viddec_pm_get_header(void *parent);
+
+/* This function provides the next workload header for pasers to fill in attribute values
+ */
+viddec_workload_t* viddec_pm_get_next_header(void *parent);
+
+/* Returns the current byte value where offset is on */
+uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte);
+
+/* Tells us if there is more data that need to parse */
+int32_t viddec_pm_is_nomoredata(void *parent);
+
+/* This function appends misc tag to work load starting from start position to end position of au unit */
+int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next);
+
+void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error);
+
+void viddec_pm_set_late_frame_detect(void *parent);
+
+static inline void viddec_fw_reset_workload_item(viddec_workload_item_t *wi)
+{
+ wi->vwi_payload[0] = wi->vwi_payload[1] = wi->vwi_payload[2] = 0;
+}
+
+void viddec_pm_setup_userdata(viddec_workload_item_t *wi);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h
new file mode 100644
index 0000000..6d1d2be
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm.h
@@ -0,0 +1,95 @@
+#ifndef VIDDEC_PM_H
+#define VIDDEC_PM_H
+
+#include <stdint.h>
+#include "viddec_emitter.h"
+#include "viddec_pm_utils_list.h"
+#include "viddec_pm_utils_bstream.h"
+#include "viddec_pm_parse.h"
+#include "viddec_parser_ops.h"
+
+#define SC_DETECT_BUF_SIZE 1024
+#define MAX_CODEC_CXT_SIZE 4096
+
+typedef enum
+{
+ PM_SUCCESS = 0,
+ /* Messages to indicate more ES data */
+ PM_NO_DATA = 0x100,
+ /* Messages to indicate SC found */
+ PM_SC_FOUND = 0x200,
+ PM_FIRST_SC_FOUND = 0x201,
+ /* Messages to indicate Frame done */
+ PM_WKLD_DONE = 0x300,
+ /* Messages to indicate Error conditions */
+ PM_OVERFLOW = 0x400,
+ /* Messages to indicate inband conditions */
+ PM_INBAND_MESSAGES = 0x500,
+ PM_EOS = 0x501,
+ PM_DISCONTINUITY = 0x502,
+}pm_parse_state_t;
+
+/* This is a temporary structure for first pass sc parsing. index tells us where we are in list of es buffers
+ cur_es points to current es buffer we are parsing. */
+typedef struct
+{
+ int32_t list_index; /* current index of list */
+ uint32_t cur_offset;
+ uint32_t cur_size;
+ viddec_input_buffer_t *cur_es;
+}viddec_pm_sc_cur_buf_t;
+
+typedef struct
+{
+ uint32_t pending_tags[MAX_IBUFS_PER_SC];
+ uint8_t dummy;
+ uint8_t frame_done;
+ uint8_t first_buf_aligned;
+ uint8_t using_next;
+}vidded_pm_pending_tags_t;
+
+/* This structure holds all necessary data required by parser manager for stream parsing.
+ */
+typedef struct
+{
+ /* Actual buffer where data gets DMA'd. 8 padding bytes for alignment */
+ uint8_t scbuf[SC_DETECT_BUF_SIZE + 8];
+ viddec_sc_parse_cubby_cxt_t parse_cubby;
+ viddec_pm_utils_list_t list;
+ /* Place to store tags to be added to next to next workload */
+ viddec_pm_sc_cur_buf_t cur_buf;
+ viddec_emitter emitter;
+ viddec_pm_utils_bstream_cxt_t getbits;
+ viddec_sc_prefix_state_t sc_prefix_info;
+ vidded_pm_pending_tags_t pending_tags;
+ uint8_t word_align_dummy;
+ uint8_t late_frame_detect;
+ uint8_t frame_start_found;
+ uint8_t found_fm_st_in_current_au;
+ uint32_t next_workload_error_eos;
+ uint32_t pending_inband_tags;
+#ifdef VBP
+ uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3];
+#else
+ uint32_t codec_data[MAX_CODEC_CXT_SIZE>>2];
+#endif
+}viddec_pm_cxt_t;
+
+/*
+ *
+ * Functions used by Parser kernel
+ *
+ */
+
+/* This is for initialising parser manager context to default values */
+void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean);
+
+/* This is the main parse function which returns state information that parser kernel can understand.*/
+uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf);
+
+void viddec_pm_init_ops();
+
+void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time);
+
+uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h
new file mode 100644
index 0000000..703d65d
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_parse.h
@@ -0,0 +1,24 @@
+#ifndef VIDDEC_PM_PARSE_H
+#define VIDDEC_PM_PARSE_H
+
+#include <stdint.h>
+/* This structure is used by first pass parsing(sc detect), the pm passes information on number of bytes
+ that needs to be parsed and if start code found then sc_end_pos contains the index of last sc code byte
+ in the current buffer */
+typedef struct
+{
+ uint32_t size; /* size pointed to by buf */
+ uint8_t *buf; /* ptr to data */
+ int32_t sc_end_pos; /* return value end position of sc */
+ uint32_t phase; /* phase information(state) for sc */
+}viddec_sc_parse_cubby_cxt_t;
+
+typedef struct
+{
+ uint16_t next_sc;
+ uint8_t second_scprfx_length;
+ uint8_t first_sc_detect;
+}viddec_sc_prefix_state_t;
+
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h
new file mode 100644
index 0000000..f035e53
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_tags.h
@@ -0,0 +1,17 @@
+#ifndef VIDDEC_PM_TAGS_H
+#define VIDDEC_PM_TAGS_H
+
+#include "viddec_pm.h"
+#include "viddec_emitter.h"
+
+/* Define to initalize temporary association list */
+#define INVALID_ENTRY ((uint32_t) -1)
+
+void viddec_pm_generate_tags_for_unused_buffers_to_flush(viddec_pm_cxt_t *cxt);
+uint32_t viddec_generic_add_association_tags(void *parent);
+uint32_t viddec_h264_add_association_tags(void *parent);
+uint32_t viddec_mpeg2_add_association_tags(void *parent);
+uint32_t viddec_pm_lateframe_generate_contribution_tags(void *parent, uint32_t ignore_partial);
+uint32_t viddec_pm_generic_generate_contribution_tags(void *parent, uint32_t ignore_partial);
+uint32_t viddec_pm_generate_missed_association_tags(viddec_pm_cxt_t *cxt, uint32_t using_next);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h
new file mode 100644
index 0000000..1971a36
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_bstream.h
@@ -0,0 +1,81 @@
+#ifndef VIDDEC_PM_UTILS_BSTREAM_H
+#define VIDDEC_PM_UTILS_BSTREAM_H
+
+#include "viddec_pm_utils_list.h"
+
+#define CUBBY_SIZE 1024
+//#define CUBBY_SIZE 512
+#define SCRATCH_SIZE 20
+#define MIN_DATA 8
+
+typedef struct
+{
+#ifdef VBP
+ uint8_t *buf;
+#else
+ uint8_t buf[CUBBY_SIZE + 8 + MIN_DATA];/* extra 8 bytes for alignmet, extra 8 bytes for old data */
+#endif
+ uint32_t buf_st; /* start pos in buf */
+ uint32_t buf_end; /* first invalid byte in buf */
+ uint32_t buf_index; /* current index in buf */
+ uint32_t buf_bitoff; /* bit offset in current index position */
+}viddec_pm_utils_bstream_buf_cxt_t;
+
+typedef struct
+{
+ uint8_t buf_scratch[SCRATCH_SIZE];/* scratch for boundary reads*/
+ uint32_t st; /* start index of valid byte */
+ uint32_t size;/* Total number of bytes in current buffer */
+ uint32_t bitoff; /* bit offset in first valid byte */
+}viddec_pm_utils_bstream_scratch_cxt_t;
+
+typedef struct
+{
+#ifdef VBP
+ /* counter of emulation preventation byte */
+ uint32_t emulation_byte_counter;
+#endif
+ /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store
+ the bstream buffer's first valid byte index wrt to accessunit in this variable */
+ uint32_t au_pos;
+ /* This is for keeping track of which list item was used to load data last */
+ uint32_t list_off;
+ /* This is for tracking emulation prevention bytes */
+ uint32_t phase;
+ /* This flag tells us whether to look for emulation prevention or not */
+ uint32_t is_emul_reqd;
+ /* A pointer to list of es buffers which contribute to current access unit */
+ viddec_pm_utils_list_t *list;
+ /* scratch buffer to stage data on boundaries and reloads */
+ viddec_pm_utils_bstream_scratch_cxt_t scratch;
+ /* Actual context which has valid data for get bits functionality */
+ viddec_pm_utils_bstream_buf_cxt_t bstrm_buf;
+}viddec_pm_utils_bstream_cxt_t;
+
+void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul);
+
+int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits);
+
+int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip);
+
+int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte);
+
+uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt);
+
+uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt);
+
+static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *bit, uint32_t *byte, uint8_t *is_emul)
+{
+ uint32_t phase=cxt->phase;
+
+ *bit = cxt->bstrm_buf.buf_bitoff;
+ *byte = cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st);
+ if(cxt->phase > 0)
+ {
+ phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 );
+ }
+ *is_emul = (cxt->is_emul_reqd) && (phase > 0) &&
+ (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) &&
+ (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3);
+}
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h
new file mode 100644
index 0000000..98f2d46
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_pm_utils_list.h
@@ -0,0 +1,51 @@
+#ifndef VIDDEC_PM_COMMON_LIST_H
+#define VIDDEC_PM_COMMON_LIST_H
+
+#include "viddec_emitter.h"
+
+/* Limitation:This is the maximum numbers of es buffers between start codes. Needs to change if we encounter
+ a case if this is not sufficent */
+#ifdef VBP
+#define MAX_IBUFS_PER_SC 512
+#else
+#define MAX_IBUFS_PER_SC 64
+#endif
+
+/* This structure is for storing information on byte position in the current access unit.
+ stpos is the au byte index of first byte in current es buffer.edpos is the au byte index+1 of last
+ valid byte in current es buffer.*/
+typedef struct
+{
+ uint32_t stpos;
+ uint32_t edpos;
+}viddec_pm_utils_au_bytepos_t;
+
+/* this structure is for storing all necessary information for list handling */
+typedef struct
+{
+ uint16_t num_items; /* Number of buffers in List */
+ uint16_t first_scprfx_length; /* Length of first sc prefix in this list */
+ int32_t start_offset; /* starting offset of unused data including sc prefix in first buffer */
+ int32_t end_offset; /* Offset of unsused data in last buffer including 2nd sc prefix */
+ viddec_input_buffer_t sc_ibuf[MAX_IBUFS_PER_SC]; /* Place to store buffer descriptors */
+ viddec_pm_utils_au_bytepos_t data[MAX_IBUFS_PER_SC]; /* place to store au byte positions */
+ int32_t total_bytes; /* total bytes for current access unit including first sc prefix*/
+}viddec_pm_utils_list_t;
+
+/* This function initialises the list to default values */
+void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt);
+
+/* This function adds a new entry to list and will emit tags if needed */
+uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf);
+
+/* This function updates au byte position of the current list. This should be called after sc codes are detected and before
+ syntax parsing as get bits requires this to be initialized. */
+void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length);
+
+/* This function walks through the list and removes consumed buffers based on total bytes. It then moves
+ unused entires to the top of list. */
+void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length);
+
+/* this function returns 1 if the requested byte is not found. If found returns list and offset into list */
+uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset);
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h b/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h
new file mode 100644
index 0000000..c77aed1
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/include/viddec_vc1_parse.h
@@ -0,0 +1,6 @@
+#ifndef VIDDEC_VC1_PARSE_H
+#define VIDDEC_VC1_PARSE_H
+
+void viddec_vc1_get_ops(viddec_parser_ops_t *ops);
+
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c
new file mode 100644
index 0000000..1bb368a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/main.c
@@ -0,0 +1,608 @@
+#include "fw_pvt.h"
+#include "viddec_fw_parser_ipclib_config.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_fw_parser.h"
+#include "viddec_fw_debug.h"
+
+/* This define makes sure that the structure is stored in Local memory.
+ This is shared memory between host and FW.*/
+volatile dmem_t _dmem __attribute__ ((section (".exchange")));
+/* Debug index should be disbaled for Production FW */
+uint32_t dump_ptr=0;
+uint32_t timer=0;
+
+/* Auto Api definitions */
+ismd_api_group viddec_fw_api_array[2];
+
+extern void viddec_fw_parser_register_callbacks(void);
+
+/*------------------------------------------------------------------------------
+ * Function: initialize firmware SVEN TX Output
+ *------------------------------------------------------------------------------
+ */
+int SMDEXPORT viddec_fw_parser_sven_init(struct SVEN_FW_Globals *sven_fw_globals )
+{
+ extern int sven_fw_set_globals(struct SVEN_FW_Globals *fw_globals );
+ return(sven_fw_set_globals(sven_fw_globals));
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_check_watermark_boundary
+ * This function figures out if we crossesd watermark boundary on input data.
+ * before represents the ES Queue data when we started and current represents ES Queue data
+ * when we are ready to swap.Threshold is the amount of data specified by the driver to trigger an
+ * interrupt.
+ * We return true if threshold is between before and current.
+ *------------------------------------------------------------------------------
+ */
+static inline uint32_t viddec_fw_check_watermark_boundary(uint32_t before, uint32_t current, uint32_t threshold)
+{
+ return ((before >= threshold) && (current < threshold));
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_get_total_input_Q_data
+ * This function figures out how much data is available in input queue of the FW
+ *------------------------------------------------------------------------------
+ */
+static uint32_t viddec_fw_get_total_input_Q_data(uint32_t indx)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ uint32_t ret;
+ int32_t pos=0;
+ FW_IPC_ReceiveQue *rcv_q;
+
+ rcv_q = &fwipc->rcv_q[indx];
+ /* count the cubby buffer which we already read if present */
+ ret = (_dmem.stream_info[indx].buffered_data) ? CONFIG_IPC_MESSAGE_MAX_SIZE:0;
+ ret += ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos);
+ return ret;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: mfd_round_robin
+ * Params:
+ * [in] pri: Priority of the stream
+ * [in] indx: stream id number of the last stream that was scheduled.
+ * [out] qnum: Stream id of priority(pri) which has data.
+ * This function is responsible for figuring out which stream needs to be scheduled next.
+ * It starts after the last scheduled stream and walks through all streams until it finds
+ * a stream which is of required priority, in start state, has space on output and data in
+ * input.
+ * If no such stream is found qnum is not updated and return value is 0.
+ * If a stream is found then qnum is updated with that id and function returns 1.
+ *------------------------------------------------------------------------------
+ */
+
+uint32_t mfd_round_robin(uint32_t pri, int32_t *qnum, int32_t indx)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ int32_t i = CONFIG_IPC_FW_MAX_RX_QUEUES;
+ uint32_t ret = 0;
+ /* Go through all queues until we find a valid queue of reqd priority */
+ while(i>0)
+ {
+ indx++;
+ if(indx >= CONFIG_IPC_FW_MAX_RX_QUEUES) indx = 0;
+
+ /* We should look only at queues which match priority and
+ in running state */
+ if( (_dmem.stream_info[indx].state == 1)
+ && (_dmem.stream_info[indx].priority == pri))
+ {
+ uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos;
+ FW_IPC_ReceiveQue *rcv_q;
+ rcv_q = &fwipc->rcv_q[indx];
+ inpt_avail = (_dmem.stream_info[indx].buffered_data > 0) || (ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos) > 0);
+ /* we have to check for two workloads to protect against error cases where we might have to push both current and next workloads */
+ output_avail = FwIPC_SpaceAvailForMessage(fwipc, &fwipc->snd_q[indx], CONFIG_IPC_MESSAGE_MAX_SIZE, &pos) >= 2;
+ pos = 0;
+ /* Need at least current and next to proceed */
+ wklds_avail = (ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos) >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1));
+ if(inpt_avail && output_avail && wklds_avail)
+ {/* Success condition: we have some data on input and enough space on output queue */
+ *qnum = indx;
+ ret =1;
+ break;
+ }
+ }
+ i--;
+ }
+ return ret;
+}
+static inline void mfd_setup_emitter(FW_IPC_Handle *fwipc, FW_IPC_ReceiveQue *rcv_q, mfd_pk_strm_cxt *cxt)
+{
+ int32_t ret1=0,ret=0;
+ /* We don't check return values for the peek as round robin guarantee's that we have required free workloads */
+ ret = FwIPC_PeekReadMessage(fwipc, rcv_q, (char *)&(cxt->wkld1), sizeof(ipc_msg_data), 0);
+ ret1 = FwIPC_PeekReadMessage(fwipc, rcv_q, (char *)&(cxt->wkld2), sizeof(ipc_msg_data), 1);
+ viddec_emit_update(&(cxt->pm.emitter), cxt->wkld1.phys, cxt->wkld2.phys, cxt->wkld1.len, cxt->wkld2.len);
+}
+
+static inline void mfd_init_swap_memory(viddec_pm_cxt_t *pm, uint32_t codec_type, uint32_t start_addr, uint32_t clean)
+{
+ uint32_t *persist_mem;
+ persist_mem = (uint32_t *)(start_addr | GV_DDR_MEM_MASK);
+ viddec_pm_init_context(pm,codec_type, persist_mem, clean);
+ pm->sc_prefix_info.first_sc_detect = 1;
+ viddec_emit_init(&(pm->emitter));
+}
+
+void output_omar_wires( unsigned int value )
+{
+#ifdef RTL_SIMULATION
+ reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, value );
+#endif
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_init_swap_memory
+ * This function is responsible for seeting the swap memory to a good state for current stream.
+ * The swap parameter tells us whether we need to dma the context to local memory.
+ * We call init on emitter and parser manager which inturn calls init of the codec we are opening the stream for.
+ *------------------------------------------------------------------------------
+ */
+
+void viddec_fw_init_swap_memory(unsigned int stream_id, unsigned int swap, unsigned int clean)
+{
+ mfd_pk_strm_cxt *cxt;
+ mfd_stream_info *cxt_swap;
+ cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt);
+ cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[stream_id]);
+
+ if(swap)
+ {/* Swap context into local memory */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false);
+ }
+
+ {
+ mfd_init_swap_memory(&(cxt->pm), cxt_swap->strm_type, cxt_swap->ddr_cxt+cxt_swap->cxt_size, clean);
+ cxt_swap->wl_time = 0;
+ cxt_swap->es_time = 0;
+ }
+ if(swap)
+ {/* Swap context into DDR */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false);
+ }
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_push_current_frame_to_output
+ * This is a helper function to read a workload from input queue and push to output queue.
+ * This is called when are done with a frame.
+ *------------------------------------------------------------------------------
+ */
+static inline void viddec_fw_push_current_frame_to_output(FW_IPC_Handle *fwipc, uint32_t cur)
+{
+ ipc_msg_data wkld_to_push;
+ FwIPC_ReadMessage(fwipc, &fwipc->wkld_q[cur], (char *)&(wkld_to_push), sizeof(ipc_msg_data));
+ FwIPC_SendMessage(fwipc, cur, (char *)&(wkld_to_push), sizeof(ipc_msg_data));
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_get_next_stream_to_schedule
+ * This is a helper function to figure out which active stream needs to be scheduled next.
+ * If none of the streams are active it returns -1.
+ *------------------------------------------------------------------------------
+ */
+static inline int viddec_fw_get_next_stream_to_schedule(void)
+{
+ int32_t cur = -1;
+
+ if(mfd_round_robin(viddec_stream_priority_REALTIME, &cur, _dmem.g_pk_data.high_id))
+ {
+ /* On success store the stream id */
+ _dmem.g_pk_data.high_id = cur;
+ }
+ else
+ {
+ /* Check Low priority Queues, Since we couldn't find a valid realtime stream */
+ if(mfd_round_robin(viddec_stream_priority_BACKGROUND, &cur, _dmem.g_pk_data.low_id))
+ {
+ _dmem.g_pk_data.low_id = cur;
+ }
+ }
+
+ return cur;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_update_pending_interrupt_flag
+ * This is a helper function to figure out if we need to mark an interrupt pending for this stream.
+ * We update status value here if we find any of the interrupt conditions are true.
+ * If this stream has a interrupt pending which we could not send to host, we don't overwrite past status info.
+ *------------------------------------------------------------------------------
+ */
+static inline void viddec_fw_update_pending_interrupt_flag(int32_t cur, mfd_stream_info *cxt_swap, uint8_t pushed_a_workload,
+ uint32_t es_Q_data_at_start)
+{
+ if(_dmem.int_status[cur].mask)
+ {
+ if(!cxt_swap->pending_interrupt)
+ {
+ uint32_t es_Q_data_now;
+ uint8_t wmark_boundary_reached=false;
+ es_Q_data_now = viddec_fw_get_total_input_Q_data((uint32_t)cur);
+ wmark_boundary_reached = viddec_fw_check_watermark_boundary(es_Q_data_at_start, es_Q_data_now, cxt_swap->low_watermark);
+ _dmem.int_status[cur].status = 0;
+ if(pushed_a_workload)
+ {
+ _dmem.int_status[cur].status |= VIDDEC_FW_WKLD_DATA_AVAIL;
+ }
+ if(wmark_boundary_reached)
+ {
+ _dmem.int_status[cur].status |= VIDDEC_FW_INPUT_WATERMARK_REACHED;
+ }
+ cxt_swap->pending_interrupt = ( _dmem.int_status[cur].status != 0);
+ }
+ }
+ else
+ {
+ cxt_swap->pending_interrupt = false;
+ }
+}
+
+static inline void viddec_fw_handle_error_and_inband_messages(int32_t cur, uint32_t pm_ret)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+
+ viddec_fw_push_current_frame_to_output(fwipc, cur);
+ switch(pm_ret)
+ {
+ case PM_EOS:
+ case PM_OVERFLOW:
+ {
+ viddec_fw_init_swap_memory(cur, false, true);
+ }
+ break;
+ case PM_DISCONTINUITY:
+ {
+ viddec_fw_init_swap_memory(cur, false, false);
+ }
+ break;
+ default:
+ break;
+ }
+}
+
+void viddec_fw_debug_scheduled_stream_state(int32_t indx, int32_t start)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ uint32_t inpt_avail=0, output_avail=0, wklds_avail =0 , pos;
+ FW_IPC_ReceiveQue *rcv_q;
+ uint32_t message;
+
+ message = (start) ? SVEN_MODULE_EVENT_GV_FW_PK_SCHDL_STRM_START: SVEN_MODULE_EVENT_GV_FW_PK_SCHDL_STRM_END;
+ rcv_q = &fwipc->rcv_q[indx];
+ inpt_avail = ipc_mq_read_avail(&rcv_q->mq, (int32_t *)&pos);
+ inpt_avail += ((_dmem.stream_info[indx].buffered_data > 0) ? CONFIG_IPC_MESSAGE_MAX_SIZE: 0);
+ inpt_avail = inpt_avail >> 4;
+ pos = 0;
+ output_avail = ipc_mq_read_avail(&fwipc->snd_q[indx].mq, (int32_t *)&pos);
+ output_avail = output_avail >> 4;
+ pos = 0;
+ wklds_avail = ipc_mq_read_avail(&fwipc->wkld_q[indx].mq, (int32_t *)&pos);
+ wklds_avail = wklds_avail >> 4;
+ WRITE_SVEN(message, (int)indx, (int)inpt_avail, (int)output_avail,
+ (int)wklds_avail, 0, 0);
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_process_async_queues(A.K.A -> Parser Kernel)
+ * This function is responsible for handling the asynchronous queues.
+ *
+ * The first step is to figure out which stream to run. The current algorithm
+ * will go through all high priority queues for a valid stream, if not found we
+ * go through lower priority queues.
+ *
+ * If a valid stream is found we swap the required context from DDR to DMEM and do all necessary
+ * things to setup the stream.
+ * Once a stream is setup we call the parser manager and wait until a wrkld is created or no more input
+ * data left.
+ * Once we find a wkld we push it to host and save the current context to DDR.
+ *------------------------------------------------------------------------------
+ */
+
+static inline int32_t viddec_fw_process_async_queues()
+{
+ int32_t cur = -1;
+
+ cur = viddec_fw_get_next_stream_to_schedule();
+
+ if(cur != -1)
+ {
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ FW_IPC_ReceiveQue *rcv_q;
+ /* bits captured by OMAR */
+ output_omar_wires( 0x0 );
+ rcv_q = &fwipc->rcv_q[cur];
+ {
+ mfd_pk_strm_cxt *cxt;
+ mfd_stream_info *cxt_swap;
+ cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt);
+ cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[cur]);
+
+ /* Step 1: Swap rodata to local memory. Not doing this currently as all the rodata fits in local memory. */
+ {/* Step 2: Swap context into local memory */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), false, false);
+ }
+ /* Step 3:setup emitter by reading input data and workloads and initialising it */
+ mfd_setup_emitter(fwipc, &fwipc->wkld_q[cur], cxt);
+ viddec_fw_debug_scheduled_stream_state(cur, true);
+ /* Step 4: Call Parser Manager until workload done or No more ES buffers */
+ {
+ ipc_msg_data *data = 0;
+ uint8_t stream_active = true, pushed_a_workload=false;
+ uint32_t pm_ret = PM_SUCCESS, es_Q_data_at_start;
+ uint32_t start_time, time=0;
+
+ start_time = set_wdog(VIDDEC_WATCHDOG_COUNTER_MAX);
+ timer=0;
+ es_Q_data_at_start = viddec_fw_get_total_input_Q_data((uint32_t)cur);
+ do
+ {
+ output_omar_wires( 0x1 );
+ {
+ uint32_t es_t0,es_t1;
+ get_wdog(&es_t0);
+ pm_ret = viddec_pm_parse_es_buffer(&(cxt->pm), cxt_swap->strm_type, data);
+ get_wdog(&es_t1);
+ cxt_swap->es_time += get_total_ticks(es_t0, es_t1);
+ }
+ switch(pm_ret)
+ {
+ case PM_EOS:
+ case PM_WKLD_DONE:
+ case PM_OVERFLOW:
+ case PM_DISCONTINUITY:
+ {/* Finished a frame worth of data or encountered fatal error*/
+ stream_active = false;
+ }
+ break;
+ case PM_NO_DATA:
+ {
+ uint32_t next_ret=0;
+ if ( (NULL != data) && (0 != cxt_swap->es_time) )
+ {
+ /* print performance info for this buffer */
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_DONE, (int)cur, (int)cxt_swap->es_time, (int)cxt->input.phys,
+ (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags );
+ cxt_swap->es_time = 0;
+ }
+
+ next_ret = FwIPC_ReadMessage(fwipc, rcv_q, (char *)&(cxt->input), sizeof(ipc_msg_data));
+ if(next_ret != 0)
+ {
+ data = &(cxt->input);
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_PK_ES_START, (int)cur, (int)cxt_swap->wl_time,
+ (int)cxt->input.phys, (int)cxt->input.len, (int)cxt->input.id, (int)cxt->input.flags );
+ }
+ else
+ {/* No data on input queue */
+ cxt_swap->buffered_data = 0;
+ stream_active = false;
+ }
+ }
+ break;
+ default:
+ {/* Not done with current buffer */
+ data = NULL;
+ }
+ break;
+ }
+ }while(stream_active);
+ get_wdog(&time);
+ cxt_swap->wl_time += get_total_ticks(start_time, time);
+ /* Step 5: If workload done push workload out */
+ switch(pm_ret)
+ {
+ case PM_EOS:
+ case PM_WKLD_DONE:
+ case PM_OVERFLOW:
+ case PM_DISCONTINUITY:
+ {/* Push current workload as we are done with the frame */
+ cxt_swap->buffered_data = (PM_WKLD_DONE == pm_ret) ? true: false;
+ viddec_pm_update_time(&(cxt->pm), cxt_swap->wl_time);
+
+ /* xmit performance info for this workload output */
+ WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_PK_WL_DONE, (int)cur, (int)cxt_swap->wl_time, (int)cxt->wkld1.phys,
+ (int)cxt->wkld1.len, (int)cxt->wkld1.id, (int)cxt->wkld1.flags );
+ cxt_swap->wl_time = 0;
+
+ viddec_fw_push_current_frame_to_output(fwipc, cur);
+ if(pm_ret != PM_WKLD_DONE)
+ {
+ viddec_fw_handle_error_and_inband_messages(cur, pm_ret);
+ }
+ pushed_a_workload = true;
+ }
+ break;
+ default:
+ break;
+ }
+ /* Update information on whether we have active interrupt for this stream */
+ viddec_fw_update_pending_interrupt_flag(cur, cxt_swap, pushed_a_workload, es_Q_data_at_start);
+ }
+ viddec_fw_debug_scheduled_stream_state(cur, false);
+ /* Step 6: swap context into DDR */
+ {
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) &(cxt->pm), sizeof(viddec_pm_cxt_t), true, false);
+ }
+ }
+
+ }
+ return cur;
+}
+
+
+/*------------------------------------------------------------------------------
+ * Function: process_command
+ * This magic function figures out which function to excute based on autoapi.
+ *------------------------------------------------------------------------------
+ */
+
+static inline void process_command(uint32_t cmd_id, unsigned char *command)
+{
+ int32_t groupid = ((cmd_id >> 24) - 13) & 0xff;
+ int32_t funcid = cmd_id & 0xffffff;
+ /* writing func pointer to hsot doorbell */
+ output_omar_wires( (int) viddec_fw_api_array[groupid].unmarshal[funcid] );
+ WRITE_SVEN( SVEN_MODULE_EVENT_GV_FW_AUTOAPI_CMD,(int) cmd_id, (int) command, ((int *)command)[0],
+ ((int *)command)[1], ((int *)command)[2], ((int *)command)[3] );
+
+ viddec_fw_api_array[groupid].unmarshal[funcid](0, command);
+
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_process_sync_queues(A.K.A auto api)
+ * Params:
+ * [in] msg: common sync structure where all required parameters are present for autoapi.
+ *
+ * This function is responsible for handling synchronous messages. All synchronous messages
+ * are handled through auto api.
+ * what are synchronous messages? Anything releated to teardown or opening a stream Ex: open, close, flush etc.
+ *
+ * Only once synchronous message at a time. When a synchronous message its id is usually in cp doorbell. Once
+ * we are done handling synchronous message through auto api we release doorbell to let the host write next
+ * message.
+ *------------------------------------------------------------------------------
+ */
+
+static inline int32_t viddec_fw_process_sync_queues(unsigned char *msg)
+{
+ int32_t ret = -1;
+
+ if(0 == reg_read(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS))
+ {
+ uint32_t command1=0;
+ command1 = reg_read(CONFIG_IPC_ROFF_RISC_RX_DOORBELL);
+ process_command(command1, msg);
+ reg_write(CONFIG_IPC_ROFF_RISC_DOORBELL_STATUS, 0x2); /* Inform Host we are done with this message */
+ ret = 0;
+ }
+ return ret;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_check_for_pending_int
+ * This function walks through all active streams to see if atleast one stream has a pending interrupt
+ * and returns true if it finds one.
+ *------------------------------------------------------------------------------
+ */
+static inline uint32_t viddec_fw_check_for_pending_int(void)
+{
+ uint32_t i=0, ret=false;
+ /* start from 0 to max streams that fw can handle*/
+ while(i < FW_SUPPORTED_STREAMS)
+ {
+ if(_dmem.stream_info[i].state == 1)
+ {
+ if((_dmem.stream_info[i].pending_interrupt) && _dmem.int_status[i].mask)
+ {
+ ret = true;
+ }
+ else
+ {/* If this is not in INT state clear the status before sending it to host */
+ _dmem.int_status[i].status = 0;
+ }
+ }
+ i++;
+ }
+ return ret;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_clear_processed_int
+ * This function walks through all active streams to clear pending interrupt state.This is
+ * called after a INT was issued.
+ *------------------------------------------------------------------------------
+ */
+static inline void viddec_fw_clear_processed_int(void)
+{
+ uint32_t i=0;
+ /* start from 0 to max streams that fw can handle*/
+ while(i < FW_SUPPORTED_STREAMS)
+ {
+ //if(_dmem.stream_info[i].state == 1)
+ _dmem.stream_info[i].pending_interrupt = false;
+ i++;
+ }
+ return;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: viddec_fw_int_host
+ * This function interrupts host if data is available for host or any other status
+ * is valid which the host configures the FW to.
+ * There is only one interrupt line so this is a shared Int for all streams, Host should
+ * look at status of all streams when it receives a Int.
+ * The FW will interrupt the host only if host doorbell is free, in other words the host
+ * should always make the doorbell free at the End of its ISR.
+ *------------------------------------------------------------------------------
+ */
+
+static inline int32_t viddec_fw_int_host()
+{
+ /* We Interrupt the host only if host is ready to receive an interrupt */
+ if((reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) & GV_DOORBELL_STATS) == GV_DOORBELL_STATS)
+ {
+ if(viddec_fw_check_for_pending_int())
+ {
+ /* If a pending interrupt is found trigger INT */
+ reg_write(CONFIG_IPC_ROFF_HOST_DOORBELL, VIDDEC_FW_PARSER_IPC_HOST_INT);
+ /* Clear all stream's pending Interrupt info since we use a global INT for all streams */
+ viddec_fw_clear_processed_int();
+ }
+ }
+ return 1;
+}
+volatile unsigned int stack_corrupted __attribute__ ((section (".stckovrflwchk")));
+/*------------------------------------------------------------------------------
+ * Function: main
+ * This function is the main firmware function. Its a infinite loop where it polls
+ * for messages and processes them if they are available. Currently we ping pong between
+ * synchronous and asynchronous messages one at a time. If we have multiple aysnchronous
+ * queues we always process only one between synchronous messages.
+ *
+ * For multiple asynchronous queues we round robin through the high priorities first and pick
+ * the first one available. Next time when we come around for asynchronous message we start
+ * from the next stream onwards so this guarantees that we give equal time slices for same
+ * priority queues. If no high priority queues are active we go to low priority queues and repeat
+ * the same process.
+ *------------------------------------------------------------------------------
+ */
+
+int main(void)
+{
+ unsigned char *msg = (uint8_t *)&(_dmem.buf.data[0]);
+
+ /* We wait until host reads sync message */
+ reg_write(CONFIG_IPC_ROFF_HOST_RX_DOORBELL, GV_FW_IPC_HOST_SYNC);
+
+ while ( GV_DOORBELL_STATS != reg_read(CONFIG_IPC_ROFF_HOST_DOORBELL_STATUS) )
+ { /*poll register until done bit is set */
+ /* Host re-writes Vsparc DRAM (BSS) in this loop and will hit the DONE bit when complete */
+ }
+ enable_intr();
+ /* Initialize State for queues */
+ viddec_fw_parser_register_callbacks();
+ FwIPC_Initialize(GET_IPC_HANDLE(_dmem), (volatile char *)msg);
+ _dmem.g_pk_data.high_id = _dmem.g_pk_data.low_id = -1;
+ viddec_pm_init_ops();
+ stack_corrupted = 0xDEADBEEF;
+ while(1)
+ {
+ viddec_fw_process_sync_queues(msg);
+ viddec_fw_process_async_queues();
+ viddec_fw_int_host();
+#if 0
+ if(stack_corrupted != 0xDEADBEEF)
+ {
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_STACK_CORRPON, 0, 0, 0, 0, 0, 0);
+ while(1);
+ }
+#endif
+ }
+ return 1;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/utils.c b/mix_vbp/viddec_fw/fw/parser/utils.c
new file mode 100644
index 0000000..5a22e5b
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/utils.c
@@ -0,0 +1,253 @@
+#include "fw_pvt.h"
+#include "viddec_fw_parser_ipclib_config.h"
+
+extern uint32_t timer;
+
+/*------------------------------------------------------------------------------
+ * Function: memcpy
+ * This is a memory-copy function.
+ *------------------------------------------------------------------------------
+ */
+/* NOTE: we are inventing memcpy since we don't want to include string libs as part of FW Due to size limitations*/
+void *memcpy(void *dest, const void *src, uint32_t n)
+{
+ uint8_t *ptr8_frm, *ptr8_to;
+ uint32_t *ptr32_frm, *ptr32_to;
+ uint32_t bytes_left=n,trail = 0;
+ uint32_t align=0;
+
+ ptr8_frm = (uint8_t *)src;
+ ptr8_to = (uint8_t *)dest;
+
+ trail = ((uint32_t)ptr8_frm) & 0x3;
+ if((trail == (((uint32_t)ptr8_to) & 0x3)) && (n > 4))
+ {
+ /* check to see what's the offset bytes to go to a word alignment */
+ bytes_left -= trail;
+ while(align > 0){
+ *ptr8_to ++ = *ptr8_frm ++;
+ trail--;
+ }
+ /* check to see if rest of bytes is a multiple of 4. */
+ trail = bytes_left & 0x3;
+ bytes_left = (bytes_left >> 2) << 2;
+ ptr32_to = (uint32_t *)ptr8_to;
+ ptr32_frm = (uint32_t *)ptr8_frm;
+ /* copy word by word */
+ while(bytes_left > 0){
+ *ptr32_to ++ = *ptr32_frm ++;
+ bytes_left -= 4;
+ }
+ /* If there are any trailing bytes do a byte copy */
+ ptr8_to = (uint8_t *)ptr32_to;
+ ptr8_frm = (uint8_t *)ptr32_frm;
+ while(trail > 0){
+ *ptr8_to ++ = *ptr8_frm ++;
+ trail--;
+ }
+ }
+ else
+ {/* case when src and dest addr are not on same alignment.
+ Just do a byte copy */
+ while(bytes_left > 0){
+ *ptr8_to ++ = *ptr8_frm ++;
+ bytes_left -= 1;
+ }
+ }
+ return dest;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: memset
+ * This is a function to copy specificed value into memory array.
+ *------------------------------------------------------------------------------
+ */
+/* NOTE: we are inventing memset since we don't want to include string libs as part of FW Due to size limitations*/
+void *memset(void *s, int32_t c, uint32_t n)
+{
+ uint8_t *ptr8 = (uint8_t *)s;
+ uint32_t *ptr32, data;
+ uint32_t mask = 0, bytes_left = n;
+
+ mask = c & 0xFF;
+ mask |= (mask << 8);
+ mask |= (mask << 16);
+ if(n >= 4)
+ {
+ uint32_t trail=0;
+ trail = 4 - (((uint32_t)ptr8) & 0x3);
+ if(trail < 4)
+ {
+ ptr32 = (uint32_t *)(((uint32_t)ptr8) & ~0x3);
+ data = (*ptr32 >> (8*trail)) << (8*trail);
+ data |= (mask >> (32 - (8*trail)));
+ *ptr32 = data;
+ bytes_left -= trail;
+ ptr8 += trail;
+ }
+ ptr32 = (uint32_t *)((uint32_t)ptr8);
+ while(bytes_left >= 4)
+ {
+ *ptr32 = mask;
+ ptr32++;
+ bytes_left -=4;
+ }
+ if(bytes_left > 0)
+ {
+ data = (*ptr32 << (8*bytes_left)) >> (8*bytes_left);
+ data |= (mask << (32 - (8*bytes_left)));
+ *ptr32=data;
+ }
+ }
+
+ return s;
+}
+
+/*------------------------------------------------------------------------------
+ * Function: cp_using_dma
+ * This is a function to copy data from local memory to/from system memory.
+ * Params:
+ * [in] ddr_addr : Word aligned ddr address.
+ * [in] local_addr: Word aligned local address.
+ * [in] size : No of bytes to transfer.
+ * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory.
+ * [in] swap : Enable or disable byte swap(endian).
+ * [out] return : Actual number of bytes copied, which can be more than what was requested
+ * since we can only copy words at a time.
+ * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned.
+ *------------------------------------------------------------------------------
+ */
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+ uint32_t val=0, wrote = size;
+
+ while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0)
+ {
+ /* wait if DMA is busy with a transcation Error condition??*/
+ }
+
+ reg_write(DMA_SYSTEM_ADDRESS, (ddr_addr & ~3) & ~GV_DDR_MEM_MASK);
+ reg_write(DMA_LOCAL_ADDRESS, (local_addr & 0xfffc));
+ //wrote += (ddr_addr & 0x3);
+ wrote = (wrote+3)>>2;/* make number of bytes multiple of 4 */
+ val=(wrote & 0xffff) << 2;
+ reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE);
+ val |= DMA_CTRL_STATUS_START;
+ /* If size > 64 use 128 byte burst speed */
+ if(wrote > 64)
+ val |= (1<<18);
+ if(swap) /* Endian swap if needed */
+ val |= DMA_CTRL_STATUS_SWAP;
+ if(to_ddr)
+ val = val | DMA_CTRL_STATUS_DIRCN;
+ reg_write(DMA_CONTROL_STATUS, val);
+ while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0)
+ {
+ /* wait till DMA is done */
+ }
+ reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE);
+
+ return (wrote << 2);
+}
+
+/*------------------------------------------------------------------------------
+ * Function: cp_using_dma
+ * This is a function to copy data from local memory to/from system memory.
+ * Params:
+ * [in] ddr_addr : Word aligned ddr address.
+ * [in] local_addr: Word aligned local address.
+ * [in] size : No of bytes to transfer.
+ * [in] to_ddr : Direction of copy, if true copy to ddr else copy to local memory.
+ * [in] swap : Enable or disable byte swap(endian).
+ * [out] return : Actual number of bytes copied, which can be more than what was requested
+ * since we can only copy words at a time.
+ * Limitations: DMA can transfer Words only, Local addr & DDR addr should be word aligned.
+ *------------------------------------------------------------------------------
+ */
+uint32_t cp_using_dma_phys(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+ uint32_t val=0, wrote = size;
+
+ while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_BUSY) != 0)
+ {
+ /* wait if DMA is busy with a transcation Error condition??*/
+ }
+
+ reg_write(DMA_SYSTEM_ADDRESS, (ddr_addr & ~3));
+ reg_write(DMA_LOCAL_ADDRESS, (local_addr & 0xfffc));
+ //wrote += (ddr_addr & 0x3);
+ wrote = (wrote+3)>>2;/* make number of bytes multiple of 4 */
+ val=(wrote & 0xffff) << 2;
+ reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE);
+ val |= DMA_CTRL_STATUS_START;
+ /* If size > 64 use 128 byte burst speed */
+ if(wrote > 64)
+ val |= (1<<18);
+ if(swap) /* Endian swap if needed */
+ val |= DMA_CTRL_STATUS_SWAP;
+ if(to_ddr)
+ val = val | DMA_CTRL_STATUS_DIRCN;
+ reg_write(DMA_CONTROL_STATUS, val);
+ while((reg_read(DMA_CONTROL_STATUS) & DMA_CTRL_STATUS_DONE) == 0)
+ {
+ /* wait till DMA is done */
+ }
+ reg_write(DMA_CONTROL_STATUS, DMA_CTRL_STATUS_DONE);
+
+ return (wrote << 2);
+}
+
+void update_ctrl_reg(uint8_t enable, uint32_t mask)
+{
+ uint32_t read_val = 0;
+ read_val = reg_read(CONFIG_CP_CONTROL_REG);
+ if(enable)
+ {
+ read_val = read_val | mask;
+ }
+ else
+ {
+ read_val = read_val & ~mask;
+ }
+ reg_write(CONFIG_CP_CONTROL_REG, read_val);
+ return;
+
+}
+
+extern uint32_t sven_get_timestamp();
+
+uint32_t set_wdog(uint32_t offset)
+{
+#ifdef B0_TIMER_FIX
+ update_ctrl_reg(0, WATCH_DOG_ENABLE);
+ reg_write(INT_REG, INT_WDOG_ENABLE);
+ reg_write(WATCH_DOG_COUNTER, offset & WATCH_DOG_MASK);
+ update_ctrl_reg(1, WATCH_DOG_ENABLE);
+ return offset & WATCH_DOG_MASK;
+#else
+ return sven_get_timestamp();
+#endif
+}
+
+void get_wdog(uint32_t *value)
+{
+#ifdef B0_TIMER_FIX
+ *value = reg_read(WATCH_DOG_COUNTER) & WATCH_DOG_MASK;
+ reg_write(INT_REG, ~INT_WDOG_ENABLE);
+ update_ctrl_reg(0, WATCH_DOG_ENABLE);
+#else
+ *value = sven_get_timestamp();
+#endif
+}
+
+uint32_t get_total_ticks(uint32_t start, uint32_t end)
+{
+ uint32_t value;
+#ifdef B0_TIMER_FIX
+ value = (start-end) + (start*timer);
+ timer=0;
+#else
+ value = end-start;/* convert to 1 MHz clocks */
+#endif
+ return value;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
new file mode 100644
index 0000000..033f6b6
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
@@ -0,0 +1,1568 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include <glib.h>
+#include <dlfcn.h>
+
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_h264_parser.h"
+
+
+/* number of bytes used to encode length of NAL payload. Default is 4 bytes. */
+static int NAL_length_size = 4;
+
+/* default scaling list table */
+unsigned char Default_4x4_Intra[16] =
+{
+ 6,13,20,28,
+ 13,20,28,32,
+ 20,28,32,37,
+ 28,32,37,42
+};
+
+unsigned char Default_4x4_Inter[16] =
+{
+ 10,14,20,24,
+ 14,20,24,27,
+ 20,24,27,30,
+ 24,27,30,34
+};
+
+unsigned char Default_8x8_Intra[64] =
+{
+ 6,10,13,16,18,23,25,27,
+ 10,11,16,18,23,25,27,29,
+ 13,16,18,23,25,27,29,31,
+ 16,18,23,25,27,29,31,33,
+ 18,23,25,27,29,31,33,36,
+ 23,25,27,29,31,33,36,38,
+ 25,27,29,31,33,36,38,40,
+ 27,29,31,33,36,38,40,42
+};
+
+unsigned char Default_8x8_Inter[64] =
+{
+ 9,13,15,17,19,21,22,24,
+ 13,13,17,19,21,22,24,25,
+ 15,17,19,21,22,24,25,27,
+ 17,19,21,22,24,25,27,28,
+ 19,21,22,24,25,27,28,30,
+ 21,22,24,25,27,28,30,32,
+ 22,24,25,27,28,30,32,33,
+ 24,25,27,28,30,32,33,35
+};
+
+unsigned char quant_flat[16] =
+{
+ 16,16,16,16,
+ 16,16,16,16,
+ 16,16,16,16,
+ 16,16,16,16
+};
+
+unsigned char quant8_flat[64] =
+{
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16,
+ 16,16,16,16,16,16,16,16
+};
+
+unsigned char* UseDefaultList[8] =
+{
+ Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra,
+ Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter,
+ Default_8x8_Intra,
+ Default_8x8_Inter
+};
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_h264(vbp_context *pcontext)
+{
+ if (NULL == pcontext->parser_ops)
+ {
+ return VBP_PARM;
+ }
+ pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init");
+ if (NULL == pcontext->parser_ops->init)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+ pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse");
+ if (NULL == pcontext->parser_ops->parse_syntax)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size");
+ if (NULL == pcontext->parser_ops->get_cxt_size)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done");
+ if (NULL == pcontext->parser_ops->is_wkld_done)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ /* entry point not needed */
+ pcontext->parser_ops->is_frame_start = NULL;
+ return VBP_OK;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_allocate_query_data_h264(vbp_context *pcontext)
+{
+ if (NULL != pcontext->query_data)
+ {
+ return VBP_PARM;
+ }
+
+ pcontext->query_data = NULL;
+ vbp_data_h264 *query_data = NULL;
+
+ query_data = g_try_new0(vbp_data_h264, 1);
+ if (NULL == query_data)
+ {
+ goto cleanup;
+ }
+
+ /* assign the pointer */
+ pcontext->query_data = (void *)query_data;
+
+ query_data->pic_data = g_try_new0(vbp_picture_data_h264, MAX_NUM_PICTURES);
+ if (NULL == query_data->pic_data)
+ {
+ goto cleanup;
+ }
+
+ int i;
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferH264, 1);
+ if (NULL == query_data->pic_data[i].pic_parms)
+ {
+ goto cleanup;
+ }
+ query_data->pic_data[i].num_slices = 0;
+ query_data->pic_data[i].slc_data = g_try_new0(vbp_slice_data_h264, MAX_NUM_SLICES);
+ if (NULL == query_data->pic_data[i].slc_data)
+ {
+ goto cleanup;
+ }
+ }
+
+
+ query_data->IQ_matrix_buf = g_try_new0(VAIQMatrixBufferH264, 1);
+ if (NULL == query_data->IQ_matrix_buf)
+ {
+ goto cleanup;
+ }
+
+ query_data->codec_data = g_try_new0(vbp_codec_data_h264, 1);
+ if (NULL == query_data->codec_data)
+ {
+ goto cleanup;
+ }
+
+ return VBP_OK;
+
+cleanup:
+ vbp_free_query_data_h264(pcontext);
+
+ return VBP_MEM;
+}
+
+uint32 vbp_free_query_data_h264(vbp_context *pcontext)
+{
+ if (NULL == pcontext->query_data)
+ {
+ return VBP_OK;
+ }
+
+ int i;
+ vbp_data_h264 *query_data;
+ query_data = (vbp_data_h264 *)pcontext->query_data;
+
+ if (query_data->pic_data)
+ {
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ g_free(query_data->pic_data[i].slc_data);
+ g_free(query_data->pic_data[i].pic_parms);
+ }
+ g_free(query_data->pic_data);
+ }
+
+ g_free(query_data->IQ_matrix_buf);
+ g_free(query_data->codec_data);
+ g_free(query_data);
+
+ pcontext->query_data = NULL;
+
+ return VBP_OK;
+}
+
+
+static inline uint16_t vbp_utils_ntohs(uint8_t* p)
+{
+ uint16_t i = ((*p) << 8) + ((*(p+1)));
+ return i;
+}
+
+static inline uint32_t vbp_utils_ntohl(uint8_t* p)
+{
+ uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3)));
+ return i;
+}
+
+
+static inline void vbp_set_VAPicture_h264(
+ int curr_picture_structure,
+ int bottom_field,
+ frame_store* store,
+ VAPictureH264* pic)
+{
+ if (FRAME == curr_picture_structure)
+ {
+ if (FRAME != viddec_h264_get_dec_structure(store))
+ {
+ WTRACE("Reference picture structure is not frame for current frame picture!");
+ }
+ pic->flags = 0;
+ pic->TopFieldOrderCnt = store->frame.poc;
+ pic->BottomFieldOrderCnt = store->frame.poc;
+ }
+ else
+ {
+ if (FRAME == viddec_h264_get_dec_structure(store))
+ {
+ WTRACE("reference picture structure is frame for current field picture!");
+ }
+ if (bottom_field)
+ {
+ pic->flags = VA_PICTURE_H264_BOTTOM_FIELD;
+ pic->TopFieldOrderCnt = store->top_field.poc;
+ pic->BottomFieldOrderCnt = store->bottom_field.poc;
+ }
+ else
+ {
+ pic->flags = VA_PICTURE_H264_TOP_FIELD;
+ pic->TopFieldOrderCnt = store->top_field.poc;
+ pic->BottomFieldOrderCnt = store->bottom_field.poc;
+ }
+ }
+}
+
+static inline void vbp_set_slice_ref_list_h264(
+ struct h264_viddec_parser* h264_parser,
+ VASliceParameterBufferH264 *slc_parms)
+{
+ int i, j;
+ int num_ref_idx_active = 0;
+ h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+ uint8_t* p_list = NULL;
+ VAPictureH264* refPicListX = NULL;
+ frame_store* fs = NULL;
+
+ /* initialize ref picutre list, set picture id and flags to invalid. */
+
+ for (i = 0; i < 2; i++)
+ {
+ refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+ for (j = 0; j < 32; j++)
+ {
+ refPicListX->picture_id = VA_INVALID_SURFACE;
+ refPicListX->frame_idx = 0;
+ refPicListX->flags = VA_PICTURE_H264_INVALID;
+ refPicListX->TopFieldOrderCnt = 0;
+ refPicListX->BottomFieldOrderCnt = 0;
+ refPicListX++;
+ }
+ }
+
+ for (i = 0; i < 2; i++)
+ {
+ refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+
+ if ((i == 0) &&
+ ((h264_PtypeB == slice_header->slice_type) ||
+ (h264_PtypeP == slice_header->slice_type)))
+ {
+ num_ref_idx_active = slice_header->num_ref_idx_l0_active;
+ if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag)
+ {
+ p_list = h264_parser->info.slice_ref_list0;
+ }
+ else
+ {
+ p_list = h264_parser->info.dpb.listX_0;
+ }
+ }
+ else if((i == 1) && (h264_PtypeB == slice_header->slice_type))
+ {
+ num_ref_idx_active = slice_header->num_ref_idx_l1_active;
+ if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag)
+ {
+ p_list = h264_parser->info.slice_ref_list1;
+ }
+ else
+ {
+ p_list = h264_parser->info.dpb.listX_1;
+ }
+ }
+ else
+ {
+ num_ref_idx_active = 0;
+ p_list = NULL;
+ }
+
+
+ for (j = 0; j < num_ref_idx_active; j++)
+ {
+ fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]);
+
+ /* bit 5 indicates if reference picture is bottom field */
+ vbp_set_VAPicture_h264(
+ h264_parser->info.img.structure,
+ (p_list[j] & 0x20) >> 5,
+ fs,
+ refPicListX);
+
+ refPicListX->frame_idx = fs->frame_num;
+ refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ refPicListX++;
+ }
+ }
+}
+
+static inline void vbp_set_pre_weight_table_h264(
+ struct h264_viddec_parser* h264_parser,
+ VASliceParameterBufferH264 *slc_parms)
+{
+ h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+ int i, j;
+
+ if ((((h264_PtypeP == slice_header->slice_type) ||
+ (h264_PtypeB == slice_header->slice_type)) &&
+ h264_parser->info.active_PPS.weighted_pred_flag) ||
+ ((h264_PtypeB == slice_header->slice_type) &&
+ (1 == h264_parser->info.active_PPS.weighted_bipred_idc)))
+ {
+ slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom;
+ slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom;
+ slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag;
+ slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag;
+ slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag;
+ slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag;
+
+ for (i = 0; i < 32; i++)
+ {
+ slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i];
+ slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i];
+ slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i];
+ slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i];
+
+ for (j = 0; j < 2; j++)
+ {
+ slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j];
+ slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j];
+ slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j];
+ slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j];
+ }
+ }
+ }
+ else
+ {
+ /* default weight table */
+ slc_parms->luma_log2_weight_denom = 5;
+ slc_parms->chroma_log2_weight_denom = 5;
+ slc_parms->luma_weight_l0_flag = 0;
+ slc_parms->luma_weight_l1_flag = 0;
+ slc_parms->chroma_weight_l0_flag = 0;
+ slc_parms->chroma_weight_l1_flag = 0;
+ for (i = 0; i < 32; i++)
+ {
+ slc_parms->luma_weight_l0[i] = 0;
+ slc_parms->luma_offset_l0[i] = 0;
+ slc_parms->luma_weight_l1[i] = 0;
+ slc_parms->luma_offset_l1[i] = 0;
+
+ for (j = 0; j < 2; j++)
+ {
+ slc_parms->chroma_weight_l0[i][j] = 0;
+ slc_parms->chroma_offset_l0[i][j] = 0;
+ slc_parms->chroma_weight_l1[i][j] = 0;
+ slc_parms->chroma_offset_l1[i][j] = 0;
+ }
+ }
+ }
+}
+
+
+static inline void vbp_set_reference_frames_h264(
+ struct h264_viddec_parser *parser,
+ VAPictureParameterBufferH264* pic_parms)
+{
+ int buffer_idx;
+ int frame_idx;
+ frame_store* store = NULL;
+ h264_DecodedPictureBuffer* dpb = &(parser->info.dpb);
+ /* initialize reference frames */
+ for (frame_idx = 0; frame_idx < 16; frame_idx++)
+ {
+ pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+ pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+ pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+ }
+ pic_parms->num_ref_frames = 0;
+
+ frame_idx = 0;
+
+ /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer); */
+ /* set short term reference frames */
+ for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++)
+ {
+ if (frame_idx >= 16)
+ {
+ WTRACE("Frame index is out of bound.");
+ break;
+ }
+
+ store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]];
+ /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */
+ if (viddec_h264_get_is_used(store))
+ {
+ pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num;
+ pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ if (FRAME == parser->info.img.structure)
+ {
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
+ }
+ else
+ {
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+ if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+ {
+ /* if both fields are used for reference, just set flag to be frame (0) */
+ }
+ else
+ {
+ if (store->top_field.used_for_reference)
+ pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+ if (store->bottom_field.used_for_reference)
+ pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+ }
+ }
+ }
+ frame_idx++;
+ }
+
+ /* set long term reference frames */
+ for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++)
+ {
+ if (frame_idx >= 16)
+ {
+ WTRACE("Frame index is out of bound.");
+ break;
+ }
+ store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]];
+ if (!viddec_h264_get_is_long_term(store))
+ {
+ WTRACE("long term frame is not marked as long term.");
+ }
+ /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */
+ if (viddec_h264_get_is_used(store))
+ {
+ pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE;
+ if (FRAME == parser->info.img.structure)
+ {
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
+ }
+ else
+ {
+ pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+ pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+ if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+ {
+ /* if both fields are used for reference, just set flag to be frame (0)*/
+ }
+ else
+ {
+ if (store->top_field.used_for_reference)
+ pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+ if (store->bottom_field.used_for_reference)
+ pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+ }
+ }
+ }
+ frame_idx++;
+ }
+
+ pic_parms->num_ref_frames = frame_idx;
+
+ if (frame_idx > parser->info.active_SPS.num_ref_frames)
+ {
+ WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).",
+ frame_idx, parser->info.active_SPS.num_ref_frames);
+ }
+}
+
+
+static inline void vbp_set_scaling_list_h264(
+ struct h264_viddec_parser *parser,
+ VAIQMatrixBufferH264* IQ_matrix_buf)
+{
+ int i;
+ if (parser->info.active_PPS.pic_scaling_matrix_present_flag)
+ {
+ for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++)
+ {
+ if (parser->info.active_PPS.pic_scaling_list_present_flag[i])
+ {
+ if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+ ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]))
+ {
+ /* use default scaling list */
+ if (i < 6)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+ }
+ else
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+ }
+ }
+ else
+ {
+ /* use PPS list */
+ if (i < 6)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16);
+ }
+ else
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64);
+ }
+ }
+ }
+ else /* pic_scaling_list not present */
+ {
+ if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+ {
+ /* SPS matrix present - use fallback rule B */
+ switch (i)
+ {
+ case 0:
+ case 3:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i],
+ parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i],
+ 16);
+ break;
+
+ case 6:
+ case 7:
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6],
+ parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i],
+ 64);
+ break;
+
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i],
+ IQ_matrix_buf->ScalingList4x4[i - 1],
+ 16);
+ break;
+
+ default:
+ g_warning("invalid scaling list index.");
+ break;
+ }
+ }
+ else /* seq_scaling_matrix not present */
+ {
+ /* SPS matrix not present - use fallback rule A */
+ switch (i)
+ {
+ case 0:
+ case 3:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+ break;
+
+ case 6:
+ case 7:
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+ break;
+
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i],
+ IQ_matrix_buf->ScalingList4x4[i - 1],
+ 16);
+ break;
+
+ default:
+ WTRACE("invalid scaling list index.");
+ break;
+ }
+ } /* end of seq_scaling_matrix not present */
+ } /* end of pic_scaling_list not present */
+ } /* for loop for each index from 0 to 7 */
+ } /* end of pic_scaling_matrix present */
+ else
+ {
+ /* PPS matrix not present, use SPS information */
+ if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+ {
+ for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++)
+ {
+ if (parser->info.active_SPS.seq_scaling_list_present_flag[i])
+ {
+ if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+ ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6]))
+ {
+ /* use default scaling list */
+ if (i < 6)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+ }
+ else
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+ }
+ }
+ else
+ {
+ /* use SPS list */
+ if (i < 6)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16);
+ }
+ else
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64);
+ }
+ }
+ }
+ else
+ {
+ /* SPS list not present - use fallback rule A */
+ switch (i)
+ {
+ case 0:
+ case 3:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+ break;
+
+ case 6:
+ case 7:
+ memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+ break;
+
+ case 1:
+ case 2:
+ case 4:
+ case 5:
+ memcpy(IQ_matrix_buf->ScalingList4x4[i],
+ IQ_matrix_buf->ScalingList4x4[i - 1],
+ 16);
+ break;
+
+ default:
+ WTRACE("invalid scaling list index.");
+ break;
+ }
+ }
+ }
+ }
+ else
+ {
+ /* SPS matrix not present - use flat lists */
+ for (i = 0; i < 6; i++)
+ {
+ memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16);
+ }
+ for (i = 0; i < 2; i++)
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+ }
+ }
+ }
+
+ if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) &&
+ (parser->info.active_PPS.pic_scaling_matrix_present_flag ||
+ parser->info.active_SPS.seq_scaling_matrix_present_flag))
+ {
+ for (i = 0; i < 2; i++)
+ {
+ memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+ }
+ }
+}
+
+static void vbp_set_codec_data_h264(
+ struct h264_viddec_parser *parser,
+ vbp_codec_data_h264* codec_data)
+{
+ /* parameter id */
+ codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id;
+ codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id;
+
+ /* profile and level */
+ codec_data->profile_idc = parser->info.active_SPS.profile_idc;
+ codec_data->level_idc = parser->info.active_SPS.level_idc;
+
+
+ codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2;
+
+
+ /* reference frames */
+ codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames;
+
+ if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag &&
+ !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag)
+ {
+ /* no longer necessary: two fields share the same interlaced surface */
+ /* codec_data->num_ref_frames *= 2; */
+ }
+
+ codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+
+ /* frame coding */
+ codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+ codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+
+ /* frame dimension */
+ codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16;
+
+ codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+ (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16;
+
+ /* frame cropping */
+ codec_data->frame_cropping_flag =
+ parser->info.active_SPS.sps_disp.frame_cropping_flag;
+
+ codec_data->frame_crop_rect_left_offset =
+ parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset;
+
+ codec_data->frame_crop_rect_right_offset =
+ parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset;
+
+ codec_data->frame_crop_rect_top_offset =
+ parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset;
+
+ codec_data->frame_crop_rect_bottom_offset =
+ parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset;
+
+ /* aspect ratio */
+ codec_data->aspect_ratio_info_present_flag =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag;
+
+ codec_data->aspect_ratio_idc =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
+
+ codec_data->sar_width =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
+
+ codec_data->sar_height =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
+
+ /* video format */
+ codec_data->video_format =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;
+
+ codec_data->video_format =
+ parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag;
+}
+
+
+static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+ vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+ struct h264_viddec_parser* parser = NULL;
+ vbp_picture_data_h264* pic_data = NULL;
+ VAPictureParameterBufferH264* pic_parms = NULL;
+
+ parser = (struct h264_viddec_parser *)cxt->codec_data;
+
+ if (0 == parser->info.SliceHeader.first_mb_in_slice)
+ {
+ /* a new picture is parsed */
+ query_data->num_pictures++;
+ }
+
+ if (query_data->num_pictures > MAX_NUM_PICTURES)
+ {
+ ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES);
+ return VBP_DATA;
+ }
+
+ int pic_data_index = query_data->num_pictures - 1;
+ if (pic_data_index < 0)
+ {
+ WTRACE("MB address does not start from 0!");
+ return VBP_DATA;
+ }
+
+ pic_data = &(query_data->pic_data[pic_data_index]);
+ pic_parms = pic_data->pic_parms;
+
+ if (parser->info.SliceHeader.first_mb_in_slice == 0)
+ {
+ /**
+ * picture parameter only needs to be set once,
+ * even multiple slices may be encoded
+ */
+
+ /* VAPictureParameterBufferH264 */
+ pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE;
+ pic_parms->CurrPic.frame_idx = 0;
+ if (parser->info.img.field_pic_flag == 1)
+ {
+ if (parser->info.img.bottom_field_flag)
+ {
+ pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD;
+ }
+ else
+ {
+ /* also OK set to 0 (from test suite) */
+ pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD;
+ }
+ }
+ else
+ {
+ pic_parms->CurrPic.flags = 0; /* frame picture */
+ }
+ pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc;
+ pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc;
+ pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num;
+
+ /* don't care if current frame is used as long term reference */
+ if (parser->info.SliceHeader.nal_ref_idc != 0)
+ {
+ pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ }
+
+ pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1;
+
+ /* frame height in MBS */
+ pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+ (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1;
+
+ pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8;
+ pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8;
+
+
+ pic_parms->seq_fields.value = 0;
+ pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc;
+ pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag;
+ pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+ pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+ pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag;
+
+ /* new fields in libva 0.31 */
+ pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+ pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4;
+ pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+ pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+ pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag;
+
+
+ /* referened from UMG_Moorstown_TestSuites */
+ pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0;
+
+ pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1;
+ pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type;
+ pic_parms->slice_group_change_rate_minus1 = 0;
+ pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26;
+ pic_parms->pic_init_qs_minus26 = 0;
+ pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset;
+ pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset;
+
+ pic_parms->pic_fields.value = 0;
+ pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag;
+ pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag;
+ pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc;
+ pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag;
+
+ /* new LibVA fields in v0.31*/
+ pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag;
+ pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag;
+ pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag;
+ pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0;
+
+ /* all slices in the pciture have the same field_pic_flag */
+ pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag;
+ pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag;
+
+ pic_parms->frame_num = parser->info.SliceHeader.frame_num;
+ }
+
+
+ /* set reference frames, and num_ref_frames */
+ vbp_set_reference_frames_h264(parser, pic_parms);
+ if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+ {
+ /* num of reference frame is 0 if current picture is IDR */
+ pic_parms->num_ref_frames = 0;
+ }
+ else
+ {
+ /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */
+ }
+
+ return VBP_OK;
+}
+
+#if 0
+static inline void vbp_update_reference_frames_h264_methodA(vbp_picture_data_h264* pic_data)
+{
+ VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms;
+
+ char is_used[16];
+ memset(is_used, 0, sizeof(is_used));
+
+ int ref_list;
+ int slice_index;
+ int i, j;
+ VAPictureH264* pRefList = NULL;
+
+ for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++)
+ {
+ VASliceParameterBufferH264* slice_parms =
+ &(pic_data->slc_data[slice_index].slc_parms);
+
+ for (ref_list = 0; ref_list < 2; ref_list++)
+ {
+ if (0 == ref_list)
+ pRefList = slice_parms->RefPicList0;
+ else
+ pRefList = slice_parms->RefPicList1;
+
+ for (i = 0; i < 32; i++, pRefList++)
+ {
+ if (VA_PICTURE_H264_INVALID == pRefList->flags)
+ break;
+
+ for (j = 0; j < 16; j++)
+ {
+ if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt ==
+ pRefList->TopFieldOrderCnt)
+ {
+ is_used[j] = 1;
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ int frame_idx = 0;
+ VAPictureH264* pRefFrame = pic_parms->ReferenceFrames;
+ for (i = 0; i < 16; i++)
+ {
+ if (is_used[i])
+ {
+ memcpy(pRefFrame,
+ &(pic_parms->ReferenceFrames[i]),
+ sizeof(VAPictureH264));
+
+ pRefFrame++;
+ frame_idx++;
+ }
+ }
+ pic_parms->num_ref_frames = frame_idx;
+
+ for (; frame_idx < 16; frame_idx++)
+ {
+ pRefFrame->picture_id = VA_INVALID_SURFACE;
+ pRefFrame->frame_idx = -1;
+ pRefFrame->flags = VA_PICTURE_H264_INVALID;
+ pRefFrame->TopFieldOrderCnt = -1;
+ pRefFrame->BottomFieldOrderCnt = -1;
+ pRefFrame++;
+ }
+}
+#endif
+
+#if 0
+static inline void vbp_update_reference_frames_h264_methodB(vbp_picture_data_h264* pic_data)
+{
+ VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms;
+ int i;
+ VAPictureH264* pRefFrame = pic_parms->ReferenceFrames;
+ for (i = 0; i < 16; i++)
+ {
+ pRefFrame->picture_id = VA_INVALID_SURFACE;
+ pRefFrame->frame_idx = -1;
+ pRefFrame->flags = VA_PICTURE_H264_INVALID;
+ pRefFrame->TopFieldOrderCnt = -1;
+ pRefFrame->BottomFieldOrderCnt = -1;
+ pRefFrame++;
+ }
+
+ pic_parms->num_ref_frames = 0;
+
+
+ int ref_list;
+ int slice_index;
+ int j;
+ VAPictureH264* pRefList = NULL;
+
+ for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++)
+ {
+ VASliceParameterBufferH264* slice_parms =
+ &(pic_data->slc_data[slice_index].slc_parms);
+
+ for (ref_list = 0; ref_list < 2; ref_list++)
+ {
+ if (0 == ref_list)
+ pRefList = slice_parms->RefPicList0;
+ else
+ pRefList = slice_parms->RefPicList1;
+
+ for (i = 0; i < 32; i++, pRefList++)
+ {
+ if (VA_PICTURE_H264_INVALID == pRefList->flags)
+ break;
+
+ for (j = 0; j < 16; j++)
+ {
+ if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt ==
+ pRefList->TopFieldOrderCnt)
+ {
+ pic_parms->ReferenceFrames[j].flags |=
+ pRefList->flags;
+
+ if ((pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_TOP_FIELD) &&
+ (pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_BOTTOM_FIELD))
+ {
+ pic_parms->ReferenceFrames[j].flags = 0;
+ }
+ break;
+ }
+ }
+ if (j == 16)
+ {
+ memcpy(&(pic_parms->ReferenceFrames[pic_parms->num_ref_frames++]),
+ pRefList,
+ sizeof(VAPictureH264));
+ }
+
+ }
+ }
+ }
+}
+#endif
+
+
+static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ uint32 bit, byte;
+ uint8 is_emul;
+
+ vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+ VASliceParameterBufferH264 *slc_parms = NULL;
+ vbp_slice_data_h264 *slc_data = NULL;
+ struct h264_viddec_parser* h264_parser = NULL;
+ h264_Slice_Header_t* slice_header = NULL;
+ vbp_picture_data_h264* pic_data = NULL;
+
+
+ h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
+ int pic_data_index = query_data->num_pictures - 1;
+ if (pic_data_index < 0)
+ {
+ ETRACE("invalid picture data index.");
+ return VBP_DATA;
+ }
+
+ pic_data = &(query_data->pic_data[pic_data_index]);
+
+ slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+ slc_data->buffer_addr = cxt->parse_cubby.buf;
+ slc_parms = &(slc_data->slc_parms);
+
+ /* byte: how many bytes have been parsed */
+ /* bit: bits parsed within the current parsing position */
+ viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+
+#if 0
+ /* add 4 bytes of start code prefix */
+ slc_parms->slice_data_size = slc_data->slice_size =
+ pcontext->parser_cxt->list.data[index].edpos -
+ pcontext->parser_cxt->list.data[index].stpos + 4;
+
+ slc_data->slice_offset = pcontext->parser_cxt->list.data[index].stpos - 4;
+
+ /* overwrite the "length" bytes to start code (0x00000001) */
+ *(slc_data->buffer_addr + slc_data->slice_offset) = 0;
+ *(slc_data->buffer_addr + slc_data->slice_offset + 1) = 0;
+ *(slc_data->buffer_addr + slc_data->slice_offset + 2) = 0;
+ *(slc_data->buffer_addr + slc_data->slice_offset + 3) = 1;
+
+
+ /* the offset to the NAL start code for this slice */
+ slc_parms->slice_data_offset = 0;
+
+ /* whole slice is in this buffer */
+ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+ /* bit offset from NAL start code to the beginning of slice data */
+ /* slc_parms->slice_data_bit_offset = bit;*/
+ slc_parms->slice_data_bit_offset = (byte + 4)* 8 + bit;
+
+#else
+ slc_parms->slice_data_size = slc_data->slice_size =
+ pcontext->parser_cxt->list.data[index].edpos -
+ pcontext->parser_cxt->list.data[index].stpos;
+
+ /* the offset to the NAL start code for this slice */
+ slc_data->slice_offset = cxt->list.data[index].stpos;
+ slc_parms->slice_data_offset = 0;
+
+ /* whole slice is in this buffer */
+ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+ /* bit offset from NAL start code to the beginning of slice data */
+ slc_parms->slice_data_bit_offset = bit + byte * 8;
+#endif
+
+ if (is_emul)
+ {
+ WTRACE("next byte is emulation prevention byte.");
+ /*slc_parms->slice_data_bit_offset += 8; */
+ }
+
+ if (cxt->getbits.emulation_byte_counter != 0)
+ {
+ slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8;
+ }
+
+ slice_header = &(h264_parser->info.SliceHeader);
+ slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
+
+ if(h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
+ (!(h264_parser->info.SliceHeader.field_pic_flag)))
+ {
+ slc_parms->first_mb_in_slice /= 2;
+ }
+
+ slc_parms->slice_type = slice_header->slice_type;
+
+ slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag;
+
+ slc_parms->num_ref_idx_l0_active_minus1 = 0;
+ slc_parms->num_ref_idx_l1_active_minus1 = 0;
+ if (slice_header->slice_type == h264_PtypeI)
+ {
+ }
+ else if (slice_header->slice_type == h264_PtypeP)
+ {
+ slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
+ }
+ else if (slice_header->slice_type == h264_PtypeB)
+ {
+ slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
+ slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1;
+ }
+ else
+ {
+ WTRACE("slice type %d is not supported.", slice_header->slice_type);
+ }
+
+ slc_parms->cabac_init_idc = slice_header->cabac_init_idc;
+ slc_parms->slice_qp_delta = slice_header->slice_qp_delta;
+ slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc;
+ slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2;
+ slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2;
+
+
+ vbp_set_pre_weight_table_h264(h264_parser, slc_parms);
+ vbp_set_slice_ref_list_h264(h264_parser, slc_parms);
+
+
+ pic_data->num_slices++;
+
+ //vbp_update_reference_frames_h264_methodB(pic_data);
+ if (pic_data->num_slices > MAX_NUM_SLICES)
+ {
+ ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
+ return VBP_DATA;
+ }
+ return VBP_OK;
+}
+
+/**
+* parse decoder configuration data
+*/
+uint32 vbp_parse_init_data_h264(vbp_context* pcontext)
+{
+ /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */
+
+ uint8 configuration_version = 0;
+ uint8 AVC_profile_indication = 0;
+ uint8 profile_compatibility = 0;
+ uint8 AVC_level_indication = 0;
+ uint8 length_size_minus_one = 0;
+ uint8 num_of_sequence_parameter_sets = 0;
+ uint8 num_of_picture_parameter_sets = 0;
+ uint16 sequence_parameter_set_length = 0;
+ uint16 picture_parameter_set_length = 0;
+
+ int i = 0;
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ uint8* cur_data = cxt->parse_cubby.buf;
+
+
+ if (cxt->parse_cubby.size < 6)
+ {
+ /* need at least 6 bytes to start parsing the structure, see spec 15 */
+ return VBP_DATA;
+ }
+
+ configuration_version = *cur_data++;
+ AVC_profile_indication = *cur_data++;
+
+ /*ITRACE("Profile indication: %d", AVC_profile_indication); */
+
+ profile_compatibility = *cur_data++;
+ AVC_level_indication = *cur_data++;
+
+ /* ITRACE("Level indication: %d", AVC_level_indication);*/
+ /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */
+ length_size_minus_one = (*cur_data) & 0x3;
+
+ if (length_size_minus_one != 3)
+ {
+ WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1);
+ }
+
+ NAL_length_size = length_size_minus_one + 1;
+
+ cur_data++;
+
+ /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */
+ num_of_sequence_parameter_sets = (*cur_data) & 0x1f;
+ if (num_of_sequence_parameter_sets > 1)
+ {
+ WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets);
+ }
+ if (num_of_sequence_parameter_sets > MAX_NUM_SPS)
+ {
+ /* this would never happen as MAX_NUM_SPS = 32 */
+ WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS);
+ }
+ cur_data++;
+
+ cxt->list.num_items = 0;
+ for (i = 0; i < num_of_sequence_parameter_sets; i++)
+ {
+ if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+ {
+ /* need at least 2 bytes to parse sequence_parameter_set_length */
+ return VBP_DATA;
+ }
+
+ /* 16 bits */
+ sequence_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+
+ cur_data += 2;
+
+ if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size)
+ {
+ /* need at least sequence_parameter_set_length bytes for SPS */
+ return VBP_DATA;
+ }
+
+ cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+ /* end pos is exclusive */
+ cxt->list.data[cxt->list.num_items].edpos =
+ cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length;
+
+ cxt->list.num_items++;
+
+ cur_data += sequence_parameter_set_length;
+ }
+
+ if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size)
+ {
+ /* need at least one more byte to parse num_of_picture_parameter_sets */
+ return VBP_DATA;
+ }
+
+ num_of_picture_parameter_sets = *cur_data++;
+ if (num_of_picture_parameter_sets > 1)
+ {
+ /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */
+ }
+
+ for (i = 0; i < num_of_picture_parameter_sets; i++)
+ {
+ if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+ {
+ /* need at least 2 bytes to parse picture_parameter_set_length */
+ return VBP_DATA;
+ }
+
+ /* 16 bits */
+ picture_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+ cur_data += 2;
+
+ if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size)
+ {
+ /* need at least picture_parameter_set_length bytes for PPS */
+ return VBP_DATA;
+ }
+
+ cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+ /* end pos is exclusive */
+ cxt->list.data[cxt->list.num_items].edpos =
+ cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length;
+
+ cxt->list.num_items++;
+
+ cur_data += picture_parameter_set_length;
+ }
+
+ if ((cur_data - cxt->parse_cubby.buf) != cxt->parse_cubby.size)
+ {
+ WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.",
+ cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf));
+ }
+
+ return VBP_OK;
+}
+
+static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p)
+{
+ switch (NAL_length_size)
+ {
+ case 4:
+ return vbp_utils_ntohl(p);
+
+ case 3:
+ {
+ uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2)));
+ return i;
+ }
+
+ case 2:
+ return vbp_utils_ntohs(p);
+
+ case 1:
+ return *p;
+
+ default:
+ WTRACE("invalid NAL_length_size: %d.", NAL_length_size);
+ /* default to 4 bytes for length */
+ NAL_length_size = 4;
+ return vbp_utils_ntohl(p);
+ }
+}
+
+/**
+** H.264 elementary stream does not have start code.
+* instead, it is comprised of size of NAL unit and payload
+* of NAL unit. See spec 15 (Sample format)
+*/
+uint32 vbp_parse_start_code_h264(vbp_context *pcontext)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ int32_t size_left = 0;
+ int32_t size_parsed = 0;
+ int32_t NAL_length = 0;
+ viddec_sc_parse_cubby_cxt_t* cubby = NULL;
+
+ /* reset query data for the new sample buffer */
+ vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+ int i;
+
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ query_data->pic_data[i].num_slices = 0;
+ }
+ query_data->num_pictures = 0;
+
+
+ cubby = &(cxt->parse_cubby);
+
+ cxt->list.num_items = 0;
+
+ /* start code emulation prevention byte is present in NAL */
+ cxt->getbits.is_emul_reqd = 1;
+
+ size_left = cubby->size;
+
+ while (size_left >= NAL_length_size)
+ {
+ NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed);
+
+ size_parsed += NAL_length_size;
+ cxt->list.data[cxt->list.num_items].stpos = size_parsed;
+ size_parsed += NAL_length; /* skip NAL bytes */
+ /* end position is exclusive */
+ cxt->list.data[cxt->list.num_items].edpos = size_parsed;
+ cxt->list.num_items++;
+ if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+ {
+ ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
+ break;
+ }
+
+ size_left = cubby->size - size_parsed;
+ }
+
+ if (size_left != 0)
+ {
+ WTRACE("Elementary stream is not aligned (%d).", size_left);
+ }
+ return VBP_OK;
+}
+
+/**
+*
+* process parsing result after a NAL unit is parsed
+*
+*/
+uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i)
+{
+ if (i >= MAX_NUM_SLICES)
+ {
+ return VBP_PARM;
+ }
+
+ uint32 error = VBP_OK;
+
+ struct h264_viddec_parser* parser = NULL;
+ parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]);
+ switch (parser->info.nal_unit_type)
+ {
+ case h264_NAL_UNIT_TYPE_SLICE:
+ /* ITRACE("slice header is parsed."); */
+ error = vbp_add_pic_data_h264(pcontext, i);
+ if (VBP_OK == error)
+ {
+ error = vbp_add_slice_data_h264(pcontext, i);
+ }
+ break;
+
+ case h264_NAL_UNIT_TYPE_IDR:
+ /* ITRACE("IDR header is parsed."); */
+ error = vbp_add_pic_data_h264(pcontext, i);
+ if (VBP_OK == error)
+ {
+ error = vbp_add_slice_data_h264(pcontext, i);
+ }
+ break;
+
+ case h264_NAL_UNIT_TYPE_SEI:
+ /* ITRACE("SEI header is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_SPS:
+ /*ITRACE("SPS header is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_PPS:
+ /* ITRACE("PPS header is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+ /* ITRACE("ACC unit delimiter is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_EOSeq:
+ /* ITRACE("EOSeq is parsed."); */
+ break;
+
+ case h264_NAL_UNIT_TYPE_EOstream:
+ /* ITRACE("EOStream is parsed."); */
+ break;
+
+ default:
+ WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type);
+ break;
+ }
+ return error;
+}
+
+/*
+*
+* fill query data structure after sample buffer is parsed
+*
+*/
+uint32 vbp_populate_query_data_h264(vbp_context *pcontext)
+{
+ vbp_data_h264 *query_data = NULL;
+ struct h264_viddec_parser *parser = NULL;
+
+ parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data;
+ query_data = (vbp_data_h264 *)pcontext->query_data;
+
+ vbp_set_codec_data_h264(parser, query_data->codec_data);
+
+ /* buffer number */
+ query_data->buf_number = buffer_counter;
+
+ /* VQIAMatrixBufferH264 */
+ vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf);
+
+ if (query_data->num_pictures > 0)
+ {
+ /*
+ * picture parameter buffer and slice parameter buffer have been populated
+ */
+ }
+ else
+ {
+ /**
+ * add a dummy picture that contains picture parameters parsed
+ from SPS and PPS.
+ */
+ vbp_add_pic_data_h264(pcontext, 0);
+ }
+ return VBP_OK;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h
new file mode 100644
index 0000000..6ed4499
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.h
@@ -0,0 +1,48 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_H264_PARSER_H
+#define VBP_H264_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+uint32 vbp_init_parser_entries_h264(vbp_context *pcontext);
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_h264(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_h264(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_h264(vbp_context *pcontext);
+
+/*
+ * parse start code. Only support lenght prefixed mode. Start
+ * code prefixed is not supported.
+ */
+uint32 vbp_parse_start_code_h264(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_h264(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_h264(vbp_context *pcontext);
+
+#endif /*VBP_H264_PARSER_H*/
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c
new file mode 100644
index 0000000..27a2dd0
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c
@@ -0,0 +1,162 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include <glib.h>
+
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+
+/**
+ *
+ */
+uint32 vbp_open(uint32 parser_type, Handle *hcontext)
+{
+ vbp_context **ppcontext;
+ uint32 error;
+
+ if (NULL == hcontext)
+ {
+ return VBP_PARM;
+ }
+
+ *hcontext = NULL; /* prepare for failure. */
+
+ ppcontext = (vbp_context **)hcontext;
+
+ /**
+ * TO DO:
+ * check if vbp context has been created.
+ */
+
+ error = vbp_utils_create_context(parser_type, ppcontext);
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to create context: %d.", error);
+ }
+
+ return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_close(Handle hcontext)
+{
+ uint32 error;
+
+ if (NULL == hcontext)
+ {
+ return VBP_PARM;
+ }
+
+ vbp_context *pcontext = (vbp_context *)hcontext;
+
+ if (MAGIC_NUMBER != pcontext->identifier)
+ {
+ /* not a valid vbp context. */
+ ETRACE("context is not initialized");
+ return VBP_INIT;
+ }
+ error = vbp_utils_destroy_context(pcontext);
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to destroy context: %d.", error);
+ }
+
+ return error;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag)
+{
+ vbp_context *pcontext;
+ uint32 error = VBP_OK;
+
+ if ((NULL == hcontext) || (NULL == data) || (0 == size))
+ {
+ ETRACE("Invalid input parameters.");
+ return VBP_PARM;
+ }
+
+ pcontext = (vbp_context *)hcontext;
+
+ if (MAGIC_NUMBER != pcontext->identifier)
+ {
+ ETRACE("context is not initialized");
+ return VBP_INIT;
+ }
+
+ error = vbp_utils_parse_buffer(pcontext, data, size, init_data_flag);
+
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to parse buffer: %d.", error);
+ }
+ return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_query(Handle hcontext, void **data)
+{
+ vbp_context *pcontext;
+ uint32 error = VBP_OK;
+
+ if ((NULL == hcontext) || (NULL == data))
+ {
+ ETRACE("Invalid input parameters.");
+ return VBP_PARM;
+ }
+
+ pcontext = (vbp_context *)hcontext;
+
+ if (MAGIC_NUMBER != pcontext->identifier)
+ {
+ ETRACE("context is not initialized");
+ return VBP_INIT;
+ }
+
+ error = vbp_utils_query(pcontext, data);
+
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to query parsing result: %d.", error);
+ }
+ return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_flush(Handle hcontext)
+{
+ vbp_context *pcontext;
+ uint32 error = VBP_OK;
+
+ if (NULL == hcontext)
+ {
+ ETRACE("Invalid input parameters.");
+ return VBP_PARM;
+ }
+
+ pcontext = (vbp_context *)hcontext;
+
+ if (MAGIC_NUMBER != pcontext->identifier)
+ {
+ ETRACE("context is not initialized");
+ return VBP_INIT;
+ }
+
+ error = vbp_utils_flush(pcontext);
+
+ return error;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
new file mode 100644
index 0000000..66169dd
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
@@ -0,0 +1,318 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_LOADER_H
+#define VBP_LOADER_H
+
+#include <va/va.h>
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+
+#ifndef uint8
+typedef unsigned char uint8;
+#endif
+#ifndef uint16
+typedef unsigned short uint16;
+#endif
+#ifndef uint32
+typedef unsigned int uint32;
+#endif
+#ifndef bool
+typedef int bool;
+#endif
+
+typedef void *Handle;
+
+/*
+ * MPEG-4 Part 2 data structure
+ */
+
+typedef struct _vbp_codec_data_mp42
+{
+ uint8 profile_and_level_indication;
+} vbp_codec_data_mp42;
+
+typedef struct _vbp_slice_data_mp42
+{
+ uint8* buffer_addr;
+ uint32 slice_offset;
+ uint32 slice_size;
+ VASliceParameterBufferMPEG4 slice_param;
+} vbp_slice_data_mp42;
+
+typedef struct _vbp_picture_data_mp42
+{
+ uint8 vop_coded;
+ VAPictureParameterBufferMPEG4 picture_param;
+ VAIQMatrixBufferMPEG4 iq_matrix_buffer;
+
+ uint32 number_slices;
+ vbp_slice_data_mp42 *slice_data;
+
+} vbp_picture_data_mp42;
+
+typedef struct _vbp_data_mp42
+{
+ vbp_codec_data_mp42 codec_data;
+
+ uint32 number_pictures;
+ vbp_picture_data_mp42 *picture_data;
+
+} vbp_data_mp42;
+
+/*
+ * H.264 data structure
+ */
+
+typedef struct _vbp_codec_data_h264
+{
+ uint8 pic_parameter_set_id;
+ uint8 seq_parameter_set_id;
+
+ uint8 profile_idc;
+ uint8 level_idc;
+ uint8 constraint_set1_flag;
+
+ uint8 num_ref_frames;
+ uint8 gaps_in_frame_num_value_allowed_flag;
+
+ uint8 frame_mbs_only_flag;
+ uint8 mb_adaptive_frame_field_flag;
+
+ int frame_width;
+ int frame_height;
+
+ uint8 frame_cropping_flag;
+ int frame_crop_rect_left_offset;
+ int frame_crop_rect_right_offset;
+ int frame_crop_rect_top_offset;
+ int frame_crop_rect_bottom_offset;
+
+ uint8 vui_parameters_present_flag;
+ /* aspect ratio */
+ uint8 aspect_ratio_info_present_flag;
+ uint8 aspect_ratio_idc;
+ uint16 sar_width;
+ uint16 sar_height;
+
+ /* video fromat */
+ uint8 video_signal_type_present_flag;
+ uint8 video_format;
+
+} vbp_codec_data_h264;
+
+typedef struct _vbp_slice_data_h264
+{
+ uint8* buffer_addr;
+
+ uint32 slice_offset; /* slice data offset */
+
+ uint32 slice_size; /* slice data size */
+
+ VASliceParameterBufferH264 slc_parms;
+
+} vbp_slice_data_h264;
+
+
+ typedef struct _vbp_picture_data_h264
+ {
+ VAPictureParameterBufferH264* pic_parms;
+
+ uint32 num_slices;
+
+ vbp_slice_data_h264* slc_data;
+
+ } vbp_picture_data_h264;
+
+
+typedef struct _vbp_data_h264
+{
+ /* rolling counter of buffers sent by vbp_parse */
+ uint32 buf_number;
+
+ uint32 num_pictures;
+
+ vbp_picture_data_h264* pic_data;
+
+ /**
+ * do we need to send matrix to VA for each picture? If not, we need
+ * a flag indicating whether it is updated.
+ */
+ VAIQMatrixBufferH264* IQ_matrix_buf;
+
+ vbp_codec_data_h264* codec_data;
+
+} vbp_data_h264;
+
+/*
+ * vc1 data structure
+ */
+typedef struct _vbp_codec_data_vc1
+{
+ /* Sequence layer. */
+ uint8 PROFILE;
+ uint8 LEVEL;
+ uint8 POSTPROCFLAG;
+ uint8 PULLDOWN;
+ uint8 INTERLACE;
+ uint8 TFCNTRFLAG;
+ uint8 FINTERPFLAG;
+ uint8 PSF;
+
+ /* Entry point layer. */
+ uint8 BROKEN_LINK;
+ uint8 CLOSED_ENTRY;
+ uint8 PANSCAN_FLAG;
+ uint8 REFDIST_FLAG;
+ uint8 LOOPFILTER;
+ uint8 FASTUVMC;
+ uint8 EXTENDED_MV;
+ uint8 DQUANT;
+ uint8 VSTRANSFORM;
+ uint8 OVERLAP;
+ uint8 QUANTIZER;
+ uint16 CODED_WIDTH;
+ uint16 CODED_HEIGHT;
+ uint8 EXTENDED_DMV;
+ uint8 RANGE_MAPY_FLAG;
+ uint8 RANGE_MAPY;
+ uint8 RANGE_MAPUV_FLAG;
+ uint8 RANGE_MAPUV;
+
+ /* Others. */
+ uint8 RANGERED;
+ uint8 MAXBFRAMES;
+ uint8 MULTIRES;
+ uint8 SYNCMARKER;
+ uint8 RNDCTRL;
+ uint8 REFDIST;
+ uint16 widthMB;
+ uint16 heightMB;
+
+ uint8 INTCOMPFIELD;
+ uint8 LUMSCALE2;
+ uint8 LUMSHIFT2;
+} vbp_codec_data_vc1;
+
+typedef struct _vbp_slice_data_vc1
+{
+ uint8 *buffer_addr;
+ uint32 slice_offset;
+ uint32 slice_size;
+ VASliceParameterBufferVC1 slc_parms; /* pointer to slice parms */
+} vbp_slice_data_vc1;
+
+
+typedef struct _vbp_picture_data_vc1
+{
+ uint32 picture_is_skipped; /* VC1_PTYPE_SKIPPED is PTYPE is skipped. */
+ VAPictureParameterBufferVC1 *pic_parms; /* current parsed picture header */
+ uint32 size_bitplanes; /* based on number of MBs */
+ uint8 *packed_bitplanes; /* contains up to three bitplanes packed for libVA */
+ uint32 num_slices; /* number of slices. always at least one */
+ vbp_slice_data_vc1 *slc_data; /* pointer to array of slice data */
+} vbp_picture_data_vc1;
+
+typedef struct _vbp_data_vc1
+{
+ uint32 buf_number; /* rolling counter of buffers sent by vbp_parse */
+ vbp_codec_data_vc1 *se_data; /* parsed SH/EPs */
+
+ uint32 num_pictures;
+
+ vbp_picture_data_vc1* pic_data;
+} vbp_data_vc1;
+
+enum _picture_type
+{
+ VC1_PTYPE_I,
+ VC1_PTYPE_P,
+ VC1_PTYPE_B,
+ VC1_PTYPE_BI,
+ VC1_PTYPE_SKIPPED
+};
+
+enum _vbp_parser_error
+{
+ VBP_OK,
+ VBP_TYPE,
+ VBP_LOAD,
+ VBP_UNLOAD,
+ VBP_INIT,
+ VBP_DATA,
+ VBP_DONE,
+ VBP_GLIB,
+ VBP_MEM,
+ VBP_PARM,
+ VBP_CXT,
+ VBP_IMPL
+};
+
+enum _vbp_parser_type
+{
+ VBP_VC1,
+ VBP_MPEG2,
+ VBP_MPEG4,
+ VBP_H264
+};
+
+/*
+ * open video bitstream parser to parse a specific media type.
+ * @param parser_type: one of the types defined in #vbp_parser_type
+ * @param hcontext: pointer to hold returned VBP context handle.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_open(uint32 parser_type, Handle *hcontext);
+
+/*
+ * close video bitstream parser.
+ * @param hcontext: VBP context handle.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_close(Handle hcontext);
+
+/*
+ * parse bitstream.
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to bitstream buffer.
+ * @param size: size of bitstream buffer.
+ * @param init_flag: 1 if buffer contains bitstream configuration data, 0 otherwise.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag);
+
+/*
+ * query parsing result.
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to hold a data blob that contains parsing result.
+ * Structure of data blob is determined by the media type.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_query(Handle hcontext, void **data);
+
+
+/*
+ * flush any un-parsed bitstream.
+ * @param hcontext: handle to VBP context.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_flush(Handle hcontent);
+
+#endif /* VBP_LOADER_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
new file mode 100644
index 0000000..87beca4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
@@ -0,0 +1,1277 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include <glib.h>
+#include <dlfcn.h>
+
+#include <string.h>
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_mp42_parser.h"
+#include "../codecs/mp4/parser/viddec_mp4_parse.h"
+
+#define MIX_VBP_COMP "mixvbp"
+
+/*
+ * Some divX avi files contains 2 frames in one gstbuffer.
+ */
+#define MAX_NUM_PICTURES_MP42 8
+
+uint32 vbp_get_sc_pos_mp42(uint8 *buf, uint32 length,
+ uint32* sc_phase, uint32 *sc_end_pos, uint8 *is_normal_sc);
+
+void vbp_on_vop_mp42(vbp_context *pcontext, int list_index);
+void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index);
+void vbp_dump_query_data(vbp_context *pcontext, int list_index);
+
+uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index);
+uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index);
+
+/* This is coppied from DHG mp42 parser */
+static inline mp4_Status_t
+vbp_sprite_trajectory_mp42(void *parent, mp4_VideoObjectLayer_t *vidObjLay,
+ mp4_VideoObjectPlane_t *vidObjPlane);
+
+/* This is coppied from DHG mp42 parser */
+static inline int32_t vbp_sprite_dmv_length_mp42(void * parent,
+ int32_t *dmv_length);
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext)
+{
+ if (NULL == pcontext->parser_ops)
+ {
+ /* absolutely impossible, just sanity check */
+ return VBP_PARM;
+ }
+ pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init");
+ if (pcontext->parser_ops->init == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4");
+ if (pcontext->parser_ops->parse_sc == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse");
+ if (pcontext->parser_ops->parse_syntax == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size");
+ if (pcontext->parser_ops->get_cxt_size == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done");
+ if (pcontext->parser_ops->is_wkld_done == NULL)
+ {
+ ETRACE ("Failed to set entry point." );
+ return VBP_LOAD;
+ }
+
+ return VBP_OK;
+}
+
+
+/*
+ * For the codec_data passed by gstreamer
+ */
+uint32 vbp_parse_init_data_mp42(vbp_context *pcontext)
+{
+ VTRACE ("begin\n");
+ vbp_parse_start_code_mp42(pcontext);
+ VTRACE ("end\n");
+
+ return VBP_OK;
+}
+
+uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index)
+{
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+
+ uint8 is_svh = 0;
+ uint32 current_sc = parser->current_sc;
+ is_svh = parser->cur_sc_prefix ? false : true;
+
+ VTRACE ("begin\n");
+
+ VTRACE ("current_sc = 0x%x profile_and_level_indication = 0x%x\n",
+ parser->current_sc, parser->info.profile_and_level_indication);
+
+ if (!is_svh)
+ {
+ /* remove prefix from current_sc */
+ current_sc &= 0x0FF;
+ switch (current_sc)
+ {
+ case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+ VTRACE ("MP4_SC_VISUAL_OBJECT_SEQUENCE\n");
+
+ query_data->codec_data.profile_and_level_indication
+ = parser->info.profile_and_level_indication;
+
+ break;
+ case MP4_SC_VIDEO_OBJECT_PLANE:
+ VTRACE ("MP4_SC_VIDEO_OBJECT_PLANE\n");
+ vbp_on_vop_mp42(pcontext, list_index);
+ break;
+ default: {
+ if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (current_sc
+ <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) {
+ query_data->codec_data.profile_and_level_indication
+ = parser->info.profile_and_level_indication;
+ } else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX) {
+ if (parser->sc_seen == MP4_SC_SEEN_SVH) {
+ VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n");
+ vbp_on_vop_svh_mp42(pcontext, list_index);
+ }
+ }
+ }
+ break;
+ }
+
+ } else {
+ if (parser->sc_seen == MP4_SC_SEEN_SVH) {
+ VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n");
+ vbp_on_vop_svh_mp42(pcontext, list_index);
+ }
+ }
+
+ VTRACE ("End\n");
+
+ return VBP_OK;
+}
+
+/*
+ * This function fills viddec_pm_cxt_t by start codes
+ * I may change the codes to make it more efficient later
+ */
+
+uint32 vbp_parse_start_code_mp42(vbp_context *pcontext)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ /*viddec_parser_ops_t *func = pcontext->parser_ops; */
+ uint8 *buf = NULL;
+ uint32 size = 0;
+ uint32 sc_phase = 0;
+ uint32 sc_end_pos = -1;
+
+ uint32 bytes_parsed = 0;
+
+ viddec_mp4_parser_t *pinfo = NULL;
+
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ /* reset query data for the new sample buffer */
+ query_data->number_pictures = 0;
+
+ /* emulation prevention byte is always present */
+ cxt->getbits.is_emul_reqd = 1;
+
+ cxt->list.num_items = 0;
+ cxt->list.data[0].stpos = 0;
+ cxt->list.data[0].edpos = cxt->parse_cubby.size;
+
+ buf = cxt->parse_cubby.buf;
+ size = cxt->parse_cubby.size;
+
+ pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]);
+
+ uint8 is_normal_sc = 0;
+
+ uint32 found_sc = 0;
+
+ VTRACE ("begin cxt->parse_cubby.size= %d\n", size);
+
+ while (1) {
+
+ sc_phase = 0;
+
+ found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size
+ - bytes_parsed, &sc_phase, &sc_end_pos, &is_normal_sc);
+
+ if (found_sc) {
+
+ VTRACE ("sc_end_pos = %d\n", sc_end_pos);
+
+ cxt->list.data[cxt->list.num_items].stpos = bytes_parsed
+ + sc_end_pos - 3;
+ if (cxt->list.num_items != 0) {
+ cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed
+ + sc_end_pos - 3;
+ }
+ bytes_parsed += sc_end_pos;
+
+ cxt->list.num_items++;
+ pinfo->cur_sc_prefix = is_normal_sc;
+
+ } else {
+
+ if (cxt->list.num_items != 0) {
+ cxt->list.data[cxt->list.num_items - 1].edpos
+ = cxt->parse_cubby.size;
+ break;
+ } else {
+
+ VTRACE ("I didn't find any sc in cubby buffer! The size of cubby is %d\n",
+ size);
+
+ cxt->list.num_items = 1;
+ cxt->list.data[0].stpos = 0;
+ cxt->list.data[0].edpos = cxt->parse_cubby.size;
+ break;
+ }
+ }
+ }
+
+ return VBP_OK;
+}
+
+uint32 vbp_populate_query_data_mp42(vbp_context *pcontext)
+{
+#if 0
+ vbp_dump_query_data(pcontext);
+#endif
+ return VBP_OK;
+}
+
+void vbp_fill_codec_data(vbp_context *pcontext, int list_index)
+{
+
+ /* fill vbp_codec_data_mp42 data */
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ query_data->codec_data.profile_and_level_indication
+ = parser->info.profile_and_level_indication;
+}
+
+void vbp_fill_slice_data(vbp_context *pcontext, int list_index)
+{
+
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+
+ if (!parser->info.VisualObject.VideoObject.short_video_header) {
+ vbp_process_slices_mp42(pcontext, list_index);
+ } else {
+ vbp_process_slices_svh_mp42(pcontext, list_index);
+ }
+}
+
+void vbp_fill_picture_param(vbp_context *pcontext, int list_index)
+{
+
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ vbp_picture_data_mp42 *picture_data = NULL;
+ VAPictureParameterBufferMPEG4 *picture_param = NULL;
+
+ picture_data = &(query_data->picture_data[query_data->number_pictures]);
+
+ picture_param = &(picture_data->picture_param);
+
+ uint8 idx = 0;
+
+ picture_data->vop_coded
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded;
+ VTRACE ("vop_coded = %d\n", picture_data->vop_coded);
+
+ /*
+ * fill picture_param
+ */
+
+ /* NOTE: for short video header, the parser saves vop_width and vop_height
+ * to VOL->video_object_layer_width and VOL->video_object_layer_height
+ */
+ picture_param->vop_width
+ = parser->info.VisualObject.VideoObject.video_object_layer_width;
+ picture_param->vop_height
+ = parser->info.VisualObject.VideoObject.video_object_layer_height;
+
+ picture_param->forward_reference_picture = VA_INVALID_SURFACE;
+ picture_param->backward_reference_picture = VA_INVALID_SURFACE;
+
+ /*
+ * VAPictureParameterBufferMPEG4::vol_fields
+ */
+ picture_param->vol_fields.bits.short_video_header
+ = parser->info.VisualObject.VideoObject.short_video_header;
+ picture_param->vol_fields.bits.chroma_format
+ = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format;
+
+ /* TODO: find out why testsuite always set this value to be 0 */
+ // picture_param->vol_fields.bits.chroma_format = 0;
+
+ picture_param->vol_fields.bits.interlaced
+ = parser->info.VisualObject.VideoObject.interlaced;
+ picture_param->vol_fields.bits.obmc_disable
+ = parser->info.VisualObject.VideoObject.obmc_disable;
+ picture_param->vol_fields.bits.sprite_enable
+ = parser->info.VisualObject.VideoObject.sprite_enable;
+ picture_param->vol_fields.bits.sprite_warping_accuracy
+ = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy;
+ picture_param->vol_fields.bits.quant_type
+ = parser->info.VisualObject.VideoObject.quant_type;
+ picture_param->vol_fields.bits.quarter_sample
+ = parser->info.VisualObject.VideoObject.quarter_sample;
+ picture_param->vol_fields.bits.data_partitioned
+ = parser->info.VisualObject.VideoObject.data_partitioned;
+ picture_param->vol_fields.bits.reversible_vlc
+ = parser->info.VisualObject.VideoObject.reversible_vlc;
+ picture_param->vol_fields.bits.resync_marker_disable
+ = parser->info.VisualObject.VideoObject.resync_marker_disable;
+
+ picture_param->no_of_sprite_warping_points
+ = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points;
+
+ for (idx = 0; idx < 3; idx++) {
+ picture_param->sprite_trajectory_du[idx]
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx];
+ picture_param->sprite_trajectory_dv[idx]
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx];
+ }
+
+ picture_param->quant_precision
+ = parser->info.VisualObject.VideoObject.quant_precision;
+
+ /*
+ * VAPictureParameterBufferMPEG4::vop_fields
+ */
+
+ if (!parser->info.VisualObject.VideoObject.short_video_header) {
+ picture_param->vop_fields.bits.vop_coding_type
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type;
+ } else {
+ picture_param->vop_fields.bits.vop_coding_type
+ = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type;
+ }
+
+ /* TODO:
+ * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type
+ * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7
+ */
+
+ if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) {
+ picture_param->vop_fields.bits.backward_reference_vop_coding_type
+ = picture_param->vop_fields.bits.vop_coding_type;
+ }
+
+ picture_param->vop_fields.bits.vop_rounding_type
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type;
+ picture_param->vop_fields.bits.intra_dc_vlc_thr
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr;
+ picture_param->vop_fields.bits.top_field_first
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first;
+ picture_param->vop_fields.bits.alternate_vertical_scan_flag
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag;
+
+ picture_param->vop_fcode_forward
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward;
+ picture_param->vop_fcode_backward
+ = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward;
+ picture_param->vop_time_increment_resolution
+ = parser->info.VisualObject.VideoObject.vop_time_increment_resolution;
+
+ /* short header related */
+ picture_param->num_gobs_in_vop
+ = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop;
+ picture_param->num_macroblocks_in_gob
+ = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob;
+
+ /* for direct mode prediction */
+ picture_param->TRB = parser->info.VisualObject.VideoObject.TRB;
+ picture_param->TRD = parser->info.VisualObject.VideoObject.TRD;
+
+#if 0
+ printf(
+ "parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable = %d\n",
+ parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable);
+
+ printf("parser->info.VisualObject.VideoObject.data_partitioned = %d\n",
+ parser->info.VisualObject.VideoObject.data_partitioned);
+
+ printf(
+ "####parser->info.VisualObject.VideoObject.resync_marker_disable = %d####\n",
+ parser->info.VisualObject.VideoObject.resync_marker_disable);
+#endif
+}
+
+void vbp_fill_iq_matrix_buffer(vbp_context *pcontext, int list_index)
+{
+
+ viddec_mp4_parser_t *parser =
+ (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ mp4_VOLQuant_mat_t *quant_mat_info =
+ &(parser->info.VisualObject.VideoObject.quant_mat_info);
+
+ vbp_picture_data_mp42 *picture_data = NULL;
+ VAIQMatrixBufferMPEG4 *iq_matrix = NULL;
+
+ picture_data = &(query_data->picture_data[query_data->number_pictures]);
+ iq_matrix = &(picture_data->iq_matrix_buffer);
+
+ iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat;
+ iq_matrix->load_non_intra_quant_mat
+ = quant_mat_info->load_nonintra_quant_mat;
+ memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64);
+ memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat,
+ 64);
+}
+
+void vbp_on_vop_mp42(vbp_context *pcontext, int list_index)
+{
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ vbp_fill_codec_data(pcontext, list_index);
+
+ vbp_fill_picture_param(pcontext, list_index);
+ vbp_fill_iq_matrix_buffer(pcontext, list_index);
+ vbp_fill_slice_data(pcontext, list_index);
+
+ query_data->number_pictures++;
+}
+
+void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index)
+{
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ vbp_fill_codec_data(pcontext, list_index);
+
+ vbp_fill_picture_param(pcontext, list_index);
+ vbp_fill_iq_matrix_buffer(pcontext, list_index);
+ vbp_fill_slice_data(pcontext, list_index);
+
+ query_data->number_pictures++;
+}
+
+uint32 vbp_get_sc_pos_mp42(
+ uint8 *buf,
+ uint32 length,
+ uint32* sc_phase,
+ uint32 *sc_end_pos,
+ uint8 *is_normal_sc)
+{
+ uint8 *ptr = buf;
+ uint32 size;
+ uint32 data_left = 0, phase = 0, ret = 0;
+ size = 0;
+
+ data_left = length;
+ phase = *sc_phase;
+ *sc_end_pos = -1;
+
+ /* parse until there is more data and start code not found */
+ while ((data_left > 0) && (phase < 3)) {
+ /* Check if we are byte aligned & phase=0, if thats the case we can check
+ work at a time instead of byte*/
+ if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) {
+ while (data_left > 3) {
+ uint32 data;
+ char mask1 = 0, mask2 = 0;
+
+ data = *((uint32 *) ptr);
+#ifndef MFDBIGENDIAN
+ data = SWAP_WORD(data);
+#endif
+ mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+ mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+ /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+ two consecutive zero bytes for a start code pattern */
+ if (mask1 && mask2) {/* Success so skip 4 bytes and start over */
+ ptr += 4;
+ size += 4;
+ data_left -= 4;
+ continue;
+ } else {
+ break;
+ }
+ }
+ }
+
+ /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+ two zero bytes in the word so we look one byte at a time*/
+ if (data_left > 0) {
+ if (*ptr == FIRST_STARTCODE_BYTE) {/* Phase can be 3 only if third start code byte is found */
+ phase++;
+ ptr++;
+ size++;
+ data_left--;
+ if (phase > 2) {
+ phase = 2;
+
+ if ((((uint32) ptr) & 0x3) == 0) {
+ while (data_left > 3) {
+ if (*((uint32 *) ptr) != 0) {
+ break;
+ }
+ ptr += 4;
+ size += 4;
+ data_left -= 4;
+ }
+ }
+ }
+ } else {
+ uint8 normal_sc = 0, short_sc = 0;
+ if (phase == 2) {
+ normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
+ short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC));
+
+ VTRACE ("short_sc = %d\n", short_sc);
+
+ *is_normal_sc = normal_sc;
+ }
+
+ if (!(normal_sc | short_sc)) {
+ phase = 0;
+ } else {/* Match for start code so update context with byte position */
+ *sc_end_pos = size;
+ phase = 3;
+
+ if (normal_sc) {
+ } else {
+ /* For short start code since start code is in one nibble just return at this point */
+ phase += 1;
+ ret = 1;
+ break;
+ }
+ }
+ ptr++;
+ size++;
+ data_left--;
+ }
+ }
+ }
+ if ((data_left > 0) && (phase == 3)) {
+ (*sc_end_pos)++;
+ phase++;
+ ret = 1;
+ }
+ *sc_phase = phase;
+ /* Return SC found only if phase is 4, else always success */
+ return ret;
+}
+
+uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs)
+{
+ uint32 length = 0;
+ numOfMbs--;
+ do {
+ numOfMbs >>= 1;
+ length++;
+ } while (numOfMbs);
+ return length;
+}
+
+mp4_Status_t vbp_video_packet_header_mp42(
+ void *parent,
+ viddec_mp4_parser_t *parser_cxt,
+ uint16_t *quant_scale,
+ uint32 *macroblock_number)
+{
+
+ mp4_Status_t ret = MP4_STATUS_OK;
+ mp4_Info_t *pInfo = &(parser_cxt->info);
+ mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+ mp4_VideoObjectPlane_t *vidObjPlane =
+ &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+ uint32 code = 0;
+ int32_t getbits = 0;
+
+ uint16_t _quant_scale = 0;
+ uint32 _macroblock_number = 0;
+ uint32 header_extension_codes = 0;
+ uint8 vop_coding_type = vidObjPlane->vop_coding_type;
+
+ do {
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ /* get macroblock_number */
+ {
+ uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4;
+ uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4;
+ uint32 length = vbp_macroblock_number_length_mp42(mbs_x
+ * mbs_y);
+
+ getbits = viddec_pm_get_bits(parent, &code, length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ length = code;
+ }
+
+ /* quant_scale */
+ if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) {
+ getbits = viddec_pm_get_bits(parent, &code,
+ vidObjLay->quant_precision);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ _quant_scale = code;
+ }
+
+ /* header_extension_codes */
+ if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ header_extension_codes = code;
+ }
+
+ if (header_extension_codes) {
+ do {
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ } while (code);
+
+ /* marker_bit */
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ /* vop_time_increment */
+ {
+ uint32 numbits = 0;
+ numbits = vidObjLay->vop_time_increment_resolution_bits;
+ if (numbits == 0) {
+ numbits = 1;
+ }
+ getbits = viddec_pm_get_bits(parent, &code, numbits);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+ /* marker_bit */
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ /* vop_coding_type */
+ getbits = viddec_pm_get_bits(parent, &code, 2);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ vop_coding_type = code & 0x3;
+
+ /* Fixed Klocwork issue: Code is unreachable.
+ * Comment the following codes because we have
+ * already checked video_object_layer_shape
+ */
+ /* if (vidObjLay->video_object_layer_shape
+ != MP4_SHAPE_TYPE_RECTANGULAR) {
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+ */
+ if (vidObjLay->video_object_layer_shape
+ != MP4_SHAPE_TYPE_BINARYONLY) {
+ /* intra_dc_vlc_thr */
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC)
+ && (vop_coding_type == MP4_VOP_TYPE_S)
+ && (vidObjLay->sprite_info.no_of_sprite_warping_points
+ > 0)) {
+ if (vbp_sprite_trajectory_mp42(parent, vidObjLay,
+ vidObjPlane) != MP4_STATUS_OK) {
+ break;
+ }
+ }
+
+ if (vidObjLay->reduced_resolution_vop_enable
+ && (vidObjLay->video_object_layer_shape
+ == MP4_SHAPE_TYPE_RECTANGULAR)
+ && ((vop_coding_type == MP4_VOP_TYPE_I)
+ || (vop_coding_type == MP4_VOP_TYPE_P))) {
+ /* vop_reduced_resolution */
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+
+ if (vop_coding_type == MP4_VOP_TYPE_I) {
+ /* vop_fcode_forward */
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+
+ if (vop_coding_type == MP4_VOP_TYPE_B) {
+ /* vop_fcode_backward */
+ getbits = viddec_pm_get_bits(parent, &code, 3);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ }
+ }
+ }
+
+ if (vidObjLay->newpred_enable) {
+ /* New pred mode not supported in HW, but, does libva support this? */
+ ret = MP4_STATUS_NOTSUPPORT;
+ break;
+ }
+
+ *quant_scale = _quant_scale;
+ *macroblock_number = _macroblock_number;
+ } while (0);
+ return ret;
+}
+
+uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt)
+{
+
+ mp4_Info_t *pInfo = &(parser_cxt->info);
+ mp4_VideoObjectPlane_t *vidObjPlane =
+ &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+ uint32 resync_marker_length = 0;
+ if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) {
+ resync_marker_length = 17;
+ } else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) {
+ uint8 fcode_max = vidObjPlane->vop_fcode_forward;
+ if (fcode_max < vidObjPlane->vop_fcode_backward) {
+ fcode_max = vidObjPlane->vop_fcode_backward;
+ }
+ resync_marker_length = 16 + fcode_max;
+ } else {
+ resync_marker_length = 16 + vidObjPlane->vop_fcode_forward;
+ }
+ return resync_marker_length;
+}
+
+uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index)
+{
+ uint32 ret = MP4_STATUS_OK;
+
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+ viddec_mp4_parser_t *parser_cxt =
+ (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+
+ VTRACE ("begin\n");
+
+ vbp_picture_data_mp42 *picture_data =
+ &(query_data->picture_data[query_data->number_pictures]);
+ vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data[0]);
+ VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param);
+
+ picture_data->number_slices = 1;
+
+ uint8 is_emul = 0;
+ uint32 bit_offset = 0;
+ uint32 byte_offset = 0;
+
+ /* The offsets are relative to parent->parse_cubby.buf */
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+ slice_data->buffer_addr = parent->parse_cubby.buf;
+
+ slice_data->slice_offset = byte_offset
+ + parent->list.data[list_index].stpos;
+ slice_data->slice_size = parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos - byte_offset;
+
+ slice_param->slice_data_size = slice_data->slice_size;
+ slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+ slice_param->slice_data_offset = 0;
+ slice_param->macroblock_offset = bit_offset;
+ slice_param->macroblock_number = 0;
+ slice_param->quant_scale
+ = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant;
+
+ VTRACE ("end\n");
+
+ return ret;
+}
+
+mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index)
+{
+
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+ viddec_mp4_parser_t *parser_cxt =
+ (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+
+ vbp_picture_data_mp42 *picture_data = NULL;
+ vbp_slice_data_mp42 *slice_data = NULL;
+ VASliceParameterBufferMPEG4* slice_param = NULL;
+
+ uint32 ret = MP4_STATUS_OK;
+
+ uint8 is_emul = 0;
+ uint32 bit_offset = 0;
+ uint32 byte_offset = 0;
+
+ uint32 code = 0;
+ int32_t getbits = 0;
+ uint32 resync_marker_length = 0;
+
+ uint32 slice_index = 0;
+
+#ifdef VBP_TRACE
+ uint32 list_size_at_index = parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos;
+#endif
+
+ VTRACE ("list_index = %d list_size_at_index = %d\n", list_index,
+ list_size_at_index);
+
+ VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index,
+ parent->list.data[list_index].edpos,
+ parent->list.data[list_index].stpos);
+
+ /* The offsets are relative to parent->parse_cubby.buf */
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+#if 0
+ if (is_emul) {
+ g_print("*** emul != 0\n");
+ /*byte_offset += 1;*/
+ }
+#endif
+
+ picture_data = &(query_data->picture_data[query_data->number_pictures]);
+ slice_data = &(picture_data->slice_data[slice_index]);
+ slice_param = &(slice_data->slice_param);
+
+ slice_data->buffer_addr = parent->parse_cubby.buf;
+
+ slice_data->slice_offset = byte_offset
+ + parent->list.data[list_index].stpos;
+ slice_data->slice_size = parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos - byte_offset;
+
+ slice_param->slice_data_size = slice_data->slice_size;
+ slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+ slice_param->slice_data_offset = 0;
+ slice_param->macroblock_offset = bit_offset;
+ slice_param->macroblock_number = 0;
+ slice_param->quant_scale
+ = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant;
+
+ slice_index++;
+ picture_data->number_slices = slice_index;
+
+ /*
+ * scan for resync_marker
+ */
+
+ if (!parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) {
+
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+ if (bit_offset) {
+ getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
+ if (getbits == -1) {
+ ret = MP4_STATUS_PARSE_ERROR;
+ return ret;
+ }
+ }
+
+ /*
+ * get resync_marker_length
+ */
+ resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt);
+
+ while (1) {
+
+ uint16_t quant_scale = 0;
+ uint32 macroblock_number = 0;
+
+ getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ if (code != 1) {
+ getbits = viddec_pm_get_bits(parent, &code, 8);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ continue;
+ }
+
+ /*
+ * We found resync_marker
+ */
+
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+ slice_data->slice_size -= (parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos - byte_offset);
+ slice_param->slice_data_size = slice_data->slice_size;
+
+ slice_data = &(picture_data->slice_data[slice_index]);
+ slice_param = &(slice_data->slice_param);
+
+ /*
+ * parse video_packet_header
+ */
+ getbits = viddec_pm_get_bits(parent, &code, resync_marker_length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ vbp_video_packet_header_mp42(parent, parser_cxt,
+ &quant_scale, &macroblock_number);
+
+ viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+ slice_data->buffer_addr = parent->parse_cubby.buf;
+
+ slice_data->slice_offset = byte_offset
+ + parent->list.data[list_index].stpos;
+ slice_data->slice_size = parent->list.data[list_index].edpos
+ - parent->list.data[list_index].stpos - byte_offset;
+
+ slice_param->slice_data_size = slice_data->slice_size;
+ slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+ slice_param->slice_data_offset = 0;
+ slice_param->macroblock_offset = bit_offset;
+ slice_param->macroblock_number = macroblock_number;
+ slice_param->quant_scale = quant_scale;
+
+ slice_index++;
+
+ if (slice_index >= MAX_NUM_SLICES) {
+ ret = MP4_STATUS_PARSE_ERROR;
+ break;
+ }
+
+ picture_data->number_slices = slice_index;
+ }
+ }
+ return ret;
+}
+
+/* This is coppied from DHG MP42 parser */
+static inline int32_t vbp_sprite_dmv_length_mp42(
+ void * parent,
+ int32_t *dmv_length)
+{
+ uint32 code, skip;
+ int32_t getbits = 0;
+ mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+ *dmv_length = 0;
+ skip = 3;
+ do {
+ getbits = viddec_pm_peek_bits(parent, &code, skip);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ if (code == 7) {
+ viddec_pm_skip_bits(parent, skip);
+ getbits = viddec_pm_peek_bits(parent, &code, 9);
+ BREAK_GETBITS_FAIL(getbits, ret);
+
+ skip = 1;
+ while ((code & 256) != 0) {/* count number of 1 bits */
+ code <<= 1;
+ skip++;
+ }
+ *dmv_length = 5 + skip;
+ } else {
+ skip = (code <= 1) ? 2 : 3;
+ *dmv_length = code - 1;
+ }
+ viddec_pm_skip_bits(parent, skip);
+ ret = MP4_STATUS_OK;
+
+ } while (0);
+ return ret;
+}
+
+/* This is coppied from DHG MP42 parser */
+static inline mp4_Status_t vbp_sprite_trajectory_mp42(
+ void *parent,
+ mp4_VideoObjectLayer_t *vidObjLay,
+ mp4_VideoObjectPlane_t *vidObjPlane)
+{
+ uint32 code, i;
+ int32_t dmv_length = 0, dmv_code = 0, getbits = 0;
+ mp4_Status_t ret = MP4_STATUS_OK;
+ for (i = 0; i
+ < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) {
+ ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
+ if (ret != MP4_STATUS_OK) {
+ break;
+ }
+ if (dmv_length <= 0) {
+ dmv_code = 0;
+ } else {
+ getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ dmv_code = (int32_t) code;
+ if ((dmv_code & (1 << (dmv_length - 1))) == 0) {
+ dmv_code -= (1 << dmv_length) - 1;
+ }
+ }
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ if (code != 1) {
+ ret = MP4_STATUS_PARSE_ERROR;
+ break;
+ }
+ vidObjPlane->warping_mv_code_du[i] = dmv_code;
+ /* TODO: create another inline function to avoid code duplication */
+ ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
+ if (ret != MP4_STATUS_OK) {
+ break;
+ }
+ if (dmv_length <= 0) {
+ dmv_code = 0;
+ } else {
+ getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ dmv_code = (int32_t) code;
+ if ((dmv_code & (1 << (dmv_length - 1))) == 0) {
+ dmv_code -= (1 << dmv_length) - 1;
+ }
+ }
+ getbits = viddec_pm_get_bits(parent, &code, 1);
+ BREAK_GETBITS_FAIL(getbits, ret);
+ if (code != 1) {
+ ret = MP4_STATUS_PARSE_ERROR;
+ break;
+ }
+ vidObjPlane->warping_mv_code_dv[i] = dmv_code;
+
+ }
+ return ret;
+}
+
+/*
+ * free memory of vbp_data_mp42 structure and its members
+ */
+uint32 vbp_free_query_data_mp42(vbp_context *pcontext)
+{
+
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+ gint idx = 0;
+
+ if (query_data) {
+ if (query_data->picture_data) {
+ for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
+ g_free(query_data->picture_data[idx].slice_data);
+ }
+ g_free(query_data->picture_data);
+ }
+
+ g_free(query_data);
+ }
+
+ pcontext->query_data = NULL;
+ return VBP_OK;
+}
+
+/*
+ * Allocate memory for vbp_data_mp42 structure and all its members.
+ */
+uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext)
+{
+
+ gint idx = 0;
+ vbp_data_mp42 *query_data;
+ pcontext->query_data = NULL;
+
+ query_data = g_try_new0(vbp_data_mp42, 1);
+ if (query_data == NULL) {
+ goto cleanup;
+ }
+
+ query_data->picture_data = g_try_new0(vbp_picture_data_mp42,
+ MAX_NUM_PICTURES_MP42);
+ if (NULL == query_data->picture_data) {
+ goto cleanup;
+ }
+
+ for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
+ query_data->picture_data[idx].number_slices = 0;
+ query_data->picture_data[idx].slice_data = g_try_new0(
+ vbp_slice_data_mp42, MAX_NUM_SLICES);
+
+ if (query_data->picture_data[idx].slice_data == NULL) {
+ goto cleanup;
+ }
+ }
+
+ pcontext->query_data = (void *) query_data;
+ return VBP_OK;
+
+ cleanup:
+
+ if (query_data) {
+ if (query_data->picture_data) {
+ for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
+ g_free(query_data->picture_data[idx].slice_data);
+ }
+ g_free(query_data->picture_data);
+ }
+
+ g_free(query_data);
+ }
+
+ return VBP_MEM;
+}
+
+void vbp_dump_query_data(vbp_context *pcontext, int list_index)
+{
+ vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+ vbp_picture_data_mp42 *picture_data = NULL;
+ VAPictureParameterBufferMPEG4 *picture_param = NULL;
+ vbp_slice_data_mp42 *slice_data = NULL;
+
+ uint32 idx = 0, jdx = 0;
+
+ for (idx = 0; idx < query_data->number_pictures; idx++) {
+
+ picture_data = &(query_data->picture_data[idx]);
+ picture_param = &(picture_data->picture_param);
+ slice_data = &(picture_data->slice_data[0]);
+
+ g_print("======================= dump_begin ======================\n\n");
+ g_print("======================= codec_data ======================\n");
+
+ /* codec_data */
+ g_print("codec_data.profile_and_level_indication = 0x%x\n",
+ query_data->codec_data.profile_and_level_indication);
+
+ g_print("==================== picture_param =======================\n");
+
+ /* picture_param */
+ g_print("picture_param->vop_width = %d\n", picture_param->vop_width);
+ g_print("picture_param->vop_height = %d\n", picture_param->vop_height);
+
+ g_print("picture_param->vol_fields.bits.short_video_header = %d\n",
+ picture_param->vol_fields.bits.short_video_header);
+ g_print("picture_param->vol_fields.bits.chroma_format = %d\n",
+ picture_param->vol_fields.bits.chroma_format);
+ g_print("picture_param->vol_fields.bits.interlaced = %d\n",
+ picture_param->vol_fields.bits.interlaced);
+ g_print("picture_param->vol_fields.bits.obmc_disable = %d\n",
+ picture_param->vol_fields.bits.obmc_disable);
+ g_print("picture_param->vol_fields.bits.sprite_enable = %d\n",
+ picture_param->vol_fields.bits.sprite_enable);
+ g_print(
+ "picture_param->vol_fields.bits.sprite_warping_accuracy = %d\n",
+ picture_param->vol_fields.bits.sprite_warping_accuracy);
+ g_print("picture_param->vol_fields.bits.quant_type = %d\n",
+ picture_param->vol_fields.bits.quant_type);
+ g_print("picture_param->vol_fields.bits.quarter_sample = %d\n",
+ picture_param->vol_fields.bits.quarter_sample);
+ g_print("picture_param->vol_fields.bits.data_partitioned = %d\n",
+ picture_param->vol_fields.bits.data_partitioned);
+ g_print("picture_param->vol_fields.bits.reversible_vlc = %d\n",
+ picture_param->vol_fields.bits.reversible_vlc);
+
+ g_print("picture_param->no_of_sprite_warping_points = %d\n",
+ picture_param->no_of_sprite_warping_points);
+ g_print("picture_param->quant_precision = %d\n",
+ picture_param->quant_precision);
+ g_print("picture_param->sprite_trajectory_du = %d, %d, %d\n",
+ picture_param->sprite_trajectory_du[0],
+ picture_param->sprite_trajectory_du[1],
+ picture_param->sprite_trajectory_du[2]);
+ g_print("picture_param->sprite_trajectory_dv = %d, %d, %d\n",
+ picture_param->sprite_trajectory_dv[0],
+ picture_param->sprite_trajectory_dv[1],
+ picture_param->sprite_trajectory_dv[2]);
+
+ g_print("picture_param->vop_fields.bits.vop_coding_type = %d\n",
+ picture_param->vop_fields.bits.vop_coding_type);
+ g_print(
+ "picture_param->vop_fields.bits.backward_reference_vop_coding_type = %d\n",
+ picture_param->vop_fields.bits.backward_reference_vop_coding_type);
+ g_print("picture_param->vop_fields.bits.vop_rounding_type = %d\n",
+ picture_param->vop_fields.bits.vop_rounding_type);
+ g_print("picture_param->vop_fields.bits.intra_dc_vlc_thr = %d\n",
+ picture_param->vop_fields.bits.intra_dc_vlc_thr);
+ g_print("picture_param->vop_fields.bits.top_field_first = %d\n",
+ picture_param->vop_fields.bits.top_field_first);
+ g_print(
+ "picture_param->vop_fields.bits.alternate_vertical_scan_flag = %d\n",
+ picture_param->vop_fields.bits.alternate_vertical_scan_flag);
+
+ g_print("picture_param->vop_fcode_forward = %d\n",
+ picture_param->vop_fcode_forward);
+ g_print("picture_param->vop_fcode_backward = %d\n",
+ picture_param->vop_fcode_backward);
+ g_print("picture_param->num_gobs_in_vop = %d\n",
+ picture_param->num_gobs_in_vop);
+ g_print("picture_param->num_macroblocks_in_gob = %d\n",
+ picture_param->num_macroblocks_in_gob);
+ g_print("picture_param->TRB = %d\n", picture_param->TRB);
+ g_print("picture_param->TRD = %d\n", picture_param->TRD);
+
+ g_print("==================== slice_data ==========================\n");
+
+ g_print("slice_data.buffer_addr = 0x%x\n",
+ (unsigned int) slice_data->buffer_addr);
+ g_print("slice_data.slice_offset = 0x%x\n", slice_data->slice_offset);
+ g_print("slice_data.slice_size = 0x%x\n", slice_data->slice_size);
+
+ g_print("slice_data.slice_param.macroblock_number = %d\n",
+ slice_data->slice_param.macroblock_number);
+ g_print("slice_data.slice_param.macroblock_offset = 0x%x\n",
+ slice_data->slice_param.macroblock_offset);
+ g_print("slice_data.slice_param.quant_scale = %d\n",
+ slice_data->slice_param.quant_scale);
+ g_print("slice_data.slice_param.slice_data_flag = %d\n",
+ slice_data->slice_param.slice_data_flag);
+ g_print("slice_data.slice_param.slice_data_offset = %d\n",
+ slice_data->slice_param.slice_data_offset);
+ g_print("slice_data.slice_param.slice_data_size = %d\n",
+ slice_data->slice_param.slice_data_size);
+
+ g_print("================= iq_matrix_buffer ======================\n");
+ g_print("iq_matrix_buffer.load_intra_quant_mat = %d\n",
+ picture_data->iq_matrix_buffer.load_intra_quant_mat);
+ g_print("iq_matrix_buffer.load_non_intra_quant_mat = %d\n",
+ picture_data->iq_matrix_buffer.load_non_intra_quant_mat);
+
+ g_print("------- iq_matrix_buffer.intra_quant_mat ----------\n");
+ for (jdx = 0; jdx < 64; jdx++) {
+
+ g_print("%02x ",
+ picture_data->iq_matrix_buffer.intra_quant_mat[jdx]);
+
+ if ((jdx + 1) % 8 == 0) {
+ g_print("\n");
+ }
+ }
+
+ g_print("----- iq_matrix_buffer.non_intra_quant_mat --------\n");
+ for (jdx = 0; jdx < 64; jdx++) {
+
+ g_print("%02x ",
+ picture_data->iq_matrix_buffer.non_intra_quant_mat[jdx]);
+
+ if ((jdx + 1) % 8 == 0) {
+ g_print("\n");
+ }
+ }
+
+ g_print("-------- slice buffer begin ------------\n");
+
+ for (jdx = 0; jdx < 64; jdx++) {
+ g_print("%02x ", *(slice_data->buffer_addr
+ + slice_data->slice_offset + jdx));
+ if ((jdx + 1) % 8 == 0) {
+ g_print("\n");
+ }
+ }
+ g_print("-------- slice buffer begin ------------\n");
+
+ g_print("\n\n============== dump_end ==========================\n\n");
+
+ }
+}
+
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h
new file mode 100644
index 0000000..c0deaa4
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.h
@@ -0,0 +1,49 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_MP42_PARSER_H
+#define VBP_MP42_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+
+uint32 vbp_init_parser_entries_mp42(vbp_context *pcontext);
+
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_mp42(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_mp42(vbp_context *pcontext);
+
+/*
+ * parse start code.
+ */
+uint32 vbp_parse_start_code_mp42(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_mp42(vbp_context *pcontext);
+
+#endif /*VBP_MP42_PARSER_H*/
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.c b/mix_vbp/viddec_fw/fw/parser/vbp_trace.c
new file mode 100644
index 0000000..d87bfd8
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.c
@@ -0,0 +1,28 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include "vbp_trace.h"
+
+#ifdef VBP_TRACE
+
+void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...)
+{
+ if (NULL == cat || NULL == fun || NULL == format)
+ return;
+
+ printf("%s %s(#%d): ", cat, fun, line);
+ va_list args;
+ va_start(args, format);
+ vprintf(format, args);
+ va_end(args);
+ printf("\n");
+}
+
+#endif
+
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_trace.h b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h
new file mode 100644
index 0000000..9f2a21c
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_trace.h
@@ -0,0 +1,47 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#ifndef VBP_TRACE_H_
+#define VBP_TRACE_H_
+
+
+
+//#define VBP_TRACE
+
+
+#ifdef VBP_TRACE /* if VBP_TRACE is defined*/
+
+#include <stdio.h>
+#include <stdarg.h>
+
+extern void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...);
+
+#define VBP_TRACE_UTIL(cat, format, ...) \
+vbp_trace_util(cat, __FUNCTION__, __LINE__, format, ##__VA_ARGS__)
+
+
+#define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR: ", format, ##__VA_ARGS__)
+#define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ", format, ##__VA_ARGS__)
+#define ITRACE(format, ...) VBP_TRACE_UTIL("INFO: ", format, ##__VA_ARGS__)
+#define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ", format, ##__VA_ARGS__)
+
+#else /* if VBP_TRACE is not defined */
+
+#define ETRACE(format, ...)
+#define WTRACE(format, ...)
+#define ITRACE(format, ...)
+#define VTRACE(format, ...)
+
+
+#endif /* VBP_TRACE*/
+
+
+#endif /*VBP_TRACE_H_*/
+
+
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
new file mode 100644
index 0000000..651b801
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
@@ -0,0 +1,548 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include <glib.h>
+#include <dlfcn.h>
+
+#include "vc1.h"
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vc1_parser.h"
+#include "vbp_h264_parser.h"
+#include "vbp_mp42_parser.h"
+
+
+
+/* buffer counter */
+uint32 buffer_counter = 0;
+
+
+/**
+ *
+ * uninitialize parser context
+ *
+ */
+static uint32 vbp_utils_uninitialize_context(vbp_context *pcontext)
+{
+ uint32 error = VBP_OK;
+
+ if (NULL == pcontext)
+ {
+ return error;
+ }
+
+ /* not need to reset parser entry points. */
+
+ g_free(pcontext->parser_ops);
+ pcontext->parser_ops = NULL;
+
+
+ if (pcontext->fd_parser)
+ {
+ dlclose(pcontext->fd_parser);
+ pcontext->fd_parser = NULL;
+ }
+
+ return error;
+}
+
+/**
+ *
+ * initialize parser context
+ *
+ */
+static uint32 vbp_utils_initialize_context(vbp_context *pcontext)
+{
+ uint32 error = VBP_OK;
+ char *parser_name;
+
+ switch (pcontext->parser_type)
+ {
+ case VBP_VC1:
+ parser_name = "libmixvbp_vc1.so.0";
+ break;
+
+ /* MPEG-2 parser is not supported. */
+
+ /* case VBP_MPEG2:
+ parser_name = "libmixvbp_mpeg2.so.0";
+ break;*/
+
+ case VBP_MPEG4:
+ parser_name = "libmixvbp_mpeg4.so.0";
+ break;
+
+ case VBP_H264:
+ parser_name = "libmixvbp_h264.so.0";
+ break;
+
+ default:
+ g_warning ("Warning! Unsupported parser type!");
+ return VBP_TYPE;
+ }
+
+ pcontext->fd_parser = dlopen(parser_name, RTLD_LAZY);
+ if (NULL == pcontext->fd_parser)
+ {
+ ETRACE("Failed to load parser %s.", parser_name);
+ error = VBP_LOAD;
+ goto cleanup;
+ }
+
+ pcontext->parser_ops = g_try_new(viddec_parser_ops_t, 1);
+ if (NULL == pcontext->parser_ops)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+#define SET_FUNC_POINTER(X, Y)\
+ case X:\
+ pcontext->func_init_parser_entries = vbp_init_parser_entries_##Y;\
+ pcontext->func_allocate_query_data = vbp_allocate_query_data_##Y;\
+ pcontext->func_free_query_data = vbp_free_query_data_##Y;\
+ pcontext->func_parse_init_data = vbp_parse_init_data_##Y;\
+ pcontext->func_parse_start_code = vbp_parse_start_code_##Y;\
+ pcontext->func_process_parsing_result = vbp_process_parsing_result_##Y;\
+ pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\
+ break;
+
+ switch (pcontext->parser_type)
+ {
+ SET_FUNC_POINTER(VBP_VC1, vc1);
+ SET_FUNC_POINTER(VBP_MPEG4, mp42);
+ SET_FUNC_POINTER(VBP_H264, h264);
+ }
+
+ /* set entry points for parser operations:
+ init
+ parse_sc
+ parse_syntax
+ get_cxt_size
+ is_wkld_done
+ is_frame_start
+ */
+ error = pcontext->func_init_parser_entries(pcontext);
+
+cleanup:
+
+ if (VBP_OK != error)
+ {
+ /* no need to log error. the loader would have done so already. */
+ vbp_utils_uninitialize_context(pcontext);
+ }
+
+ return error;
+}
+
+/**
+*
+* free allocated memory.
+*
+*/
+static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext)
+{
+ if (NULL == pcontext)
+ {
+ return VBP_OK;
+ }
+
+ if (pcontext->func_free_query_data)
+ {
+ pcontext->func_free_query_data(pcontext);
+ }
+
+ g_free(pcontext->workload2);
+ pcontext->workload2 = NULL;
+
+ g_free(pcontext->workload1);
+ pcontext->workload1 = NULL;
+
+ g_free(pcontext->persist_mem);
+ pcontext->persist_mem = NULL;
+
+ g_free(pcontext->parser_cxt);
+ pcontext->parser_cxt = NULL;
+
+ return VBP_OK;
+}
+
+
+/**
+ *
+ * allocate memory
+ *
+ */
+static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext)
+{
+ /* pcontext is guaranteed to be valid input. */
+ uint32 error = VBP_OK;
+ viddec_parser_memory_sizes_t sizes;
+
+ pcontext->parser_cxt = g_try_new(viddec_pm_cxt_t, 1);
+ if (NULL == pcontext->parser_cxt)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+ /* invoke parser entry to get context size */
+ /* no return value, should always succeed. */
+ pcontext->parser_ops->get_cxt_size(&sizes);
+
+ /* allocate persistent memory for parser */
+ if (sizes.persist_size)
+ {
+ pcontext->persist_mem = g_try_malloc(sizes.persist_size);
+ if (NULL == pcontext->persist_mem)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+ }
+ else
+ {
+ /* OK for VC-1, MPEG2 and MPEG4. */
+ if ((VBP_VC1 == pcontext->parser_type) ||
+ (VBP_MPEG2 == pcontext->parser_type) ||
+ (VBP_MPEG4 == pcontext->parser_type))
+ {
+ pcontext->persist_mem = NULL;
+ }
+ else
+ {
+ /* mandatory for H.264 */
+ ETRACE("Failed to allocate memory");
+ error = VBP_CXT;
+ goto cleanup;
+ }
+ }
+
+ /* allocate a new workload with 1000 items. */
+ pcontext->workload1 = g_try_malloc(sizeof(viddec_workload_t) +
+ (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t)));
+ if (NULL == pcontext->workload1)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+ /* allocate a second workload with 1000 items. */
+ pcontext->workload2 = g_try_malloc(sizeof(viddec_workload_t) +
+ (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t)));
+ if (NULL == pcontext->workload2)
+ {
+ ETRACE("Failed to allocate memory");
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+ /* allocate format-specific query data */
+ error = pcontext->func_allocate_query_data(pcontext);
+
+cleanup:
+ if (error != VBP_OK)
+ {
+ vbp_utils_free_parser_memory(pcontext);
+ }
+ return error;
+}
+
+
+
+/**
+ *
+ * parse the elementary sample buffer or codec configuration data
+ *
+ */
+static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ viddec_parser_ops_t *ops = pcontext->parser_ops;
+ uint32 error = VBP_OK;
+ int i;
+
+ /* reset list number. func_parse_init_data or func_parse_start_code will
+ * set it equal to number of sequence headers, picture headers or slices headers
+ * found in the sample buffer
+ */
+ cxt->list.num_items = 0;
+
+ /**
+ * READ THIS NOTE: cxt->getbits.is_emul_reqd must be set to 1
+ * for H.264 and MPEG-4, VC1 advanced profile and set to 0
+ * for VC1 simple or main profile when parsing the frame
+ * buffer. When parsing the sequence header, it must be set to 1
+ * always.
+ *
+ * PARSER IMPLEMENTOR: set this flag in the parser.
+ */
+
+ /*
+ if ((codec_type == VBP_H264) || (codec_type == VBP_MPEG4))
+ {
+ cxt->getbits.is_emul_reqd = 1;
+ }
+ */
+
+
+ /* populate the list.*/
+ if (init_data_flag)
+ {
+ error = pcontext->func_parse_init_data(pcontext);
+ }
+ else
+ {
+ error = pcontext->func_parse_start_code(pcontext);
+ }
+
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to parse the start code!");
+ return error;
+ }
+
+ /* set up bitstream buffer */
+ cxt->getbits.list = &(cxt->list);
+
+ /* setup buffer pointer */
+ cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf;
+
+ /*
+ * TO DO:
+ * check if cxt->getbits.is_emul_reqd is set properly
+ */
+
+ for (i = 0; i < cxt->list.num_items; i++)
+ {
+ /* setup bitstream parser */
+ cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos;
+ cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos;
+ cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos;
+
+ /* It is possible to end up with buf_offset not equal zero. */
+ cxt->getbits.bstrm_buf.buf_bitoff = 0;
+
+ cxt->getbits.au_pos = 0;
+ cxt->getbits.list_off = 0;
+ cxt->getbits.phase = 0;
+ cxt->getbits.emulation_byte_counter = 0;
+
+ cxt->list.start_offset = cxt->list.data[i].stpos;
+ cxt->list.end_offset = cxt->list.data[i].edpos;
+ cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos;
+
+ /* invoke parse entry point to parse the buffer */
+ error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0]));
+
+ /* can't return error for now. Neet further investigation */
+
+ /*if (0 != error)
+ {
+ ETRACE("failed to parse the syntax: %d!", error);
+ return error;
+ }*/
+
+ /*
+ * process parsing result
+ */
+ error = pcontext->func_process_parsing_result(pcontext, i);
+
+ if (0 != error)
+ {
+ ETRACE("Failed to process parsing result.");
+ return error;
+ }
+ }
+
+ /* currently always assume a complete frame is supplied for parsing, so
+ * there is no need to check if workload is done
+ */
+
+ /*
+ uint32_t codec_errors = 0;
+ uint32_t state;
+
+ error = ops->is_wkld_done(
+ (void *)cxt,
+ (void *)&(cxt->codec_data[0]),
+ (uint32_t)cxt->sc_prefix_info.next_sc,
+ &codec_errors);
+ state = (ret == VIDDEC_PARSE_FRMDONE) ? VBP_DONE : VBP_OK;
+ return state;
+ */
+
+ return VBP_OK;
+}
+
+
+/**
+ *
+ * create the parser context
+ *
+ */
+uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext)
+{
+ uint32 error = VBP_OK;
+ vbp_context *pcontext = NULL;
+
+ /* prevention from the failure */
+ *ppcontext = NULL;
+
+ pcontext = g_try_new0(vbp_context, 1);
+ if (NULL == pcontext)
+ {
+ error = VBP_MEM;
+ goto cleanup;
+ }
+
+ pcontext->parser_type = parser_type;
+
+ /* load parser, initialize parser operators and entry points */
+ error = vbp_utils_initialize_context(pcontext);
+ if (VBP_OK != error)
+ {
+ goto cleanup;
+ }
+
+ /* allocate parser context, persistent memory, query data and workload */
+ error = vbp_utils_allocate_parser_memory(pcontext);
+ if (VBP_OK != error)
+ {
+ goto cleanup;
+ }
+
+ viddec_pm_utils_list_init(&(pcontext->parser_cxt->list));
+ viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0);
+ pcontext->parser_cxt->cur_buf.list_index = -1;
+ pcontext->parser_cxt->parse_cubby.phase = 0;
+
+ /* invoke the entry point to initialize the parser. */
+ pcontext->parser_ops->init(
+ (void *)pcontext->parser_cxt->codec_data,
+ (void *)pcontext->persist_mem,
+ FALSE);
+
+ viddec_emit_init(&(pcontext->parser_cxt->emitter));
+
+ /* overwrite init with our number of items. */
+ pcontext->parser_cxt->emitter.cur.max_items = MAX_WORKLOAD_ITEMS;
+ pcontext->parser_cxt->emitter.next.max_items = MAX_WORKLOAD_ITEMS;
+
+ /* set up to find the first start code. */
+ pcontext->parser_cxt->sc_prefix_info.first_sc_detect = 1;
+
+ /* indicates initialized OK. */
+ pcontext->identifier = MAGIC_NUMBER;
+ *ppcontext = pcontext;
+ error = VBP_OK;
+
+cleanup:
+
+ if (VBP_OK != error)
+ {
+ vbp_utils_free_parser_memory(pcontext);
+ vbp_utils_uninitialize_context(pcontext);
+ g_free(pcontext);
+ pcontext = NULL;
+ }
+
+ return error;
+}
+
+/**
+ *
+ * destroy the context.
+ *
+ */
+uint32 vbp_utils_destroy_context(vbp_context *pcontext)
+{
+ /* entry point, not need to validate input parameters. */
+ vbp_utils_free_parser_memory(pcontext);
+ vbp_utils_uninitialize_context(pcontext);
+ g_free(pcontext);
+ pcontext = NULL;
+
+ return VBP_OK;
+}
+
+
+/**
+ *
+ * parse the sample buffer or parser configuration data.
+ *
+ */
+uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag)
+{
+ /* entry point, not need to validate input parameters. */
+
+ uint32 error = VBP_OK;
+
+ /* ITRACE("buffer counter: %d",buffer_counter); */
+
+ /* set up emitter. */
+ pcontext->parser_cxt->emitter.cur.data = pcontext->workload1;
+ pcontext->parser_cxt->emitter.next.data = pcontext->workload2;
+
+ /* reset bit offset */
+ pcontext->parser_cxt->getbits.bstrm_buf.buf_bitoff = 0;
+
+
+ /* set up cubby. */
+ pcontext->parser_cxt->parse_cubby.buf = data;
+ pcontext->parser_cxt->parse_cubby.size = size;
+ pcontext->parser_cxt->parse_cubby.phase = 0;
+
+ error = vbp_utils_parse_es_buffer(pcontext, init_data_flag);
+
+ /* rolling count of buffers. */
+ if (0 == init_data_flag)
+ {
+ buffer_counter++;
+ }
+ return error;
+}
+
+/**
+ *
+ * provide query data back to the consumer
+ *
+ */
+uint32 vbp_utils_query(vbp_context *pcontext, void **data)
+{
+ /* entry point, not need to validate input parameters. */
+ uint32 error = VBP_OK;
+
+ error = pcontext->func_populate_query_data(pcontext);
+ if (VBP_OK == error)
+ {
+ *data = pcontext->query_data;
+ }
+ else
+ {
+ *data = NULL;
+ }
+ return error;
+}
+
+/**
+ *
+ * flush parsing buffer. Currently it is no op.
+ *
+ */
+uint32 vbp_utils_flush(vbp_context *pcontext)
+{
+ return VBP_IMPL;
+}
+
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h
new file mode 100644
index 0000000..67ff3e8
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h
@@ -0,0 +1,106 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_UTILS_H
+#define VBP_UTILS_H
+
+#include "viddec_parser_ops.h"
+#include "viddec_pm_parse.h"
+#include "viddec_pm.h"
+#include "vbp_trace.h"
+
+#define MAGIC_NUMBER 0x0DEADBEEF
+#define MAX_WORKLOAD_ITEMS 1000
+
+/* maximum 256 slices per sample buffer */
+#define MAX_NUM_SLICES 256
+
+/* maximum two pictures per sample buffer */
+#define MAX_NUM_PICTURES 2
+
+
+extern uint32 viddec_parse_sc(void *in, void *pcxt, void *sc_state);
+
+/* rolling counter of sample buffer */
+extern uint32 buffer_counter;
+
+typedef struct vbp_context_t vbp_context;
+
+typedef uint32 (*function_init_parser_entries)(vbp_context* cxt);
+typedef uint32 (*function_allocate_query_data)(vbp_context* cxt);
+typedef uint32 (*function_free_query_data)(vbp_context* cxt);
+typedef uint32 (*function_parse_init_data)(vbp_context* cxt);
+typedef uint32 (*function_parse_start_code)(vbp_context* cxt);
+typedef uint32 (*function_process_parsing_result)(vbp_context* cxt, int i);
+typedef uint32 (*function_populate_query_data)(vbp_context* cxt);
+
+
+
+struct vbp_context_t
+{
+ /* magic number */
+ uint32 identifier;
+
+ /* parser type, eg, MPEG-2, MPEG-4, H.264, VC1 */
+ uint32 parser_type;
+
+ /* handle to parser (shared object) */
+ void *fd_parser;
+
+ /* parser (shared object) entry points */
+ viddec_parser_ops_t *parser_ops;
+
+ /* parser context */
+ viddec_pm_cxt_t *parser_cxt;
+
+ /* work load */
+ viddec_workload_t *workload1, *workload2;
+
+ /* persistent memory for parser */
+ uint32 *persist_mem;
+
+ /* format specific query data */
+ void *query_data;
+
+
+ function_init_parser_entries func_init_parser_entries;
+ function_allocate_query_data func_allocate_query_data;
+ function_free_query_data func_free_query_data;
+ function_parse_init_data func_parse_init_data;
+ function_parse_start_code func_parse_start_code;
+ function_process_parsing_result func_process_parsing_result;
+ function_populate_query_data func_populate_query_data;
+
+};
+
+/**
+ * create VBP context
+ */
+uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext);
+
+/*
+ * destroy VBP context
+ */
+uint32 vbp_utils_destroy_context(vbp_context *pcontext);
+
+/*
+ * parse bitstream
+ */
+uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_utils_query(vbp_context *pcontext, void **data);
+
+/*
+ * flush un-parsed bitstream
+ */
+uint32 vbp_utils_flush(vbp_context *pcontext);
+
+#endif /* VBP_UTILS_H */
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
new file mode 100644
index 0000000..502cdc6
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
@@ -0,0 +1,1029 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#include <glib.h>
+#include <dlfcn.h>
+#include <string.h>
+
+#include "vc1.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vc1_parser.h"
+
+/* maximum number of Macroblock divided by 2, see va.h */
+#define MAX_BITPLANE_SIZE 16384
+
+/* Start code prefix is 001 which is 3 bytes. */
+#define PREFIX_SIZE 3
+
+static uint32 b_fraction_table[][9] = {
+ /* num 0 1 2 3 4 5 6 7 8 den */
+ /* 0 */ { 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ /* 1 */ { 0, 0, 0, 1, 3, 5, 9, 11, 17 },
+ /* 2 */ { 0, 0, 0, 2, 0, 6, 0, 12, 0 },
+ /* 3 */ { 0, 0, 0, 0, 4, 7, 0, 13, 18 },
+ /* 4 */ { 0, 0, 0, 0, 0, 8, 0, 14, 0 },
+ /* 5 */ { 0, 0, 0, 0, 0, 0, 10, 15, 19 },
+ /* 6 */ { 0, 0, 0, 0, 0, 0, 0, 16, 0 },
+ /* 7 */ { 0, 0, 0, 0, 0, 0, 0, 0, 20 }
+};
+
+
+
+/**
+ * set parser entry points
+ */
+uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext)
+{
+ if (NULL == pcontext->parser_ops)
+ {
+ /* impossible, just sanity check */
+ return VBP_PARM;
+ }
+
+ pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vc1_init");
+ if (NULL == pcontext->parser_ops->init)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+ pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vc1_parse");
+ if (NULL == pcontext->parser_ops->parse_syntax)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vc1_get_context_size");
+ if (NULL == pcontext->parser_ops->get_cxt_size)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done");
+ if (NULL == pcontext->parser_ops->is_wkld_done)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame");
+ if (NULL == pcontext->parser_ops->is_frame_start)
+ {
+ ETRACE ("Failed to set entry point.");
+ return VBP_LOAD;
+ }
+
+ return VBP_OK;
+}
+
+/**
+ * allocate query data structure
+ */
+uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext)
+{
+ if (NULL != pcontext->query_data)
+ {
+ /* impossible, just sanity check */
+ return VBP_PARM;
+ }
+
+ pcontext->query_data = NULL;
+
+ vbp_data_vc1 *query_data = NULL;
+ query_data = g_try_new0(vbp_data_vc1, 1);
+ if (NULL == query_data)
+ {
+ return VBP_MEM;
+ }
+
+ /* assign the pointer */
+ pcontext->query_data = (void *)query_data;
+
+ query_data->se_data = g_try_new0(vbp_codec_data_vc1, 1);
+ if (NULL == query_data->se_data)
+ {
+ goto cleanup;
+ }
+ query_data->pic_data = g_try_new0(vbp_picture_data_vc1, MAX_NUM_PICTURES);
+ if (NULL == query_data->pic_data)
+ {
+ goto cleanup;
+ }
+
+ int i;
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferVC1, 1);
+ if (NULL == query_data->pic_data[i].pic_parms)
+ {
+ goto cleanup;
+ }
+
+ query_data->pic_data[i].packed_bitplanes = g_try_malloc0(MAX_BITPLANE_SIZE);
+ if (NULL == query_data->pic_data[i].packed_bitplanes)
+ {
+ goto cleanup;
+ }
+
+ query_data->pic_data[i].slc_data = g_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1));
+ if (NULL == query_data->pic_data[i].slc_data)
+ {
+ goto cleanup;
+ }
+ }
+
+ return VBP_OK;
+
+cleanup:
+ vbp_free_query_data_vc1(pcontext);
+
+ return VBP_MEM;
+}
+
+
+/**
+ * free query data structure
+ */
+uint32 vbp_free_query_data_vc1(vbp_context *pcontext)
+{
+ vbp_data_vc1 *query_data = NULL;
+
+ if (NULL == pcontext->query_data)
+ {
+ return VBP_OK;
+ }
+
+ query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+ if (query_data->pic_data)
+ {
+ int i = 0;
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ g_free(query_data->pic_data[i].slc_data);
+ g_free(query_data->pic_data[i].packed_bitplanes);
+ g_free(query_data->pic_data[i].pic_parms);
+ }
+ }
+
+ g_free(query_data->pic_data);
+
+ g_free(query_data->se_data);
+
+ g_free(query_data);
+
+ pcontext->query_data = NULL;
+
+ return VBP_OK;
+}
+
+
+/**
+ * We want to create a list of buffer segments where each segment is a start
+ * code followed by all the data up to the next start code or to the end of
+ * the buffer. In VC-1, it is common to get buffers with no start codes. The
+ * parser proper, doesn't really handle the situation where there are no SCs.
+ * In this case, I will bypass the stripping of the SC code and assume a frame.
+ */
+static uint32 vbp_parse_start_code_helper_vc1(
+ viddec_pm_cxt_t *cxt,
+ viddec_parser_ops_t *ops,
+ int init_data_flag)
+{
+ uint32_t ret = VBP_OK;
+ viddec_sc_parse_cubby_cxt_t cubby;
+
+ /* make copy of cubby */
+ /* this doesn't copy the buffer, merely the structure that holds the buffer */
+ /* pointer. Below, where we call parse_sc() the code starts the search for */
+ /* SCs at the beginning of the buffer pointed to by the cubby, so in our */
+ /* cubby copy we increment the pointer as we move through the buffer. If */
+ /* you think of each start code followed either by another start code or the */
+ /* end of the buffer, then parse_sc() is returning information relative to */
+ /* current segment. */
+
+ cubby = cxt->parse_cubby;
+
+ cxt->list.num_items = 0;
+ cxt->list.data[0].stpos = 0;
+ cxt->getbits.is_emul_reqd = 1;
+
+ /* codec initialization data is always start code prefixed. (may not start at position 0)
+ * sample buffer for AP has three start code patterns here:
+ * pattern 0: no start code at all, the whole buffer is a single segment item
+ * pattern 1: start codes for all segment items
+ * pattern 2: no start code for the first segment item, start codes for the rest segment items
+ */
+
+ gboolean is_pattern_two = FALSE;
+
+ unsigned char start_code = 0;
+
+ while(1)
+ {
+ /* parse the created buffer for sc */
+ ret = ops->parse_sc((void *)&cubby, (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info));
+ if(ret == 1)
+ {
+ cubby.phase = 0;
+ start_code = *(unsigned char*)(cubby.buf + cubby.sc_end_pos);
+#if 1
+ if (0 == init_data_flag &&
+ PREFIX_SIZE != cubby.sc_end_pos &&
+ 0 == cxt->list.num_items)
+ {
+ /* buffer does not have start code at the beginning */
+ vc1_viddec_parser_t *parser = NULL;
+ vc1_metadata_t *seqLayerHeader = NULL;
+
+ parser = (vc1_viddec_parser_t *)cxt->codec_data;
+ seqLayerHeader = &(parser->info.metadata);
+ if (1 == seqLayerHeader->INTERLACE)
+ {
+ /* this is a hack for interlaced field coding */
+ /* handle field interlace coding. One sample contains two fields, where:
+ * the first field does not have start code prefix,
+ * the second field has start code prefix.
+ */
+ cxt->list.num_items = 1;
+ cxt->list.data[0].stpos = 0;
+ is_pattern_two = TRUE;
+ }
+ }
+#endif
+ if (cxt->list.num_items == 0) /* found first SC. */
+ {
+ /* sc_end_pos gets us to the SC type. We need to back up to the first zero */
+ cxt->list.data[0].stpos = cubby.sc_end_pos - PREFIX_SIZE;
+ }
+ else
+ {
+ /* First we set the end position of the last segment. */
+ /* Since the SC parser searches from SC type to SC type and the */
+ /* sc_end_pos is relative to this segment only, we merely add */
+ /* sc_end_pos to the start to find the end. */
+ cxt->list.data[cxt->list.num_items - 1].edpos =
+ cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+
+ /* Then we set the start position of the current segment. */
+ /* So I need to subtract 1 ??? */
+ cxt->list.data[cxt->list.num_items].stpos =
+ cxt->list.data[cxt->list.num_items - 1].edpos;
+
+ if (is_pattern_two)
+ {
+ cxt->list.data[cxt->list.num_items].stpos -= PREFIX_SIZE;
+ /* restore to normal pattern */
+ is_pattern_two = FALSE;
+ }
+ }
+ /* We need to set up the cubby buffer for the next time through parse_sc(). */
+ /* But even though we want the list to contain a segment as described */
+ /* above, we want the cubby buffer to start just past the prefix, or it will */
+ /* find the same SC again. So I bump the cubby buffer past the prefix. */
+ cubby.buf = cubby.buf +
+ cxt->list.data[cxt->list.num_items].stpos +
+ PREFIX_SIZE;
+
+ cubby.size = cxt->parse_cubby.size -
+ cxt->list.data[cxt->list.num_items].stpos -
+ PREFIX_SIZE;
+
+ if (start_code >= 0x0A && start_code <= 0x0F)
+ {
+ /* only put known start code to the list
+ * 0x0A: end of sequence
+ * 0x0B: slice header
+ * 0x0C: frame header
+ * 0x0D: field header
+ * 0x0E: entry point header
+ * 0x0F: sequence header
+ */
+ cxt->list.num_items++;
+ }
+ else
+ {
+ ITRACE("skipping unknown start code :%d", start_code);
+ }
+
+ if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+ {
+ WTRACE("Num items exceeds the limit!");
+ /* not fatal, just stop parsing */
+ break;
+ }
+ }
+ else
+ {
+ /* we get here, if we reach the end of the buffer while looking or a SC. */
+ /* If we never found a SC, then num_items will never get incremented. */
+ if (cxt->list.num_items == 0)
+ {
+ /* If we don't find a SC we probably still have a frame of data. */
+ /* So let's bump the num_items or else later we will not parse the */
+ /* frame. */
+ cxt->list.num_items = 1;
+ }
+ /* now we can set the end position of the last segment. */
+ cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+ break;
+ }
+ }
+ return VBP_OK;
+}
+
+/*
+* parse initialization data (decoder configuration data)
+* for VC1 advanced profile, data is sequence header and
+* entry pointer header.
+* for VC1 main/simple profile, data format
+* is defined in VC1 spec: Annex J, (Decoder initialization metadata
+* structure 1 and structure 3
+*/
+uint32 vbp_parse_init_data_vc1(vbp_context *pcontext)
+{
+ /**
+ * init data (aka decoder configuration data) must
+ * be start-code prefixed
+ */
+
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ viddec_parser_ops_t *ops = pcontext->parser_ops;
+ return vbp_parse_start_code_helper_vc1(cxt, ops, 1);
+}
+
+
+
+/**
+* Parse start codes, VC1 main/simple profile does not have start code;
+* VC1 advanced may not have start code either.
+*/
+uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ viddec_parser_ops_t *ops = pcontext->parser_ops;
+
+ vc1_viddec_parser_t *parser = NULL;
+ vc1_metadata_t *seqLayerHeader = NULL;
+
+ vbp_data_vc1 *query_data = (vbp_data_vc1 *) pcontext->query_data;
+
+ /* Reset query data for the new sample buffer */
+ int i = 0;
+ for (i = 0; i < MAX_NUM_PICTURES; i++)
+ {
+ query_data->num_pictures = 0;
+ query_data->pic_data[i].num_slices = 0;
+ query_data->pic_data[i].picture_is_skipped = 0;
+ }
+
+ parser = (vc1_viddec_parser_t *)cxt->codec_data;
+ seqLayerHeader = &(parser->info.metadata);
+
+
+ /* WMV codec data will have a start code, but the WMV picture data won't. */
+ if (VC1_PROFILE_ADVANCED == seqLayerHeader->PROFILE)
+ {
+ return vbp_parse_start_code_helper_vc1(cxt, ops, 0);
+ }
+ else
+ {
+ /* WMV: vc1 simple or main profile. No start code present.
+ */
+
+ /* must set is_emul_reqd to 0! */
+ cxt->getbits.is_emul_reqd = 0;
+ cxt->list.num_items = 1;
+ cxt->list.data[0].stpos = 0;
+ cxt->list.data[0].edpos = cxt->parse_cubby.size;
+ }
+
+ return VBP_OK;
+}
+
+
+/**
+ *
+ */
+static inline uint8 vbp_get_bit_vc1(uint32 *data, uint32 *current_word, uint32 *current_bit)
+{
+ uint8 value;
+
+ value = (data[*current_word] >> *current_bit) & 1;
+
+ /* Fix up bit/byte offsets. endianess?? */
+ if (*current_bit < 31)
+ {
+ ++(*current_bit);
+ }
+ else
+ {
+ ++(*current_word);
+ *current_bit = 0;
+ }
+
+ return value;
+}
+
+
+/**
+ *
+ */
+static uint32 vbp_pack_bitplane_vc1(
+ uint32 *from_plane,
+ uint8 *to_plane,
+ uint32 width,
+ uint32 height,
+ uint32 nibble_shift)
+{
+ uint32 error = VBP_OK;
+ uint32 current_word = 0;
+ uint32 current_bit = 0; /* must agree with number in vbp_get_bit_vc1 */
+ uint32 i, j, n;
+ uint8 value;
+ uint32 stride = 0;
+
+ stride = 32 * ((width + 31) / 32);
+
+ for (i = 0, n = 0; i < height; i++)
+ {
+ for (j = 0; j < stride; j++)
+ {
+ if (j < width)
+ {
+ value = vbp_get_bit_vc1(
+ from_plane,
+ &current_word,
+ &current_bit);
+
+ to_plane[n / 2] |= value << (nibble_shift + ((n % 2) ? 0 : 4));
+ n++;
+ }
+ else
+ {
+ break;
+ }
+ }
+ if (stride > width)
+ {
+ current_word++;
+ current_bit = 0;
+ }
+ }
+
+ return error;
+}
+
+
+/**
+ *
+ */
+static inline uint32 vbp_map_bfraction(uint32 numerator, uint32 denominator)
+{
+ uint32 b_fraction = 0;
+
+ if ((numerator < 8) && (denominator < 9))
+ {
+ b_fraction = b_fraction_table[numerator][denominator];
+ }
+
+ return b_fraction;
+}
+
+/**
+ *
+ */
+static uint32 vbp_pack_bitplanes_vc1(
+ vbp_context *pcontext,
+ int index,
+ vbp_picture_data_vc1* pic_data)
+{
+ uint32 error = VBP_OK;
+ if (0 == pic_data->pic_parms->bitplane_present.value)
+ {
+ /* return if bitplane is not present */
+ pic_data->size_bitplanes = 0;
+ memset(pic_data->packed_bitplanes, 0, MAX_BITPLANE_SIZE);
+ return error;
+ }
+
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data;
+ vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+ vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader);
+
+
+ /* set bit plane size */
+ pic_data->size_bitplanes = ((seqLayerHeader->widthMB * seqLayerHeader->heightMB) + 1) / 2;
+
+
+ memset(pic_data->packed_bitplanes, 0, pic_data->size_bitplanes);
+
+ /* see libva library va.h for nibble bit */
+ switch (picLayerHeader->PTYPE)
+ {
+ case VC1_I_FRAME:
+ case VC1_BI_FRAME:
+ if (picLayerHeader->OVERFLAGS.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->OVERFLAGS.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 2);
+ }
+ if (picLayerHeader->ACPRED.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->ACPRED.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 1);
+ }
+ if (picLayerHeader->FIELDTX.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->FIELDTX.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 0);
+ }
+ /* sanity check */
+ if (picLayerHeader->MVTYPEMB.imode ||
+ picLayerHeader->DIRECTMB.imode ||
+ picLayerHeader->SKIPMB.imode ||
+ picLayerHeader->FORWARDMB.imode)
+ {
+ ETRACE("Unexpected bit-plane type.");
+ error = VBP_TYPE;
+ }
+ break;
+
+ case VC1_P_FRAME:
+ if (picLayerHeader->MVTYPEMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->MVTYPEMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 2);
+ }
+ if (picLayerHeader->SKIPMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->SKIPMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 1);
+ }
+ if (picLayerHeader->DIRECTMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->DIRECTMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 0);
+ }
+ /* sanity check */
+ if (picLayerHeader->FIELDTX.imode ||
+ picLayerHeader->FORWARDMB.imode ||
+ picLayerHeader->ACPRED.imode ||
+ picLayerHeader->OVERFLAGS.imode )
+ {
+ ETRACE("Unexpected bit-plane type.");
+ error = VBP_TYPE;
+ }
+ break;
+
+ case VC1_B_FRAME:
+ if (picLayerHeader->FORWARDMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->FORWARDMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 2);
+ }
+ if (picLayerHeader->SKIPMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->SKIPMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 1);
+ }
+ if (picLayerHeader->DIRECTMB.imode)
+ {
+ vbp_pack_bitplane_vc1(
+ picLayerHeader->DIRECTMB.databits,
+ pic_data->packed_bitplanes,
+ seqLayerHeader->widthMB,
+ seqLayerHeader->heightMB,
+ 0);
+ }
+ /* sanity check */
+ if (picLayerHeader->MVTYPEMB.imode ||
+ picLayerHeader->FIELDTX.imode ||
+ picLayerHeader->ACPRED.imode ||
+ picLayerHeader->OVERFLAGS.imode)
+ {
+ ETRACE("Unexpected bit-plane type.");
+ error = VBP_TYPE;
+ }
+ break;
+ }
+ return error;
+}
+
+
+/**
+ * fill the query data structure after sequence header, entry point header
+ * or a complete frame is parsed.
+ * NOTE: currently partial frame is not handled properly
+ */
+uint32 vbp_populate_query_data_vc1(vbp_context *pcontext)
+{
+ uint32 error = VBP_OK;
+
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data;
+ vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+
+ vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+ /* first we get the SH/EP data. Can we cut down on this? */
+ vbp_codec_data_vc1 *se_data = query_data->se_data;
+ se_data->PROFILE = seqLayerHeader->PROFILE;
+ se_data->LEVEL = seqLayerHeader->LEVEL;
+ se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG;
+ se_data->PULLDOWN = seqLayerHeader->PULLDOWN;
+ se_data->INTERLACE = seqLayerHeader->INTERLACE;
+ se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG;
+ se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG;
+ se_data->PSF = seqLayerHeader->PSF;
+ se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK;
+ se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY;
+ se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG;
+ se_data->REFDIST_FLAG = seqLayerHeader->REFDIST_FLAG;
+ se_data->LOOPFILTER = seqLayerHeader->LOOPFILTER;
+ se_data->FASTUVMC = seqLayerHeader->FASTUVMC;
+ se_data->EXTENDED_MV = seqLayerHeader->EXTENDED_MV;
+ se_data->DQUANT = seqLayerHeader->DQUANT;
+ se_data->VSTRANSFORM = seqLayerHeader->VSTRANSFORM;
+ se_data->OVERLAP = seqLayerHeader->OVERLAP;
+ se_data->QUANTIZER = seqLayerHeader->QUANTIZER;
+ se_data->CODED_WIDTH = (seqLayerHeader->width + 1) << 1;
+ se_data->CODED_HEIGHT = (seqLayerHeader->height + 1) << 1;
+ se_data->EXTENDED_DMV = seqLayerHeader->EXTENDED_DMV;
+ se_data->RANGE_MAPY_FLAG = seqLayerHeader->RANGE_MAPY_FLAG;
+ se_data->RANGE_MAPY = seqLayerHeader->RANGE_MAPY;
+ se_data->RANGE_MAPUV_FLAG = seqLayerHeader->RANGE_MAPUV_FLAG;
+ se_data->RANGE_MAPUV = seqLayerHeader->RANGE_MAPUV;
+ se_data->RANGERED = seqLayerHeader->RANGERED;
+ se_data->MAXBFRAMES = seqLayerHeader->MAXBFRAMES;
+ se_data->MULTIRES = seqLayerHeader->MULTIRES;
+ se_data->SYNCMARKER = seqLayerHeader->SYNCMARKER;
+ se_data->RNDCTRL = seqLayerHeader->RNDCTRL;
+ se_data->REFDIST = seqLayerHeader->REFDIST;
+ se_data->widthMB = seqLayerHeader->widthMB;
+ se_data->heightMB = seqLayerHeader->heightMB;
+ se_data->INTCOMPFIELD = seqLayerHeader->INTCOMPFIELD;
+ se_data->LUMSCALE2 = seqLayerHeader->LUMSCALE2;
+ se_data->LUMSHIFT2 = seqLayerHeader->LUMSHIFT2;
+
+ /* update buffer number */
+ query_data->buf_number = buffer_counter;
+
+ if (query_data->num_pictures > 2)
+ {
+ WTRACE("sampe buffer contains %d pictures", query_data->num_pictures);
+ }
+ return error;
+}
+
+
+
+static void vbp_pack_picture_params_vc1(
+ vbp_context *pcontext,
+ int index,
+ vbp_picture_data_vc1* pic_data)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data;
+ vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+ vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader);
+
+
+ VAPictureParameterBufferVC1 *pic_parms = pic_data->pic_parms;
+
+ /* Then we get the picture header data. Picture type need translation. */
+ pic_parms->forward_reference_picture = VA_INVALID_SURFACE;
+ pic_parms->backward_reference_picture = VA_INVALID_SURFACE;
+ pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE;
+
+ pic_parms->sequence_fields.value = 0;
+ pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE;
+ pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER;
+ pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP;
+
+ pic_parms->coded_width = (seqLayerHeader->width + 1) << 1;
+ pic_parms->coded_height = (seqLayerHeader->height + 1) << 1;
+
+ pic_parms->entrypoint_fields.value = 0;
+ pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY;
+ pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK;
+ pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER;
+
+ pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER;
+ pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC;
+
+ pic_parms->range_mapping_fields.value = 0;
+ pic_parms->range_mapping_fields.bits.luma_flag = seqLayerHeader->RANGE_MAPY_FLAG;
+ pic_parms->range_mapping_fields.bits.luma = seqLayerHeader->RANGE_MAPY;
+ pic_parms->range_mapping_fields.bits.chroma_flag = seqLayerHeader->RANGE_MAPUV_FLAG;
+ pic_parms->range_mapping_fields.bits.chroma = seqLayerHeader->RANGE_MAPUV;
+
+ pic_parms->b_picture_fraction =
+ vbp_map_bfraction(picLayerHeader->BFRACTION_NUM, picLayerHeader->BFRACTION_DEN);
+
+ pic_parms->cbp_table = picLayerHeader->CBPTAB;
+ pic_parms->mb_mode_table = picLayerHeader->MBMODETAB;
+ pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM;
+ pic_parms->rounding_control = picLayerHeader->RNDCTRL;
+ pic_parms->post_processing = picLayerHeader->POSTPROC;
+ /* fix this. Add RESPIC to parser. */
+ pic_parms->picture_resolution_index = 0;
+ pic_parms->luma_scale = picLayerHeader->LUMSCALE;
+ pic_parms->luma_shift = picLayerHeader->LUMSHIFT;
+
+ pic_parms->picture_fields.value = 0;
+ switch (picLayerHeader->PTYPE)
+ {
+ case VC1_I_FRAME:
+ pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_I;
+ break;
+
+ case VC1_P_FRAME:
+ pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_P;
+ break;
+
+ case VC1_B_FRAME:
+ pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_B;
+ break;
+
+ case VC1_BI_FRAME:
+ pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_BI;
+ break;
+
+ case VC1_SKIPPED_FRAME:
+ pic_data->picture_is_skipped = VC1_PTYPE_SKIPPED;
+ break;
+
+ default:
+ /* to do: handle this case */
+ break;
+ }
+ pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM;
+ if (0 == seqLayerHeader->PROFILE || 1 == seqLayerHeader->PROFILE)
+ {
+ /* simple or main profile, top field flag is not present, default to 1.*/
+ pic_parms->picture_fields.bits.top_field_first = 1;
+ }
+ else
+ {
+ pic_parms->picture_fields.bits.top_field_first = picLayerHeader->TFF;
+ }
+
+ pic_parms->picture_fields.bits.is_first_field = !(picLayerHeader->CurrField);
+ /* This seems to be set based on the MVMODE and MVMODE2 syntax. */
+ /* This is a hack. Probably will need refining. */
+ if ((VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE) ||
+ (VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE2))
+ {
+ pic_parms->picture_fields.bits.intensity_compensation = 1;
+ }
+ else
+ {
+ pic_parms->picture_fields.bits.intensity_compensation = picLayerHeader->INTCOMP;
+ }
+
+ /* Lets store the raw-mode BP bits. */
+ pic_parms->raw_coding.value = 0;
+ pic_parms->raw_coding.flags.mv_type_mb = picLayerHeader->raw_MVTYPEMB;
+ pic_parms->raw_coding.flags.direct_mb = picLayerHeader->raw_DIRECTMB;
+ pic_parms->raw_coding.flags.skip_mb = picLayerHeader->raw_SKIPMB;
+ pic_parms->raw_coding.flags.field_tx = picLayerHeader->raw_FIELDTX;
+ pic_parms->raw_coding.flags.forward_mb = picLayerHeader->raw_FORWARDMB;
+ pic_parms->raw_coding.flags.ac_pred = picLayerHeader->raw_ACPRED;
+ pic_parms->raw_coding.flags.overflags = picLayerHeader->raw_OVERFLAGS;
+
+ /* imode 1/0 indicates bitmap presence in Pic Hdr. */
+ pic_parms->bitplane_present.value = 0;
+
+ pic_parms->bitplane_present.flags.bp_mv_type_mb =
+ pic_parms->raw_coding.flags.mv_type_mb ? 1 :
+ (picLayerHeader->MVTYPEMB.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_direct_mb =
+ pic_parms->raw_coding.flags.direct_mb ? 1 :
+ (picLayerHeader->DIRECTMB.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_skip_mb =
+ pic_parms->raw_coding.flags.skip_mb ? 1 :
+ (picLayerHeader->SKIPMB.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_field_tx =
+ pic_parms->raw_coding.flags.field_tx ? 1 :
+ (picLayerHeader->FIELDTX.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_forward_mb =
+ pic_parms->raw_coding.flags.forward_mb ? 1 :
+ (picLayerHeader->FORWARDMB.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_ac_pred =
+ pic_parms->raw_coding.flags.ac_pred ? 1 :
+ (picLayerHeader->ACPRED.imode ? 1: 0);
+
+ pic_parms->bitplane_present.flags.bp_overflags =
+ pic_parms->raw_coding.flags.overflags ? 1 :
+ (picLayerHeader->OVERFLAGS.imode ? 1: 0);
+
+ pic_parms->reference_fields.value = 0;
+ pic_parms->reference_fields.bits.reference_distance_flag =
+ seqLayerHeader->REFDIST_FLAG;
+
+ pic_parms->reference_fields.bits.reference_distance =
+ seqLayerHeader->REFDIST;
+
+ pic_parms->reference_fields.bits.num_reference_pictures =
+ picLayerHeader->NUMREF;
+
+ pic_parms->reference_fields.bits.reference_field_pic_indicator =
+ picLayerHeader->REFFIELD;
+
+ pic_parms->mv_fields.value = 0;
+ pic_parms->mv_fields.bits.mv_mode = picLayerHeader->MVMODE;
+ pic_parms->mv_fields.bits.mv_mode2 = picLayerHeader->MVMODE2;
+
+ pic_parms->mv_fields.bits.mv_table = picLayerHeader->MVTAB;
+ pic_parms->mv_fields.bits.two_mv_block_pattern_table = picLayerHeader->MV2BPTAB;
+ pic_parms->mv_fields.bits.four_mv_switch = picLayerHeader->MV4SWITCH;
+ pic_parms->mv_fields.bits.four_mv_block_pattern_table = picLayerHeader->MV4BPTAB;
+ pic_parms->mv_fields.bits.extended_mv_flag = seqLayerHeader->EXTENDED_MV;
+ pic_parms->mv_fields.bits.extended_mv_range = picLayerHeader->MVRANGE;
+ pic_parms->mv_fields.bits.extended_dmv_flag = seqLayerHeader->EXTENDED_DMV;
+ pic_parms->mv_fields.bits.extended_dmv_range = picLayerHeader->DMVRANGE;
+
+ pic_parms->pic_quantizer_fields.value = 0;
+ pic_parms->pic_quantizer_fields.bits.dquant = seqLayerHeader->DQUANT;
+ pic_parms->pic_quantizer_fields.bits.quantizer = seqLayerHeader->QUANTIZER;
+ pic_parms->pic_quantizer_fields.bits.half_qp = picLayerHeader->HALFQP;
+ pic_parms->pic_quantizer_fields.bits.pic_quantizer_scale = picLayerHeader->PQUANT;
+ pic_parms->pic_quantizer_fields.bits.pic_quantizer_type = picLayerHeader->UniformQuant;
+ pic_parms->pic_quantizer_fields.bits.dq_frame = picLayerHeader->DQUANTFRM;
+ pic_parms->pic_quantizer_fields.bits.dq_profile = picLayerHeader->DQPROFILE;
+ pic_parms->pic_quantizer_fields.bits.dq_sb_edge = picLayerHeader->DQSBEDGE;
+ pic_parms->pic_quantizer_fields.bits.dq_db_edge = picLayerHeader->DQDBEDGE;
+ pic_parms->pic_quantizer_fields.bits.dq_binary_level = picLayerHeader->DQBILEVEL;
+ pic_parms->pic_quantizer_fields.bits.alt_pic_quantizer = picLayerHeader->ALTPQUANT;
+
+ pic_parms->transform_fields.value = 0;
+ pic_parms->transform_fields.bits.variable_sized_transform_flag =
+ seqLayerHeader->VSTRANSFORM;
+
+ pic_parms->transform_fields.bits.mb_level_transform_type_flag = picLayerHeader->TTMBF;
+ pic_parms->transform_fields.bits.frame_level_transform_type = picLayerHeader->TTFRM;
+
+ pic_parms->transform_fields.bits.transform_ac_codingset_idx1 =
+ (picLayerHeader->TRANSACFRM > 0) ? picLayerHeader->TRANSACFRM - 1 : 0;
+
+ pic_parms->transform_fields.bits.transform_ac_codingset_idx2 =
+ (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0;
+
+ pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB;
+}
+
+
+static void vbp_pack_slice_data_vc1(
+ vbp_context *pcontext,
+ int index,
+ vbp_picture_data_vc1* pic_data)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ uint32 slice_size = cxt->list.data[index].edpos - cxt->list.data[index].stpos;
+ uint32 bit;
+ uint32 byte;
+ uint8 is_emul;
+ viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+ vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+ VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms);
+
+ /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/
+
+ slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos;
+ slc_data->slice_size = slice_size - byte;
+ slc_data->slice_offset = byte;
+
+ slc_parms->slice_data_size = slc_data->slice_size;
+ slc_parms->slice_data_offset = 0;
+
+ /* fix this. we need to be able to handle partial slices. */
+ slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+ slc_parms->macroblock_offset = bit;
+
+ /* fix this. we need o get the slice_vertical_position from the code */
+ slc_parms->slice_vertical_position = pic_data->num_slices;
+
+ pic_data->num_slices++;
+}
+
+/**
+ * process parsing result
+ */
+uint32_t vbp_process_parsing_result_vc1(vbp_context *pcontext, int index)
+{
+ viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+ uint32 error = VBP_OK;
+
+ vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data;
+ if (parser->start_code != VC1_SC_FRM && parser->start_code != VC1_SC_FLD &&
+ parser->start_code != VC1_SC_SLC)
+ {
+ /* only handle frame data, field data and slice data here
+ */
+ return VBP_OK;
+ }
+ vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+ if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD)
+ {
+ query_data->num_pictures++;
+ }
+
+ if (query_data->num_pictures > MAX_NUM_PICTURES)
+ {
+ ETRACE("Num of pictures per sample buffer exceeds the limit (%d).", MAX_NUM_PICTURES);
+ return VBP_DATA;
+ }
+
+ if (query_data->num_pictures == 0)
+ {
+ ETRACE("Unexpected num of pictures.");
+ return VBP_DATA;
+ }
+
+ /* start packing data */
+ int picture_index = query_data->num_pictures - 1;
+ vbp_picture_data_vc1* pic_data = &(query_data->pic_data[picture_index]);
+
+ if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD)
+ {
+ /* setup picture parameter first*/
+ vbp_pack_picture_params_vc1(pcontext, index, pic_data);
+
+ /* setup bitplane after setting up picture parameter (so that bitplane_present is updated) */
+ error = vbp_pack_bitplanes_vc1(pcontext, index, pic_data);
+ if (VBP_OK != error)
+ {
+ ETRACE("Failed to pack bitplane.");
+ return error;
+ }
+
+ }
+
+ /* Always pack slice parameter. The first macroblock in the picture CANNOT
+ * be preceeded by a slice header, so we will have first slice parsed always.
+ *
+ */
+
+ if (pic_data->num_slices >= MAX_NUM_SLICES)
+ {
+ ETRACE("Num of slices exceeds the limit (%d).", MAX_NUM_SLICES);
+ return VBP_DATA;
+ }
+
+ /* set up slice parameter */
+ vbp_pack_slice_data_vc1(pcontext, index, pic_data);
+
+
+ return VBP_OK;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h
new file mode 100644
index 0000000..510e16c
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.h
@@ -0,0 +1,54 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VBP_VC1_PARSER_H
+#define VBP_VC1_PARSER_H
+
+
+/*
+ * setup parser's entry pointer
+ */
+uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext);
+
+/*
+ * allocate query data structure - vbp_vc1_data
+ */
+uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext);
+
+/*
+ * free query data structure
+ */
+uint32 vbp_free_query_data_vc1(vbp_context *pcontext);
+
+/*
+ * parse bitstream configuration data
+ */
+uint32 vbp_parse_init_data_vc1(vbp_context *pcontext);
+
+/*
+ * parse bitstream start code and fill the viddec_input_buffer_t list.
+ * WMV has no start code so the whole buffer will be treated as a single frame.
+ * For VC1 progressive, if start code is not found, the whole buffer will be treated as a
+ * single frame as well.
+ * For VC1 interlace, the first field is not start code prefixed, but the second field
+ * is always start code prefixed.
+ */
+uint32 vbp_parse_start_code_vc1(vbp_context *pcontext);
+
+/*
+ * processe parsing result
+ */
+uint32 vbp_process_parsing_result_vc1(vbp_context *pcontext, int list_index);
+
+/*
+ * populate query data structure
+ */
+uint32 vbp_populate_query_data_vc1(vbp_context *pcontext);
+
+
+#endif /*VBP_VC1_PARSER_H*/
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c
new file mode 100644
index 0000000..f6e6a8a
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c
@@ -0,0 +1,78 @@
+#include "viddec_emitter.h"
+#include "viddec_fw_workload.h"
+#include "viddec_fw_debug.h"
+
+int32_t viddec_emit_flush_current_wkld(viddec_emitter *emit)
+{
+ if(emit->cur.data != NULL)
+ {
+ emit->cur.data->num_items = emit->cur.num_items;
+ }
+ if(emit->next.data != NULL)
+ {
+ emit->next.data->num_items = emit->next.num_items;
+ }
+ emit->cur.num_items = emit->next.num_items;
+ emit->next.num_items = 0;
+ if(emit->cur.data != NULL)
+ {
+ emit->cur.data->result = emit->cur.result;
+ }
+ if(emit->next.data != NULL)
+ {
+ emit->next.data->result = emit->next.result;
+ }
+ emit->cur.result = emit->next.result;
+ emit->next.result = 0;
+ return 1;
+}
+
+int32_t viddec_emit_append(viddec_emitter_wkld *cxt, viddec_workload_item_t *item)
+{
+ int32_t ret =0;
+ if((cxt->num_items < cxt->max_items) && (cxt->data != NULL))
+ {
+ cxt->data->item[cxt->num_items] = *item;
+ cxt->num_items++;
+ ret = 1;
+ CDEB(0, "%s: item(%02d) = [%08x %08x %08x %08x]\n",__FUNCTION__, cxt->num_items - 1, item->vwi_type, item->vwi_payload[0], item->vwi_payload[1], item->vwi_payload[2]);
+ }
+ else
+ {
+ cxt->result |= (VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_WKLD_OVERLFOW, (int)item->vwi_type, (int)(cxt->data), 0, 0, 0, 0);
+ }
+ return ret;
+}
+
+int32_t viddec_emit_contr_tag(viddec_emitter *emit, viddec_input_buffer_t *ibuf, uint8_t incomplete, uint32_t using_next)
+{
+ viddec_workload_item_t item;
+ viddec_emitter_wkld *cur_wkld;
+
+ cur_wkld = (using_next == 0)? &(emit->cur):&(emit->next);
+
+ if(!incomplete)
+ item.vwi_type = VIDDEC_WORKLOAD_IBUF_DONE;
+ else
+ item.vwi_type = VIDDEC_WORKLOAD_IBUF_CONTINUED;
+ item.tag.tag_phys_addr = ibuf->phys;
+ item.tag.tag_phys_len = ibuf->len;
+ item.tag.tag_value = ibuf->id;
+
+ return viddec_emit_append(cur_wkld, &item);
+}
+
+int32_t viddec_emit_assoc_tag(viddec_emitter *emit, uint32_t id, uint32_t using_next)
+{
+ viddec_workload_item_t item;
+ viddec_emitter_wkld *cur_wkld;
+
+ cur_wkld = (using_next == false)? &(emit->cur):&(emit->next);
+ item.vwi_type = VIDDEC_WORKLOAD_TAG;
+ item.tag.tag_phys_addr = -1;
+ item.tag.tag_phys_len = -1;
+ item.tag.tag_value = id;
+ return viddec_emit_append(cur_wkld, &item);
+}
+
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_intr.c b/mix_vbp/viddec_fw/fw/parser/viddec_intr.c
new file mode 100644
index 0000000..fa6c1f2
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_intr.c
@@ -0,0 +1,56 @@
+#include "fw_pvt.h"
+#include "viddec_fw_parser_ipclib_config.h"
+#include "viddec_fw_debug.h"
+
+extern uint32_t timer;
+
+void enable_intr(void)
+{
+ TRAPS_ENABLE;
+ TRAPS_INT_ENABLE;
+ //reg_write(INT_REG, 0);
+}
+
+/*------------------------------------------------------------------------------
+ * Function: mfd_trap_handler
+ * This is the FW's ISR, Currently we don't support any INT as we are running parsers only on GV which
+ * are pure SW modules.
+ *------------------------------------------------------------------------------
+ */
+void mfd_trap_handler()
+{
+ uint32_t reg=0, temp=0;
+ temp = reg_read(INT_STATUS);
+ //DEBUG_WRITE(0xff, temp, timer, 0, 0, 0);
+ if(temp & INT_WDOG_ENABLE)
+ {
+ timer++;
+ set_wdog(VIDDEC_WATCHDOG_COUNTER_MAX);
+ reg = reg_read(INT_STATUS);
+ }
+ if(temp & 0x4)
+ {
+
+ temp = temp & (~0x4);
+ reg_write(INT_REG, temp);
+ //val = reg_read(DMA_CONTROL_STATUS);
+ //val |=DMA_CTRL_STATUS_DONE;
+ //reg_write(DMA_CONTROL_STATUS, val);
+ //reg = reg_read(INT_STATUS);
+ }
+ if(temp & 0x2)
+ {
+
+ temp = temp & (~0x2);
+ reg_write(INT_REG, temp);
+ }
+
+ if(temp & 0x1)
+ {
+ temp = temp & (~0x1);
+ reg_write(INT_REG, temp);
+ }
+ //DEBUG_WRITE(0xff, timer, temp, reg, 0, val);
+ __asm__("nop");
+
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c
new file mode 100644
index 0000000..85b6b8e
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc.c
@@ -0,0 +1,119 @@
+#include "viddec_pm_parse.h"
+#include "viddec_fw_debug.h"
+
+#define FIRST_STARTCODE_BYTE 0x00
+#define SECOND_STARTCODE_BYTE 0x00
+#define THIRD_STARTCODE_BYTE 0x01
+
+/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* these are little-endian defines */
+#define SC_BYTE_MASK0 0x00ff0000 /* little-endian */
+#define SC_BYTE_MASK1 0x000000ff /* little-endian */
+
+/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success.
+ The conext is updated with current phase and sc_code position in the buffer.
+*/
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state)
+{
+ uint8_t *ptr;
+ uint32_t size;
+ uint32_t data_left=0, phase = 0, ret = 0;
+ viddec_sc_parse_cubby_cxt_t *cxt;
+ /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+ Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+ if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+ we are looking for. Its incremented to 4 once we see a byte after this pattern */
+ cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+ size = 0;
+ data_left = cxt->size;
+ ptr = cxt->buf;
+ phase = cxt->phase;
+ cxt->sc_end_pos = -1;
+ pcxt=pcxt;
+
+ /* parse until there is more data and start code not found */
+ while((data_left > 0) &&(phase < 3))
+ {
+ /* Check if we are byte aligned & phase=0, if thats the case we can check
+ work at a time instead of byte*/
+ if(((((uint32_t)ptr) & 0x3) == 0) && (phase == 0))
+ {
+ while(data_left > 3)
+ {
+ uint32_t data;
+ char mask1 = 0, mask2=0;
+
+ data = *((uint32_t *)ptr);
+#ifndef MFDBIGENDIAN
+ data = SWAP_WORD(data);
+#endif
+ mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+ mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+ /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+ two consecutive zero bytes for a start code pattern */
+ if(mask1 && mask2)
+ {/* Success so skip 4 bytes and start over */
+ ptr+=4;size+=4;data_left-=4;
+ continue;
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+
+ /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+ two zero bytes in the word so we look one byte at a time*/
+ if(data_left > 0)
+ {
+ if(*ptr == FIRST_STARTCODE_BYTE)
+ {/* Phase can be 3 only if third start code byte is found */
+ phase++;
+ ptr++;size++;data_left--;
+ if(phase > 2)
+ {
+ phase = 2;
+
+ if ( (((uint32_t)ptr) & 0x3) == 0 )
+ {
+ while( data_left > 3 )
+ {
+ if(*((uint32_t *)ptr) != 0)
+ {
+ break;
+ }
+ ptr+=4;size+=4;data_left-=4;
+ }
+ }
+ }
+ }
+ else
+ {
+ if((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2))
+ {/* Match for start code so update context with byte position */
+ phase = 3;
+ cxt->sc_end_pos = size;
+ }
+ else
+ {
+ phase = 0;
+ }
+ ptr++;size++;data_left--;
+ }
+ }
+ }
+ if((data_left > 0) && (phase == 3))
+ {
+ viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+ cxt->sc_end_pos++;
+ state->next_sc = cxt->buf[cxt->sc_end_pos];
+ state->second_scprfx_length = 3;
+ phase++;
+ ret = 1;
+ }
+ cxt->phase = phase;
+ /* Return SC found only if phase is 4, else always success */
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c
new file mode 100644
index 0000000..6f00d27
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_fast_loop.c
@@ -0,0 +1,190 @@
+#include "viddec_pm_parse.h"
+#include "viddec_fw_debug.h"
+
+#define FIRST_STARTCODE_BYTE 0x00
+#define SECOND_STARTCODE_BYTE 0x00
+#define THIRD_STARTCODE_BYTE 0x01
+
+/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* these are little-endian defines */
+#define SC_BYTE_MASK0 0x00ff0000 /* little-endian */
+#define SC_BYTE_MASK1 0x000000ff /* little-endian */
+
+// This is the 2.25 clocks per byte loop
+#define USE_2p25_CLOCK_PER_BYTE_LOOP
+
+#ifdef USE_2p25_CLOCK_PER_BYTE_LOOP
+static int parser_find_next_startcode(
+ const unsigned char *buf,
+ int i,
+ int len,
+ unsigned int *pphase )
+{
+ int sc_pos = -1;
+ int in_slow_loop;
+ register unsigned int scphase;
+
+ scphase = *pphase;
+
+ in_slow_loop = 1;
+ if ( (0 == (0x3 & i)) && /* dword aligned */
+ (0 == scphase) && /* no "potential" SC detected */
+ ((len - i) >= 4) ) /* more than four bytes left */
+ {
+ in_slow_loop = 0; /* go to fast loop */
+ }
+
+ while( i < len )
+ {
+ if ( in_slow_loop )
+ {
+/* ------- slow SC Detect Loop, used when 0 detected in stream --------*/
+sc_detect_slow_loop:
+
+ while ( i < len )
+ {
+ unsigned char ch;
+
+ ch = buf[i];
+
+ /* searching for a zero, ignore phase for now */
+ if ( FIRST_STARTCODE_BYTE == ch )
+ {
+ /* if we've already got two zeros, hold at phase == 2 */
+ if ( scphase < 2 )
+ {
+ scphase++;
+ }
+ else if ( scphase > 2 )
+ {
+ /* RARE Valid Condition, SC == 00 00 01 00 */
+ /* if we've already got two zeros hold at phase == 2
+ * we also enter here of we're at phase 3
+ * meaning we've got 00 00 01 00 which is a valid SC
+ */
+ /* 00 00 01 00 */
+ sc_pos = i;
+ *pphase = scphase;
+ return(sc_pos);
+ }
+ else /* implies scphase == 2, holding receiving 0's */
+ {
+ }
+ }
+ else if ( THIRD_STARTCODE_BYTE == ch )
+ {
+ if ( 2 == scphase )
+ {
+ /* next byte is the SC */
+ scphase++;
+ }
+ else if ( scphase < 2 )
+ {
+ scphase = 0; /* start over */
+ }
+ else if ( scphase > 2 )
+ {
+ /* RARE Valid Condition, SC == 00 00 01 01 */
+ sc_pos = i;
+ *pphase = scphase;
+ return(sc_pos);
+ }
+ }
+ else if ( 3 == scphase )
+ {
+ /* Valid Condition, SC == 00 00 01 xx */
+ sc_pos = i;
+ *pphase = scphase;
+ return(sc_pos);
+ }
+ else
+ {
+ scphase = 0;
+
+ if ( (3 == (0x3 & i)) && /* dword aligned? */
+ ((len - i) > 4) ) /* more than four bytes left */
+ {
+ i++;
+ in_slow_loop = 0; /* go to fast loop */
+
+ /* WARNING: Performance GoTo */
+ goto sc_detect_fast_loop;
+ }
+ }
+
+ i++;
+ }
+ }
+ else /* we're in the fast loop */
+ {
+/* ------- FAST SC Detect Loop, used to skip at high bandwidth --------*/
+sc_detect_fast_loop:
+
+ /* FAST start-code scanning loop (Krebs Algorithm) */
+ while ( i <= (len - 4) )
+ {
+ register unsigned int dw;
+
+ dw = *((unsigned int *)&buf[i]);
+#ifndef MFDBIGENDIAN
+ dw = SWAP_WORD(dw);
+#endif
+ if ( 0 != (dw & SC_BYTE_MASK0) )
+ {
+ if ( 0 != (dw & SC_BYTE_MASK1) )
+ {
+ /* most common code path */
+ i += 4;
+ continue;
+ }
+ }
+
+ break;
+ }
+ /* potential SC detected or at end of loop */
+ in_slow_loop = 1;
+
+ /* WARNING: performance goto */
+ goto sc_detect_slow_loop;
+ }
+ }
+
+ *pphase = scphase;
+ return(sc_pos);
+}
+unsigned int viddec_parse_sc(void *in, void *pcxt)
+{
+ viddec_sc_parse_cubby_cxt_t *cxt;
+ int boff;
+ int retval=0;
+
+ cxt = (viddec_sc_parse_cubby_cxt_t *)in;
+
+ /* get to four-byte alignment */
+ boff = (int)cxt->buf & 0x3;
+
+ cxt->sc_end_pos = parser_find_next_startcode(
+ (const unsigned char *)cxt->buf - boff,
+ boff,
+ cxt->size + boff,
+ &cxt->phase );
+
+ if ( (int)cxt->sc_end_pos >= 0 )
+ {
+ cxt->sc_end_pos -= boff;
+
+ /* have not fully finished the buffer */
+ if ( cxt->sc_end_pos < cxt->size )
+ cxt->phase++;
+
+ retval = 1;
+ }
+ else
+ {
+ /* No startcode found */
+ }
+
+ return(retval);
+}
+#endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c
new file mode 100644
index 0000000..5aa2e9c
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_parse_sc_stub.c
@@ -0,0 +1,6 @@
+#include <stdint.h>
+
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+ return (0);
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c
new file mode 100644
index 0000000..ffcff11
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c
@@ -0,0 +1,554 @@
+#include "viddec_pm.h"
+#include "viddec_fw_debug.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_pm_tags.h"
+#include "viddec_parser_ops.h"
+#include "viddec_vc1_parse.h"
+#include "viddec_mp4_parse.h"
+#include "viddec_mpeg2_parse.h"
+#include "viddec_h264_parse.h"
+/*
+ Overview of Parser manager:
+ Parser manager is the glue between Kernel(main.c) and actual codecs. We abstract common functionality as much as we can
+ in this module. The parser Manager context allocates memory for Parsers. At any point in time there is only one active stream.
+ During open stream we setup all necessary initialisation for the codec we are handling. The parser manager context is
+ stored on DDR when the current stream gets swapped out by the kernel. When the next stream comes in it has it's own
+ version of parser manager.
+ Parser manager is reponsible for providing information on when its a good time to swap a stream.
+ High level algorithm of parser Manager once a stream is opened and active(RET's are returns to Kernel):
+
+ 1. create a list data structure to hold any incoming ES descriptors.
+ 2. Check to see if any of the ES buffers Desc in current list has data to be processed. If not request kernel(RET) for a buffer.
+ 3. If data is present parse until a scprefix+sc is found. If not goto step2.
+ 4. If startcode detected update list state to make ES data look like Linear buffer.
+ 5. Setup required state to provide getbits interface for codecs to access bit stream maximum 32bits at a time.
+ 6. Setup Current & Next workloads provided by Kernel.
+ 7. Call the codec to parse the data we collected between start codes.
+ 8. Query to see if we parsed frame worth of data.
+ 9. Do necessary TAG association and remove used buffers from List.
+ 10. Send information to kernel on whether workload is done or Not.(RET). When kernel reschedules start from step2.
+
+ Kernel can swap current stream at RET points described above.
+
+ Other additional things supported:
+ - Generic start code detect function which is same for most of codecs.
+ - Memory Management.
+ - Flush of stream.
+ - Emulation prevention.
+ - Interface to emit necessary tags for codec specific types.
+*/
+
+
+/* check to see if codec needs emulation prevention */
+#define EMUL_REQD(codec) ((codec == MFD_STREAM_FORMAT_VC1) || (codec_type == MFD_STREAM_FORMAT_H264) ? 1: 0)
+
+#ifdef RTL_SIMULATION
+extern void output_omar_wires( unsigned int value );
+#else
+#define output_omar_wires(x)
+#endif
+
+/* Place to store Function pointers for all supported interfaces for each codec */
+viddec_parser_ops_t parser_ops[MFD_STREAM_FORMAT_MAX];
+
+
+
+/* we need to define as external function so that for host mode we can use the same code without
+ modifications by overloading dma function with a copy function
+*/
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+void viddec_pm_init_ops()
+{
+ viddec_vc1_get_ops(&parser_ops[MFD_STREAM_FORMAT_VC1]);
+ parser_ops[MFD_STREAM_FORMAT_VC1].parse_sc = viddec_parse_sc;
+ parser_ops[MFD_STREAM_FORMAT_VC1].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags;
+ parser_ops[MFD_STREAM_FORMAT_VC1].gen_assoc_tags = viddec_generic_add_association_tags;
+
+ viddec_mpeg2_get_ops(&parser_ops[MFD_STREAM_FORMAT_MPEG]);
+ parser_ops[MFD_STREAM_FORMAT_MPEG].parse_sc = viddec_parse_sc;
+ parser_ops[MFD_STREAM_FORMAT_MPEG].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags;
+ parser_ops[MFD_STREAM_FORMAT_MPEG].gen_assoc_tags = viddec_mpeg2_add_association_tags;
+
+ viddec_h264_get_ops(&parser_ops[MFD_STREAM_FORMAT_H264]);
+ parser_ops[MFD_STREAM_FORMAT_H264].parse_sc = viddec_parse_sc;
+ parser_ops[MFD_STREAM_FORMAT_H264].gen_contrib_tags = viddec_pm_lateframe_generate_contribution_tags;
+ parser_ops[MFD_STREAM_FORMAT_H264].gen_assoc_tags = viddec_h264_add_association_tags;
+
+ viddec_mp4_get_ops(&parser_ops[MFD_STREAM_FORMAT_MPEG42]);
+ parser_ops[MFD_STREAM_FORMAT_MPEG42].gen_contrib_tags = viddec_pm_generic_generate_contribution_tags;
+ parser_ops[MFD_STREAM_FORMAT_MPEG42].gen_assoc_tags = viddec_generic_add_association_tags;
+}
+
+/*
+ Returns size of persistent DDR memory required for the codec. If the required memory is less than max allocated
+ scratch memory in FW we always give the max scratch size.
+*/
+uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size)
+{
+ parser_ops[codec_type].get_cxt_size(size);
+ if(size->context_size > MAX_CODEC_CXT_SIZE)
+ {
+ DEB("ERROR: size(%d) of context for codec=%d is greater than max=%d\n",size->context_size,codec_type,MAX_CODEC_CXT_SIZE);
+ }
+ size->context_size = sizeof(viddec_pm_cxt_t);
+ return 1;
+}
+
+/*
+ Initialize the scratch memory allocated to the stream based on clean. if clean is true initialize to
+ start state, if not then preserve stream information.
+*/
+void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean)
+{
+ int i;
+
+ for(i=0; i<MAX_IBUFS_PER_SC; i++)
+ {
+ cxt->pending_tags.pending_tags[i] = INVALID_ENTRY;
+ }
+ cxt->frame_start_found = false;
+ cxt->found_fm_st_in_current_au = false;
+ cxt->late_frame_detect = (MFD_STREAM_FORMAT_H264 == codec_type) ? true:false;
+ cxt->pending_tags.first_buf_aligned = cxt->pending_tags.using_next = cxt->pending_tags.frame_done =false;
+ cxt->next_workload_error_eos = VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+ viddec_pm_utils_list_init(&(cxt->list));
+ cxt->cur_buf.list_index = -1;
+ cxt->parse_cubby.phase=0;
+ parser_ops[codec_type].init((void *)&(cxt->codec_data[0]), persist_mem, !clean);
+ if(clean)
+ {
+ cxt->pending_inband_tags = 0;
+ }
+ else
+ {
+ /* TODO: Enable this once codecs support this function */
+ //parser_ops[codec_type].flush_preserve((void *)&(cxt->codec_data[0]), persist_mem);
+ }
+
+}
+
+void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time)
+{
+ viddec_emit_time(&(cxt->emitter), time);
+}
+
+/* add an esbuffer to list */
+static inline uint32_t viddec_pm_add_es_buf_to_list(viddec_pm_cxt_t *cxt, viddec_input_buffer_t *es_buf)
+{
+ uint32_t val , ret = PM_OVERFLOW;
+
+ val = viddec_pm_utils_list_addbuf(&(cxt->list), es_buf);
+ if(val == 1) ret = PM_SUCCESS;
+ return ret;
+}
+
+static inline uint32_t viddec_pm_check_inband_messages(viddec_pm_sc_cur_buf_t *cur_buf, uint32_t *type)
+{
+ uint32_t ret=false;
+ if(cur_buf->cur_es->flags != 0)
+ {
+ /* update offset to point to next position for loading data */
+ cur_buf->cur_offset +=(cur_buf->cur_size);
+ cur_buf->cur_size = 0;
+ switch(cur_buf->cur_es->flags)
+ {
+ case VIDDEC_STREAM_EOS:
+ {
+ *type = PM_EOS;
+ }
+ break;
+ case VIDDEC_STREAM_DISCONTINUITY:
+ {
+ *type = PM_DISCONTINUITY;
+ }
+ default:
+ break;
+ }
+ ret =true;
+ }
+ return ret;
+}
+
+/* creates an ibuf from the current position in list. Fills sc_parse_cubby_cxt */
+uint32_t viddec_pm_create_ibuf(viddec_pm_cxt_t *cxt)
+{
+ uint32_t ret = PM_NO_DATA;
+#ifndef VBP
+ viddec_sc_parse_cubby_cxt_t *cubby = &(cxt->parse_cubby);
+#endif
+ viddec_pm_sc_cur_buf_t *cur_buf = &(cxt->cur_buf);
+ viddec_pm_utils_list_t *list = &(cxt->list);
+
+ /* Step1: check if list is Empty, If yes return No data */
+ if(list->num_items > 0)
+ {
+ /* Step 2: Check to see If current index into list is empty & we have data in list,
+ if so increment index and initialise it*/
+ if(cur_buf->list_index == -1)
+ {
+ if(viddec_pm_utils_list_getbyte_position(list,
+ list->first_scprfx_length+1,
+ (uint32_t *)&(cur_buf->list_index),
+ &(cur_buf->cur_offset)) != 1)
+ {/* This return's offset and index from where we have to start for sc detect */
+ cur_buf->cur_size = 0;
+ cur_buf->cur_es = &(list->sc_ibuf[cur_buf->list_index]);
+ }
+ else
+ {
+ return PM_NO_DATA;
+ }
+ }
+
+ /* Step3: If we are done with current buffer then try to go to next item in list */
+ if((cur_buf->cur_offset + cur_buf->cur_size) >= cur_buf->cur_es->len)
+ {
+ /* Need to handle In band messages before going to next buffer */
+ //if(viddec_pm_check_inband_messages(cur_buf))
+ if(viddec_pm_check_inband_messages(cur_buf, &ret))
+ {
+ return ret;
+ }
+ /* If no items in list after the current buffer return no data */
+ if((uint32_t)(cur_buf->list_index + 1) >= list->num_items)
+ {
+ return PM_NO_DATA;
+ }
+ cur_buf->list_index++;
+ cur_buf->cur_es = &(list->sc_ibuf[cur_buf->list_index]);
+ cur_buf->cur_offset = cur_buf->cur_size = 0;
+ }
+ /* Step4: Fill the cubby with data to send to parser sc code function */
+ {
+ int32_t data_left;
+ /* data left is the leftout size in current ES buffer */
+ data_left = cur_buf->cur_es->len - (cur_buf->cur_offset + cur_buf->cur_size);
+
+ /* update offset to point to next position for loading data */
+ cur_buf->cur_offset +=(cur_buf->cur_size);
+
+#ifndef VBP
+ /* Load maximum of array size */
+ if(data_left >= SC_DETECT_BUF_SIZE)
+ {
+ data_left = SC_DETECT_BUF_SIZE;
+ }
+ /* can be zero if we have zero sized buffers in our list.EX:NEW segment */
+ if(data_left > 0)
+ {/* do a copy using Linear Dma */
+ uint32_t size , ddr_addr = 0, ddr_mask=0;
+ /* get ddr adress of current offset in ES buffer */
+#ifdef HOST_ONLY
+ ddr_addr = cur_buf->cur_offset + (uint32_t)cur_buf->cur_es->buf;
+#else
+ ddr_addr = cur_buf->cur_offset + cur_buf->cur_es->phys;
+#endif
+ ddr_mask = (ddr_addr & 3);
+ ddr_addr = ddr_addr & ~3;
+ /* return from this function can be more bytes based on input buf alignment.
+ The adress for local memory we are sending is on DWORD boundary so it should be safe.
+ */
+
+ size = cp_using_dma(ddr_addr, (uint32_t)&(cxt->scbuf[0]), data_left+ddr_mask, 0,1);//false, true);
+ cubby->size = data_left;
+
+ /* point to actual memory location which has the data(skip aligment bytes) */
+ cubby->buf = &(cxt->scbuf[ddr_mask]);
+ cur_buf->cur_size = data_left;
+ ret = PM_SUCCESS;
+ }
+ else
+ {
+ /* If we completely consumed this buffer or this is a zero sized buffer we want to check inband messages */
+ //if(viddec_pm_check_inband_messages(cur_buf))
+ if(viddec_pm_check_inband_messages(cur_buf, &ret))
+ {
+ return ret;
+ }
+ }
+#else
+ ret = PM_SUCCESS;
+#endif
+ }
+ }
+
+ return ret;
+}
+
+/*
+ Read data from esbuffer list and parse for start codes or EOS. If we consumed all the data we return no data left.
+*/
+static inline uint32_t viddec_pm_parse_for_sccode(viddec_pm_cxt_t *cxt, viddec_parser_ops_t *func)
+{
+ uint32_t ret = PM_NO_DATA;
+ uint32_t sc_boundary_found = 0;
+
+ while(!sc_boundary_found)
+ {
+ /* Create an buffer from list to parse */
+ ret = viddec_pm_create_ibuf(cxt);
+ switch(ret)
+ {
+ case PM_NO_DATA:
+ {/* No data in esbuffer list for parsing sc */
+ sc_boundary_found = 1;
+ }
+ break;
+ case PM_EOS:
+ case PM_DISCONTINUITY:
+ {
+ sc_boundary_found = 1;
+ cxt->list.end_offset = cxt->cur_buf.cur_offset+1;
+ cxt->parse_cubby.phase = 0;
+ /* we didn't find a start code so second start code length would be 0 */
+ cxt->sc_prefix_info.second_scprfx_length = 0;
+ //cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS;
+ if(ret == PM_EOS)
+ {
+ cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_EOS;
+ }
+ if(ret == PM_DISCONTINUITY)
+ {
+ cxt->sc_prefix_info.next_sc = VIDDEC_PARSE_DISCONTINUITY;
+ }
+ }
+ break;
+ case PM_SUCCESS:
+ default:
+ {
+ /* parse the created buffer for sc */
+ ret = func->parse_sc((void *)&(cxt->parse_cubby), (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info));
+ if(ret == 1)
+ {
+ cxt->list.end_offset = cxt->parse_cubby.sc_end_pos + cxt->cur_buf.cur_offset;
+ cxt->parse_cubby.phase = 0;
+ cxt->list.total_bytes+=cxt->parse_cubby.sc_end_pos;
+ ret = PM_SC_FOUND;
+ sc_boundary_found = 1;
+ break;
+ }
+ else
+ {
+ cxt->list.total_bytes+=cxt->cur_buf.cur_size;
+ }
+ }
+ break;
+ }
+ }
+
+ return ret;
+}
+
+/*
+ Once we are ready to flush the current workload, we update current workload on DDR with our internal information
+ that was not written before like num of items in workload, errors in stream etc...
+*/
+void viddec_pm_finalize_workload(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t codec_errors)
+{
+ viddec_emit_set_codec(&(cxt->emitter), codec_type);
+ viddec_emit_set_codec_errors(&(cxt->emitter), codec_errors);
+ viddec_emit_flush_current_wkld(&(cxt->emitter));
+ output_omar_wires( 0x5 );
+ output_omar_wires( 0x1 );
+}
+
+/*
+ After parsing between start codes we cleanup our list so that it has only buffers that are not consumed yet.
+*/
+uint32_t viddec_pm_finalize_list(viddec_pm_cxt_t *cxt)
+{
+ uint32_t ret=1;
+
+ viddec_pm_utils_list_remove_used_entries(&(cxt->list), cxt->sc_prefix_info.second_scprfx_length);
+ cxt->cur_buf.list_index = -1;
+ cxt->list.first_scprfx_length = cxt->sc_prefix_info.second_scprfx_length;
+ return ret;
+}
+
+/* Case to handle if we encounter list overflow without seeing second start code */
+void viddec_pm_handle_buffer_overflow(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf)
+{
+ uint32_t indx=0;
+ while(indx< (uint32_t)cxt->list.num_items)
+ {/* Dump tags for all entries in list to prevent buffer leak */
+ viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, true);
+ viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, true);
+ indx++;
+ }
+ /* Dump tags for the new buffer that was received */
+ viddec_emit_contr_tag(&(cxt->emitter), es_buf, 0, true);
+ viddec_emit_assoc_tag(&(cxt->emitter), es_buf->id, true);
+ /* Set errors on both current and next as both can be invalid */
+ viddec_emit_set_workload_error(&(cxt->emitter),
+ (VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE),
+ true);
+ viddec_emit_set_workload_error(&(cxt->emitter),
+ (VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE),
+ false);
+ /* cleanup the pending tags */
+ viddec_pm_generate_missed_association_tags(cxt, true);
+ viddec_pm_finalize_workload(cxt, codec_type, 0);
+ WRITE_SVEN(SVEN_MODULE_EVENT_GV_FW_FATAL_BUFFER_OVERLFOW, (int)es_buf->phys, (int)es_buf->len, 0, 0, 0, 0);
+}
+
+static inline void viddec_pm_handle_post_inband_messages(viddec_pm_cxt_t *cxt, uint32_t m_type)
+{
+ if((m_type & ~(0xFF))== PM_INBAND_MESSAGES)
+ {
+ /* If EOS decide set error on next workload too */
+ viddec_emit_set_workload_error(&(cxt->emitter), cxt->next_workload_error_eos, true);
+ if(m_type == PM_EOS)
+ {
+ viddec_emit_set_inband_tag(&(cxt->emitter), VIDDEC_WORKLOAD_IBUF_EOS, true);
+ }
+ if(m_type == PM_DISCONTINUITY)
+ {
+ cxt->pending_inband_tags = PM_DISCONTINUITY;
+ }
+ }
+}
+
+static inline uint32_t viddec_pm_handle_new_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf)
+{
+ uint32_t state = PM_SUCCESS;
+ if(es_buf != NULL)
+ {
+ state = viddec_pm_add_es_buf_to_list(cxt, es_buf);
+ if(state == PM_OVERFLOW)
+ {
+ viddec_pm_handle_buffer_overflow(cxt, codec_type, es_buf);
+ }
+ }
+ return state;
+}
+
+static inline void viddec_pm_handle_pre_inband_messages(viddec_pm_cxt_t *cxt)
+{
+ if(cxt->pending_inband_tags == PM_DISCONTINUITY)
+ {
+ viddec_emit_set_inband_tag(&(cxt->emitter), VIDDEC_WORKLOAD_IBUF_DISCONTINUITY, false);
+ cxt->pending_inband_tags = 0;
+ }
+}
+
+/*
+ Main function of parser manager.
+ It searches until start codes are found int he list if not through return type indicates kernel to provide more buffers.
+ If a start code is found it calls the codec to parse the syntax data it accumulated so far.
+ If codec says a frame is not done then continues to find the next start code.
+ If codec says frame is done it does tag association and indicates kernel a frame is done.
+*/
+uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf)
+{
+ uint32_t state = PM_SUCCESS;
+
+ /* Step1: Append Es buffer to list */
+ viddec_pm_handle_pre_inband_messages(cxt);
+ state = viddec_pm_handle_new_es_buffer(cxt, codec_type, es_buf);
+ if(state == PM_SUCCESS)
+ {
+ uint32_t scdetect_ret;
+ output_omar_wires( 0x3 );
+ /* Step2: Phase1 of parsing, parse until a sc is found */
+ scdetect_ret = viddec_pm_parse_for_sccode(cxt,&parser_ops[codec_type]);
+ switch(scdetect_ret)
+ {
+ case PM_NO_DATA:
+ {
+ /* Step3: If we consumed all the data indicate we need more buffers */
+ state = PM_NO_DATA;
+ break;
+ }
+ case PM_EOS:
+ case PM_DISCONTINUITY:
+ case PM_SC_FOUND:
+ {
+ uint32_t codec_errors=0;
+ /* Create necessary state information to make the ES buffers look like linear data */
+ viddec_pm_utils_list_updatebytepos(&(cxt->list), cxt->sc_prefix_info.second_scprfx_length);
+ if(cxt->sc_prefix_info.first_sc_detect != 1)
+ {
+ /* Step4: If we saw two start codes init state and call codec to parse */
+ uint32_t codec_ret;
+ /* Initialise the state to provide get bits for codecs */
+ viddec_pm_utils_bstream_init(&(cxt->getbits), &(cxt->list), EMUL_REQD(codec_type));
+ output_omar_wires( 0x1 );
+ /* call the codec to do synatax parsing */
+ parser_ops[codec_type].parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0]));
+ /* Check and see if frame start was detected. If we did update frame start in current au */
+ if(parser_ops[codec_type].is_frame_start((void *)&(cxt->codec_data[0])) == true)
+ {
+ cxt->frame_start_found += 1;
+ cxt->found_fm_st_in_current_au = true;
+ }
+ /* Query to see if we reached end of current frame */
+ codec_ret = parser_ops[codec_type].is_wkld_done((void *)cxt,
+ (void *)&(cxt->codec_data[0]),
+ (uint32_t)(cxt->sc_prefix_info.next_sc),
+ &codec_errors);
+
+ state = (codec_ret == VIDDEC_PARSE_FRMDONE) ? PM_WKLD_DONE : PM_SUCCESS;
+ /* generate contribution and association tags */
+ cxt->pending_tags.frame_done = (codec_ret == VIDDEC_PARSE_FRMDONE);
+ parser_ops[codec_type].gen_assoc_tags(cxt);
+ parser_ops[codec_type].gen_contrib_tags(cxt, (state != PM_WKLD_DONE));
+ }
+ else
+ {
+ /* Step4: If this is the first start code in this stream, clean up and return */
+ if(cxt->list.total_bytes != 0)
+ {
+ viddec_pm_generic_generate_contribution_tags(cxt, true);
+ viddec_generic_add_association_tags(cxt);
+ }
+ else
+ {
+ if(cxt->list.num_items >= 1)
+ {
+ uint32_t indx=0;
+ while((indx< (uint32_t)cxt->list.num_items) && (cxt->list.sc_ibuf[indx].len == 0))
+ {/* Dump all zero sized buffers until we see a buffer with valid data */
+ viddec_emit_contr_tag(&(cxt->emitter), &(cxt->list.sc_ibuf[indx]), false, false);
+ viddec_emit_assoc_tag(&(cxt->emitter), cxt->list.sc_ibuf[indx].id, false);
+ indx++;
+ }
+ }
+ }
+ if((scdetect_ret & ~(0xFF))!= PM_INBAND_MESSAGES)
+ {
+ state = PM_SUCCESS;//state = PM_FIRST_SC_FOUND;
+ cxt->sc_prefix_info.first_sc_detect = 0;
+ }
+ else
+ {
+ state = PM_WKLD_DONE;
+ }
+ }
+
+ viddec_pm_handle_post_inband_messages(cxt, scdetect_ret);
+
+ /* Step 5: If current frame is done, finalise the workload state with necessary information */
+ if(state == PM_WKLD_DONE)
+ {
+ DEB("\nFRAME ... DONE\n");
+ /* we decrement frame start. This can be 0 in cases like sending junk data with EOS */
+ cxt->frame_start_found -= (cxt->frame_start_found)? 1: 0;
+ if((scdetect_ret & ~(0xFF))== PM_INBAND_MESSAGES)
+ {/* If EOS dump pending tags and set state */
+ viddec_pm_generate_missed_association_tags(cxt, false);
+ state = scdetect_ret;
+ }
+ /* Write back stored state of workloads to memory to prepare for psuhing to output queue */
+ viddec_pm_finalize_workload(cxt, codec_type, codec_errors);
+ }
+ /* Step 6: Reset the list to prepare for next iteration */
+ viddec_pm_finalize_list(cxt);
+ break;
+ }
+ default:
+ break;
+ }
+ }//if(state == PM_SUCCESS)
+ return state;
+} // viddec_pm_parse_es_buffer
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c
new file mode 100644
index 0000000..f16fbcd
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c
@@ -0,0 +1,127 @@
+#include "fw_pvt.h"
+#include "viddec_fw_parser_ipclib_config.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_pm_tags.h"
+#include "viddec_fw_parser.h"
+
+extern dmem_t _dmem;
+extern viddec_parser_ops_t parser_ops[MFD_STREAM_FORMAT_MAX];
+
+static void viddec_fw_parser_peekmessages(viddec_pm_cxt_t *pm, ipc_msg_data *wkld_cur, ipc_msg_data *wkld_next, int32_t *ret_cur, int32_t *ret_next, uint32_t stream_id)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ wkld_cur->phys = wkld_next->phys = 0;
+ /* read current and next workloads by peeking to free wkld queue.This would only give us a copy
+ of message but won't actually pull it out of queue*/
+
+ *ret_cur = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_cur, sizeof(ipc_msg_data), 0);
+ *ret_next = FwIPC_PeekReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_next, sizeof(ipc_msg_data), 1);
+ /* NOTE: I am passing length of current workload as size for next, since next workload might not exist. This is safe since in flush we always append to current workload */
+ viddec_emit_update(&(pm->emitter), wkld_cur->phys, wkld_next->phys, wkld_cur->len, wkld_cur->len);
+}
+
+static void viddec_fw_parser_push_error_workload(viddec_pm_cxt_t *pm, ipc_msg_data *wkld_cur, uint32_t stream_id)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ /* Push the current wkld */
+ viddec_emit_set_workload_error(&(pm->emitter),
+ (VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE),
+ false);
+ viddec_emit_flush_current_wkld(&(pm->emitter));
+ FwIPC_SendMessage(fwipc, stream_id, (char *)wkld_cur, sizeof(ipc_msg_data));
+ FwIPC_ReadMessage(fwipc, &(fwipc->wkld_q[stream_id]), (char *)wkld_cur, sizeof(ipc_msg_data));
+}
+
+int viddec_fw_parser_flush(unsigned int stream_id, unsigned int flush_type)
+{
+ FW_IPC_Handle *fwipc = GET_IPC_HANDLE(_dmem);
+ mfd_pk_strm_cxt *cxt;
+ mfd_stream_info *cxt_swap;
+ viddec_pm_cxt_t *pm;
+ int32_t pos=0, ret = VIDDEC_FW_SUCCESS;/* success */
+ uint32_t workloads_in_input_q = 0;
+ cxt = (mfd_pk_strm_cxt *)&(_dmem.srm_cxt);
+ cxt_swap = (mfd_stream_info *)&(_dmem.stream_info[stream_id]);
+ pm = &(cxt->pm);
+
+ workloads_in_input_q = ipc_mq_read_avail(&fwipc->wkld_q[stream_id].mq, (int32_t *)&pos);
+ pos = 0;
+ /* Check to see if output queue has space for next message */
+ if(ipc_mq_write_avail(&fwipc->snd_q[stream_id].mq,&pos) >= workloads_in_input_q)
+ {
+ /* Check how many free workloads are available. Need at least 1 */
+ if(workloads_in_input_q >= CONFIG_IPC_MESSAGE_MAX_SIZE)
+ {
+ ipc_msg_data wkld_cur, wkld_next, cur_es;
+ int32_t ret_cur=0,ret_next=0;
+
+ {/* Swap context into local memory */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) pm, sizeof(viddec_pm_cxt_t), false, false);
+ }
+
+ viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id);
+ if(workloads_in_input_q >= (CONFIG_IPC_MESSAGE_MAX_SIZE << 1))
+ {/* If we have more than 2 workloads, most likely current workload has partial data. To avoid overflow
+ lets push current and use next which is most likely empty .If there's only one workload it was
+ next for previous frame so most likely its empty in which case we don't do this logic*/
+ viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id);
+ viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id);
+ }
+ /* Empty current es buffers in list */
+ /* TODO(Assumption): we have to make sure that list flush is really succesful by checking return values.
+ If our workload size is big enough to to accomadate buf done tags then its not necessary
+ since we will guaranteed succesful writes for all es buffers */
+ viddec_pm_generate_tags_for_unused_buffers_to_flush(pm);
+ /* Check the number of ES buffers and append them to current wkld */
+ while(FwIPC_ReadMessage(fwipc, &(fwipc->rcv_q[stream_id]), (char *)&cur_es, sizeof(ipc_msg_data)) != 0)
+ {
+ /* NOTE(Assumption): Again we have to define workload size to be big enough to make sure we can fit
+ all the es buffers into current workload */
+ viddec_emit_contr_tag(&(pm->emitter), &cur_es, 0, false);
+ viddec_emit_assoc_tag(&(pm->emitter), cur_es.id, false);
+ }
+ viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id);
+ do
+ {/* Read until no workloads left */
+ viddec_fw_parser_peekmessages(pm, &wkld_cur, &wkld_next, &ret_cur, &ret_next, stream_id);
+ if(ret_cur == 0)
+ {
+ break;
+ }
+ viddec_fw_parser_push_error_workload(pm, &wkld_cur, stream_id);
+ }while(1);
+ switch(flush_type)
+ {
+ case VIDDEC_STREAM_FLUSH_DISCARD:
+ {
+ /* Reset pm_context */
+ viddec_fw_init_swap_memory(stream_id, 0, 1);
+ }
+ break;
+ case VIDDEC_STREAM_FLUSH_PRESERVE:
+ {
+ /* Reset just stream information */
+ viddec_fw_init_swap_memory(stream_id, 0, 0);
+ }
+ default:
+ break;
+ }
+ {/* swap context into DDR */
+ cp_using_dma(cxt_swap->ddr_cxt, (uint32_t) pm, sizeof(viddec_pm_cxt_t), true, false);
+ }
+ }
+ else
+ {
+ pos = 0;
+ /* check to see if I have any es buffers on input queue. If none are present we don't have to do anything */
+ if(ipc_mq_read_avail(&fwipc->rcv_q[stream_id].mq, (int32_t *)&pos) != 0)
+ ret = VIDDEC_FW_NEED_FREE_WKLD;
+ }
+ }
+ else
+ {
+ /* data present in output queue. */
+ ret =VIDDEC_FW_PORT_FULL;
+ }
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c
new file mode 100644
index 0000000..9a7d828
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c
@@ -0,0 +1,178 @@
+#include "viddec_pm.h"
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "viddec_emitter.h"
+#include "viddec_fw_workload.h"
+#include "viddec_pm_utils_bstream.h"
+
+extern void viddec_pm_utils_list_emit_pixel_tags(viddec_pm_utils_list_t *list, uint32_t start, viddec_emitter *emitter, uint32_t using_next);
+extern void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t start, uint32_t end, viddec_emitter *emitter, uint32_t is_cur_wkld, viddec_workload_item_t *wi);
+
+int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1);
+ if(ret == -1)
+ {DEB("FAILURE!!!! getbits returned %d\n", ret);}
+
+ return ret;
+}
+
+int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 0);
+ return ret;
+}
+
+int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_skipbits(&(cxt->getbits), num_bits);
+ return ret;
+}
+
+int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_emit_append(&(cxt->emitter.cur), item);
+ return ret;
+}
+
+int32_t viddec_pm_append_workitem_next(void *parent, viddec_workload_item_t *item)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_emit_append(&(cxt->emitter.next), item);
+ return ret;
+}
+
+int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_t *is_emul)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul);
+
+ return ret;
+
+}
+
+static inline int32_t viddec_pm_append_restof_pixel_data(void *parent, uint32_t cur_wkld)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+ uint32_t start=0, b_off=0;
+ uint8_t emul=0;
+ viddec_workload_item_t wi;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), &b_off, &start, &emul);
+ if(emul) start--;
+
+ wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES;
+ wi.es.es_flags = 0;
+ viddec_pm_utils_list_emit_slice_tags(&(cxt->list), start, cxt->list.total_bytes -1, &(cxt->emitter), cur_wkld, &wi);
+ return ret;
+}
+
+int32_t viddec_pm_append_pixeldata(void *parent)
+{
+ return viddec_pm_append_restof_pixel_data(parent, 1);
+}
+
+int32_t viddec_pm_append_pixeldata_next(void *parent)
+{
+ return viddec_pm_append_restof_pixel_data(parent, 0);
+}
+
+viddec_workload_t* viddec_pm_get_header(void *parent)
+{
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+
+ return cxt->emitter.cur.data;
+}
+
+viddec_workload_t* viddec_pm_get_next_header(void *parent)
+{
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+
+ return cxt->emitter.next.data;
+}
+
+int32_t viddec_pm_is_nomoredata(void *parent)
+{
+ int32_t ret=0;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_nomorerbspdata(&(cxt->getbits));
+ return ret;
+}
+
+uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte)
+{
+ int32_t ret=-1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ ret = viddec_pm_utils_bstream_get_current_byte(&(cxt->getbits), byte);
+ return ret;
+}
+
+int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next)
+{
+ int32_t ret = 1;
+ viddec_pm_cxt_t *cxt;
+
+ cxt = (viddec_pm_cxt_t *)parent;
+ if (end == VIDDEC_PARSE_INVALID_POS) end = (cxt->list.total_bytes -1);
+ viddec_pm_utils_list_emit_slice_tags(&(cxt->list), start, end, &(cxt->emitter), using_next, wi);
+
+ return ret;
+
+}
+
+void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error)
+{
+ viddec_pm_cxt_t *cxt;
+ cxt = (viddec_pm_cxt_t *)parent;
+ cxt->next_workload_error_eos = error;
+}
+
+void viddec_pm_set_late_frame_detect(void *parent)
+{
+ viddec_pm_cxt_t *cxt;
+ cxt = (viddec_pm_cxt_t *)parent;
+ cxt->late_frame_detect = true;
+}
+
+void viddec_pm_setup_userdata(viddec_workload_item_t *wi)
+{
+#ifdef MFDBIGENDIAN
+ wi->vwi_payload[0] = SWAP_WORD(wi->vwi_payload[0]);
+ wi->vwi_payload[1] = SWAP_WORD(wi->vwi_payload[1]);
+ wi->vwi_payload[2] = SWAP_WORD(wi->vwi_payload[2]);
+#else
+ wi=wi;
+#endif
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c
new file mode 100644
index 0000000..0a6f09b
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_stubs.c
@@ -0,0 +1,21 @@
+#include "viddec_parser_ops.h"
+
+void viddec_vc1_get_ops(viddec_parser_ops_t *ops)
+{
+ return;
+}
+
+void viddec_mpeg2_get_ops(viddec_parser_ops_t *ops)
+{
+ return;
+}
+
+void viddec_mp4_get_ops(viddec_parser_ops_t *ops)
+{
+ return;
+}
+
+void viddec_h264_get_ops(viddec_parser_ops_t *ops)
+{
+ return;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c
new file mode 100644
index 0000000..b0d8842
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c
@@ -0,0 +1,304 @@
+#include "viddec_pm.h"
+#include "viddec_fw_debug.h"
+#include "viddec_fw_common_defs.h"
+#include "viddec_pm_tags.h"
+/*
+ Overview of tag association:
+
+ Contribution flags:
+ The current list has all the buffers which contribute to this particular workload. So we walkthrough the
+ list and throw buf done for all the buffers which were consumed. This can be deduced from total bytes we
+ in list which represents the bytes that were used for this acces unit.
+ For buffers which were partially used and this can only be the last buffer we throw continued tag. The
+ Parser manager tells us when to throw a continued tag. This will only happen when parser Manager detects
+ that we reached end of current frame.
+
+ Association Tags:
+ These are the tags that FW generates which indicates how to associate metadata with Frames.
+ The policy to determine which tag belongs to which frame is based on sc prefix position. If ES buffer starts with
+ or has a sc prefix its associated to next decodable frame(based on first slice or header depending on codec).
+ We use three state variables to determine where the frame starts and ends.
+ frame_start_found: Indicates we saw the beggining of frame in current list of ES buffers(which represent current acces unit).
+ This is decremented on workload done since it normally means we detected frame end.
+ found_fm_st_in_current_au:Indicates we saw the first slice in current access unit. Its mainly used to decide whether the first buffer
+ belongs to current frame or next frame. Its reset after its use.
+ Frame Done: Indicates we detected end of frame pointed by current workload.
+
+ Basic algo:
+ If we find frame start and if first buffer doesn't start with SC prefix Every consumed buffer belongs to Next frame. If first buffer
+ starts with SC prefix on that buffer belongs to Current frame.
+ If we haven't found frame start every buffer belongs to current frame.
+
+ TODO: Check for return codes from emitter
+*/
+
+
+/*
+ This function generates contribution tags current workload by walking through list of consumed buffers.
+ If frame is done(ignore_partial is false) we generate continue tags for the last item in list(if its not completely consumed).
+ This is used for all codecs except H264.
+ */
+uint32_t viddec_pm_generic_generate_contribution_tags(void *parent, uint32_t ignore_partial)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_list_t *list = &(cxt->list);
+
+ if(list->num_items != 0)
+ {
+ if(!cxt->late_frame_detect)
+ {
+ uint32_t num_items = 0;
+ while((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes))
+ {/* Walkthrough Consumed buffers and dump the tags */
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), false, false);
+ num_items++;
+ }
+ /* Dump incomplete tags if required */
+ if(!ignore_partial)
+ {/* check to see if last item is not consumed and dump continued flag */
+ if((num_items < list->num_items)
+ && (list->data[num_items].edpos >= (uint32_t)list->total_bytes))
+ {
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), true, false);
+ }
+ }
+ }
+ else
+ {
+ /* Only happens for dangling fields in MP2 Field pictures, in which case we find out the current frame was done in
+ last access unit, which is similar to H264 */
+ ret = viddec_pm_lateframe_generate_contribution_tags(parent, ignore_partial);
+ cxt->late_frame_detect = false;
+ }
+ }
+ return ret;
+}
+
+/*
+ For H264 when a frame is done it really means current frame was done in last access unit. The current access unit represnted
+ by list belongs to next frame. ignore_partial is false for frame done.
+ When frame is not done we dump all consumed buffers into next workload else they go to current workload.
+ If frame is done we throw a continued flag for first buffer in current workload if it was used in last access unit.
+ */
+uint32_t viddec_pm_lateframe_generate_contribution_tags(void *parent, uint32_t ignore_partial)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_list_t *list = &(cxt->list);
+
+ if(list->num_items != 0)
+ {
+ uint32_t num_items = 0;
+ /* If start offset is not 0 then it was partially used in last access unit. !ignore_partial means frame done*/
+ if((list->start_offset!= 0) && !ignore_partial)
+ {/* Emit continue in current if necessary. */
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), true, false);
+ }
+
+ while((num_items < list->num_items) && (list->data[num_items].edpos <= (uint32_t)list->total_bytes))
+ { /* Walkthrough Consumed buffers and dump the tags to current or Next*/
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[num_items]), false, !ignore_partial);
+ num_items++;
+ }
+ }
+ return ret;
+}
+
+/*
+ This function dumps tags from temporary array into a workload(we indicate either current or next from using_next).
+*/
+uint32_t viddec_pm_generate_missed_association_tags(viddec_pm_cxt_t *cxt, uint32_t using_next)
+{
+ uint32_t i=0, ret = PM_SUCCESS;
+
+ while((i < MAX_IBUFS_PER_SC) && (cxt->pending_tags.pending_tags[i] != INVALID_ENTRY))
+ {
+ viddec_emit_assoc_tag(&(cxt->emitter), cxt->pending_tags.pending_tags[i], using_next);
+ cxt->pending_tags.pending_tags[i] = INVALID_ENTRY;
+ i++;
+ }
+ return ret;
+}
+
+/* This function adds current list of es buffer to pending list. ignore_first when set tells us to ignore the first
+ buffer in list.
+*/
+void viddec_pm_add_tags_to_pendinglist(viddec_pm_cxt_t *cxt, uint32_t ignore_first)
+{
+ viddec_pm_utils_list_t *list = &(cxt->list);
+ vidded_pm_pending_tags_t *pend = &(cxt->pending_tags);
+ uint32_t index=0, t_index=0;
+
+ if(!ignore_first && (list->start_offset == 0))
+ {/* If start offset is 0 we are saying that first buffer in list starts with start code */
+ pend->first_buf_aligned = true;
+ }
+ else
+ {/* We are ignoring first item in list since we already threw a tag for this buffer */
+ index++;
+ pend->first_buf_aligned = false;
+ }
+
+ while( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes))
+ {/* walk through consumed buffers and buffer id's in pending list */
+ pend->pending_tags[t_index] = list->sc_ibuf[index].id;
+ index++;t_index++;
+ }
+ if( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes))
+ {/* If last item is partially consumed still add it to pending tags since tag association is based on start of ES buffer */
+ pend->pending_tags[t_index] = list->sc_ibuf[index].id;
+ }
+}
+
+/* Helper function to emit a association tag from pending list and resetting the value to invalid entry */
+static inline void viddec_pm_emit_pending_tag_item(viddec_emitter *emit, vidded_pm_pending_tags_t *pend, uint32_t index, uint32_t using_next)
+{
+ viddec_emit_assoc_tag(emit, pend->pending_tags[index], using_next);
+ pend->pending_tags[index] = INVALID_ENTRY;
+}
+
+/*
+ Tag association for mpeg2:
+ start frame is detected in pict header extension, but pict header represents start of frame.
+ To handle this we always store current AU list in temporary pending list. At the start of function
+ we look to see if a frame start was found, if we did we start dumping items from pending list based
+ on byte position of sc in first buffer of pending list. At the end we copy current list items to
+ pending list.
+ Limitation With Dangling fields: If we have AF1 AF2 BF1 CF1 CF2 as the sequence of fields
+ Tag assocaiation will be fine for A & B, However the first buffer tag on C will fall into B
+ We donot want to fix this issue right now as it means doubling size of pending list which
+ increases memory usage. Normally dangling fields are thrown away so worst case we will miss
+ one original PTS, So its OK not to fix it right now.
+ */
+uint32_t viddec_mpeg2_add_association_tags(void *parent)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ vidded_pm_pending_tags_t *pend = &(cxt->pending_tags);
+ uint32_t first_slice = false, index = 0;
+ /* check to see if we found a frame start in current access unit */
+ first_slice = cxt->frame_start_found && cxt->found_fm_st_in_current_au;
+ cxt->found_fm_st_in_current_au = false;
+ /* If we found frame start and first item in pending tags is start with start code
+ then it needs to go to current frame. */
+ if(first_slice && pend->first_buf_aligned && (pend->pending_tags[index] != INVALID_ENTRY))
+ {
+ viddec_pm_emit_pending_tag_item(&(cxt->emitter), pend, index, false);
+ index++;
+ }
+ /* rest of list goes to current if frame start is not found else next frame */
+ while((index < MAX_IBUFS_PER_SC) && (pend->pending_tags[index] != INVALID_ENTRY))
+ {
+ viddec_pm_emit_pending_tag_item(&(cxt->emitter), pend, index, cxt->frame_start_found);
+ index++;
+ }
+ /* Copy items to temporary List */
+ viddec_pm_add_tags_to_pendinglist(cxt, false);
+ return ret;
+}
+
+/*
+ Tag association for h264:
+ In this case when we get frame done it means current frame was done in last access unit. The data in current list belongs
+ to next frame. To handle this we always dump the buffered tags from last list and throw them in current/next frame based on pend state.
+ If the first item in current list is on sc boundary, it has to go into next so we always throw that tag in next.
+ For rest of items we store them in pending tags array and store inforamtion on where these stored tags should go into for
+ next run. Thi is detemined by start frame. we do this because at this state our next should be current and "next next" should
+ be next.
+ */
+uint32_t viddec_h264_add_association_tags(void *parent)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_list_t *list = &(cxt->list);
+ vidded_pm_pending_tags_t *pend = &(cxt->pending_tags);
+ uint32_t first_slice = false, index = 0;
+
+ /* Throw tags for items from pending list based on stored state from last run */
+ viddec_pm_generate_missed_association_tags(cxt, pend->using_next);
+ first_slice = cxt->frame_start_found && cxt->found_fm_st_in_current_au;
+ cxt->found_fm_st_in_current_au = false;
+ /* If we saw frame start and first buffer is aligned to start code throw it into next */
+ if(first_slice && (list->start_offset == 0))
+ {
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found && cxt->pending_tags.frame_done);
+ index++;
+ }
+ /* add tags to pending list */
+ viddec_pm_add_tags_to_pendinglist(cxt, (index != 0));
+ /* We want to figure out where these buffers should go into. There are three possible cases
+ current: If no frame start found these should go into next.
+ next: If one frame start is found and frame is not done then it should go to next.
+ if a frame is done then pm will push current out and next time we come here previous next is current.
+ next next: If two frame starts are found then we want it to be next next workload, which is what next will be
+ when we get called next time.
+ */
+ pend->using_next = (!cxt->pending_tags.frame_done && (cxt->frame_start_found == 1)) || (cxt->frame_start_found > 1);
+ return ret;
+}
+
+/*
+ Tag association for vc1:
+ Frame header represents start of new frame. If we saw a frame start in current access unit and the buffer starts
+ with start code it needs to go to current frame. Rest of items go to next if frame start found else current frame.
+ */
+uint32_t viddec_generic_add_association_tags(void *parent)
+{
+ uint32_t ret = PM_SUCCESS;
+ viddec_pm_cxt_t *cxt = (viddec_pm_cxt_t *)parent;
+ viddec_pm_utils_list_t *list = &(cxt->list);
+ uint32_t not_first_slice = false, index = 0;
+
+ /* We check to see if this access unit is not the first one with frame start. This evaluates to true in that case */
+ not_first_slice = cxt->frame_start_found && !cxt->found_fm_st_in_current_au;
+ cxt->found_fm_st_in_current_au = false;
+ if(list->start_offset == 0)
+ {/* If start offset is 0, we have start code at beggining of buffer. If frame start was detected in this
+ access unit we put the tag in current else it goes to next */
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, not_first_slice);
+ }
+ /* Skip first item always, for start_offset=0 its already been handled above*/
+ index++;
+ while( (index < list->num_items) && (list->data[index].edpos <= (uint32_t)list->total_bytes))
+ {/* Walkthrough Consumed buffers and dump the tags to current or next*/
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found);
+ index++;
+ }
+ if( (index < list->num_items) && (list->data[index].stpos < (uint32_t)list->total_bytes))
+ {/* Dump last item if it was partially consumed */
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, cxt->frame_start_found);
+ }
+ return ret;
+}
+
+/*
+ This function throws tags for buffers which were not used yet during flush.
+ */
+void viddec_pm_generate_tags_for_unused_buffers_to_flush(viddec_pm_cxt_t *cxt)
+{
+ viddec_pm_utils_list_t *list;
+ uint32_t index=0;
+
+ list = &(cxt->list);
+ /* Generate association tags from temporary pending array */
+ viddec_pm_generate_missed_association_tags(cxt, false);
+ if(list->num_items > 0)
+ {
+ /* Throw contribution flag for first item as done */
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[index]), false, false);
+ if(cxt->list.start_offset == 0)
+ {/* Throw association for first item if it was not done already */
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, false);
+ }
+ index++;
+ while(index < list->num_items)
+ {/* Walk through list and throw contribution and association flags */
+ viddec_emit_contr_tag(&(cxt->emitter), &(list->sc_ibuf[index]), false, false);
+ viddec_emit_assoc_tag(&(cxt->emitter), list->sc_ibuf[index].id, false);
+ index++;
+ }
+ }
+ /* Not required to re init list structure as flush takes care of it */
+}
+
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c
new file mode 100644
index 0000000..8d3f329
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c
@@ -0,0 +1,472 @@
+#include "viddec_pm_utils_bstream.h"
+#include "viddec_fw_debug.h"
+
+/* Internal data structure for calculating required bits. */
+typedef union
+{
+ uint8_t byte[8];
+ uint32_t word[2];
+}viddec_pm_utils_getbits_t;
+
+void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt);
+uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index);
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+/* Bytes left in cubby buffer which were not consumed yet */
+static inline uint32_t viddec_pm_utils_bstream_bytesincubby(viddec_pm_utils_bstream_buf_cxt_t *cxt)
+{
+ return (cxt->buf_end - cxt->buf_index);
+}
+
+/*
+ This function checks to see if we are at the last valid byte for current access unit.
+*/
+uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+ uint32_t data_remaining = 0;
+ uint8_t ret = false;
+
+ /* How much data is remaining including current byte to be processed.*/
+ data_remaining = cxt->list->total_bytes - (cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st));
+
+ /* Start code prefix can be 000001 or 0000001. We always only check for 000001.
+ data_reamining should be 1 for 000001, as we don't count sc prefix and 1 represents current byte.
+ data_reamining should be 2 for 00000001, as we don't count sc prefix its current byte and extra 00 as we check for 000001.
+ NOTE: This is used for H264 only.
+ */
+ switch(data_remaining)
+ {
+ case 2:
+ /* If next byte is 0 and its the last byte in access unit */
+ ret = (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x0);
+ break;
+ case 1:
+ /* if the current byte is last byte */
+ ret = true;
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+/*
+ This function returns true if cubby buffer has the last byte of access unit.
+*/
+uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+ uint32_t last_byte_offset_plus_one=0;
+ uint8_t ret = false;
+ /* Check to see if the last byte Acces unit offset is the last byte for current access unit.
+ End represents the first invalid byte, so (end - st) will give number of bytes.*/
+ last_byte_offset_plus_one = cxt->au_pos + (cxt->bstrm_buf.buf_end - cxt->bstrm_buf.buf_st);
+ if((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes)
+ {
+ ret = true;
+ }
+ return ret;
+}
+
+/* This function initializes scratch buffer, which is used for staging already read data, due to DMA limitations */
+static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_scratch_cxt_t *cxt)
+{
+ cxt->st = cxt->size = cxt->bitoff=0;
+}
+
+/* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if
+ we need to go to next es buffer */
+static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_list_t *list, uint32_t index, uint32_t offset)
+{
+ uint32_t ret=0;
+ int32_t val=0;
+ val = (list->data[index].edpos <= (uint32_t)list->total_bytes) ? list->data[index].edpos: (uint32_t)list->total_bytes;
+ val = val - (int32_t)offset;
+ if(val > 0) ret = (uint32_t)val;
+ return val;
+}
+
+/* This function seeks to byte offset position starting from lst_index, if more data is present in current ES buffer pointed by
+ lst_index returns the remaining data in current buffer along with physical address of byte offset. The lst_index parameter
+ at returns index of ES buffer in list which has byte_offset */
+static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt,
+ uint32_t *lst_index,
+ uint32_t byte_offset,
+ uint32_t *physaddr)
+{
+ viddec_pm_utils_list_t *list;
+ uint32_t last_byte_offst=0, bytes_left=0;/* default return value is 0 bytes */
+
+ list = cxt->list;
+ while(*lst_index < list->num_items)
+ {
+ /* Check to see if we reached the buffer with last valid byte of current access unit, List can have data beyond current access unit */
+ last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes;
+ if(byte_offset < last_byte_offst)
+ {/* Found a match so return with data remaining */
+#if 1
+ int32_t val=0;
+ val = last_byte_offst - (int32_t)byte_offset;
+ if(val > 0) bytes_left = (uint32_t)val;
+#else
+ bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset);
+#endif
+ *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index);
+ break;
+ }
+ *lst_index+=1;
+ }
+ return bytes_left;
+}
+
+/* This function is for copying trailing bytes of cubby bitstream buffer to scratch buffer */
+static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data, uint32_t num_bytes)
+{
+ uint32_t i=0;
+ for(i=0; i<num_bytes;i++)
+ {
+ cxt->buf_scratch[i] = *data;
+ data++;cxt->size++;
+ }
+}
+
+/* This function is for copying trailing bytes from scratch buffer to bitstream buffer*/
+static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data)
+{
+ uint32_t i=0;
+ for(i=0; i<cxt->size;i++)
+ {
+ *data = cxt->buf_scratch[i];
+ data++;
+ }
+}
+
+/* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */
+static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream,
+ viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/
+ uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/
+ uint32_t *phase, /* Phase for emulation */
+ uint32_t num_bytes,/* requested number of bytes*/
+ uint32_t emul_reqd, /* On true we look for emulation prevention */
+ uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/
+ )
+{
+ int32_t ret = 1;
+ uint8_t cur_byte = 0, valid_bytes_read = 0;
+ *act_bytes = 0;
+
+ while(valid_bytes_read < num_bytes)
+ {
+ cur_byte = bstream->buf[bstream->buf_index + *act_bytes];
+ if((cur_byte == 0x3) &&(*phase == 2))
+ {/* skip emulation byte. we update the phase only if emulation prevention is enabled */
+ *phase = 0;
+ }
+ else
+ {
+ data->byte[valid_bytes_read] = cur_byte;
+ /*
+ We only update phase for first byte if bit offset is 0. If its not 0 then it was already accounted for in the past.
+ From second byte onwards we always look to update phase.
+ */
+ if((*act_bytes != 0) || (is_offset_zero))
+ {
+ if(cur_byte == 0)
+ {
+ /* Update phase only if emulation prevention is required */
+ *phase +=( ((*phase < 2) && emul_reqd ) ? 1: 0 );
+ }
+ else
+ {
+ *phase=0;
+ }
+ }
+ valid_bytes_read++;
+ }
+ *act_bytes +=1;
+ }
+ /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array
+ has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */
+ if((bstream->buf_index + *act_bytes -1) >= bstream->buf_end)
+ {
+ ret = -1;
+ }
+ return ret;
+}
+
+/*
+ This function checks to see if we have minimum amount of data else tries to reload as much as it can.
+ Always returns the data left in current buffer in parameter.
+*/
+static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *data_left)
+{
+#ifdef VBP
+ *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+#else
+ uint8_t isReload=0;
+
+ *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+ /* If we have minimum data we should continue, else try to read more data */
+ if(*data_left <MIN_DATA)
+ {
+ /* Check to see if we already read last byte of current access unit */
+ isReload = !(viddec_pm_utils_bstream_nomoredata(cxt) == 1);
+ while(isReload)
+ {
+ /* We have more data in access unit so keep reading until we get at least minimum data */
+ viddec_pm_utils_bstream_reload(cxt);
+ *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+ /* Break out of loop if we reached last byte or we have enough data */
+ isReload = !((*data_left >= MIN_DATA) || (viddec_pm_utils_bstream_nomoredata(cxt) == 1));
+ }
+ }
+#endif
+}
+/*
+ This function moves the stream position by N bits(parameter bits). The bytes parameter tells us how many bytes were
+ read for this N bits(can be different due to emulation bytes).
+*/
+static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_buf_cxt_t *bstream, uint32_t bits, uint32_t bytes)
+{
+ if((bits & 0x7) == 0)
+ {
+ bstream->buf_bitoff = 0;
+ bstream->buf_index +=bytes;
+ }
+ else
+ {
+ bstream->buf_bitoff = bits & 0x7;
+ bstream->buf_index +=(bytes - 1);
+ }
+}
+
+/*
+ This function gets physical address of the requested au offset(pos).
+*/
+
+uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index)
+{
+ uint32_t ret = 0, last_byte_offst=0;
+ viddec_pm_utils_list_t *list;
+
+ list = cxt->list;
+ while(lst_index < list->num_items)
+ {
+ last_byte_offst = (list->data[lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[lst_index].edpos: (uint32_t)list->total_bytes;
+ if(pos < last_byte_offst)
+ {
+#ifndef MFDBIGENDIAN
+ ret = (uint32_t)list->sc_ibuf[lst_index].buf;
+#else
+ ret = list->sc_ibuf[lst_index].phys;
+#endif
+ ret +=(pos - list->data[lst_index].stpos);
+ if(lst_index == 0) ret+=list->start_offset;
+ break;
+ }
+ lst_index++;
+ }
+ return ret;
+}
+
+/*
+ Actual reload function which uses dma to refill bitstream buffer.
+*/
+void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+ viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+ bstream = &(cxt->bstrm_buf);
+
+ /* Update current offset positions */
+ cxt->au_pos += (bstream->buf_index - bstream->buf_st);
+ bstream->buf_st = bstream->buf_index;
+ /* copy leftover bytes into scratch */
+ {
+ int32_t cur_bytes=0;
+ viddec_pm_utils_bstream_scratch_init(&(cxt->scratch));
+ cur_bytes = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+ if(cur_bytes > 0)
+ {
+ viddec_pm_utils_bstream_scratch_copyto(&(cxt->scratch), &(bstream->buf[bstream->buf_index]), cur_bytes);
+ cxt->scratch.bitoff = bstream->buf_bitoff;
+ }
+ }
+ /* Initiate DMA and copyback scratch data */
+ {
+ uint32_t data_left = 0, ddr_mask=0;
+ /* calculate necesary aligmnets and copy data */
+ {
+ uint32_t ddr_addr=0, data_wrote=0;
+ uint32_t byte_pos;
+ /* byte pos points to the position from where we want to read data.*/
+ byte_pos = cxt->au_pos + cxt->scratch.size;
+ data_left = viddec_pm_utils_bstream_maxbytes_from_index(cxt, &(cxt->list_off), byte_pos, &ddr_addr);
+ if(data_left > CUBBY_SIZE)
+ {
+ data_left = CUBBY_SIZE;
+ }
+ if(data_left != 0)
+ {
+ ddr_mask = ddr_addr & 0x3;
+ ddr_addr = ddr_addr & ~0x3;
+ data_wrote = cp_using_dma(ddr_addr, (uint32_t)&(bstream->buf[MIN_DATA]), (data_left + ddr_mask), 0, 1);
+ }
+ }
+ /* copy scratch data back to buffer and update offsets */
+ {
+ uint32_t index=0;
+ index = MIN_DATA + ddr_mask;
+ index -= cxt->scratch.size;
+ viddec_pm_utils_bstream_scratch_copyfrom(&(cxt->scratch), &(bstream->buf[index]));
+ bstream->buf_st = bstream->buf_index = index;
+ bstream->buf_end = data_left + cxt->scratch.size + bstream->buf_st;
+ bstream->buf_bitoff = cxt->scratch.bitoff;
+ }
+ }
+}
+
+/*
+ Init function called by parser manager after sc code detected.
+*/
+void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul)
+{
+#ifdef VBP
+ cxt->emulation_byte_counter = 0;
+#endif
+
+ cxt->au_pos = 0;
+ cxt->list = list;
+ cxt->list_off = 0;
+ cxt->phase = 0;
+ cxt->is_emul_reqd = is_emul;
+ cxt->bstrm_buf.buf_st = cxt->bstrm_buf.buf_end = cxt->bstrm_buf.buf_index = cxt->bstrm_buf.buf_bitoff = 0;
+}
+
+/* Get the requested byte position. If the byte is already present in cubby its returned
+ else we seek forward and get the requested byte.
+ Limitation:Once we seek forward we can't return back.
+*/
+int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte)
+{
+ int32_t ret = -1;
+ uint32_t data_left=0;
+ viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+ bstream = &(cxt->bstrm_buf);
+ viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+ if(data_left != 0)
+ {
+ *byte = bstream->buf[bstream->buf_index];
+ ret = 1;
+ }
+ return ret;
+}
+
+/*
+ Function to skip N bits ( N<= 32).
+*/
+int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits)
+{
+ int32_t ret = -1;
+ uint32_t data_left=0;
+ viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+ bstream = &(cxt->bstrm_buf);
+ viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+ if((num_bits <= 32) && (num_bits > 0) && (data_left != 0))
+ {
+ uint8_t bytes_required=0;
+
+ bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3;
+ if(bytes_required <= data_left)
+ {
+ viddec_pm_utils_getbits_t data;
+ uint32_t act_bytes =0;
+ if(viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1)
+ {
+ uint32_t total_bits=0;
+ total_bits=num_bits+bstream->buf_bitoff;
+ viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes);
+ ret=1;
+
+ if (act_bytes > bytes_required)
+ {
+ cxt->emulation_byte_counter = act_bytes - bytes_required;
+ }
+ }
+ }
+ }
+ return ret;
+}
+
+/*
+ Function to get N bits ( N<= 32).
+*/
+int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip)
+{
+ uint32_t data_left=0;
+ int32_t ret = -1;
+ /* STEP 1: Make sure that we have at least minimum data before we calculate bits */
+ viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+
+ if((num_bits <= 32) && (num_bits > 0) && (data_left != 0))
+ {
+ uint32_t bytes_required=0;
+ viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+ bstream = &(cxt->bstrm_buf);
+ bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3;
+
+ /* Step 2: Make sure we have bytes for requested bits */
+ if(bytes_required <= data_left)
+ {
+ uint32_t act_bytes, phase;
+ viddec_pm_utils_getbits_t data;
+ phase = cxt->phase;
+ /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */
+ if(viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1)
+ {
+ uint32_t total_bits=0;
+ uint32_t shift_by=0;
+ /* zero out upper bits */
+ /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts
+ in single statement */
+ data.byte[0] <<= bstream->buf_bitoff;
+ data.byte[0] >>= bstream->buf_bitoff;
+
+#ifndef MFDBIGENDIAN
+ data.word[0] = SWAP_WORD(data.word[0]);
+ data.word[1] = SWAP_WORD(data.word[1]);
+#endif
+ total_bits = num_bits+bstream->buf_bitoff;
+ if(total_bits > 32)
+ {
+ /* We have to use both the words to get required data */
+ shift_by = total_bits - 32;
+ data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by));
+ //total_bits -= shift_by;/* BUG */
+ }
+ else
+ {
+ shift_by = 32 - total_bits;
+ data.word[0] = data.word[0] >> shift_by;
+ }
+ *out = data.word[0];
+ if(skip)
+ {
+ /* update au byte position if needed */
+ viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes);
+ cxt->phase = phase;
+
+ if (act_bytes > bytes_required)
+ {
+ cxt->emulation_byte_counter += act_bytes - bytes_required;
+ }
+ }
+
+ ret =1;
+ }
+ }
+ }
+ return ret;
+}
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c
new file mode 100644
index 0000000..ccc83b3
--- /dev/null
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c
@@ -0,0 +1,221 @@
+#include "viddec_pm_utils_list.h"
+#include "viddec_fw_debug.h"
+
+/*
+ Initialize list.
+ */
+void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt)
+{
+ cxt->num_items = 0;
+ cxt->start_offset = 0;
+ cxt->end_offset = -1;
+ cxt->total_bytes = 0;
+ cxt->first_scprfx_length = 0;
+}
+
+/*
+ Add a new ES buffer to list. If not succesful returns 0.
+ */
+uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf)
+{
+ uint32_t ret = 0;
+ if((list->num_items + 1) <= MAX_IBUFS_PER_SC)
+ {
+ list->num_items +=1;
+ list->sc_ibuf[list->num_items - 1] = *es_buf;
+ ret = 1;
+ }
+ return ret;
+}
+
+/*
+ We return the index of es buffer and the offset into it for the requested byte offset.
+ EX: if byte=4, and the first es buffer in list is of length 100, we return lis_index=0, offset=3.
+ byte value should range from [1-N].
+ */
+uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset)
+{
+ uint32_t index = 0, accumulated_size=0;
+
+ /* First buffer in list is always special case, since start offset is tied to it */
+ accumulated_size = list->sc_ibuf[index].len - list->start_offset;
+ if( accumulated_size >= byte)
+ {
+ /* we found a match in first buffer itself */
+ *offset = list->start_offset + byte - 1;
+ *list_index = index;
+ return 0;
+ }
+ index++;
+ /* walkthrough the list until we find the byte */
+ while(index < list->num_items)
+ {
+ if((accumulated_size + list->sc_ibuf[index].len) >= byte)
+ {
+ *offset = byte - accumulated_size - 1;
+ *list_index = index;
+ return 0;
+ }
+ accumulated_size += list->sc_ibuf[index].len;
+ index++;
+ }
+ return 1;
+}
+
+/*
+ Since the stream data can span multiple ES buffers on different DDR locations, for our purpose
+ we store start and end position on each ES buffer to make the data look linear.
+ The start represents the linear offset of the first byte in list.
+ end-1 represents linear offset of last byte in list.
+ */
+void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length)
+{
+ uint32_t items=0;
+ uint32_t start=0, end=0;
+
+ if(list->num_items != 0)
+ {
+ end = list->sc_ibuf[0].len - list->start_offset;
+ if((int32_t)end >= list->total_bytes) end = list->total_bytes;
+ list->data[items].stpos = start;
+ list->data[items].edpos = end;
+ items++;
+ while((int32_t)end < list->total_bytes)
+ {
+ start = end;
+ end += list->sc_ibuf[items].len;
+ if((int32_t)end >= list->total_bytes) end = list->total_bytes;
+ list->data[items].stpos = start;
+ list->data[items].edpos = end;
+ items++;
+ }
+ while(items < list->num_items)
+ {
+ if(sc_prefix_length != 0)
+ {
+ start = end = list->total_bytes+1;
+ }
+ else
+ {
+ start = end = list->total_bytes;
+ }
+ list->data[items].stpos = start;
+ list->data[items].edpos = end;
+ items++;
+ }
+ /* Normal access unit sequence is SC+data+SC. We read SC+data+SC bytes so far.
+ but the current access unit should be SC+data, the Second SC belongs to next access unit.
+ So we subtract SC length to reflect that */
+ list->total_bytes -= sc_prefix_length;
+ }
+}
+
+static inline void viddec_pm_utils_list_emit_slice_tags_append(viddec_emitter_wkld *cur_wkld, viddec_workload_item_t *wi)
+{
+ /*
+ Most of the time len >0. However we can have a condition on EOS where the last buffer can be
+ zero sized in which case we want to make sure that we emit END of SLICE information.
+ */
+ if((wi->es.es_phys_len != 0) || (wi->es.es_flags&VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE))
+ {
+ viddec_emit_append(cur_wkld, wi);
+ }
+}
+
+/*
+ Emit requested tags for data from start to end position. The tags should include end byte too.
+ */
+void viddec_pm_utils_list_emit_slice_tags(viddec_pm_utils_list_t *list, uint32_t start, uint32_t end, viddec_emitter *emitter, uint32_t is_cur_wkld, viddec_workload_item_t *wi)
+{
+ if((list->num_items != 0) && ((int32_t)start < (list->total_bytes)) && ((int32_t)end <= (list->total_bytes)))
+ {
+ uint32_t flags=0, items=0;
+ viddec_emitter_wkld *cur_wkld;
+
+ flags = wi->es.es_flags;
+ cur_wkld = (is_cur_wkld != 0) ? &(emitter->cur):&(emitter->next);
+ /* Seek until we find a ES buffer entry which has the start position */
+ while(start >= list->data[items].edpos) items++;
+
+ if(end < list->data[items].edpos)
+ { /* One ES buffer has both start and end in it. So dump a single entry */
+ wi->es.es_phys_len = end - start + 1;
+ wi->es.es_phys_addr = list->sc_ibuf[items].phys + start - list->data[items].stpos;
+ /* Account for start_offset if its the first buffer in List */
+ if(items == 0) wi->es.es_phys_addr += list->start_offset;
+
+ wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE | VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE;
+ viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi);
+ }
+ else
+ {
+ /* We know that there are at least two buffers for the requested data. Dump the first item */
+ wi->es.es_phys_len = list->data[items].edpos - start;
+ wi->es.es_phys_addr = list->sc_ibuf[items].phys + start - list->data[items].stpos;
+ if(items == 0) wi->es.es_phys_addr += list->start_offset;
+ wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE;
+ viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi);
+ items++;
+ /* Dump everything in between if any until the last buffer */
+ while(end >= list->data[items].edpos)
+ {
+ wi->es.es_phys_len = list->data[items].edpos - list->data[items].stpos;
+ wi->es.es_phys_addr = list->sc_ibuf[items].phys;
+ wi->es.es_flags = flags;
+ viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi);
+ items++;
+ }
+ /* Dump ES buffer which has end in it along with end slice flag */
+ wi->es.es_phys_len = end - list->data[items].stpos + 1;
+ wi->es.es_phys_addr = list->sc_ibuf[items].phys;
+ wi->es.es_flags = flags | VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE;
+ viddec_pm_utils_list_emit_slice_tags_append(cur_wkld, wi);
+ }
+ }
+}
+
+/*
+ We delete the consumed buffers in our list. If there are any buffers left over which have more data
+ the get moved to the top of the list array.
+ */
+void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length)
+{
+ list->end_offset = -1;
+
+ if(list->num_items != 0)
+ {
+ if(length != 0)
+ {
+ uint32_t items = list->num_items-1, byte_pos;
+ uint32_t index=0;
+ viddec_input_buffer_t *es_buf;
+ byte_pos = list->total_bytes;
+ while((list->data[items].edpos > byte_pos) && (list->data[items].stpos > byte_pos))
+ {
+ items--;
+ }
+ if(items != 0)
+ {
+ list->start_offset = byte_pos - list->data[items].stpos;
+ while(items < list->num_items)
+ {
+ es_buf = &(list->sc_ibuf[items]);
+ list->sc_ibuf[index] = *es_buf;
+ index++;
+ items++;
+ }
+ list->num_items = index;
+ }
+ else
+ {
+ list->start_offset += (byte_pos - list->data[items].stpos);
+ }
+ }
+ else
+ {
+ list->num_items = 0;
+ list->start_offset = 0;
+ }
+ list->total_bytes = length;
+ }
+}
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h
new file mode 100644
index 0000000..bc2c239
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h
@@ -0,0 +1,200 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_COMMON_DEFS_H
+#define VIDDEC_FW_COMMON_DEFS_H
+
+#define VIDDEC_FW_PARSER_IPC_HOST_INT 0x87654321
+#define EMITTER_WORKLOAD_ENTRIES 2048
+
+/* This enum defines priority level for opening a stream */
+enum viddec_stream_priority
+{
+ viddec_stream_priority_BACKGROUND, /* Lowest priority stream */
+ viddec_stream_priority_REALTIME, /* Real time highest priority stream */
+ viddec_stream_priority_INVALID,
+};
+
+/* This enum defines supported flush types */
+enum viddec_stream_flushtype
+{
+ VIDDEC_STREAM_FLUSH_DISCARD, /* Reinitialise to start state */
+ VIDDEC_STREAM_FLUSH_PRESERVE, /* Reinitialise to start state by preserving sequence info*/
+};
+
+enum viddec_stream_inband_flags
+{
+ VIDDEC_STREAM_DEFAULT_FLAG=0, /* Default value for flags */
+ VIDDEC_STREAM_EOS, /* End of stream message */
+ VIDDEC_STREAM_DISCONTINUITY, /* new segment which forces flush and preserve */
+};
+
+/* Message descriptor for Parser's Input and output queues. needs to be 8 byte aligned */
+typedef struct viddec_input_buffer
+{
+ unsigned int flags; /* Flags for Inband messages like EOS, valid range defined in viddec_stream_inband_flags */
+ unsigned int phys;/* DDR addr of where ES/WKLD is at. */
+ unsigned int len;/* size of buffer at phys_addr */
+ unsigned int id;/* A id for the buffer which is not used or modified by the FW. */
+#ifdef HOST_ONLY
+ unsigned char *buf; /* virt pointer to buffer. This is a don't care for FW */
+#endif
+}ipc_msg_data;
+
+typedef ipc_msg_data viddec_input_buffer_t;
+typedef ipc_msg_data viddec_ipc_msg_data;
+
+/* Return types for interface functions */
+typedef enum
+{
+ VIDDEC_FW_SUCCESS, /* succesful with current operation */
+ VIDDEC_FW_NORESOURCES, /* No resources to execute the requested functionality */
+ VIDDEC_FW_FAILURE, /* Failed for Uknown reason */
+ VIDDEC_FW_INVALID_PARAM, /* The parameters that were passed are Invalid */
+ VIDDEC_FW_PORT_FULL, /* The operation failed since queue is full */
+ VIDDEC_FW_PORT_EMPTY, /* The operation failed since queue is empty */
+ VIDDEC_FW_NEED_FREE_WKLD, /* The operation failed since a free wkld is not available */
+}viddec_fw_return_types_t;
+
+/* Defines for Interrupt mask and status */
+typedef enum
+{
+ VIDDEC_FW_WKLD_DATA_AVAIL=1, /* A processed workload is available */
+ VIDDEC_FW_INPUT_WATERMARK_REACHED=2, /* The input path is below the set watermark for current stream */
+}viddec_fw_parser_int_status_t;
+
+/* Defines for attributes on stream, If not set explicitly will be default values */
+typedef enum
+{
+ VIDDEC_FW_INPUT_Q_WATERMARK, /* Define for setting Input queue watermarks */
+ VIDDEC_FW_STREAM_PRIORITY, /* Define for setting stream priority */
+}viddec_fw_stream_attributes_t;
+
+typedef struct
+{
+ unsigned int input_q_space; /* Num of messages that can be written to input queue */
+ unsigned int output_q_data; /* Num of messages in output queue */
+ unsigned int workload_q_status; /* Number of free wklds available to parser */
+}viddec_fw_q_status_t;
+
+typedef struct
+{
+ unsigned int to_fw_q_space; /* Num of messages that can be written to input queue */
+ unsigned int from_fw_q_data; /* Num of messages in output queue */
+}viddec_fw_decoder_q_status_t;
+
+enum viddec_fw_decoder_int_status
+{
+ VIDDEC_FW_DECODER_INT_STATUS_STREAM_0 = (1<< 0), /* Decoder Stream 0 Requires Service */
+ VIDDEC_FW_DECODER_INT_STATUS_STREAM_1 = (1<< 1), /* Decoder Stream 1 Requires Service */
+ VIDDEC_FW_DECODER_INT_STATUS_STREAM_2 = (1<< 2), /* Decoder Stream 2 Requires Service */
+
+
+ VIDDEC_FW_DECODER_INT_STATUS_STREAM_HIGH = (1<<30), /* Any Decoder Stream >= 30 Requires Service */
+ VIDDEC_FW_DECODER_INT_STATUS_AUTO_API = (1<<31) /* An Auto-API Function has completed */
+};
+
+/** Hardware Accelerated stream formats */
+enum viddec_stream_format
+{
+ MFD_STREAM_FORMAT_MPEG=1,
+ MFD_STREAM_FORMAT_H264,
+ MFD_STREAM_FORMAT_VC1,
+ MFD_STREAM_FORMAT_MPEG42,
+
+ MFD_STREAM_FORMAT_MAX, /* must be last */
+ MFD_STREAM_FORMAT_INVALID
+};
+
+/* Workload specific error codes */
+enum viddec_fw_workload_error_codes
+{
+ VIDDEC_FW_WORKLOAD_SUCCESS = 0,
+ VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE = (1 << 0),/* Parser/Decoder detected a non decodable error with this workload */
+ VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW = (1 << 1),/* Parser Detected more than 64 buffers between two start codes */
+ VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW = (1 << 2),/* Parser Detected overflow of currently allocated workload memory */
+ VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME = (1 << 3),/* This is impartial or empty frame which was flushed by Parser/Decoder */
+ VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM = (1 << 4),/* This is impartial or empty frame from Parser/Decoder */
+ VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED = (1 << 5),/* Parser Detected unsupported feature in the stream */
+ /* First 8 bits reserved for Non Decodable errors */
+ VIDDEC_FW_WORKLOAD_ERR_CONCEALED = (1 << 9),/* The decoder concealed some errors in this frame */
+ VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE = (1 << 10),/* Deocder/parser detected at least one of the required reference frames is missing */
+ VIDDEC_FW_WORKLOAD_ERR_IN_REFERENCE = (1 << 11),/* Deocder/parser detected at least one of the reference frames has errors in it */
+ VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD = (1 << 12),/* Parser detected at least one of the fields are missing */
+ VIDDEC_FW_WORKLOAD_ERR_PARTIAL_SLICE = (1 << 13),/* Deocder detected at least one of the fields are missing */
+ VIDDEC_FW_WORKLOAD_ERR_MACROBLOCK = (1 << 14),/* Deocder detected macroblock errors */
+ VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO = (1 << 16),/* Parser detected sequence information is missing */
+
+ VIDDEC_FW_WORKLOAD_ERR_TOPFIELD = (1 << 17),/* Decoder/Parser detected errors in "top field" or "frame"*/
+ VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD = (1 << 18),/* Decoder/Parser detected errors in "bottom field" or "frame" */
+ VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR = (1 << 19),/* Parser detected errors */
+
+};
+
+enum viddec_fw_mpeg2_error_codes
+{
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR = (1 << 24),/* Parser detected corruption in sequence header. Will use the previous good sequence info, if found. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT = (1 << 25),/* Parser detected corruption in seqeunce extension. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT = (1 << 26),/* Parser detected corruption in sequence display extension. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR = (1 << 27),/* Parser detected corruption in GOP header. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR = (1 << 26),/* Parser detected corruption in picture header. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT = (1 << 27),/* Parser detected corruption in picture coding extension. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT = (1 << 28),/* Parser detected corruption in picture display extension. */
+ VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT = (1 << 29),/* Parser detected corruption in quantization matrix extension. */
+};
+
+#endif
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h b/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h
new file mode 100644
index 0000000..3a07af0
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_decoder_host.h
@@ -0,0 +1,242 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef VIDDEC_FW_DECODER_HOST_H
+#define VIDDEC_FW_DECODER_HOST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "viddec_fw_common_defs.h"
+
+/** @weakgroup viddec Fw Decoder interface Functions */
+/** @ingroup viddec_fw_decoder */
+/*@{*/
+
+/**
+ This function returns the size required for loading fw.
+ @retval size : Required size.
+*/
+ uint32_t viddec_fw_decoder_query_fwsize(void);
+
+/**
+ This function loads Decoder Firmware and initialises necessary state information.
+ @param[in] phys : Physical address on where firmware should be loaded.
+ @param[in] len : Length of data allocated at phys.
+ @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+ @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_decoder_loadfw(uint32_t phys, uint32_t len);
+
+/**
+ This function returns required size for global memory for all supported decoders. This is a synchronous message to FW.
+ @param[out] size : returns the size required.
+ @retval VIDDEC_FW_SUCCESS : Successfuly got required information from FW.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+*/
+ uint32_t viddec_fw_decoder_query_fwsize_scratchmem(uint32_t *size);
+
+/**
+ This function sets global memory for the firmware to use.This is a synchronous message to FW.
+ @param[in] phys : Physical address on where global memory starts.
+ @param[in] len : Length of data allocated at phys.
+ @retval VIDDEC_FW_SUCCESS : Successfully setup global memory.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+*/
+ uint32_t viddec_fw_decoder_set_fw_scratchmem(uint32_t phys, uint32_t len);
+
+/**
+ This function returns the size required opening a stream. This a synchronous message to FW.
+ @param[in] codec_type : Type of codec that we want information about.
+ @param[out] size : Size of memory required for opening a stream.
+ @retval VIDDEC_FW_SUCCESS : Successfuly talked to FW and got required size.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+*/
+ uint32_t viddec_fw_decoder_query_streamsize(uint32_t codec_type, uint32_t *size);
+
+/**
+ This function opens requested codec.This a synchronous message to FW.
+ @param[in] codec_type : Type of codec that we want to open.
+ @param[in] phys : Physical address of allocated memory for this codec.
+ @param[in] prority : Priority of stream. 1 for realtime and 0 for background.
+ @param[out] strm_handle : Handle of the opened stream.
+ @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream.
+ @retval VIDDEC_FW_FAILURE : Failed to Open a stream.
+*/
+ uint32_t viddec_fw_decoder_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority);
+
+
+/**
+ This function closes stream.This a synchronous message to FW.
+ @param[in] strm_handle : Handle of the stream to close.
+*/
+ void viddec_fw_decoder_closestream(uint32_t strm_handle);
+
+/**
+ This function allows to get current status of the decoder workload queues. If the current stream is active we return
+ number of input messages that can be written to input queue and the number of messages in output queue of the stream.
+
+ Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT
+ Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is
+ written into output queue of a stream.
+ @param[in] strm_handle : The handle of stream that we want to get status of queues.
+ @param[out] status : The status of each queue gets updated in here.
+ @retval VIDDEC_FW_SUCCESS : Successfully Got the status information.
+ @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream.
+*/
+ uint32_t viddec_fw_decoder_get_queue_status(uint32_t strm_handle, viddec_fw_decoder_q_status_t *status);
+
+/**
+ This function flushes the current stream. This is a synchronous message to FW.
+ Before calling this function the host has to make sure the output queue of the firmware
+ is empty. After this function is executed the FW will read all entries in input
+ wkld buffer queue into output queue. After this operation the host has to read all entries
+ in output queue again to finish the flush operation.
+ @param[in] flush_type : Type of flush we want to perform.ex:flush and discard.
+ @param[in] strm_handle : Handle of the stream we want to flush.
+ @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream.
+ @retval VIDDEC_FW_FAILURE : Failed to flush a stream.
+*/
+ uint32_t viddec_fw_decoder_flushstream(uint32_t strm_handle, uint32_t flush_type);
+
+/**
+ This function sends an input workload buffer. The host should provide required frame buffers in this workload before
+ sending it to fw.
+ @param[in] strm_handle : The handle of stream that we want to send workload buffer to.
+ @param[in] cur_wkld : The workload buffer we want to send.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message.
+*/
+ uint32_t viddec_fw_decoder_send(uint32_t strm_handle, ipc_msg_data *cur_wkld);
+
+/**
+ This function gets the decoded workload from fw.
+ @param[in] strm_handle : The handle of stream that we want to read workload from.
+ @param[out] cur_wkld : The workload descriptor.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld.
+*/
+ uint32_t viddec_fw_decoder_recv(uint32_t strm_handle, ipc_msg_data *cur_wkld);
+
+/**
+ This function unloads Decoder Firmware and free's the resources allocated in Load fw.
+ If this function is called before load fw it will crash with a segmentation fault.
+*/
+ void viddec_fw_decoder_deinit(void);
+
+/**
+ This function gets the major and minor revison numbers of the loaded firmware.
+ @param[out] major : The major revision number.
+ @param[out] minor : The minor revision number.
+ @param[out] build : The Internal Build number.
+*/
+ void viddec_fw_decoder_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build);
+
+/**
+ This function returns the interrupt status of all streams which need to be processed. A value of zero
+ means no active streams which generated this interrupt.
+*/
+ uint32_t viddec_fw_decoder_active_pending_interrupts(void);
+
+/**
+ This function clears the interrupts for all active streams represented by status input parameter.
+ The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts().
+ @param[in] status : The status value that was returned by viddec_fw_decoder_active_pending_interrupts().
+*/
+ void viddec_fw_decoder_clear_all_pending_interrupts(uint32_t status);
+
+/**
+ This function enables/disables interrupt for the stream specified.
+ @param[in] strm_handle : The handle of stream that we want enable or disable interrupts for.
+ @param[in] enable : Boolean value if ==0 means disable Interrupts else enable.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed.
+*/
+ uint32_t viddec_fw_decoder_set_stream_interrupt_mask(uint32_t stream_handle, uint32_t enable);
+
+/**
+ This function returns which stream interrupted in the past based on status, which is a snapshot of
+ interrupt status that was cleared in the past. The host has to call clear with status information
+ before calling this function again with status value. The Host should do this operation until this function
+ returns 0, which means all the streams that generated interrupt have been processed.
+ @param[out]strm_handle : The handle of a stream that generated interrupt.
+ @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts().
+ @retval 1 : A valid stream handle was found.
+ @retval 0 : No more streams from the status which caused interrupt.
+*/
+ uint32_t viddec_fw_decoder_get_next_stream_from_interrupt_status(uint32_t status, uint32_t *stream_handle);
+
+/**
+ This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(),
+ This should be called after host performs all necessary actions for the stream.
+ @param[in] strm_handle : The handle of a stream that we want to clear to indicate we handled it.
+ @param[in] status : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts().
+ @retval 1 : Operation was sucessful.
+ @retval 0 : Invalid stream handle was passed.
+*/
+ uint32_t viddec_fw_decoder_clear_stream_from_interrupt_status(uint32_t *status, uint32_t stream_handle);
+
+/*@}*/
+#ifdef __cplusplus
+}
+#endif
+
+#endif//#ifndef VIDDEC_FW_DECODER_HOST_H
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h
new file mode 100644
index 0000000..a816dd4
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_frame_attr.h
@@ -0,0 +1,281 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_FRAME_ATTR_H
+#define VIDDEC_FW_FRAME_ATTR_H
+
+#define VIDDEC_PANSCAN_MAX_OFFSETS 4
+#define VIDDEC_MAX_CPB_CNT 32
+
+/**
+This enumeration lists all the frame types defined by the MPEG, VC1 and H264 specifications.
+Frame types applicable to a single codec are specified in the comments.
+*/
+typedef enum
+{
+ VIDDEC_FRAME_TYPE_INVALID=0, /** Unknown type - default value */
+ VIDDEC_FRAME_TYPE_IDR=0x1, /** IDR frame - h264 only */
+ VIDDEC_FRAME_TYPE_I=0x2, /** I frame */
+ VIDDEC_FRAME_TYPE_P=0x3, /** P frame */
+ VIDDEC_FRAME_TYPE_B=0x4, /** B frame */
+ VIDDEC_FRAME_TYPE_BI=0x5, /** BI frame - Intracoded B frame - vc1 only */
+ VIDDEC_FRAME_TYPE_SKIP=0x6, /** Skipped frame - vc1 only */
+ VIDDEC_FRAME_TYPE_D=0x7, /** D frame - mpeg1 only */
+ VIDDEC_FRAME_TYPE_S=0x8, /** SVOP frame - mpeg4 only - sprite encoded frame - treat as P */
+ VIDDEC_FRAME_TYPE_MAX,
+} viddec_frame_type_t;
+
+/**
+This structure contains the content size info extracted from the stream.
+*/
+typedef struct viddec_rect_size
+{
+ unsigned int width;
+ unsigned int height;
+}viddec_rect_size_t;
+
+/**
+This structure contains MPEG2 specific pan scan offsets extracted from the stream.
+*/
+typedef struct viddec_mpeg2_frame_center_offset
+{
+ int horz;
+ int vert;
+}viddec_mpeg2_frame_center_offset_t;
+
+/**
+This structure contains the MPEG2 specific frame attributes.
+*/
+typedef struct viddec_mpeg2_frame_attributes
+{
+ /**
+ 10 bit unsigned integer corresponding to the display order of each coded picture
+ in the stream (or gop if gop header is present).
+ Refer to "temporal_reference" of the picture header in ITU-T H.262 Specification.
+ */
+ unsigned int temporal_ref;
+
+ /**
+ Pan/Scan rectangle info
+ Refer to the picture display extension in ITU-T H.262 Specification.
+ */
+ viddec_mpeg2_frame_center_offset_t frame_center_offset[VIDDEC_PANSCAN_MAX_OFFSETS];
+ unsigned int number_of_frame_center_offsets;
+
+ /**
+ Top-Field first flag
+ Refer to "top_field_first" of the picture coding extension in ITU-T H.262 Specification.
+ */
+ unsigned int top_field_first;
+
+ /**
+ Progressive frame flag - Indicates if current frame is progressive or not.
+ Refer to "progressive_frame" of the picture coding extension in ITU-T H.262 Specification.
+ */
+ unsigned int progressive_frame;
+
+ /**
+ Frame/field polarity for each coded picture.
+ Refer to Table 6-14 in ITU-T H.262 Specification.
+ */
+ unsigned int picture_struct;
+
+ /**
+ Repeat field/frame flag.
+ Refer to "repeat_first_field" of the picture coding extension in ITU-T H.262 Specification.
+ */
+ unsigned int repeat_first_field;
+
+}viddec_mpeg2_frame_attributes_t;
+
+/**
+This structure contains MPEG2 specific pan scan offsets extracted from the stream.
+*/
+typedef struct viddec_vc1_pan_scan_window
+{
+ unsigned int hoffset;
+ unsigned int voffset;
+ unsigned int width;
+ unsigned int height;
+}viddec_vc1_pan_scan_window_t;
+
+/**
+This structure contains the VC1 specific frame attributes.
+*/
+typedef struct viddec_vc1_frame_attributes
+{
+ /**
+ Temporal Reference of frame/field.
+ Refer to "TFCNTR" in the picture layer of the SMPTE VC1 Specification.
+ */
+ unsigned int tfcntr;
+
+ /**
+ Frame/field repeat information in the bitstream.
+ Refer to "RPTFRM", "TFF", "BFF" in the picture layer
+ of the SMPTE VC1 Specification.
+ */
+ unsigned int rptfrm;
+ unsigned int tff;
+ unsigned int rff;
+
+ /**
+ Pan-scan information in the bitstream.
+ Refer to "PANSCAN_FLAG" in the entrypoint layer, "PS_PRESENT", "PS_HOFFSET", "PS_VOFFSET",
+ "PS_WIDTH" and "PS_HEIGHT" in the picture layer of the SMPTE VC1 Specification.
+ */
+ unsigned int panscan_flag;
+ unsigned int ps_present;
+ unsigned int num_of_pan_scan_windows;
+ viddec_vc1_pan_scan_window_t pan_scan_window[VIDDEC_PANSCAN_MAX_OFFSETS];
+
+}viddec_vc1_frame_attributes_t;
+
+/**
+This structure contains the H264 specific frame attributes.
+*/
+typedef struct viddec_h264_frame_attributes
+{
+ /**
+ used_for_reference : 1 means this frame is used as ref frame of others. 0 means no any frame ref to this frame
+ */
+ ///// This flag hasn't been enable so far
+ unsigned int used_for_reference;
+
+
+ /** -
+ Picture Order Count for the current frame/field.-
+ This value is computed using information from the bitstream.-
+ Refer to Section 8.2.1, function 8-1 of the ITU-T H.264 Specification.-
+ */
+ // These fileds will be supported in future
+ int top_field_poc;
+ int bottom_field_poc;
+
+ /**
+ Display size, which is cropped from content size.
+ Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed
+ */
+ viddec_rect_size_t cropped_size;
+
+ /**
+ top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1
+ */
+ unsigned int top_field_first;
+
+ /**
+ field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1
+ */
+ unsigned int field_pic_flag;
+
+}viddec_h264_frame_attributes_t;
+
+/**
+This structure contains the MPEG4 specific frame attributes.
+*/
+typedef struct viddec_mpeg4_frame_attributes
+{
+ /**
+ Top-Field first flag
+ Refer to "top_field_first" of the Video Object Plane of the MPEG4 Spec.
+ */
+ unsigned int top_field_first;
+
+}viddec_mpeg4_frame_attributes_t;
+
+/**
+This structure groups all the frame attributes that are exported by the firmware.
+The frame attributes are split into attributes that are common to all codecs and
+that are specific to codec type.
+As of this release, it is populated only for mpeg2 only.
+*/
+typedef struct viddec_frame_attributes
+{
+ /**
+ Content size specified in the stream.
+ For MPEG2, refer to "horizontal_size_value, vertical_size_value" of the sequence header and
+ "horizontal_size_extension, vertical_size_extension" of the sequence extension in ITU-T H.262 Specification.
+ For H264, refer to "pic_width_in_mbs_minus1" and "pic_height_in_map_units_minus1" of the
+ sequence parameter set in ITU-T H.264 Specification.
+ For VC1, refer to "MAX_CODED_WIDTH" and "MAX_CODED_HEIGHT" in the sequence layer,
+ "CODED_SIZE_FLAG", "CODED_WIDTH" and "CODED_HEIGHT" in the entrypoint layer of the SMPTE VC1 Specification.
+ */
+ viddec_rect_size_t cont_size;
+
+ /**
+ Type of frame populated in the workload.
+ frame_type contains the frame type for progressive frame and the field type for the top field for interlaced frames.
+ bottom_field_type contains the field type for the bottom field for interlaced frames.
+ For MPEG2, refer to "picture_coding_type" in picture header (Table 6-12) in ITU-T H.262 Specification.
+ For H264, refer to "slice_type" in slice header (Table 7-6) in ITU-T H.264 Specification.
+ For VC1, refer to "PTYPE" and FPTYPE in the picture layer (Tables 33, 34, 35, 105) in SMPTE VC1 Specification.
+ */
+ viddec_frame_type_t frame_type;
+ viddec_frame_type_t bottom_field_type;
+
+ /** Codec specific attributes */
+ union
+ {
+ viddec_mpeg2_frame_attributes_t mpeg2;
+ viddec_vc1_frame_attributes_t vc1;
+ viddec_h264_frame_attributes_t h264;
+ viddec_mpeg4_frame_attributes_t mpeg4;
+ };
+
+}viddec_frame_attributes_t;
+
+#endif /* VIDDEC_FRAME_ATTR_H */
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_item_types.h b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h
new file mode 100644
index 0000000..66e5f59
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_item_types.h
@@ -0,0 +1,738 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_ITEM_TYPES_H
+#define VIDDEC_FW_ITEM_TYPES_H
+
+
+/* The following macros are defined to pack data into 32 bit words.
+ mask: A 32 bit value of N 1 bits starting from lsb where N represents the length of data we are packing.
+ start: Bit start position of data we want.
+ ex: If we want to pack Height(16bits), width(16bits) where width is from (1:16) and height is from (17:32), these are
+ the start and mask values for width and height.
+ width: start = 0 mask=0xFFFF
+ Height:start= 16 mask=0xFFFF
+
+ extract: will extract data from x_32 of unsigned integer type from bit start offset(0 to 31) of length based on mask and returns in
+ unsigned integer type.
+ insert: Will insert N bits from val_32 ,where N is length based on mask, into x_32 from bit offset based on start. val_32 is expected to
+ be a unsigned int of N bits starting with lsb.
+*/
+
+#define viddec_fw_bitfields_extract(x_32, start, mask) (((x_32) >> (start)) & (mask) )
+#define viddec_fw_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start))))
+
+/* Workload items type. Each item here represents data that Parser detected ex:slice data which
+ is used either by host or decoder.*/
+enum workload_item_type
+{
+ VIDDEC_WORKLOAD_INVALID=0x0, /* Unknown type */
+ VIDDEC_WORKLOAD_PIXEL_ES=0x100, /* Slice data tag */
+ VIDDEC_WORKLOAD_TAG=0x200, /* Frame association tag */
+ VIDDEC_WORKLOAD_USERDATA=0x300, /* user data tag */
+ VIDDEC_WORKLOAD_DECODER_INFO=0x400, /* decoder specific data tag which decoder module understands*/
+ VIDDEC_WORKLOAD_IBUF_DONE=0x500, /* Es buffer completely used tag */
+ VIDDEC_WORKLOAD_IBUF_CONTINUED=0x600, /* Es buffer partially used tag */
+ VIDDEC_WORKLOAD_TAG_BUFFER_LOOSE_START=0x700, /* ??? */
+ VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER=0x800, /* Reorder frames in DPB tag */
+ VIDDEC_WORKLOAD_DISPLAY_FRAME=0x900, /* Display order in DPB tag, for H264 NOT required??? */
+
+ VIDDEC_WORKLOAD_SEQUENCE_INFO=0xa00, /* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */
+ VIDDEC_WORKLOAD_DISPLAY_INFO=0xb00, /* MPEG2 Seq Disp Ext, H264 VUI */
+ VIDDEC_WORKLOAD_GOP_INFO=0xc00, /* MPEG2 GOP, VC1 Entrypoint */
+ VIDDEC_WORKLOAD_SEQ_USER_DATA=0xd00, /* MPEG2, VC1 Sequence Level User data */
+ VIDDEC_WORKLOAD_GOP_USER_DATA=0xe00, /* MPEG2, VC1 Gop Level User data */
+ VIDDEC_WORKLOAD_FRM_USER_DATA=0xf00, /* MPEG2 Picture User data, VC1 Frame User data */
+ VIDDEC_WORKLOAD_FLD_USER_DATA=0x1000, /* MPEG2, VC1 Field User data */
+ VIDDEC_WORKLOAD_SLC_USER_DATA=0x1100, /* VC1 Slice User data */
+ VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA=0x1200, /* MPEG4 Visual Object User data */
+ VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA=0x1300, /* MPEG4 Video Object Layer User data */
+
+ VIDDEC_WORKLOAD_MPEG2_SEQ_EXT=0x1150, /* MPEG2 Only - Sequence Extension */
+ VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C=0x1200, /* VC1 Only */
+
+ VIDDEC_WORKLOAD_H264_CROPPING=0x1400, /* H264 only */
+ VIDDEC_WORKLOAD_H264_PAN_SCAN=0x1500, /* H264 only */
+ VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO=0x2100, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_PIC_TIMING=0x1600, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT=0x1700, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED=0x1800, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED=0x1900, /* H264 only */
+ VIDDEC_WORKLOAD_SEI_RECOVERY_POINT=0x1a00, /* H264 only */
+ VIDDEC_WORKLOAD_IBUF_EOS=0x1b00, /* EOS tag on last workload used for current stream */
+ VIDDEC_WORKLOAD_IBUF_DISCONTINUITY=0x1c00, /* Discontinuity tag on first workload after discontinuity */
+
+ VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ=0x1d00, /* MPEG4 Only - Visual Sequence */
+ VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ=0x1e00, /* MPEG4 Only - Video Object Layer */
+ VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ=0x1f00, /* MPEG4 Only - Group of Video Object Planes */
+ VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT=0x2000, /* MPEG4 Only - Video Plane with Short Header */
+
+ VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0 = 0x10000, /* required reference frames tag, last eight bits tell the id of frame in dpb */
+ VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 = 0x20000, /* release frames tag, last eight bits tell the id of frame in dpb */
+ VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 = 0x30000, /* Display order in DPB tag, for H264 */
+ VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 = 0x40000, /* Release frames but not display, for H264 */
+ VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 = 0x50000, /* Release list while EOS, last eight bits tell the id of frame in dpb */
+ VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 = 0x60000, /* Diaplay list while EOS, last eight bits tell the id of frame in dpb */
+
+ VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0 = 0x70000, /* required for H264 as it needs whole DPB for each frame */
+ VIDDEC_WORKLOAD_H264_REFR_LIST_0 = 0x80000, /* ref list 0 for H264 */
+ VIDDEC_WORKLOAD_H264_REFR_LIST_1 = 0x90000, /* ref list 1 for H264 */
+ VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY = 0xa0000, /* eos items begin after this */
+ VIDDEC_WORKLOAD_DECODER_SPECIFIC = 0x100000, /* pvt info for decoder tags */
+ VIDDEC_WORKLOAD_MAX,
+};
+
+/* 16-byte workload */
+typedef struct viddec_workload_item
+{
+ enum workload_item_type vwi_type;
+ union
+ {
+ struct
+ {
+ unsigned int es_phys_addr;
+ unsigned int es_phys_len;
+ unsigned int es_flags;
+ }es;
+ struct
+ {
+ unsigned int tag_phys_addr;
+ unsigned int tag_phys_len;
+ unsigned int tag_value;
+ }tag;
+ struct
+ {
+ unsigned int data_offset;
+ unsigned int data_payload[2];
+ }data;
+ struct
+ {
+ signed int reference_id; /* Assigned by parser */
+ unsigned int luma_phys_addr; /* assigned by host, for DM */
+ unsigned int chroma_phys_addr; /* assigned by host, for DM */
+ }ref_frame;
+ struct /* when vwi_type == VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER */
+ {
+ signed int ref_table_offset; /* Index of first "reordered" */
+ /* index from Current[] for Next[offset+0], Ref[offset+1], Ref[offset+2], Ref[offset+3] */
+ unsigned int ref_reorder_00010203;
+ /* index from Current[] for Next[offset+4], Ref[offset+5], Ref[offset+6], Ref[offset+7] */
+ unsigned int ref_reorder_04050607;
+ } ref_reorder;
+ struct
+ {
+ /* we pack a maximum of 11 bytes of user data and 1 byte for size */
+ /* TODO: we can pack 12 bytes and use bottom 8 bits of type to indicate size */
+#define viddec_fw_get_user_data_size(x) ((x)->user_data.size)
+#define viddec_fw_get_user_data(x) (unsigned char *)&((x)->user_data.data_payload[0])
+ unsigned char size;
+ unsigned char data_payload[11];
+ /*
+ ITEM TYPES WHICH use this:
+ VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED, VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED,
+ VIDDEC_WORKLOAD_SEQ_USER_DATA, VIDDEC_WORKLOAD_GOP_USER_DATA,
+ VIDDEC_WORKLOAD_FRM_USER_DATA, VIDDEC_WORKLOAD_FLD_USER_DATA, VIDDEC_WORKLOAD_SLC_USER_DATA,
+ */
+ }user_data;
+ struct
+ {
+ // Sequence Header Item I (From LSB):
+ // - horizontal_size_value - 12 bits
+ // - vertical_size_value - 12 bits
+ // - aspect_ratio_information - 4 bits
+ // - frame_rate_code - 4 bits
+ #define viddec_fw_mp2_sh_get_horizontal_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 0, 0xFFF)
+ #define viddec_fw_mp2_sh_get_vertical_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF)
+ #define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF)
+ #define viddec_fw_mp2_sh_get_frame_rate_code(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF)
+ #define viddec_fw_mp2_sh_set_horizontal_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 0, 0xFFF)
+ #define viddec_fw_mp2_sh_set_vertical_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF)
+ #define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF)
+ #define viddec_fw_mp2_sh_set_frame_rate_code(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF)
+ unsigned int seq_hdr_item_1;
+
+ // Sequence Header Item II (From LSB):
+ // - bit_rate_value - 18 bits
+ // - vbv_buffer_size_value - 10 bits
+ // - remaining pad bits
+ #define viddec_fw_mp2_sh_get_bit_rate_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 0, 0x3FFFF)
+ #define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF)
+ #define viddec_fw_mp2_sh_set_bit_rate_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 0, 0x3FFFF)
+ #define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF)
+ unsigned int seq_hdr_item_2;
+
+ unsigned int pad;
+ } mp2_sh; // mp2 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+ struct
+ {
+ // Sequence Extension Item I (From LSB):
+ // - profile_and_level_indication - 8 bits
+ // - progressive_sequence - 1 bit
+ // - chroma_format - 2 bits
+ // - horizontal_size_extension - 2 bits
+ // - vertical_size_extension - 2 bits
+ // - bit_rate_extension - 12 bits
+ // - remaining pad bits
+ #define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 0, 0xFF)
+ #define viddec_fw_mp2_se_get_progressive_sequence(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 8, 0x1)
+ #define viddec_fw_mp2_se_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 9, 0x3)
+ #define viddec_fw_mp2_se_get_horizontal_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3)
+ #define viddec_fw_mp2_se_get_vertical_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3)
+ #define viddec_fw_mp2_se_get_bit_rate_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF)
+ #define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 0, 0xFF)
+ #define viddec_fw_mp2_se_set_progressive_sequence(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 8, 0x1)
+ #define viddec_fw_mp2_se_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 9, 0x3)
+ #define viddec_fw_mp2_se_set_horizontal_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3)
+ #define viddec_fw_mp2_se_set_vertical_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3)
+ #define viddec_fw_mp2_se_set_bit_rate_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF)
+ unsigned int seq_ext_item_1;
+
+ // Sequence Extension Item II (From LSB):
+ // - vbv_buffer_size_extension - 8 bits
+ // - frame_rate_extension_n - 2 bits
+ // - frame_rate_extension_d - 5 bits
+ // - remaining pad bits
+ #define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 0, 0xFF)
+ #define viddec_fw_mp2_se_get_frame_rate_extension_n(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 8, 0x3)
+ #define viddec_fw_mp2_se_get_frame_rate_extension_d(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F)
+ #define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 0, 0xFF)
+ #define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 8, 0x3)
+ #define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F)
+ unsigned int seq_ext_item_2;
+
+ unsigned int pad;
+ } mp2_se; // mp2 item of type VIDDEC_WORKLOAD_MPEG2_SEQ_EXT
+ struct
+ {
+ // Sequence Display Extension Item I (From LSB):
+ // - display_horizontal_size - 14 bits
+ // - display_vertical_size - 14 bits
+ // - video_format - 3 bits
+ // - color_description - 1 bit
+ #define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 0, 0x3FFF)
+ #define viddec_fw_mp2_sde_get_display_vertical_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF)
+ #define viddec_fw_mp2_sde_get_video_format(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7)
+ #define viddec_fw_mp2_sde_get_color_description(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1)
+ #define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 0, 0x3FFF)
+ #define viddec_fw_mp2_sde_set_display_vertical_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF)
+ #define viddec_fw_mp2_sde_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7)
+ #define viddec_fw_mp2_sde_set_color_description(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1)
+ unsigned int seq_disp_ext_item_1;
+
+ // Sequence Display Extension II (From LSB):
+ // - color_primaries - 8 bits
+ // - transfer_characteristics - 8 bits
+ // - remaining pad bits
+ #define viddec_fw_mp2_sde_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 0, 0xFF)
+ #define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2, 8, 0xFF)
+ #define viddec_fw_mp2_sde_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 0, 0xFF)
+ #define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val, 8, 0xFF)
+ unsigned int seq_disp_ext_item_2;
+
+ unsigned int pad;
+ } mp2_sde; // mp2 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+ struct
+ {
+ // Group of Pictures Header Item I (From LSB):
+ // - closed_gop - 1 bit
+ // - broken_link - 1 bit
+ // - remaining pad bits
+ #define viddec_fw_mp2_gop_get_closed_gop(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 0, 0x1)
+ #define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1, 1, 0x1)
+ #define viddec_fw_mp2_gop_set_closed_gop(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 0, 0x1)
+ #define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val, 1, 0x1)
+ unsigned int gop_hdr_item_1;
+
+ unsigned int pad1;
+ unsigned int pad2;
+ } mp2_gop; // mp2 item of type VIDDEC_WORKLOAD_GOP_INFO
+ struct
+ {
+ #define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3)
+ #define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3)
+
+ #define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7)
+ #define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7)
+
+ #define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3)
+ #define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3)
+
+ #define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1)
+ #define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1)
+
+ #define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF)
+ #define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF)
+
+ #define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF)
+ #define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF)
+
+ #define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F)
+ #define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F)
+
+ #define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7)
+ #define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7)
+
+ #define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1)
+ #define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1)
+
+ #define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1)
+ #define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1)
+
+ #define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1)
+ #define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1)
+
+ #define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1)
+ #define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1)
+
+ #define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1)
+ #define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1)
+
+ uint32_t size; // profile:2, level:3, colordiff_format:2, pulldown:1, max_coded_width:12, max_coded_height:12
+ uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, interlace:1, tfcntrflag:1, finterpflag:1, psf:1, display_ext:1
+ uint32_t pad;
+ } vc1_sl; // vc1 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+ struct
+ {
+ // This item is populated when display_ext flag is set in the sequence layer
+ // therefore, no need to provide this flag
+ #define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF)
+ #define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF)
+
+ #define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF)
+ #define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF)
+
+ #define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1)
+ #define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1)
+
+ #define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1)
+ #define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1)
+
+ #define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1)
+ #define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1)
+
+ #define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1)
+ #define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1)
+
+ #define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF)
+ #define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF)
+
+ #define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF)
+ #define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF)
+
+ #define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF)
+ #define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF)
+
+ #define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF)
+ #define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF)
+
+ #define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF)
+ #define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF)
+
+ #define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF)
+ #define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF)
+
+ #define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF)
+ #define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF)
+
+ #define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF)
+ #define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF)
+
+ uint32_t size; // disp_horiz_size:14, disp_vert_size:14, aspect_ratio_flag:1, color_format_flag:1, framerate_flag:1, framerateind:1
+ uint32_t framerate; // aspect_ratio:4, frameratenr:8, frameratedr:4, framerateexp:16
+ uint32_t aspectsize; // aspect_ratio_horiz_size:8, aspect_ratio_vert_size:8, color_prim:8, transfer_char:8
+ } vc1_sl_de; // vc1 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+ struct
+ {
+ #define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF)
+ #define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF)
+
+ #define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF)
+ #define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF)
+
+ #define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F)
+ #define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F)
+
+ #define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7)
+ #define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7)
+
+ #define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF)
+ #define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF)
+
+ #define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7)
+ #define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7)
+
+ #define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1)
+ #define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1)
+
+ #define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1)
+ #define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1)
+
+ #define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7)
+ #define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7)
+
+ #define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1)
+ #define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1)
+
+ uint32_t size; // horiz_size:16, vert_size:16
+ uint32_t flags; // bitrtq_postproc:5, frmrtq_postproc:3, profile:4, level:3, cbr:1, rangered:1, maxbframes:3, finterpflag:1
+ uint32_t pad;
+ } vc1_sh_struct_a_c; // vc1 item of type VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C
+ struct
+ {
+ #define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1)
+ #define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1)
+
+ #define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF)
+ #define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF)
+
+ #define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF)
+ #define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF)
+
+ #define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1)
+ #define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1)
+
+ #define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1)
+ #define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1)
+
+ #define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1)
+ #define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1)
+
+ #define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1)
+ #define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1)
+
+ #define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7)
+ #define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7)
+
+ #define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1)
+ #define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1)
+
+ #define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7)
+ #define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7)
+
+ uint32_t size; // coded_size_flag:1, coded_width:12, coded_height:12
+ uint32_t flags; // broken_link:1, closed_entry:1, panscan_flag:1, range_mapy_flag:1, range_mapy:3, range_mapuv_flag:1, range_mapuv:3
+ uint32_t pad;
+ } vc1_ep; // vc1 item of type VIDDEC_WORKLOAD_GOP_INFO
+ struct
+ {
+ /*
+ 0-7 bits for profile_idc.
+ 8-15 bits for level_idc.
+ 16-17 bits for chroma_format_idc.
+ 18-22 bits for num_ref_frames.
+ 23 for gaps_in_frame_num_value_allowed_flag.
+ 24 for frame_mbs_only_flag.
+ 25 for frame_cropping_flag.
+ 26 for vui_parameters_present_flag.
+ */
+#define viddec_fw_h264_sps_get_profile_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 0, 0xFF)
+#define viddec_fw_h264_sps_set_profile_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 0, 0xFF)
+#define viddec_fw_h264_sps_get_level_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 8, 0xFF)
+#define viddec_fw_h264_sps_set_level_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 8, 0xFF)
+#define viddec_fw_h264_sps_get_chroma_format_idc(x) viddec_fw_bitfields_extract( (x)->sps_messages, 16, 0x3)
+#define viddec_fw_h264_sps_set_chroma_format_idc(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 16, 0x3)
+#define viddec_fw_h264_sps_get_num_ref_frames(x) viddec_fw_bitfields_extract( (x)->sps_messages, 18, 0x1F)
+#define viddec_fw_h264_sps_set_num_ref_frames(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 18, 0x1F)
+#define viddec_fw_h264_sps_get_gaps_in_frame_num_value_allowed_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 23, 0x1)
+#define viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 23, 0x1)
+#define viddec_fw_h264_sps_get_frame_mbs_only_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 24, 0x1)
+#define viddec_fw_h264_sps_set_frame_mbs_only_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 24, 0x1)
+#define viddec_fw_h264_sps_get_frame_cropping_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 25, 0x1)
+#define viddec_fw_h264_sps_set_frame_cropping_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 25, 0x1)
+#define viddec_fw_h264_sps_get_vui_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->sps_messages, 26, 0x1)
+#define viddec_fw_h264_sps_set_vui_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->sps_messages, val, 26, 0x1)
+ unsigned int sps_messages;
+ unsigned int pic_width_in_mbs_minus1;
+ unsigned int pic_height_in_map_units_minus1;
+ } h264_sps; // h264 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+
+ struct
+ {
+#define viddec_fw_h264_cropping_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF)
+#define viddec_fw_h264_cropping_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF)
+ unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */
+#define viddec_fw_h264_cropping_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF)
+#define viddec_fw_h264_cropping_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF)
+ unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */
+ unsigned int pad;
+ } h264_cropping; // h264 item of type VIDDEC_WORKLOAD_H264_CROPPING
+
+ struct
+ {
+ /* 0 bit for aspect_ratio_info_present_flag
+ 1 st bit for video_signal_type_present_flag
+ 2 nd bit for colour_description_present_flag
+ 3 rd bit for timing_info_present_flag
+ 4 th bit for nal_hrd_parameters_present_flag
+ 5 th bit for vcl_hrd_parameters_present_flag
+ 6 th bit for fixed_frame_rate_flag
+ 7 th bit for pic_struct_present_flag
+ 8 th bit for low_delay_hrd_flag
+ 9,10,11 bits for video_format
+ */
+#define viddec_fw_h264_vui_get_aspect_ratio_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 0, 0x1)
+#define viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 0, 0x1)
+#define viddec_fw_h264_vui_get_video_signal_type_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 1, 0x1)
+#define viddec_fw_h264_vui_set_video_signal_type_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 1, 0x1)
+#define viddec_fw_h264_vui_get_colour_description_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 2, 0x1)
+#define viddec_fw_h264_vui_set_colour_description_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 2, 0x1)
+#define viddec_fw_h264_vui_get_timing_info_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 3, 0x1)
+#define viddec_fw_h264_vui_set_timing_info_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 3, 0x1)
+#define viddec_fw_h264_vui_get_nal_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 4, 0x1)
+#define viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 4, 0x1)
+#define viddec_fw_h264_vui_get_vcl_hrd_parameters_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 5, 0x1)
+#define viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 5, 0x1)
+#define viddec_fw_h264_vui_get_fixed_frame_rate_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 6, 0x1)
+#define viddec_fw_h264_vui_set_fixed_frame_rate_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 6, 0x1)
+#define viddec_fw_h264_vui_get_pic_struct_present_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 7, 0x1)
+#define viddec_fw_h264_vui_set_pic_struct_present_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 7, 0x1)
+#define viddec_fw_h264_vui_get_low_delay_hrd_flag(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 8, 0x1)
+#define viddec_fw_h264_vui_set_low_delay_hrd_flag(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 8, 0x1)
+#define viddec_fw_h264_vui_get_video_format(x) viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 9, 0x7)
+#define viddec_fw_h264_vui_set_video_format(x, val) viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 9, 0x7)
+ unsigned int vui_flags_and_format;
+
+#define viddec_fw_h264_vui_get_aspect_ratio_idc(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 0, 0xFF)
+#define viddec_fw_h264_vui_set_aspect_ratio_idc(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 0, 0xFF)
+#define viddec_fw_h264_vui_get_colour_primaries(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 8, 0xFF)
+#define viddec_fw_h264_vui_set_colour_primaries(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 8, 0xFF)
+#define viddec_fw_h264_vui_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 16, 0xFF)
+#define viddec_fw_h264_vui_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 16, 0xFF)
+ /* lower 8 bits for aspect_ratio, next 8bits for color primaries and next 8 bits for transfer characteristics */
+ unsigned int aspc_color_transfer;
+
+#define viddec_fw_h264_vui_get_sar_width(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 16, 0xFFFF)
+#define viddec_fw_h264_vui_get_sar_height(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 0, 0xFFFF)
+#define viddec_fw_h264_vui_set_sar_width(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 16, 0xFFFF)
+#define viddec_fw_h264_vui_set_sar_height(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 0, 0xFFFF)
+ unsigned int sar_width_height; /* Lower 16 for height upper 16 for width */
+ } h264_vui; // h264 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+ struct
+ {
+#define viddec_fw_h264_vui_get_num_units_in_tick_flag(x) viddec_fw_bitfields_extract( (x)->num_units_in_tick, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_set_num_units_in_tick_flag(x, val) viddec_fw_bitfields_insert( (x)->num_units_in_tick, val, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_get_time_scale_flag(x) viddec_fw_bitfields_extract( (x)->time_scale, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_set_time_scale_flag(x, val) viddec_fw_bitfields_insert( (x)->time_scale, val, 0, 0xFFFFFFFF)
+ unsigned int num_units_in_tick;
+ unsigned int time_scale;
+ unsigned int pad1;
+ } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO
+ struct
+ {
+ unsigned int pic_struct; /* 4 bit length */
+ unsigned int pad1;
+ unsigned int pad2;
+ } h264_sei_pic_timing; // h264 item of type VIDDEC_WORKLOAD_SEI_PIC_TIMING
+ struct
+ {
+ unsigned int pan_scan_rect_id;
+
+#define viddec_fw_h264_sei_pan_scan_get_cancel_flag(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 0, 0x1)
+#define viddec_fw_h264_sei_pan_scan_get_cnt_minus1(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 1, 0x3)
+#define viddec_fw_h264_sei_pan_scan_set_cancel_flag(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 0, 0x1)
+#define viddec_fw_h264_sei_pan_scan_set_cnt_minus1(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 1, 0x3)
+ unsigned int pan_scan_cancel_and_cnt; /* 0 bit for cancel flag and 2 bits for cnt_minus1 */
+ unsigned int pan_scan_rect_repetition_period;
+ } h264_sei_pan_scan; // h264 item of type VIDDEC_WORKLOAD_H264_PAN_SCAN
+
+ struct
+ {
+
+#define viddec_fw_h264_pan_scan_get_left(x) viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_left(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF)
+ unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */
+
+#define viddec_fw_h264_pan_scan_get_top(x) viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_top(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF)
+ unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */
+
+ unsigned int pad;
+ } h264_pan_scan_rect; // h264 item of type VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT
+ struct
+ {
+ unsigned int recovery_frame_cnt;
+#define viddec_fw_h264_h264_sei_recovery_get_exact_match_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 0, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_get_broken_link_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 1, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 0, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 1, 0x1)
+ unsigned int broken_and_exctmatch_flags; /* 0 bit for exact match, 1 bit for brokenlink */
+
+ unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */
+
+ } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT
+
+ struct
+ {
+ // Visual Sequence (From LSB):
+ // - profile_and_level_indication - 8 bits
+ #define viddec_fw_mp4_vs_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->vs_item, 0, 0xFF)
+ #define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val, 0, 0xFF)
+ unsigned int vs_item;
+
+ // Visual Object - video_signal_type
+ // - video_signal_type - 1b
+ // - video_format - 3b
+ // - video_range - 1b
+ // - colour_description - 1b
+ #define viddec_fw_mp4_vo_get_colour_description(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1)
+ #define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1)
+ #define viddec_fw_mp4_vo_get_video_range(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1)
+ #define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1)
+ #define viddec_fw_mp4_vo_get_video_format(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 1, 0x7)
+ #define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 1, 0x7)
+ #define viddec_fw_mp4_vo_get_video_signal_type(x) viddec_fw_bitfields_extract( (x)->video_signal_type, 0, 0x1)
+ #define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 0, 0x1)
+ unsigned int video_signal_type;
+
+ // Visual Object - video_signal_type
+ // - color_primaries - 8 bits
+ // - transfer_characteristics - 8 bits
+ #define viddec_fw_mp4_vo_get_transfer_char(x) viddec_fw_bitfields_extract( (x)->color_desc, 8, 0xFF)
+ #define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 8, 0xFF)
+ #define viddec_fw_mp4_vo_get_color_primaries(x) viddec_fw_bitfields_extract( (x)->color_desc, 0, 0xFF)
+ #define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val, 0, 0xFF)
+ unsigned int color_desc;
+ } mp4_vs_vo; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ
+
+ struct
+ {
+ // Video Object Layer(From LSB):
+ // - aspect_ratio_info - 4b
+ // - par_width - 8b
+ // - par_height - 8b
+ // - vol_control_param - 1b
+ // - chroma_format - 2b
+ // - interlaced - 1b
+ // - fixed_vop_rate - 1b
+ #define viddec_fw_mp4_vol_get_fixed_vop_rate(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1)
+ #define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1)
+ #define viddec_fw_mp4_vol_get_interlaced(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1)
+ #define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1)
+ #define viddec_fw_mp4_vol_get_chroma_format(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3)
+ #define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3)
+ #define viddec_fw_mp4_vol_get_control_param(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1)
+ #define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1)
+ #define viddec_fw_mp4_vol_get_par_height(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF)
+ #define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF)
+ #define viddec_fw_mp4_vol_get_par_width(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF)
+ #define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF)
+ #define viddec_fw_mp4_vol_get_aspect_ratio_info(x) viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF)
+ #define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF)
+ unsigned int vol_aspect_ratio;
+
+ // Video Object Layer(From LSB):
+ // - vbv_parameters - 1b
+ // - bit_rate - 30b
+ #define viddec_fw_mp4_vol_get_bit_rate(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF)
+ #define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF)
+ #define viddec_fw_mp4_vol_get_vbv_param(x) viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1)
+ #define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1)
+ unsigned int vol_bit_rate;
+
+ // Video Object Layer(From LSB):
+ // - fixed_vop_time_increment - 16b
+ // - vop_time_increment_resolution - 16b
+ #define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF)
+ #define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF)
+ #define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF)
+ #define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF)
+ unsigned int vol_frame_rate;
+ } mp4_vol; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ
+
+ struct
+ {
+ // Group of Video Object Planes(From LSB):
+ // - time_code - 18b
+ // - closed_gov - 1b
+ // - broken_link - 1b
+ #define viddec_fw_mp4_gvop_get_broken_link(x) viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1)
+ #define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1)
+ #define viddec_fw_mp4_gvop_get_closed_gov(x) viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1)
+ #define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1)
+ #define viddec_fw_mp4_gvop_get_time_code(x) viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF)
+ #define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF)
+ unsigned int gvop_info;
+
+ unsigned int pad1;
+ unsigned int pad2;
+ } mp4_gvop; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ
+
+ struct
+ {
+ // Group of Video Object Planes(From LSB):
+ // - source_format - 3b
+ #define viddec_fw_mp4_vpsh_get_source_format(x) viddec_fw_bitfields_extract((x)->info, 0, 0x7)
+ #define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7)
+ unsigned int info;
+
+ unsigned int pad1;
+ unsigned int pad2;
+ } mp4_vpsh; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT
+
+ unsigned int vwi_payload[3];
+ };
+}viddec_workload_item_t;
+
+#endif /* VIDDEC_ITEM_TYPES_H */
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h b/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h
new file mode 100644
index 0000000..6d26555
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_parser_host.h
@@ -0,0 +1,237 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef VIDDEC_FW_PARSER_HOST_H
+#define VIDDEC_FW_PARSER_HOST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "viddec_fw_common_defs.h"
+
+/** @weakgroup viddec Fw Parser interface Functions */
+/** @ingroup viddec_fw_parser */
+/*@{*/
+
+/**
+ This function returns the size required for loading fw.
+ @retval size : Required size.
+*/
+ uint32_t viddec_fw_parser_query_fwsize(void);
+
+/**
+ This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW.
+ @param[in] phys : Physical address on where firmware should be loaded.
+ @param[in] len : Length of data allocated at phys.
+ @retval VIDDEC_FW_SUCCESS : Successfully loaded firmware.
+ @retval VIDDEC_FW_FAILURE : Failed to communicate with firmware.
+ @retval VIDDEC_FW_NORESOURCES : Failed to allocate resources for Loading firmware.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_loadfw(uint32_t phys, uint32_t len);
+
+/**
+ This function returns the size required opening a stream. This a synchronous message to FW.
+ @param[in] codec_type : Type of codec that we want information about.
+ @param[out] num_wklds : Number of wklds required for initialisation.
+ @param[out] size : Size of memory required for opening a stream.
+*/
+ void viddec_fw_parser_query_streamsize(uint32_t codec_type, uint32_t *num_wklds, uint32_t *size);
+
+/**
+ This function opens requested codec.This a synchronous message to FW.
+ @param[in] codec_type : Type of codec that we want to open.
+ @param[in] phys : Physical address of allocated memory for this codec.
+ @param[in] prority : Priority of stream. 1 for realtime and 0 for background.
+ @param[out] strm_handle : Handle of the opened stream.
+ @retval VIDDEC_FW_SUCCESS : Successfully Opened the stream.
+ @retval VIDDEC_FW_FAILURE : Failed to Open a stream.
+ @retval VIDDEC_FW_NORESOURCES : Failed to Open a stream as we are out of resources.
+*/
+ uint32_t viddec_fw_parser_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority);
+
+/**
+ This function closes stream.This a synchronous message to FW.
+ For the close stream to be effective, host has to do flush with discard first and then close the stream.
+ @param[in] strm_handle : Handle of the stream to close.
+*/
+ void viddec_fw_parser_closestream(uint32_t strm_handle);
+
+/**
+ This function flushes the current stream. This is a synchronous message to FW.
+ Before calling this function the host has to make sure the output queue of the firmware
+ is empty. After this function is executed the FW will read all entries in input
+ es buffer queue into a free or partial workload and push it into output queue.
+ After this operation the host has to read all entries in output queue again to
+ finish the flush operation.
+ @param[in] flush_type : Type of flush we want to perform.ex:flush and discard.
+ @param[in] strm_handle : Handle of the stream we want to flush.
+ @retval VIDDEC_FW_SUCCESS : Successfully flushed the stream.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+ @retval VIDDEC_FW_NEED_FREE_WKLD : Failed to flush sice a free wkld was not available.
+*/
+ uint32_t viddec_fw_parser_flushstream(uint32_t strm_handle, uint32_t flush_type);
+
+/**
+ This function sends an input es buffer.
+ @param[in] strm_handle : The handle of stream that we want to send es buffer to.
+ @param[in] message : The es buffer we want to send.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_FULL : Port to fw full unsuccesful in sending message.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_send(uint32_t strm_handle, ipc_msg_data *message);
+
+/**
+ This function gets the next processed workload. The host is required to add free workloads
+ to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue.
+ @param[in] strm_handle : The handle of stream that we want to read workload from.
+ @param[out] message : The workload descriptor.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_EMPTY : Workload port is empty,unsuccesful in reading wkld.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_recv(uint32_t strm_handle, ipc_msg_data *message);
+
+/**
+ This function adds a free workload to current stream.
+ @param[in] strm_handle : The handle of stream that we want to write workload to.
+ @param[out] message : The workload descriptor.
+ @retval VIDDEC_FW_SUCCESS : Successfully Sent the message.
+ @retval VIDDEC_FW_PORT_FULL : Workload port is full,unsuccesful in writing wkld.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_addwkld(uint32_t strm_handle, ipc_msg_data *message);
+
+/**
+ This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts.
+ The driver can disable/enable Interrupts if it needs for this particular stream.
+
+ @param[in] strm_handle : The handle of stream that we want to get mask from
+ @param[in] mask : This is read as boolean variable, true to enable, false to disable.
+ @retval VIDDEC_FW_SUCCESS : Successfully set mask.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_set_interruptmask(uint32_t strm_handle, uint32_t mask);
+/**
+ This function gets the interrupt status for current stream.
+ When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams,
+ by calling this function. The status is what the FW thinks the current state of stream is. The status information that
+ FW provides is complete information on all possible events that are defined. The host should only access this information
+ in its ISR at which state FW doesn't modify this information.
+
+ @param[in] strm_handle : The handle of stream that we want to get mask from
+ @param[out] status : The status of the stream based on viddec_fw_parser_int_status_t enum.
+ @retval VIDDEC_FW_SUCCESS : Successfully in reading status.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_getstatus(uint32_t strm_handle, uint32_t *status);
+
+/**
+ This function allows to set stream attributes that are supported.
+ @param[in] strm_handle : The handle of stream that we want to set attribute on.
+ @param[in] type : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t.
+ @param[in] value : The value of the type that we want to set.
+ @retval VIDDEC_FW_SUCCESS : Successfully Set the attribute.
+ @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+*/
+ uint32_t viddec_fw_parser_set_stream_attributes(uint32_t strm_handle, uint32_t type, uint32_t value);
+
+/**
+ This function allows to get current status of all the parser queues. If the current stream is active we return
+ number of inout messages that can be written to input queue, no of messages in output queue and number of
+ free available workloads the stream has.
+ Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT
+ Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or
+ a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT
+ FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee
+ one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT
+ to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will
+ give only one INT and host should try to empty output queue.
+ @param[in] strm_handle : The handle of stream that we want to get status of queues.
+ @param[out] status : The status of each queue gets updated in here.
+ @retval VIDDEC_FW_SUCCESS : Successfully Got the status information.
+ @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream.
+*/
+ uint32_t viddec_fw_parser_get_queue_status(uint32_t strm_handle, viddec_fw_q_status_t *status);
+
+/**
+ This function unloads Parser Firmware and free's the resources allocated in Load fw.
+ If this function is called before load fw it will crash with a segmentation fault.
+*/
+ void viddec_fw_parser_deinit(void);
+
+/**
+ This function gets the major and minor revison numbers of the loaded firmware.
+ @param[out] major : The major revision numner.
+ @param[out] minor : The minor revision number.
+ @param[out] build : The Internal Build number.
+*/
+ void viddec_fw_parser_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build);
+
+/**
+ This function clears the global interrupt. This is the last thing host calls before exiting ISR.
+*/
+ void viddec_fw_parser_clear_global_interrupt(void);
+
+/*@}*/
+#ifdef __cplusplus
+}
+#endif
+
+#endif//#ifndef VIDDEC_FW_PARSER_HOST_H
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_workload.h b/mix_vbp/viddec_fw/include/viddec_fw_workload.h
new file mode 100644
index 0000000..73c5ab3
--- /dev/null
+++ b/mix_vbp/viddec_fw/include/viddec_fw_workload.h
@@ -0,0 +1,152 @@
+/*
+ This file is provided under a dual BSD/GPLv2 license. When using or
+ redistributing this file, you may do so under either license.
+
+ GPL LICENSE SUMMARY
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of version 2 of the GNU General Public License as
+ published by the Free Software Foundation.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ The full GNU General Public License is included in this distribution
+ in the file called LICENSE.GPL.
+
+ Contact Information:
+
+ BSD LICENSE
+
+ Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_WORKLOAD_H
+#define VIDDEC_FW_WORKLOAD_H
+
+#include <stdint.h>
+#include "viddec_fw_item_types.h"
+#include "viddec_fw_frame_attr.h"
+#include "viddec_fw_common_defs.h"
+
+#define VIDDEC_WORKLOAD_FLAGS_ES_START_FRAME (1 << 0)
+#define VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE (1 << 1)
+#define VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE (1 << 2)
+#define VIDDEC_WORKLOAD_FLAGS_ES_END_FRAME (1 << 3)
+
+#define VIDDEC_FRAME_REFERENCE_IS_VALID (0x1<<1)
+// PIP Output Frame request bits
+#define BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE 24
+#define BMSK_VIDDEC_FRAME_REFERENCE_PIP_MODE (0x3<<BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE)
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_NORMAL 0x0
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_W_HALF 0x1
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_W_QUARTER 0x2
+
+/** Frame reference information to pass to video decoder when performing a workload (frame decode) */
+typedef struct viddec_frame_reference
+{
+ signed int driver_frame_id;
+ unsigned int luma_phys_addr;
+ unsigned int chroma_phys_addr;
+ int internal_id; /* Used by workload manager only */
+}viddec_frame_reference_t;
+
+#define WORKLOAD_REFERENCE_FRAME (1 << 16)
+#define WORKLOAD_SKIPPED_FRAME (1 << 17)
+/**
+Bitmask to indicate that this workload has range adjustment and needs a range_adjusted_out buffer for successful decode.
+Will be used for VC1 only.
+*/
+#define WORKLOAD_FLAGS_RA_FRAME (1 << 21)
+#define WORKLOAD_REFERENCE_FRAME_BMASK 0x000000ff
+
+/** This structure contains all the information required to fully decode one frame of data */
+/**
+ num_error_mb: This field is populated at the output of the decoder.
+ Currently, its valid only for MPEG2.
+ For other codecs, it defaults to 0.
+
+ range_adjusted_out: Frame buffer needed to store range adjusted frames for VC1 only.
+ Range adjustment in VC1 requires that the luma/chroma values in the decoded frame be modified
+ before the frame can be displayed. In this case, we need a new frame buffer to store he adjusted values.
+ The parser will indicate this requirement by setting the WORKLOAD_FLAGS_RA_FRAME bit in the
+ is_reference_frame of the workload. The decoder expects this field to be valid when range adjustment
+ is indicated and populates this frame buffer along with frame_out.
+
+ Expectation from user:
+ Before feeding workload to the decoder, do the following:
+ If pip is indicated/needed,
+ provide the pip_out buffer
+ If range adjustment is indicated (WORKLOAD_FLAGS_RA_FRAME bit in is_reference_frame is set),
+ provide range_adjusted_out buffer
+ Provide frame_out buffer.
+
+ After workload is returned from the decoder, do the following:
+ If pip is indicated,
+ display the pip_out buffer
+ Else If range adjustment is indicated,
+ display range_adjusted_out buffer
+ Else
+ display frame_out buffer.
+*/
+typedef struct viddec_workload
+{
+ enum viddec_stream_format codec;
+ signed int is_reference_frame;
+ unsigned int result;
+ unsigned int time;
+ unsigned int num_items;/* number of viddec_workload_item_t in current workload */
+ unsigned int num_error_mb; /* Number of error macroblocks in the current picture. */
+ viddec_frame_attributes_t attrs;
+
+ viddec_frame_reference_t frame_out; /* output frame */
+ viddec_frame_reference_t range_adjusted_out; /* for VC1 only */
+ viddec_frame_reference_t pip_out; /* PIP Buffer */
+
+ /* Alignment is needed because the packing different between host and vSparc */
+ __attribute__ ((aligned (16))) viddec_workload_item_t item[1];
+
+ /* ------------------------------------------------------ */
+ /* ------------------------------------------------------ */
+ /* ------------------------------------------------------ */
+ /* This structure is ALLOC_EXTENDED with workload_items */
+ /* ------------------------------------------------------ */
+ /* ------------------------------------------------------ */
+ /* ------------------------------------------------------ */
+} viddec_workload_t;
+
+#endif /* VIDDEC_WORKLOAD_H */
diff --git a/mix_video/AUTHORS b/mix_video/AUTHORS
new file mode 100644
index 0000000..db8081b
--- /dev/null
+++ b/mix_video/AUTHORS
@@ -0,0 +1 @@
+linda.s.cline@intel.com
diff --git a/mix_video/COPYING b/mix_video/COPYING
new file mode 100644
index 0000000..a4f852c
--- /dev/null
+++ b/mix_video/COPYING
@@ -0,0 +1,26 @@
+INTEL SOFTWARE LICENSE AGREEMENT (Alpha, Beta, Prototype Site License)
+
+IMPORTANT - READ BEFORE COPYING, INSTALLING OR USING.
+Do not use or load this software and any associated materials (collectively, the “Software”) until you have carefully read the following terms and conditions. By loading or using the Software, you agree to the terms of this Agreement. If you do not wish to so agree, do not install or use the Software.
+
+
+LICENSE. Intel hereby grants you a limited, nontransferable, non-sublicenseable, nonexclusive, royalty-free, fully-paid license under Intel’s copyrights to use the Software on your organization’s computers solely for your organization’s internal evaluation and testing in connection with Intel products designed for the Software, and you may make a reasonable number of  copies of the Software for internal use only, subject to these conditions:
+1. You may not copy, modify, rent, sell, distribute, externally display, externally perform or transfer any part of the Software except as provided in this Agreement, and you agree to prevent unauthorized copying of the Software.
+2. You may not reverse engineer, decompile, or disassemble the Software.
+3. You may not sublicense the Software.
+4. The Software may include portions offered on terms in addition to those set out here, as set out in a license accompanying those portions.
+5. You may not subject the Software, in whole or in part, to any license obligations of Open Source Software including without limitation combining or distributing the Software with Open Source Software in a manner that subjects the Software or any portion of the Software provided by Intel hereunder to any license obligations of such Open Source Software. "Open Source Software" means any software that requires as a condition of use, modification and/or distribution of such software that such software or other software incorporated into, derived from or distributed with such software (a) be disclosed or distributed in source code form; or (b) be licensed by the user to third parties for the purpose of making and/or distributing derivative works; or (c) be redistributable at no charge. Open Source Software includes, without limitation, software licensed or distributed under any of the following licenses or distribution models, or licenses or distribution models substantially similar to any of the following: (a) GNU’s General Public License (GPL) or Lesser/Library GPL (LGPL), (b) the Artistic License (e.g., PERL), (c) the Mozilla Public License, (d) the Netscape Public License, (e) the Sun Community Source License (SCSL), (f) the Sun Industry Source License (SISL), (g) the Apache Software license and (h) the Common Public License (CPL).
+OWNERSHIP OF SOFTWARE AND COPYRIGHTS. Title to all copies of the Software remains with Intel or its suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. Intel may make changes to the Software, or to items referenced therein, at any time without notice, but is not obligated to support, update, upgrade or provide training for the Software. Except as otherwise expressly provided, Intel grants no express or implied right under Intel patents, copyrights, trademarks, or other intellectual property rights.
+EXCLUSION OF WARRANTIES . THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. Intel does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the Software.
+LIMITATION OF LIABILITY. IN NO EVENT SHALL INTEL OR ITS SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, BUSINESS INTERRUPTION OR LOST INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF INTEL HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME JURISDICTIONS PROHIBIT EXCLUSION OR LIMITATION OF LIABILITY FOR IMPLIED WARRANTIES OR CONSEQUENTIAL OR INCIDENTAL DAMAGES, SO THE ABOVE LIMITATION MAY NOT APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM JURISDICTION TO JURISDICTION. THE SOFTWARE LICENSED HEREUNDER IS NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION IN WHICH THE FAILURE OF THE SOFTWARE COULD LEAD TO PERSONAL INJURY OR DEATH. YOU SHALL INDEMNIFY AND HOLD INTEL AND THE INTEL PARTIES HARMLESS AGAINST ALL CLAIMS, COSTS, DAMAGES, AND EXPENSES, AND REASONABLE ATTORNEY FEES ARISING OUT OF, DIRECTLY OR INDIRECTLY, THE UNINTENDED USE OF THE SOFTWARE AND ANY CLAIM OF PRODUCT LIABILITY, PERSONAL INJURY OR DEATH ASSOCIATED WITH ANY UNINTENDED USE, EVEN IF SUCH CLAIM ALLEGES THAT AN INTEL PARTY WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE. THE LIMITED REMEDIES, WARRANTY DISCLAIMER AND LIMITED LIABILITY ARE FUNDAMENTAL ELEMENTS OF THE BASIS OF THE BARGAIN BETWEEN INTEL AND YOU. INTEL WOULD NOT BE ABLE TO PROVIDE THE SOFTWARE WITHOUT SUCH LIMITATIONS.
+TERMINATION OF THIS AGREEMENT. Intel may terminate this Agreement at any time if you violate its terms. Upon termination, you will immediately destroy the Software or return all copies of the Software to Intel (including providing certification of such destruction back to Intel). In the event of termination of this Agreement, all licenses granted to you hereunder shall immediately terminate.
+APPLICABLE LAWS. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. You may not export the Software in violation of applicable export laws and regulations.
+GOVERNMENT RESTRICTED RIGHTS. The Software is provided with "RESTRICTED RIGHTS." Use, duplication or disclosure by the government is subject to restrictions as set forth in FAR52.227-14 and DFAR252.227-7013 et seq. or their successors. Use of the Software by the government constitutes acknowledgment of Intel's proprietary rights therein. Contractor or Manufacturer is Intel Corporation, 2200 Mission College Blvd., Santa Clara, CA 95052.
+CONFIDENTIALITY. You shall not disclose the terms or existence of this Agreement or use Intel's name in any publications, advertisements, or other announcements without Intel's prior written consent. You do not have any rights to use any Intel trademarks or logos.
+ASSIGNMENT. You may not delegate, assign or transfer this Agreement, the license(s) granted or any of your rights or duties hereunder, expressly, by implication, by operation of law, by way of merger (regardless of whether you are the surviving entity) or acquisition, or otherwise and any attempt to do so, without Intel’s express prior written consent, shall be null and void. Intel may assign this Agreement, and its rights and obligations hereunder, in its sole discretion.
+ENTIRE AGREEMENT. The terms and conditions of this Agreement constitutes the entire agreement between the parties with respect to the subject matter hereof, and merges and supersedes all prior, contemporaneous agreements, understandings, negotiations and discussions. Neither of the parties hereto shall be bound by any conditions, definitions, warranties, understandings or representations with respect to the subject matter hereof other than as expressly provided for herein. Intel is not obligated under any other agreements unless they are in writing and signed by an authorized representative of Intel.
+NO AGENCY Nothing contained herein shall be construed as creating any agency, employment relationship, partnership, principal-agent or other form of joint enterprise between the parties.
+SEVERABILITY In the event that any provision of this Agreement shall be unenforceable or invalid under any applicable law or be so held by an applicable court decision, such unenforceability or invalidity shall not render this Agreement unenforceable or invalid as a whole, and, in such event, such provision shall be changed and interpreted so as to best accomplish the objectives of such unenforceable or invalid provision within the limits of applicable law or applicable court decisions.
+WAIVER The failure of either party to require performance by the other party of any provision hereof shall not affect the full right to require such performance at any time thereafter; nor shall the waiver by either party of a breach of any provision hereof be taken or held to be a waiver of the provision itself.
+CONTROLLING LANGUAGE. Translated versions of this Agreement may be provided in local languages for informational purposes only, provided however, that the English language version of this Agreement shall be controlling in all jurisdictions.
+
diff --git a/mix_video/ChangeLog b/mix_video/ChangeLog
new file mode 100644
index 0000000..139597f
--- /dev/null
+++ b/mix_video/ChangeLog
@@ -0,0 +1,2 @@
+
+
diff --git a/mix_video/INSTALL b/mix_video/INSTALL
new file mode 100644
index 0000000..50e1648
--- /dev/null
+++ b/mix_video/INSTALL
@@ -0,0 +1,4 @@
+run the following to build and install:
+./autogen.sh
+./configure
+make
diff --git a/mix_video/Makefile.am b/mix_video/Makefile.am
new file mode 100644
index 0000000..c6339cf
--- /dev/null
+++ b/mix_video/Makefile.am
@@ -0,0 +1,9 @@
+SUBDIRS = src
+
+#Uncomment the following line if building documentation using gtkdoc
+#SUBDIRS += docs
+
+pkgconfigdir = $(libdir)/pkgconfig
+pkgconfig_DATA=mixvideo.pc mixvideoint.pc
+EXTRA_DIST = autogen.sh mixvideo.spec
+DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
diff --git a/mix_video/NEWS b/mix_video/NEWS
new file mode 100644
index 0000000..139597f
--- /dev/null
+++ b/mix_video/NEWS
@@ -0,0 +1,2 @@
+
+
diff --git a/mix_video/README b/mix_video/README
new file mode 100644
index 0000000..2bcf017
--- /dev/null
+++ b/mix_video/README
@@ -0,0 +1,2 @@
+MIX Video is an user library interface for various hardware video codecs available on the platform.
+
diff --git a/mix_video/autogen.sh b/mix_video/autogen.sh
new file mode 100644
index 0000000..558a695
--- /dev/null
+++ b/mix_video/autogen.sh
@@ -0,0 +1,19 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+package=MixVideo
+
+#Uncomment the follow line if building documentation using gtkdoc
+#gtkdocize --flavour no-tmpl || exit 1
+aclocal -I m4/ $ACLOCAL_FLAGS || exit 1
+libtoolize --copy --force || exit 1
+autoheader -v || exit 1
+autoconf -v || exit 1
+automake -a -c -v || exit 1
+
+echo "Now type ./configure to configure $package."
+exit 0
diff --git a/mix_video/configure.ac b/mix_video/configure.ac
new file mode 100644
index 0000000..8605a92
--- /dev/null
+++ b/mix_video/configure.ac
@@ -0,0 +1,137 @@
+AC_INIT("", "", [linda.s.cline@intel.com])
+
+AC_CONFIG_MACRO_DIR(m4)
+
+AS_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 14)
+
+dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode
+AM_MAINTAINER_MODE
+
+AM_INIT_AUTOMAKE($PACKAGE, $VERSION)
+#AM_INIT_AUTOMAKE([-Wall -Werror foreign])
+
+dnl make aclocal work in maintainer mode
+AC_SUBST(ACLOCAL_AMFLAGS, "-I m4")
+
+AM_CONFIG_HEADER(config.h)
+
+dnl check for tools
+AC_PROG_CC
+AC_PROG_LIBTOOL
+
+#MIX_CFLAGS="-Wall -Werror"
+MIX_CFLAGS="-Wall"
+
+AC_ARG_ENABLE(mixlog,
+ AS_HELP_STRING([--enable-mixlog],
+ [Enable mixlog (default=no)]),
+ [mixlog_enabled=$enableval],
+ [mixlog_enabled=no])
+AC_MSG_RESULT($mixlog_enabled)
+AM_CONDITIONAL(MIXLOG_ENABLED, test "$mixlog_enabled" = "yes")
+
+
+dnl decide on error flags
+dnl AS_COMPILER_FLAG(-Wall, MIX_ERROR="$MIX_ERROR -Wall", MIX_ERROR="$MIX_ERROR")
+dnl AS_COMPILER_FLAG(-Werror,MIX_ERROR="$MIX_ERROR -Werror",MIX_ERROR="$MIX_ERROR")
+
+dnl Check for pkgconfig first
+AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes, no)
+
+dnl Give error and exit if we don't have pkgconfig
+if test "x$HAVE_PKGCONFIG" = "xno"; then
+ AC_MSG_ERROR(you need to have pkgconfig installed !)
+fi
+
+dnl GLib
+dnl FIXME: need to align with moblin glib version
+dnl FIXME: currently using an earlier version so it can be built on dev box.
+GLIB_REQ=2.16
+
+dnl Check for glib2 without extra fat, useful for the unversioned tool frontends
+dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+if test "x$HAVE_GLIB" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no)
+if test "x$HAVE_GOBJECT" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+PKG_CHECK_MODULES(GTHREAD, gthread-2.0 >= $GLIB_REQ,HAVE_GTHREAD=yes,HAVE_GTHREAD=no)
+if test "x$HAVE_GTHREAD" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+LIBVA_REQ=0.31
+PKG_CHECK_MODULES(LIBVA, libva >= $LIBVA_REQ,HAVE_LIBVA=yes,HAVE_LIBVA=no)
+if test "x$HAVE_LIBVA" = "xno"; then
+ AC_MSG_ERROR(You need libva development package installed !)
+fi
+#LIBVA_CFLAGS="-I/usr/local/include"
+#LIBVA_LIBS="-lva"
+
+
+LIBVA_X11_REQ=0.31
+PKG_CHECK_MODULES(LIBVA_X11, libva-x11 >= $LIBVA_X11_REQ,HAVE_LIBVA_X11=yes,HAVE_LIBVA_X11=no)
+if test "x$HAVE_LIBVA_X11" = "xno"; then
+ AC_MSG_ERROR(You need libva development package installed !)
+fi
+#LIBVA_X11_CFLAGS="-I/usr/local/include"
+#LIBVA_X11LIBS="-lva-x11"
+
+
+MIXCOMMON_REQ=0.1
+PKG_CHECK_MODULES(MIXCOMMON, mixcommon >= $MIXCOMMON_REQ, HAVE_MIXCOMMON=yes, HAVE_MIXCOMMON=no)
+if test "x$HAVE_MIXCOMMON" = "xno"; then
+ AC_MSG_ERROR(You need mixcommon development package installed !)
+fi
+
+MIXVBP_REQ=0.1
+PKG_CHECK_MODULES(MIXVBP, mixvbp >= $MIXVBP_REQ, HAVE_MIXVBP=yes, HAVE_MIXVBP=no)
+if test "x$HAVE_MIXVBP" = "xno"; then
+ AC_MSG_ERROR(You need mixvbp development package installed !)
+fi
+
+dnl Check for documentation xrefs
+dnl GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`"
+dnl AC_SUBST(GLIB_PREFIX)
+
+AC_SUBST(GLIB_CFLAGS)
+AC_SUBST(GLIB_LIBS)
+AC_SUBST(GOBJECT_CFLAGS)
+AC_SUBST(GOBJECT_LIBS)
+AC_SUBST(MIX_CFLAGS)
+AC_SUBST(GTHREAD_CFLAGS)
+AC_SUBST(GTHREAD_LIBS)
+AC_SUBST(LIBVA_CFLAGS)
+AC_SUBST(LIBVA_LIBS)
+AC_SUBST(MIXCOMMON_CFLAGS)
+AC_SUBST(MIXCOMMON_LIBS)
+AC_SUBST(MIXVBP_CFLAGS)
+AC_SUBST(MIXVBP_LIBS)
+
+
+#check for gtk-doc
+#GTK_DOC_CHECK(1.9)
+
+AC_CONFIG_FILES([
+mixvideo.pc
+mixvideoint.pc
+Makefile
+src/Makefile
+])
+
+dnl Additional Makefiles if we are building document with gtkdoc.
+dnl Un-comment this section to enable building of documentation.
+dnl AC_CONFIG_FILES(
+dnl docs/Makefile
+dnl docs/reference/Makefile
+#dnl docs/reference/MixVideo/Makefile
+dnl )
+
+AC_OUTPUT
+
+
diff --git a/mix_video/m4/Makefile.am b/mix_video/m4/Makefile.am
new file mode 100644
index 0000000..66381d4
--- /dev/null
+++ b/mix_video/m4/Makefile.am
@@ -0,0 +1 @@
+EXTRA_DIST +=
diff --git a/mix_video/m4/as-mix-version.m4 b/mix_video/m4/as-mix-version.m4
new file mode 100644
index 0000000..f0301b1
--- /dev/null
+++ b/mix_video/m4/as-mix-version.m4
@@ -0,0 +1,35 @@
+dnl as-mix-version.m4
+
+dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE)
+
+dnl example
+dnl AS_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,)
+dnl for a 0.3.2 release version
+
+dnl this macro
+dnl - defines [$PREFIX]_MAJOR, MINOR and REVISION, CURRENT, AGE
+dnl - defines [$PREFIX], VERSION
+dnl - AC_SUBST's all defined vars
+
+AC_DEFUN([AS_MIX_VERSION],
+[
+ PACKAGE=[$1]
+ [$2]_MAJOR=[$3]
+ [$2]_MINOR=[$4]
+ [$2]_REVISION=[$5]
+ [$2]_CURRENT=m4_eval([$3] + [$4])
+ [$2]_AGE=[$4]
+ VERSION=[$3].[$4].[$5]
+
+ AC_SUBST([$2]_MAJOR)
+ AC_SUBST([$2]_MINOR)
+ AC_SUBST([$2]_REVISION)
+ AC_SUBST([$2]_CURRENT)
+ AC_SUBST([$2]_AGE)
+
+ AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Define the package name])
+ AC_SUBST(PACKAGE)
+ AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Define the version])
+ AC_SUBST(VERSION)
+
+])
diff --git a/mix_video/mixvideo.pc.in b/mix_video/mixvideo.pc.in
new file mode 100644
index 0000000..8666d24
--- /dev/null
+++ b/mix_video/mixvideo.pc.in
@@ -0,0 +1,12 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=@includedir@
+
+Name: MI-X Video
+Description: MI-X Video Library
+Requires: libva >= 0.30
+Version: @VERSION@
+Libs: -L${libdir} -l@PACKAGE@
+Cflags: -I${includedir}/mix
+
diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec
new file mode 100644
index 0000000..7be66bb
--- /dev/null
+++ b/mix_video/mixvideo.spec
@@ -0,0 +1,67 @@
+# INTEL CONFIDENTIAL
+# Copyright 2009 Intel Corporation All Rights Reserved.
+# The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+#
+# No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+
+Summary: MIX Video
+Name: mixvideo
+Version: 0.1.14
+Release: 1
+Source0: %{name}-%{version}.tar.gz
+NoSource: 0
+License: Proprietary
+Group: System Environment/Libraries
+BuildRoot: %{_tmppath}/%{name}-root
+ExclusiveArch: i586
+Requires: glib2 , mixcommon, mixvbp
+
+%description
+MIX Video is an user library interface for various video codecs available on the platform.
+
+## devel package ##
+
+%package devel
+Summary: Libraries include files
+Group: Development/Libraries
+Requires: %{name} = %{version}, mixcommon-devel , glib2-devel, mixvbp-devel
+
+%description devel
+The %{name}-devel package contains the header files and static libraries for building applications which use %{name}.
+
+## internal devel package ##
+
+%package int-devel
+Summary: Libraries include files
+Group: Development/Libraries
+Requires: %{name} = %{version}, mixcommon-devel , glib2-devel, mixvbp-devel
+
+%description int-devel
+The %{name}-int-devel package contains the header files and static libraries for building applications which use %{name}.
+
+%prep
+%setup -q
+%build
+./autogen.sh
+./configure --prefix=%{_prefix}
+make
+%install
+make DESTDIR=$RPM_BUILD_ROOT install
+%clean
+rm -rf $RPM_BUILD_ROOT
+%files
+%defattr(-,root,root)
+%{_prefix}/lib/libmixvideo.so*
+
+%files devel
+%defattr(-,root,root)
+%{_prefix}/include/mix
+%{_prefix}/lib/*.la
+%{_prefix}/lib/pkgconfig/mixvideo.pc
+
+%files int-devel
+%defattr(-,root,root)
+%{_prefix}/include/mixvideoint
+%{_prefix}/lib/pkgconfig/mixvideoint.pc
+
+
diff --git a/mix_video/mixvideoint.pc.in b/mix_video/mixvideoint.pc.in
new file mode 100644
index 0000000..f1ff2d1
--- /dev/null
+++ b/mix_video/mixvideoint.pc.in
@@ -0,0 +1,12 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=@includedir@
+
+Name: MI-X Video Internal
+Description: MI-X Video Library for internal development
+Requires: libva >= 0.30
+Version: @VERSION@
+Libs: -L${libdir} -l@PACKAGE@
+Cflags: -I${includedir}/mixvideoint
+
diff --git a/mix_video/src/Makefile.am b/mix_video/src/Makefile.am
new file mode 100644
index 0000000..20c601b
--- /dev/null
+++ b/mix_video/src/Makefile.am
@@ -0,0 +1,136 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+lib_LTLIBRARIES = libmixvideo.la
+
+##############################################################################
+# sources used to compile
+libmixvideo_la_SOURCES = mixdisplay.c \
+ mixdrmparams.c \
+ mixvideo.c \
+ mixvideoconfigparams.c \
+ mixvideoconfigparamsdec.c \
+ mixvideoconfigparamsdec_vc1.c \
+ mixvideoconfigparamsdec_h264.c \
+ mixvideoconfigparamsdec_mp42.c \
+ mixvideoframe.c \
+ mixvideorenderparams.c \
+ mixdisplayx11.c \
+ mixvideocaps.c \
+ mixvideodecodeparams.c \
+ mixvideoinitparams.c \
+ mixvideoformat.c \
+ mixvideoformat_h264.c \
+ mixvideoformat_vc1.c \
+ mixvideoformat_mp42.c \
+ mixsurfacepool.c \
+ mixframemanager.c \
+ mixbufferpool.c \
+ mixbuffer.c \
+ mixvideoformatenc.c \
+ mixvideoformatenc_h264.c \
+ mixvideoformatenc_mpeg4.c \
+ mixvideoformatenc_preview.c \
+ mixvideoconfigparamsenc.c \
+ mixvideoconfigparamsenc_h264.c \
+ mixvideoconfigparamsenc_mpeg4.c \
+ mixvideoconfigparamsenc_preview.c \
+ mixvideoencodeparams.c
+
+if MIXLOG_ENABLED
+MIXLOG_CFLAGS = -DMIX_LOG_ENABLE
+else
+MIXLOG_CFLAGS =
+endif
+
+
+# flags used to compile this plugin
+# add other _CFLAGS and _LIBS as needed
+libmixvideo_la_CFLAGS = $(GLIB_CFLAGS) \
+ $(MIX_CFLAGS) \
+ $(MIXLOG_CFLAGS) \
+ $(GOBJECT_CFLAGS) \
+ $(GTHREAD_CFLAGS) \
+ $(LIBVA_CFLAGS) \
+ $(LIBVA_X11_CFLAGS) \
+ $(MIXCOMMON_CFLAGS) \
+ $(MIXVBP_CFLAGS) \
+ -DMIXVIDEO_CURRENT=@MIXVIDEO_CURRENT@ \
+ -DMIXVIDEO_AGE=@MIXVIDEO_AGE@ \
+ -DMIXVIDEO_REVISION=@MIXVIDEO_REVISION@
+
+libmixvideo_la_LIBADD = $(GLIB_LIBS) \
+ $(GOBJECT_LIBS) \
+ $(GTHREAD_LIBS) \
+ $(LIBVA_LIBS) \
+ $(LIBVA_X11_LIBS) \
+ $(MIXCOMMON_LIBS) \
+ $(MIXVBP_LIBS)
+
+libmixvideo_la_LDFLAGS = $(GLIB_LIBS) \
+ $(GOBJECT_LIBS) \
+ $(GTHREAD_LIBS) \
+ $(LIBVA_LIBS) \
+ $(LIBVA_X11_LIBS) \
+ $(MIXCOMMON_LIBS) \
+ $(MIXVBP_LIBS) \
+ -version-info @MIXVIDEO_CURRENT@:@MIXVIDEO_REVISION@:@MIXVIDEO_AGE@
+
+libmixvideo_la_LIBTOOLFLAGS = --tag=disable-static
+
+# headers we need but don't want installed
+noinst_HEADERS = mixvideoformat.h \
+ mixvideoformat_h264.h \
+ mixvideoformat_vc1.h \
+ mixvideoformat_mp42.h \
+ mixsurfacepool.h \
+ mixvideoframe_private.h \
+ mixbuffer_private.h \
+ mixframemanager.h \
+ mixbufferpool.h \
+ mixvideoformatqueue.h \
+ mixvideo_private.h \
+ mixvideorenderparams_internal.h \
+ mixvideoformatenc_h264.h \
+ mixvideoformatenc_mpeg4.h \
+ mixvideoformatenc_preview.h \
+ mixvideoformatenc.h \
+ mixvideolog.h
+
+# TODO: decide whehter a /usr/include/mix is needed for mix headers
+mixincludedir=$(includedir)/mix
+mixinclude_HEADERS = mixvideodef.h \
+ mixdisplayx11.h \
+ mixvideoconfigparams.h \
+ mixvideoconfigparamsdec.h \
+ mixvideoconfigparamsdec_vc1.h \
+ mixvideoconfigparamsdec_h264.h \
+ mixvideoconfigparamsdec_mp42.h \
+ mixvideoframe.h \
+ mixvideoinitparams.h \
+ mixdisplay.h \
+ mixdrmparams.h \
+ mixvideocaps.h \
+ mixvideodecodeparams.h \
+ mixvideoencodeparams.h \
+ mixvideo.h \
+ mixvideorenderparams.h \
+ mixbuffer.h \
+ mixvideoconfigparamsenc_h264.h \
+ mixvideoconfigparamsenc_mpeg4.h \
+ mixvideoconfigparamsenc_preview.h \
+ mixvideoconfigparamsenc.h
+
+
+mixintincludedir=$(includedir)/mixvideoint
+mixintinclude_HEADERS = mixvideoformat.h \
+ mixframemanager.h \
+ mixsurfacepool.h \
+ mixbufferpool.h \
+ mixvideoformatqueue.h \
+ mixvideoframe_private.h \
+ mixvideoformat_vc1.h
diff --git a/mix_video/src/Makefile.old b/mix_video/src/Makefile.old
new file mode 100644
index 0000000..2bc3f79
--- /dev/null
+++ b/mix_video/src/Makefile.old
@@ -0,0 +1,40 @@
+INCL_CONFIG = `pkg-config --cflags glib-2.0 gobject-2.0` -g
+LIB_CONFIG = `pkg-config --libs glib-2.0 gobject-2.0` -lgthread-2.0 -g
+
+all:
+ gcc -c mixparams.c -o mixparams.o $(INCL_CONFIG)
+ gcc -c mixdrmparams.c -o mixdrmparams.o $(INCL_CONFIG)
+ gcc -c mixvideocaps.c -o mixvideocaps.o $(INCL_CONFIG)
+ gcc -c mixdisplay.c -o mixdisplay.o $(INCL_CONFIG)
+ gcc -c mixdisplayx11.c -o mixdisplayx11.o $(INCL_CONFIG)
+ gcc -c mixvideoinitparams.c -o mixvideoinitparams.o $(INCL_CONFIG)
+ gcc -c mixvideoconfigparams.c -o mixvideoconfigparams.o $(INCL_CONFIG)
+ gcc -c mixvideoconfigparams_h264.c -o mixvideoconfigparams_h264.o $(INCL_CONFIG)
+ gcc -c mixvideoconfigparams_vc1.c -o mixvideoconfigparams_vc1.o $(INCL_CONFIG)
+ gcc -c mixvideodecodeparams.c -o mixvideodecodeparams.o $(INCL_CONFIG)
+ gcc -c mixvideorenderparams.c -o mixvideorenderparams.o $(INCL_CONFIG)
+ gcc -c mixvideoframe.c -o mixvideoframe.o $(INCL_CONFIG)
+ gcc -c mixvideo.c -o mixvideo.o $(INCL_CONFIG)
+ gcc -c test.c -o test.o $(INCL_CONFIG)
+ gcc test.o \
+ mixvideo.o \
+ mixparams.o \
+ mixdrmparams.o \
+ mixvideorenderparams.o \
+ mixvideodecodeparams.o \
+ mixvideoconfigparams.o \
+ mixvideoconfigparams_vc1.o \
+ mixvideoconfigparams_h264.o \
+ mixvideoinitparams.o \
+ mixdisplay.o \
+ mixdisplayx11.o \
+ mixvideocaps.o \
+ mixvideoframe.o \
+ -o test $(LIB_CONFIG)
+
+clean:
+ rm *~
+ rm *.o
+ rm test
+
+
diff --git a/mix_video/src/mixbuffer.c b/mix_video/src/mixbuffer.c
new file mode 100644
index 0000000..6d51966
--- /dev/null
+++ b/mix_video/src/mixbuffer.c
@@ -0,0 +1,220 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixbuffer
+ * @short_description: VideoConfig parameters
+ *
+ * A data object which stores videoconfig specific parameters.
+ */
+
+#include "mixvideolog.h"
+#include "mixbuffer.h"
+#include "mixbuffer_private.h"
+
+#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; }
+
+static GType _mix_buffer_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_buffer_type = g_define_type_id; }
+
+gboolean mix_buffer_copy(MixParams * target, const MixParams * src);
+MixParams *mix_buffer_dup(const MixParams * obj);
+gboolean mix_buffer_equal(MixParams * first, MixParams * second);
+static void mix_buffer_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixBuffer, mix_buffer, MIX_TYPE_PARAMS,
+ _do_init);
+
+static void mix_buffer_init(MixBuffer * self) {
+ /* initialize properties here */
+
+ MixBufferPrivate *priv = MIX_BUFFER_GET_PRIVATE(self);
+ self->reserved = priv;
+
+ priv->pool = NULL;
+
+ self->data = NULL;
+ self->size = 0;
+ self->token = 0;
+ self->callback = NULL;
+}
+
+static void mix_buffer_class_init(MixBufferClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_buffer_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_buffer_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_buffer_dup;
+ mixparams_class->equal = (MixParamsEqualFunction) mix_buffer_equal;
+
+ /* Register and allocate the space the private structure for this object */
+ g_type_class_add_private(mixparams_class, sizeof(MixBufferPrivate));
+}
+
+MixBuffer *
+mix_buffer_new(void) {
+ MixBuffer *ret = (MixBuffer *) g_type_create_instance(MIX_TYPE_BUFFER);
+ return ret;
+}
+
+void mix_buffer_finalize(MixParams * obj) {
+ /* clean up here. */
+
+ /* MixBuffer *self = MIX_BUFFER(obj); */
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixBuffer *
+mix_buffer_ref(MixBuffer * mix) {
+ return (MixBuffer *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_buffer_dup:
+ * @obj: a #MixBuffer object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_buffer_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_BUFFER(obj)) {
+ MixBuffer *duplicate = mix_buffer_new();
+ if (mix_buffer_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_buffer_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_buffer_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_buffer_copy(MixParams * target, const MixParams * src) {
+ MixBuffer *this_target, *this_src;
+
+ if (MIX_IS_BUFFER(target) && MIX_IS_BUFFER(src)) {
+ // Cast the base object to this child object
+ this_target = MIX_BUFFER(target);
+ this_src = MIX_BUFFER(src);
+
+ // Duplicate string
+ this_target->data = this_src->data;
+ this_target->size = this_src->size;
+ this_target->token = this_src->token;
+ this_target->callback = this_src->callback;
+
+ // Now chainup base class
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_buffer_:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_buffer_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixBuffer *this_first, *this_second;
+
+ if (MIX_IS_BUFFER(first) && MIX_IS_BUFFER(second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_BUFFER(first);
+ this_second = MIX_BUFFER(second);
+
+ if (this_first->data == this_second->data && this_first->size
+ == this_second->size && this_first->token == this_second->token
+ && this_first->callback == this_second->callback) {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = klass->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
+
+#define MIX_BUFFER_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_BUFFER(obj)) return MIX_RESULT_FAIL; \
+
+
+MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size,
+ gulong token, MixBufferCallback callback) {
+ MIX_BUFFER_SETTER_CHECK_INPUT (obj);
+
+ obj->data = data;
+ obj->size = size;
+ obj->token = token;
+ obj->callback = callback;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_buffer_set_pool(MixBuffer *obj, MixBufferPool *pool) {
+
+ MIX_BUFFER_SETTER_CHECK_INPUT (obj);
+ MixBufferPrivate *priv = (MixBufferPrivate *) obj->reserved;
+ priv->pool = pool;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+void mix_buffer_unref(MixBuffer * obj) {
+
+ // Unref through base class
+ mix_params_unref(MIX_PARAMS(obj));
+
+ LOG_I( "refcount = %d\n", MIX_PARAMS(
+ obj)->refcount);
+
+ // Check if we have reduced to 1, in which case we add ourselves to free pool
+ if (MIX_PARAMS(obj)->refcount == 1) {
+ MixBufferPrivate *priv = (MixBufferPrivate *) obj->reserved;
+ g_return_if_fail(priv->pool != NULL);
+
+ if (obj->callback) {
+ obj->callback(obj->token, obj->data);
+ }
+ mix_bufferpool_put(priv->pool, obj);
+ }
+}
+
diff --git a/mix_video/src/mixbuffer.h b/mix_video/src/mixbuffer.h
new file mode 100644
index 0000000..53d2e1c
--- /dev/null
+++ b/mix_video/src/mixbuffer.h
@@ -0,0 +1,130 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_BUFFER_H__
+#define __MIX_BUFFER_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_BUFFER:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_BUFFER (mix_buffer_get_type ())
+
+/**
+ * MIX_BUFFER:
+ * @obj: object to be type-casted.
+ */
+#define MIX_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_BUFFER, MixBuffer))
+
+/**
+ * MIX_IS_BUFFER:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_BUFFER))
+
+/**
+ * MIX_BUFFER_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_BUFFER, MixBufferClass))
+
+/**
+ * MIX_IS_BUFFER_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_BUFFER))
+
+/**
+ * MIX_BUFFER_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_BUFFER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_BUFFER, MixBufferClass))
+
+typedef void (*MixBufferCallback)(gulong token, guchar *data);
+
+typedef struct _MixBuffer MixBuffer;
+typedef struct _MixBufferClass MixBufferClass;
+
+/**
+ * MixBuffer:
+ *
+ * MI-X VideoConfig Parameter object
+ */
+struct _MixBuffer {
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+ guchar *data;
+ guint size;
+ gulong token;
+ MixBufferCallback callback;
+
+ gpointer reserved;
+};
+
+/**
+ * MixBufferClass:
+ *
+ * MI-X VideoConfig object class
+ */
+struct _MixBufferClass {
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_buffer_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_buffer_get_type(void);
+
+/**
+ * mix_buffer_new:
+ * @returns: A newly allocated instance of #MixBuffer
+ *
+ * Use this method to create new instance of #MixBuffer
+ */
+MixBuffer *mix_buffer_new(void);
+/**
+ * mix_buffer_ref:
+ * @mix: object to add reference
+ * @returns: the MixBuffer instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixBuffer *mix_buffer_ref(MixBuffer * mix);
+
+/**
+ * mix_buffer_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+void mix_buffer_unref(MixBuffer * mix);
+
+/* Class Methods */
+
+MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size,
+ gulong token, MixBufferCallback callback);
+
+#endif /* __MIX_BUFFER_H__ */
diff --git a/mix_video/src/mixbuffer_private.h b/mix_video/src/mixbuffer_private.h
new file mode 100644
index 0000000..87c9c07
--- /dev/null
+++ b/mix_video/src/mixbuffer_private.h
@@ -0,0 +1,39 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_BUFFER_PRIVATE_H__
+#define __MIX_BUFFER_PRIVATE_H__
+
+#include "mixbuffer.h"
+#include "mixbufferpool.h"
+
+typedef struct _MixBufferPrivate MixBufferPrivate;
+
+struct _MixBufferPrivate
+{
+ /*< private > */
+ MixBufferPool *pool;
+
+};
+
+/**
+* MIX_BUFFER_PRIVATE:
+*
+* Get private structure of this class.
+* @obj: class object for which to get private data.
+*/
+#define MIX_BUFFER_GET_PRIVATE(obj) \
+ (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_BUFFER, MixBufferPrivate))
+
+
+/* Private functions */
+MIX_RESULT
+mix_buffer_set_pool (MixBuffer *obj, MixBufferPool *pool);
+
+
+#endif /* __MIX_BUFFER_PRIVATE_H__ */
diff --git a/mix_video/src/mixbufferpool.c b/mix_video/src/mixbufferpool.c
new file mode 100644
index 0000000..9d9ad56
--- /dev/null
+++ b/mix_video/src/mixbufferpool.c
@@ -0,0 +1,484 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixbufferpool
+ * @short_description: MI-X Input Buffer Pool
+ *
+ * A data object which stores and manipulates a pool of compressed video buffers.
+ */
+
+#include "mixvideolog.h"
+#include "mixbufferpool.h"
+#include "mixbuffer_private.h"
+
+#define MIX_LOCK(lock) g_mutex_lock(lock);
+#define MIX_UNLOCK(lock) g_mutex_unlock(lock);
+
+#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; }
+
+static GType _mix_bufferpool_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_bufferpool_type = g_define_type_id; }
+
+gboolean mix_bufferpool_copy(MixParams * target, const MixParams * src);
+MixParams *mix_bufferpool_dup(const MixParams * obj);
+gboolean mix_bufferpool_equal(MixParams * first, MixParams * second);
+static void mix_bufferpool_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixBufferPool, mix_bufferpool, MIX_TYPE_PARAMS,
+ _do_init);
+
+static void mix_bufferpool_init(MixBufferPool * self) {
+ /* initialize properties here */
+ self->free_list = NULL;
+ self->in_use_list = NULL;
+ self->free_list_max_size = 0;
+ self->high_water_mark = 0;
+
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+
+ // TODO: relocate this mutex allocation -we can't communicate failure in ctor.
+ // Note that g_thread_init() has already been called by mix_video_init()
+ self->objectlock = g_mutex_new();
+
+}
+
+static void mix_bufferpool_class_init(MixBufferPoolClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_bufferpool_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_bufferpool_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_bufferpool_dup;
+ mixparams_class->equal = (MixParamsEqualFunction) mix_bufferpool_equal;
+}
+
+MixBufferPool *
+mix_bufferpool_new(void) {
+ MixBufferPool *ret = (MixBufferPool *) g_type_create_instance(
+ MIX_TYPE_BUFFERPOOL);
+ return ret;
+}
+
+void mix_bufferpool_finalize(MixParams * obj) {
+ /* clean up here. */
+
+ MixBufferPool *self = MIX_BUFFERPOOL(obj);
+
+ if (self->objectlock) {
+ g_mutex_free(self->objectlock);
+ self->objectlock = NULL;
+ }
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixBufferPool *
+mix_bufferpool_ref(MixBufferPool * mix) {
+ return (MixBufferPool *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_bufferpool_dup:
+ * @obj: a #MixBufferPool object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_bufferpool_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_BUFFERPOOL(obj)) {
+
+ MIX_LOCK(MIX_BUFFERPOOL(obj)->objectlock);
+
+ MixBufferPool *duplicate = mix_bufferpool_new();
+ if (mix_bufferpool_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_bufferpool_unref(duplicate);
+ }
+
+ MIX_UNLOCK(MIX_BUFFERPOOL(obj)->objectlock);
+
+ }
+ return ret;
+}
+
+/**
+ * mix_bufferpool_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_bufferpool_copy(MixParams * target, const MixParams * src) {
+ MixBufferPool *this_target, *this_src;
+
+ if (MIX_IS_BUFFERPOOL(target) && MIX_IS_BUFFERPOOL(src)) {
+
+ MIX_LOCK(MIX_BUFFERPOOL(src)->objectlock);
+ MIX_LOCK(MIX_BUFFERPOOL(target)->objectlock);
+
+ // Cast the base object to this child object
+ this_target = MIX_BUFFERPOOL(target);
+ this_src = MIX_BUFFERPOOL(src);
+
+ // Free the existing properties
+
+ // Duplicate string
+ this_target->free_list = this_src->free_list;
+ this_target->in_use_list = this_src->in_use_list;
+ this_target->free_list_max_size = this_src->free_list_max_size;
+ this_target->high_water_mark = this_src->high_water_mark;
+
+ MIX_UNLOCK(MIX_BUFFERPOOL(src)->objectlock);
+ MIX_UNLOCK(MIX_BUFFERPOOL(target)->objectlock);
+
+ // Now chainup base class
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_bufferpool_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_bufferpool_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixBufferPool *this_first, *this_second;
+
+ if (MIX_IS_BUFFERPOOL(first) && MIX_IS_BUFFERPOOL(second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ MIX_LOCK(MIX_BUFFERPOOL(first)->objectlock);
+ MIX_LOCK(MIX_BUFFERPOOL(second)->objectlock);
+
+ this_first = MIX_BUFFERPOOL(first);
+ this_second = MIX_BUFFERPOOL(second);
+
+ /* TODO: add comparison for other properties */
+ if (this_first->free_list == this_second->free_list
+ && this_first->in_use_list == this_second->in_use_list
+ && this_first->free_list_max_size
+ == this_second->free_list_max_size
+ && this_first->high_water_mark == this_second->high_water_mark) {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = klass->equal(first, second);
+ else
+ ret = TRUE;
+ }
+
+ MIX_LOCK(MIX_BUFFERPOOL(first)->objectlock);
+ MIX_LOCK(MIX_BUFFERPOOL(second)->objectlock);
+
+ }
+
+ return ret;
+}
+
+/* Class Methods */
+
+/**
+ * mix_bufferpool_initialize:
+ * @returns: MIX_RESULT_SUCCESS if successful in creating the buffer pool
+ *
+ * Use this method to create a new buffer pool, consisting of a GSList of
+ * buffer objects that represents a pool of buffers.
+ */
+MIX_RESULT mix_bufferpool_initialize(MixBufferPool * obj, guint num_buffers) {
+
+ LOG_V( "Begin\n");
+
+ if (obj == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ MIX_LOCK(obj->objectlock);
+
+ if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) {
+ //buffer pool is in use; return error; need proper cleanup
+ //TODO need cleanup here?
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_ALREADY_INIT;
+ }
+
+ if (num_buffers == 0) {
+ obj->free_list = NULL;
+
+ obj->in_use_list = NULL;
+
+ obj->free_list_max_size = num_buffers;
+
+ obj->high_water_mark = 0;
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_SUCCESS;
+ }
+
+ // Initialize the free pool with MixBuffer objects
+
+ gint i = 0;
+ MixBuffer *buffer = NULL;
+
+ for (; i < num_buffers; i++) {
+
+ buffer = mix_buffer_new();
+
+ if (buffer == NULL) {
+ //TODO need to log an error here and do cleanup
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ // Set the pool reference in the private data of the MixBuffer object
+ mix_buffer_set_pool(buffer, obj);
+
+ //Add each MixBuffer object to the pool list
+ obj->free_list = g_slist_append(obj->free_list, buffer);
+
+ }
+
+ obj->in_use_list = NULL;
+
+ obj->free_list_max_size = num_buffers;
+
+ obj->high_water_mark = 0;
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_V( "End\n");
+
+return MIX_RESULT_SUCCESS;
+}
+
+/**
+ * mix_bufferpool_put:
+ * @returns: SUCCESS or FAILURE
+ *
+ * Use this method to return a buffer to the free pool
+ */
+MIX_RESULT mix_bufferpool_put(MixBufferPool * obj, MixBuffer * buffer) {
+
+ if (obj == NULL || buffer == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ MIX_LOCK(obj->objectlock);
+
+ if (obj->in_use_list == NULL) {
+ //in use list cannot be empty if a buffer is in use
+ //TODO need better error code for this
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_FAIL;
+ }
+
+ GSList *element = g_slist_find(obj->in_use_list, buffer);
+ if (element == NULL) {
+ //Integrity error; buffer not found in in use list
+ //TODO need better error code and handling for this
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_FAIL;
+ } else {
+ //Remove this element from the in_use_list
+ obj->in_use_list = g_slist_remove_link(obj->in_use_list, element);
+
+ //Concat the element to the free_list
+ obj->free_list = g_slist_concat(obj->free_list, element);
+ }
+
+ //Note that we do nothing with the ref count for this. We want it to
+ //stay at 1, which is what triggered it to be added back to the free list.
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+/**
+ * mix_bufferpool_get:
+ * @returns: SUCCESS or FAILURE
+ *
+ * Use this method to get a buffer from the free pool
+ */
+MIX_RESULT mix_bufferpool_get(MixBufferPool * obj, MixBuffer ** buffer) {
+
+ if (obj == NULL || buffer == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ MIX_LOCK(obj->objectlock);
+
+ if (obj->free_list == NULL) {
+ //We are out of buffers
+ //TODO need to log this as well
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_POOLEMPTY;
+ }
+
+ //Remove a buffer from the free pool
+
+ //We just remove the one at the head, since it's convenient
+ GSList *element = obj->free_list;
+ obj->free_list = g_slist_remove_link(obj->free_list, element);
+ if (element == NULL) {
+ //Unexpected behavior
+ //TODO need better error code and handling for this
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_FAIL;
+ } else {
+ //Concat the element to the in_use_list
+ obj->in_use_list = g_slist_concat(obj->in_use_list, element);
+
+ //TODO replace with proper logging
+
+ LOG_I( "buffer refcount%d\n",
+ MIX_PARAMS(element->data)->refcount);
+
+ //Set the out buffer pointer
+ *buffer = (MixBuffer *) element->data;
+
+ //Check the high water mark for buffer use
+ guint size = g_slist_length(obj->in_use_list);
+ if (size > obj->high_water_mark)
+ obj->high_water_mark = size;
+ //TODO Log this high water mark
+ }
+
+ //Increment the reference count for the buffer
+ mix_buffer_ref(*buffer);
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+/**
+ * mix_bufferpool_deinitialize:
+ * @returns: SUCCESS or FAILURE
+ *
+ * Use this method to teardown a buffer pool
+ */
+MIX_RESULT mix_bufferpool_deinitialize(MixBufferPool * obj) {
+ if (obj == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ MIX_LOCK(obj->objectlock);
+
+ if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list)
+ != obj->free_list_max_size)) {
+ //TODO better error code
+ //We have outstanding buffer objects in use and they need to be
+ //freed before we can deinitialize.
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_FAIL;
+ }
+
+ //Now remove buffer objects from the list
+
+ MixBuffer *buffer = NULL;
+
+ while (obj->free_list != NULL) {
+ //Get the buffer object from the head of the list
+ buffer = obj->free_list->data;
+ //buffer = g_slist_nth_data(obj->free_list, 0);
+
+ //Release it
+ mix_buffer_unref(buffer);
+
+ //Delete the head node of the list and store the new head
+ obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list);
+
+ //Repeat until empty
+ }
+
+ obj->free_list_max_size = 0;
+
+ //May want to log this information for tuning
+ obj->high_water_mark = 0;
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+#define MIX_BUFFERPOOL_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_BUFFERPOOL_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_BUFFERPOOL(obj)) return MIX_RESULT_FAIL; \
+
+
+MIX_RESULT
+mix_bufferpool_dumpbuffer(MixBuffer *buffer)
+{
+ LOG_I( "\tBuffer %x, ptr %x, refcount %d\n", (guint)buffer,
+ (guint)buffer->data, MIX_PARAMS(buffer)->refcount);
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT
+mix_bufferpool_dumpprint (MixBufferPool * obj)
+{
+ //TODO replace this with proper logging later
+
+ LOG_I( "BUFFER POOL DUMP:\n");
+ LOG_I( "Free list size is %d\n", g_slist_length(obj->free_list));
+ LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list));
+ LOG_I( "High water mark is %lu\n", obj->high_water_mark);
+
+ //Walk the free list and report the contents
+ LOG_I( "Free list contents:\n");
+ g_slist_foreach(obj->free_list, (GFunc) mix_bufferpool_dumpbuffer, NULL);
+
+ //Walk the in_use list and report the contents
+ LOG_I( "In Use list contents:\n");
+ g_slist_foreach(obj->in_use_list, (GFunc) mix_bufferpool_dumpbuffer, NULL);
+
+ return MIX_RESULT_SUCCESS;
+}
+
diff --git a/mix_video/src/mixbufferpool.h b/mix_video/src/mixbufferpool.h
new file mode 100644
index 0000000..bf32d0d
--- /dev/null
+++ b/mix_video/src/mixbufferpool.h
@@ -0,0 +1,150 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_BUFFERPOOL_H__
+#define __MIX_BUFFERPOOL_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+#include "mixbuffer.h"
+
+#include <va/va.h>
+
+G_BEGIN_DECLS
+
+/**
+* MIX_TYPE_BUFFERPOOL:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_BUFFERPOOL (mix_bufferpool_get_type ())
+
+/**
+* MIX_BUFFERPOOL:
+* @obj: object to be type-casted.
+*/
+#define MIX_BUFFERPOOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_BUFFERPOOL, MixBufferPool))
+
+/**
+* MIX_IS_BUFFERPOOL:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixBufferPool
+*/
+#define MIX_IS_BUFFERPOOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_BUFFERPOOL))
+
+/**
+* MIX_BUFFERPOOL_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_BUFFERPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_BUFFERPOOL, MixBufferPoolClass))
+
+/**
+* MIX_IS_BUFFERPOOL_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixBufferPoolClass
+*/
+#define MIX_IS_BUFFERPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_BUFFERPOOL))
+
+/**
+* MIX_BUFFERPOOL_GET_CLASS:
+* @obj: a #MixBufferPool object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_BUFFERPOOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_BUFFERPOOL, MixBufferPoolClass))
+
+typedef struct _MixBufferPool MixBufferPool;
+typedef struct _MixBufferPoolClass MixBufferPoolClass;
+
+/**
+* MixBufferPool:
+*
+* MI-X Video Buffer Pool object
+*/
+struct _MixBufferPool
+{
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+ GSList *free_list; /* list of free buffers */
+ GSList *in_use_list; /* list of buffers in use */
+ gulong free_list_max_size; /* initial size of the free list */
+ gulong high_water_mark; /* most buffers in use at one time */
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+
+ /*< private > */
+ GMutex *objectlock;
+
+};
+
+/**
+* MixBufferPoolClass:
+*
+* MI-X Video Buffer Pool object class
+*/
+struct _MixBufferPoolClass
+{
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_bufferpool_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_bufferpool_get_type (void);
+
+/**
+* mix_bufferpool_new:
+* @returns: A newly allocated instance of #MixBufferPool
+*
+* Use this method to create new instance of #MixBufferPool
+*/
+MixBufferPool *mix_bufferpool_new (void);
+/**
+* mix_bufferpool_ref:
+* @mix: object to add reference
+* @returns: the MixBufferPool instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixBufferPool *mix_bufferpool_ref (MixBufferPool * mix);
+
+/**
+* mix_bufferpool_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_bufferpool_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+MIX_RESULT mix_bufferpool_initialize (MixBufferPool * obj,
+ guint num_buffers);
+MIX_RESULT mix_bufferpool_put (MixBufferPool * obj,
+ MixBuffer * buffer);
+
+MIX_RESULT mix_bufferpool_get (MixBufferPool * obj,
+ MixBuffer ** buffer);
+MIX_RESULT mix_bufferpool_deinitialize (MixBufferPool * obj);
+
+G_END_DECLS
+
+#endif /* __MIX_BUFFERPOOL_H__ */
diff --git a/mix_video/src/mixdisplay.c b/mix_video/src/mixdisplay.c
new file mode 100644
index 0000000..d6da0e9
--- /dev/null
+++ b/mix_video/src/mixdisplay.c
@@ -0,0 +1,539 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+* SECTION:mixdisplay
+* @short_description: Lightweight base class for the MIX media display
+*
+*/
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "mixdisplay.h"
+#include <gobject/gvaluecollector.h>
+
+#define DEBUG_REFCOUNT
+
+static void mix_display_class_init (gpointer g_class, gpointer class_data);
+static void mix_display_init (GTypeInstance * instance, gpointer klass);
+
+static void mix_value_display_init (GValue * value);
+static void mix_value_display_free (GValue * value);
+static void mix_value_display_copy (const GValue * src_value,
+ GValue * dest_value);
+static gpointer mix_value_display_peek_pointer (const GValue * value);
+static gchar *mix_value_display_collect (GValue * value,
+ guint n_collect_values,
+ GTypeCValue * collect_values,
+ guint collect_flags);
+static gchar *mix_value_display_lcopy (const GValue * value,
+ guint n_collect_values,
+ GTypeCValue * collect_values,
+ guint collect_flags);
+
+static void mix_display_finalize (MixDisplay * obj);
+static gboolean mix_display_copy_default (MixDisplay * target,
+ const MixDisplay * src);
+static MixDisplay *mix_display_dup_default (const MixDisplay * obj);
+static gboolean mix_display_equal_default (MixDisplay * first,
+ MixDisplay * second);
+
+GType
+mix_display_get_type (void)
+{
+ static GType _mix_display_type = 0;
+
+ if (G_UNLIKELY (_mix_display_type == 0))
+ {
+
+ GTypeValueTable value_table = {
+ mix_value_display_init,
+ mix_value_display_free,
+ mix_value_display_copy,
+ mix_value_display_peek_pointer,
+ "p",
+ mix_value_display_collect,
+ "p",
+ mix_value_display_lcopy
+ };
+
+ GTypeInfo info = {
+ sizeof (MixDisplayClass),
+ NULL,
+ NULL,
+ mix_display_class_init,
+ NULL,
+ NULL,
+ sizeof (MixDisplay),
+ 0,
+ (GInstanceInitFunc) mix_display_init,
+ NULL
+ };
+
+ static const GTypeFundamentalInfo fundamental_info = {
+ (G_TYPE_FLAG_CLASSED | G_TYPE_FLAG_INSTANTIATABLE |
+ G_TYPE_FLAG_DERIVABLE | G_TYPE_FLAG_DEEP_DERIVABLE)
+ };
+
+ info.value_table = &value_table;
+
+ _mix_display_type = g_type_fundamental_next ();
+ g_type_register_fundamental (_mix_display_type, "MixDisplay",
+ &info, &fundamental_info,
+ G_TYPE_FLAG_ABSTRACT);
+
+ }
+
+ return _mix_display_type;
+}
+
+static void
+mix_display_class_init (gpointer g_class, gpointer class_data)
+{
+ MixDisplayClass *klass = MIX_DISPLAY_CLASS (g_class);
+
+ klass->dup = mix_display_dup_default;
+ klass->copy = mix_display_copy_default;
+ klass->finalize = mix_display_finalize;
+ klass->equal = mix_display_equal_default;
+}
+
+static void
+mix_display_init (GTypeInstance * instance, gpointer klass)
+{
+ MixDisplay *obj = MIX_DISPLAY_CAST (instance);
+
+ obj->refcount = 1;
+}
+
+gboolean
+mix_display_copy (MixDisplay * target, const MixDisplay * src)
+{
+ /* Use the target object class. Because it knows what it is looking for. */
+ MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (target);
+ if (klass->copy)
+ {
+ return klass->copy (target, src);
+ }
+ else
+ {
+ return mix_display_copy_default (target, src);
+ }
+}
+
+/**
+* mix_display_copy_default:
+* @target:
+* @src:
+*
+* The default copy method of this object. Perhap copy at this level.
+* Assign this to the copy vmethod.
+*/
+static gboolean
+mix_display_copy_default (MixDisplay * target, const MixDisplay * src)
+{
+ if (MIX_IS_DISPLAY (target) && MIX_IS_DISPLAY (src))
+ {
+ // TODO perform deep copy.
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static void
+mix_display_finalize (MixDisplay * obj)
+{
+ /* do nothing */
+}
+
+MixDisplay *
+mix_display_dup (const MixDisplay * obj)
+{
+ MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (obj);
+
+ if (klass->dup)
+ {
+ return klass->dup (obj);
+ }
+ else if (MIX_IS_DISPLAY (obj))
+ {
+ return mix_display_dup_default (obj);
+ }
+ return NULL;
+}
+
+static MixDisplay *
+mix_display_dup_default (const MixDisplay * obj)
+{
+ MixDisplay *ret = mix_display_new ();
+ if (mix_display_copy (ret, obj))
+ {
+ return ret;
+ }
+
+ return NULL;
+}
+
+MixDisplay *
+mix_display_new (GType type)
+{
+ MixDisplay *obj;
+
+ /* we don't support dynamic types because they really aren't useful,
+ * and could cause refcount problems */
+ obj = (MixDisplay *) g_type_create_instance (type);
+
+ return obj;
+}
+
+MixDisplay *
+mix_display_ref (MixDisplay * obj)
+{
+ g_return_val_if_fail (MIX_IS_DISPLAY (obj), NULL);
+
+ g_atomic_int_inc (&obj->refcount);
+
+ return obj;
+}
+
+static void
+mix_display_free (MixDisplay * obj)
+{
+ MixDisplayClass *klass = NULL;
+
+ klass = MIX_DISPLAY_GET_CLASS (obj);
+ klass->finalize (obj);
+
+ /* Should we support recycling the object? */
+ /* If so, refcount handling is slightly different. */
+ /* i.e. If the refcount is still 0 we can really free the object, else the finalize method recycled the object -- but to where? */
+
+ if (g_atomic_int_get (&obj->refcount) == 0)
+ {
+
+ g_type_free_instance ((GTypeInstance *) obj);
+ }
+}
+
+void
+mix_display_unref (MixDisplay * obj)
+{
+ g_return_if_fail (obj != NULL);
+ g_return_if_fail (obj->refcount > 0);
+
+ if (G_UNLIKELY (g_atomic_int_dec_and_test (&obj->refcount)))
+ {
+ mix_display_free (obj);
+ }
+}
+
+static void
+mix_value_display_init (GValue * value)
+{
+ value->data[0].v_pointer = NULL;
+}
+
+static void
+mix_value_display_free (GValue * value)
+{
+ if (value->data[0].v_pointer)
+ {
+ mix_display_unref (MIX_DISPLAY_CAST (value->data[0].v_pointer));
+ }
+}
+
+static void
+mix_value_display_copy (const GValue * src_value, GValue * dest_value)
+{
+ if (src_value->data[0].v_pointer)
+ {
+ dest_value->data[0].v_pointer =
+ mix_display_ref (MIX_DISPLAY_CAST (src_value->data[0].v_pointer));
+ }
+ else
+ {
+ dest_value->data[0].v_pointer = NULL;
+ }
+}
+
+static gpointer
+mix_value_display_peek_pointer (const GValue * value)
+{
+ return value->data[0].v_pointer;
+}
+
+static gchar *
+mix_value_display_collect (GValue * value, guint n_collect_values,
+ GTypeCValue * collect_values, guint collect_flags)
+{
+ mix_value_set_display (value, collect_values[0].v_pointer);
+
+ return NULL;
+}
+
+static gchar *
+mix_value_display_lcopy (const GValue * value,
+ guint n_collect_values,
+ GTypeCValue * collect_values, guint collect_flags)
+{
+ gpointer *obj_p = collect_values[0].v_pointer;
+
+ if (!obj_p)
+ {
+ return g_strdup_printf ("value location for '%s' passed as NULL",
+ G_VALUE_TYPE_NAME (value));
+ }
+
+ if (!value->data[0].v_pointer)
+ *obj_p = NULL;
+ else if (collect_flags & G_VALUE_NOCOPY_CONTENTS)
+ *obj_p = value->data[0].v_pointer;
+ else
+ *obj_p = mix_display_ref (value->data[0].v_pointer);
+
+ return NULL;
+}
+
+/**
+* mix_value_set_display:
+* @value: a valid #GValue of %MIX_TYPE_DISPLAY derived type
+* @obj: object value to set
+*
+* Set the contents of a %MIX_TYPE_DISPLAY derived #GValue to
+* @obj.
+* The caller retains ownership of the reference.
+*/
+void
+mix_value_set_display (GValue * value, MixDisplay * obj)
+{
+ gpointer *pointer_p;
+
+ g_return_if_fail (MIX_VALUE_HOLDS_DISPLAY (value));
+ g_return_if_fail (obj == NULL || MIX_IS_DISPLAY (obj));
+
+ pointer_p = &value->data[0].v_pointer;
+ mix_display_replace ((MixDisplay **) pointer_p, obj);
+}
+
+/**
+* mix_value_take_display:
+* @value: a valid #GValue of #MIX_TYPE_DISPLAY derived type
+* @obj: object value to take
+*
+* Set the contents of a #MIX_TYPE_DISPLAY derived #GValue to
+* @obj.
+* Takes over the ownership of the caller's reference to @obj;
+* the caller doesn't have to unref it any more.
+*/
+void
+mix_value_take_display (GValue * value, MixDisplay * obj)
+{
+ gpointer *pointer_p;
+
+ g_return_if_fail (MIX_VALUE_HOLDS_DISPLAY (value));
+ g_return_if_fail (obj == NULL || MIX_IS_DISPLAY (obj));
+
+ pointer_p = &value->data[0].v_pointer;
+ mix_display_replace ((MixDisplay **) pointer_p, obj);
+ if (obj)
+ mix_display_unref (obj);
+}
+
+/**
+* mix_value_get_display:
+* @value: a valid #GValue of #MIX_TYPE_DISPLAY derived type
+* @returns:object contents of @value
+*
+* refcount of the MixDisplay is not increased.
+*/
+MixDisplay *
+mix_value_get_display (const GValue * value)
+{
+ g_return_val_if_fail (MIX_VALUE_HOLDS_DISPLAY (value), NULL);
+
+ return value->data[0].v_pointer;
+}
+
+/**
+* mix_value_dup_display:
+* @value: a valid #GValue of %MIX_TYPE_DISPLAY derived type
+* @returns: object contents of @value
+*
+* refcount of MixDisplay is increased.
+*/
+MixDisplay *
+mix_value_dup_display (const GValue * value)
+{
+ g_return_val_if_fail (MIX_VALUE_HOLDS_DISPLAY (value), NULL);
+
+ return mix_display_ref (value->data[0].v_pointer);
+}
+
+
+static void
+param_display_init (GParamSpec * pspec)
+{
+ /* GParamSpecDisplay *ospec = G_PARAM_SPEC_DISPLAY (pspec); */
+}
+
+static void
+param_display_set_default (GParamSpec * pspec, GValue * value)
+{
+ value->data[0].v_pointer = NULL;
+}
+
+static gboolean
+param_display_validate (GParamSpec * pspec, GValue * value)
+{
+ gboolean validated = FALSE;
+ MixParamSpecDisplay *ospec = MIX_PARAM_SPEC_DISPLAY (pspec);
+ MixDisplay *obj = value->data[0].v_pointer;
+
+ if (obj && !g_value_type_compatible (G_OBJECT_TYPE (obj), G_PARAM_SPEC_VALUE_TYPE (ospec)))
+ {
+ mix_display_unref (obj);
+ value->data[0].v_pointer = NULL;
+ validated = TRUE;
+ }
+
+ return validated;
+}
+
+static gint
+param_display_values_cmp (GParamSpec * pspec,
+ const GValue * value1, const GValue * value2)
+{
+ guint8 *p1 = value1->data[0].v_pointer;
+ guint8 *p2 = value2->data[0].v_pointer;
+
+
+ return p1 < p2 ? -1 : p1 > p2;
+}
+
+GType
+mix_param_spec_display_get_type (void)
+{
+ static GType type;
+
+ if (G_UNLIKELY (type) == 0)
+ {
+ static const GParamSpecTypeInfo pspec_info = {
+ sizeof (MixParamSpecDisplay), /* instance_size */
+ 16, /* n_preallocs */
+ param_display_init, /* instance_init */
+ G_TYPE_OBJECT, /* value_type */
+ NULL, /* finalize */
+ param_display_set_default, /* value_set_default */
+ param_display_validate, /* value_validate */
+ param_display_values_cmp, /* values_cmp */
+ };
+ /* FIXME 0.11: Should really be MixParamSpecDisplay */
+ type = g_param_type_register_static ("GParamSpecDisplay", &pspec_info);
+ }
+
+ return type;
+}
+
+/**
+* mix_param_spec_display:
+* @name: the canonical name of the property
+* @nick: the nickname of the property
+* @blurb: a short description of the property
+* @object_type: the #MixDisplayType for the property
+* @flags: a combination of #GParamFlags
+* @returns: a newly allocated #GParamSpec instance
+*
+* Creates a new #GParamSpec instance that hold #MixDisplay references.
+*
+*/
+GParamSpec *
+mix_param_spec_display (const char *name, const char *nick,
+ const char *blurb, GType object_type,
+ GParamFlags flags)
+{
+ MixParamSpecDisplay *ospec;
+
+ g_return_val_if_fail (g_type_is_a (object_type, MIX_TYPE_DISPLAY), NULL);
+
+ ospec = g_param_spec_internal (MIX_TYPE_PARAM_DISPLAY,
+ name, nick, blurb, flags);
+ G_PARAM_SPEC (ospec)->value_type = object_type;
+
+ return G_PARAM_SPEC (ospec);
+}
+
+/**
+* mix_display_replace:
+* @olddata: pointer to a pointer to a object to be replaced
+* @newdata: pointer to new object
+*
+* Modifies a pointer to point to a new object. The modification
+* is done atomically, and the reference counts are updated correctly.
+* Either @newdata and the value pointed to by @olddata may be NULL.
+*/
+void
+mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata)
+{
+ MixDisplay *olddata_val;
+
+ g_return_if_fail (olddata != NULL);
+
+ olddata_val = g_atomic_pointer_get ((gpointer *) olddata);
+
+ if (olddata_val == newdata)
+ return;
+
+ if (newdata)
+ mix_display_ref (newdata);
+
+ while (!g_atomic_pointer_compare_and_exchange
+ ((gpointer *) olddata, olddata_val, newdata))
+ {
+ olddata_val = g_atomic_pointer_get ((gpointer *) olddata);
+ }
+
+ if (olddata_val)
+ mix_display_unref (olddata_val);
+
+}
+
+gboolean
+mix_display_equal (MixDisplay * first, MixDisplay * second)
+{
+ if (MIX_IS_DISPLAY (first))
+ {
+ MixDisplayClass *klass = MIX_DISPLAY_GET_CLASS (first);
+
+ if (klass->equal)
+ {
+ return klass->equal (first, second);
+ }
+ else
+ {
+ return mix_display_equal_default (first, second);
+ }
+ }
+ else
+ return FALSE;
+}
+
+static gboolean
+mix_display_equal_default (MixDisplay * first, MixDisplay * second)
+{
+ if (MIX_IS_DISPLAY (first) && MIX_IS_DISPLAY (second))
+ {
+ gboolean ret = TRUE;
+
+ // Do data comparison here.
+
+ return ret;
+ }
+ else
+ return FALSE;
+}
diff --git a/mix_video/src/mixdisplay.h b/mix_video/src/mixdisplay.h
new file mode 100644
index 0000000..daaa5ed
--- /dev/null
+++ b/mix_video/src/mixdisplay.h
@@ -0,0 +1,233 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_DISPLAY_H__
+#define __MIX_DISPLAY_H__
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+#define MIX_TYPE_DISPLAY (mix_display_get_type())
+#define MIX_IS_DISPLAY(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAY))
+#define MIX_IS_DISPLAY_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAY))
+#define MIX_DISPLAY_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAY, MixDisplayClass))
+#define MIX_DISPLAY(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DISPLAY, MixDisplay))
+#define MIX_DISPLAY_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAY, MixDisplayClass))
+#define MIX_DISPLAY_CAST(obj) ((MixDisplay*)(obj))
+typedef struct _MixDisplay MixDisplay;
+typedef struct _MixDisplayClass MixDisplayClass;
+
+/**
+* MixDisplayDupFunction:
+* @obj: Display to duplicate
+* @returns: reference to cloned instance.
+*
+* Virtual function prototype for methods to create duplicate of instance.
+*
+*/
+typedef MixDisplay *(*MixDisplayDupFunction) (const MixDisplay * obj);
+
+/**
+* MixDisplayCopyFunction:
+* @target: target of the copy
+* @src: source of the copy
+* @returns: boolean indicates if copy is successful.
+*
+* Virtual function prototype for methods to create copies of instance.
+*
+*/
+typedef gboolean (*MixDisplayCopyFunction) (MixDisplay * target,
+ const MixDisplay * src);
+
+/**
+* MixDisplayFinalizeFunction:
+* @obj: Display to finalize
+*
+* Virtual function prototype for methods to free ressources used by
+* object.
+*/
+typedef void (*MixDisplayFinalizeFunction) (MixDisplay * obj);
+
+/**
+* MixDisplayEqualsFunction:
+* @first: first object in the comparison
+* @second: second object in the comparison
+*
+* Virtual function prototype for methods to compare 2 objects and check if they are equal.
+*/
+typedef gboolean (*MixDisplayEqualFunction) (MixDisplay * first,
+ MixDisplay * second);
+
+/**
+* MIX_VALUE_HOLDS_DISPLAY:
+* @value: the #GValue to check
+*
+* Checks if the given #GValue contains a #MIX_TYPE_PARAM value.
+*/
+#define MIX_VALUE_HOLDS_DISPLAY(value) (G_VALUE_HOLDS(value, MIX_TYPE_DISPLAY))
+
+/**
+* MIX_DISPLAY_REFCOUNT:
+* @obj: a #MixDisplay
+*
+* Get access to the reference count field of the object.
+*/
+#define MIX_DISPLAY_REFCOUNT(obj) ((MIX_DISPLAY_CAST(obj))->refcount)
+/**
+* MIX_DISPLAY_REFCOUNT_VALUE:
+* @obj: a #MixDisplay
+*
+* Get the reference count value of the object
+*/
+#define MIX_DISPLAY_REFCOUNT_VALUE(obj) (g_atomic_int_get (&(MIX_DISPLAY_CAST(obj))->refcount))
+
+/**
+* MixDisplay:
+* @instance: type instance
+* @refcount: atomic refcount
+*
+* Base class for a refcounted parameter objects.
+*/
+struct _MixDisplay
+{
+ GTypeInstance instance;
+ /*< public > */
+ gint refcount;
+
+ /*< private > */
+ gpointer _reserved;
+};
+
+/**
+* MixDisplayClass:
+* @dup: method to duplicate the object.
+* @copy: method to copy details in one object to the other.
+* @finalize: destructor
+* @equal: method to check if the content of two objects are equal.
+*
+* #MixDisplay class strcut.
+*/
+struct _MixDisplayClass
+{
+ GTypeClass type_class;
+
+ MixDisplayDupFunction dup;
+ MixDisplayCopyFunction copy;
+ MixDisplayFinalizeFunction finalize;
+ MixDisplayEqualFunction equal;
+
+ /*< private > */
+ gpointer _mix_reserved;
+};
+
+/**
+* mix_display_get_type:
+* @returns: type of this object.
+*
+* Get type.
+*/
+GType mix_display_get_type (void);
+
+/**
+* mix_display_new:
+* @returns: return a newly allocated object.
+*
+* Create new instance of the object.
+*/
+MixDisplay *mix_display_new ();
+
+/**
+* mix_display_copy:
+* @target: copy to target
+* @src: copy from source
+* @returns: boolean indicating if copy is successful.
+*
+* Copy data from one instance to the other. This method internally invoked the #MixDisplay::copy method such that derived object will be copied correctly.
+*/
+gboolean mix_display_copy (MixDisplay * target, const MixDisplay * src);
+
+/**
+* mix_display_ref:
+* @obj: a #MixDisplay object.
+* @returns: the object with reference count incremented.
+*
+* Increment reference count.
+*/
+MixDisplay *mix_display_ref (MixDisplay * obj);
+
+/**
+* mix_display_unref:
+* @obj: a #MixDisplay object.
+*
+* Decrement reference count.
+*/
+void mix_display_unref (MixDisplay * obj);
+
+/**
+* mix_display_replace:
+* @olddata:
+* @newdata:
+*
+* Replace a pointer of the object with the new one.
+*/
+void mix_display_replace (MixDisplay ** olddata, MixDisplay * newdata);
+
+/**
+* mix_display_dup:
+* @obj: #MixDisplay object to duplicate.
+* @returns: A newly allocated duplicate of the object, or NULL if failed.
+*
+* Duplicate the given #MixDisplay and allocate a new instance. This method is chained up properly and derive object will be dupped properly.
+*/
+MixDisplay *mix_display_dup (const MixDisplay * obj);
+
+/**
+* mix_display_equal:
+* @first: first object to compare
+* @second: second object to compare
+* @returns: boolean indicates if the 2 object contains same data.
+*
+* Note that the parameter comparison compares the values that are hold inside the object, not for checking if the 2 pointers are of the same instance.
+*/
+gboolean mix_display_equal (MixDisplay * first, MixDisplay * second);
+
+/* GParamSpec */
+
+#define MIX_TYPE_PARAM_DISPLAY (mix_param_spec_display_get_type())
+#define MIX_IS_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_TYPE ((pspec), MIX_TYPE_PARAM_DISPLAY))
+#define MIX_PARAM_SPEC_DISPLAY(pspec) (G_TYPE_CHECK_INSTANCE_CAST ((pspec), MIX_TYPE_PARAM_DISPLAY, MixParamSpecDisplay))
+
+typedef struct _MixParamSpecDisplay MixParamSpecDisplay;
+
+/**
+* MixParamSpecDisplay:
+* @parent: #GParamSpec portion
+*
+* A #GParamSpec derived structure that contains the meta data
+* for #MixDisplay properties.
+*/
+struct _MixParamSpecDisplay
+{
+ GParamSpec parent;
+};
+
+GType mix_param_spec_display_get_type (void);
+
+GParamSpec *mix_param_spec_display (const char *name, const char *nick,
+ const char *blurb, GType object_type,
+ GParamFlags flags);
+
+/* GValue methods */
+
+void mix_value_set_display (GValue * value, MixDisplay * obj);
+void mix_value_take_display (GValue * value, MixDisplay * obj);
+MixDisplay *mix_value_get_display (const GValue * value);
+MixDisplay *mix_value_dup_display (const GValue * value);
+
+G_END_DECLS
+#endif
diff --git a/mix_video/src/mixdisplayx11.c b/mix_video/src/mixdisplayx11.c
new file mode 100644
index 0000000..60eb3e4
--- /dev/null
+++ b/mix_video/src/mixdisplayx11.c
@@ -0,0 +1,205 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixdisplayx11
+ * @short_description: VideoInit parameters
+ *
+ * A data object which stores videoinit specific parameters.
+ */
+
+#include "mixdisplayx11.h"
+
+#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; }
+
+static GType _mix_displayx11_type = 0;
+static MixDisplayClass *parent_class = NULL;
+
+#define _do_init { _mix_displayx11_type = g_define_type_id; }
+
+gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src);
+MixDisplay *mix_displayx11_dup(const MixDisplay * obj);
+gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second);
+static void mix_displayx11_finalize(MixDisplay * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixDisplayX11, mix_displayx11,
+ MIX_TYPE_DISPLAY, _do_init);
+
+static void mix_displayx11_init(MixDisplayX11 * self) {
+
+ /* Initialize member varibles */
+ self->display = NULL;
+ self->drawable = 0;
+}
+
+static void mix_displayx11_class_init(MixDisplayX11Class * klass) {
+ MixDisplayClass *mixdisplay_class = MIX_DISPLAY_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixDisplayClass *) g_type_class_peek_parent(klass);
+
+ mixdisplay_class->finalize = mix_displayx11_finalize;
+ mixdisplay_class->copy = (MixDisplayCopyFunction) mix_displayx11_copy;
+ mixdisplay_class->dup = (MixDisplayDupFunction) mix_displayx11_dup;
+ mixdisplay_class->equal = (MixDisplayEqualFunction) mix_displayx11_equal;
+}
+
+MixDisplayX11 *
+mix_displayx11_new(void) {
+ MixDisplayX11 *ret = (MixDisplayX11 *) g_type_create_instance(
+ MIX_TYPE_DISPLAYX11);
+
+ return ret;
+}
+
+void mix_displayx11_finalize(MixDisplay * obj) {
+ /* clean up here. */
+ /* MixDisplayX11 *self = MIX_DISPLAYX11 (obj); */
+
+ /* NOTE: we don't need to do anything
+ * with display and drawable */
+
+ /* Chain up parent */
+ if (parent_class->finalize)
+ parent_class->finalize(obj);
+}
+
+MixDisplayX11 *
+mix_displayx11_ref(MixDisplayX11 * mix) {
+ return (MixDisplayX11 *) mix_display_ref(MIX_DISPLAY(mix));
+}
+
+/**
+ * mix_mixdisplayx11_dup:
+ * @obj: a #MixDisplayX11 object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixDisplay *
+mix_displayx11_dup(const MixDisplay * obj) {
+ MixDisplay *ret = NULL;
+
+ if (MIX_IS_DISPLAYX11(obj)) {
+ MixDisplayX11 *duplicate = mix_displayx11_new();
+ if (mix_displayx11_copy(MIX_DISPLAY(duplicate), MIX_DISPLAY(obj))) {
+ ret = MIX_DISPLAY(duplicate);
+ } else {
+ mix_displayx11_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_mixdisplayx11_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_displayx11_copy(MixDisplay * target, const MixDisplay * src) {
+ MixDisplayX11 *this_target, *this_src;
+
+ if (MIX_IS_DISPLAYX11(target) && MIX_IS_DISPLAYX11(src)) {
+ // Cast the base object to this child object
+ this_target = MIX_DISPLAYX11(target);
+ this_src = MIX_DISPLAYX11(src);
+
+ // Copy properties from source to target.
+
+ this_target->display = this_src->display;
+ this_target->drawable = this_src->drawable;
+
+ // Now chainup base class
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_DISPLAY_CAST(target),
+ MIX_DISPLAY_CAST(src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_mixdisplayx11_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_displayx11_equal(MixDisplay * first, MixDisplay * second) {
+ gboolean ret = FALSE;
+
+ MixDisplayX11 *this_first, *this_second;
+
+ this_first = MIX_DISPLAYX11(first);
+ this_second = MIX_DISPLAYX11(second);
+
+ if (MIX_IS_DISPLAYX11(first) && MIX_IS_DISPLAYX11(second)) {
+ // Compare member variables
+
+ // TODO: if in the copy method we just copy the pointer of display, the comparison
+ // below is enough. But we need to decide how to copy!
+
+ if (this_first->display == this_second->display && this_first->drawable
+ == this_second->drawable) {
+ // members within this scope equal. chaining up.
+ MixDisplayClass *klass = MIX_DISPLAY_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ }
+ return ret;
+}
+
+#define MIX_DISPLAYX11_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_DISPLAYX11_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_DISPLAYX11(obj)) return MIX_RESULT_FAIL; \
+
+MIX_RESULT mix_displayx11_set_display(MixDisplayX11 * obj, Display * display) {
+ MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj);
+
+ // TODO: needs to decide to clone or just copy pointer
+ obj->display = display;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_displayx11_get_display(MixDisplayX11 * obj, Display ** display) {
+ MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, display);
+
+ // TODO: needs to decide to clone or just copy pointer
+ *display = obj->display;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_displayx11_set_drawable(MixDisplayX11 * obj, Drawable drawable) {
+ MIX_DISPLAYX11_SETTER_CHECK_INPUT (obj);
+
+ // TODO: needs to decide to clone or just copy pointer
+ obj->drawable = drawable;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_displayx11_get_drawable(MixDisplayX11 * obj, Drawable * drawable) {
+ MIX_DISPLAYX11_GETTER_CHECK_INPUT (obj, drawable);
+
+ // TODO: needs to decide to clone or just copy pointer
+ *drawable = obj->drawable;
+ return MIX_RESULT_SUCCESS;
+}
diff --git a/mix_video/src/mixdisplayx11.h b/mix_video/src/mixdisplayx11.h
new file mode 100644
index 0000000..4a14c9f
--- /dev/null
+++ b/mix_video/src/mixdisplayx11.h
@@ -0,0 +1,141 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_DISPLAYX11_H__
+#define __MIX_DISPLAYX11_H__
+
+#include "mixdisplay.h"
+#include "mixvideodef.h"
+#include <X11/Xlib.h>
+
+/**
+* MIX_TYPE_DISPLAYX11:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_DISPLAYX11 (mix_displayx11_get_type ())
+
+/**
+* MIX_DISPLAYX11:
+* @obj: object to be type-casted.
+*/
+#define MIX_DISPLAYX11(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DISPLAYX11, MixDisplayX11))
+
+/**
+* MIX_IS_DISPLAYX11:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixDisplay
+*/
+#define MIX_IS_DISPLAYX11(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DISPLAYX11))
+
+/**
+* MIX_DISPLAYX11_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_DISPLAYX11_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DISPLAYX11, MixDisplayX11Class))
+
+/**
+* MIX_IS_DISPLAYX11_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixDisplayClass
+*/
+#define MIX_IS_DISPLAYX11_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DISPLAYX11))
+
+/**
+* MIX_DISPLAYX11_GET_CLASS:
+* @obj: a #MixDisplay object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_DISPLAYX11_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DISPLAYX11, MixDisplayX11Class))
+
+typedef struct _MixDisplayX11 MixDisplayX11;
+typedef struct _MixDisplayX11Class MixDisplayX11Class;
+
+/**
+* MixDisplayX11:
+*
+* MI-X VideoInit Parameter object
+*/
+struct _MixDisplayX11
+{
+ /*< public > */
+ MixDisplay parent;
+
+ /*< public > */
+
+ Display *display;
+ Drawable drawable;
+};
+
+/**
+* MixDisplayX11Class:
+*
+* MI-X VideoInit object class
+*/
+struct _MixDisplayX11Class
+{
+ /*< public > */
+ MixDisplayClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_displayx11_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_displayx11_get_type (void);
+
+/**
+* mix_displayx11_new:
+* @returns: A newly allocated instance of #MixDisplayX11
+*
+* Use this method to create new instance of #MixDisplayX11
+*/
+MixDisplayX11 *mix_displayx11_new (void);
+/**
+* mix_displayx11_ref:
+* @mix: object to add reference
+* @returns: the MixDisplayX11 instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixDisplayX11 *mix_displayx11_ref (MixDisplayX11 * mix);
+
+/**
+* mix_displayx11_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_displayx11_unref(obj) mix_display_unref(MIX_DISPLAY(obj))
+
+/* Class Methods */
+
+/*
+TO DO: Add documents
+*/
+
+MIX_RESULT mix_displayx11_set_display (MixDisplayX11 * obj,
+ Display * display);
+
+MIX_RESULT mix_displayx11_get_display (MixDisplayX11 * obj,
+ Display ** dislay);
+
+MIX_RESULT mix_displayx11_set_drawable (MixDisplayX11 * obj,
+ Drawable drawable);
+
+MIX_RESULT mix_displayx11_get_drawable (MixDisplayX11 * obj,
+ Drawable * drawable);
+
+#endif /* __MIX_DISPLAYX11_H__ */
diff --git a/mix_video/src/mixdrmparams.c b/mix_video/src/mixdrmparams.c
new file mode 100644
index 0000000..336393b
--- /dev/null
+++ b/mix_video/src/mixdrmparams.c
@@ -0,0 +1,189 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+* SECTION:mixdrmparams
+* @short_description: Drm parameters
+*
+* A data object which stores drm specific parameters.
+*/
+
+#include "mixdrmparams.h"
+
+static GType _mix_drmparams_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_drmparams_type = g_define_type_id; }
+
+gboolean mix_drmparams_copy (MixParams * target, const MixParams * src);
+MixParams *mix_drmparams_dup (const MixParams * obj);
+gboolean mix_drmparams_equal (MixParams * first, MixParams * second);
+static void mix_drmparams_finalize (MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixDrmParams, mix_drmparams, MIX_TYPE_PARAMS,
+ _do_init);
+
+static void
+mix_drmparams_init (MixDrmParams * self)
+{
+ /* initialize properties here */
+
+ /* TODO: initialize properties */
+}
+
+static void
+mix_drmparams_class_init (MixDrmParamsClass * klass)
+{
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS (klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent (klass);
+
+ mixparams_class->finalize = mix_drmparams_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_drmparams_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_drmparams_dup;
+ mixparams_class->equal = (MixParamsEqualFunction) mix_drmparams_equal;
+}
+
+MixDrmParams *
+mix_drmparams_new (void)
+{
+ MixDrmParams *ret =
+ (MixDrmParams *) g_type_create_instance (MIX_TYPE_DRMPARAMS);
+
+ return ret;
+}
+
+void
+mix_drmparams_finalize (MixParams * obj)
+{
+ /* clean up here. */
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+ if (parent_class->finalize)
+ {
+ parent_class->finalize (obj);
+ }
+}
+
+MixDrmParams *
+mix_drmparams_ref (MixDrmParams * mix)
+{
+ return (MixDrmParams *) mix_params_ref (MIX_PARAMS (mix));
+}
+
+/**
+* mix_drmparams_dup:
+* @obj: a #MixDrmParams object
+* @returns: a newly allocated duplicate of the object.
+*
+* Copy duplicate of the object.
+*/
+MixParams *
+mix_drmparams_dup (const MixParams * obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_DRMPARAMS (obj))
+ {
+ MixDrmParams *duplicate = mix_drmparams_new ();
+ if (mix_drmparams_copy (MIX_PARAMS (duplicate), MIX_PARAMS (obj)))
+ {
+ ret = MIX_PARAMS (duplicate);
+ }
+ else
+ {
+ mix_drmparams_unref (duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+* mix_drmparams_copy:
+* @target: copy to target
+* @src: copy from src
+* @returns: boolean indicates if copy is successful.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_drmparams_copy (MixParams * target, const MixParams * src)
+{
+ MixDrmParams *this_target, *this_src;
+
+ if (MIX_IS_DRMPARAMS (target) && MIX_IS_DRMPARAMS (src))
+ {
+ // Cast the base object to this child object
+ this_target = MIX_DRMPARAMS (target);
+ this_src = MIX_DRMPARAMS (src);
+
+ // TODO: copy properties */
+
+ // Now chainup base class
+ if (parent_class->copy)
+ {
+ return parent_class->copy (MIX_PARAMS_CAST (target),
+ MIX_PARAMS_CAST (src));
+ }
+ else
+ {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+* mix_drmparams_:
+* @first: first object to compare
+* @second: seond object to compare
+* @returns: boolean indicates if instance are equal.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_drmparams_equal (MixParams * first, MixParams * second)
+{
+ gboolean ret = FALSE;
+ MixDrmParams *this_first, *this_second;
+
+ if (MIX_IS_DRMPARAMS (first) && MIX_IS_DRMPARAMS (second))
+ {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_DRMPARAMS (first);
+ this_second = MIX_DRMPARAMS (second);
+
+ /* TODO: add comparison for properties */
+ /* if ( first properties == sencod properties) */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class);
+ if (klass->equal)
+ ret = parent_class->equal (first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
+
+#define MIX_DRMPARAMS_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_DRMPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_DRMPARAMS_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_DRMPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+
+/* TODO: Add getters and setters for properties. */
diff --git a/mix_video/src/mixdrmparams.h b/mix_video/src/mixdrmparams.h
new file mode 100644
index 0000000..d5ffdbe
--- /dev/null
+++ b/mix_video/src/mixdrmparams.h
@@ -0,0 +1,126 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_DRMPARAMS_H__
+#define __MIX_DRMPARAMS_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+
+/**
+* MIX_TYPE_DRMPARAMS:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_DRMPARAMS (mix_drmparams_get_type ())
+
+/**
+* MIX_DRMPARAMS:
+* @obj: object to be type-casted.
+*/
+#define MIX_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_DRMPARAMS, MixDrmParams))
+
+/**
+* MIX_IS_DRMPARAMS:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixParams
+*/
+#define MIX_IS_DRMPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_DRMPARAMS))
+
+/**
+* MIX_DRMPARAMS_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_DRMPARAMS, MixDrmParamsClass))
+
+/**
+* MIX_IS_DRMPARAMS_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixParamsClass
+*/
+#define MIX_IS_DRMPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_DRMPARAMS))
+
+/**
+* MIX_DRMPARAMS_GET_CLASS:
+* @obj: a #MixParams object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_DRMPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_DRMPARAMS, MixDrmParamsClass))
+
+typedef struct _MixDrmParams MixDrmParams;
+typedef struct _MixDrmParamsClass MixDrmParamsClass;
+
+/**
+* MixDrmParams:
+*
+* MI-X Drm Parameter object
+*/
+struct _MixDrmParams
+{
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+
+ /* TODO: Add properties */
+
+};
+
+/**
+* MixDrmParamsClass:
+*
+* MI-X Drm object class
+*/
+struct _MixDrmParamsClass
+{
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_drmparams_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_drmparams_get_type (void);
+
+/**
+* mix_drmparams_new:
+* @returns: A newly allocated instance of #MixDrmParams
+*
+* Use this method to create new instance of #MixDrmParams
+*/
+MixDrmParams *mix_drmparams_new (void);
+/**
+* mix_drmparams_ref:
+* @mix: object to add reference
+* @returns: the MixDrmParams instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixDrmParams *mix_drmparams_ref (MixDrmParams * mix);
+
+/**
+* mix_drmparams_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_drmparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for properties */
+
+#endif /* __MIX_DRMPARAMS_H__ */
diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c
new file mode 100644
index 0000000..4cb24e8
--- /dev/null
+++ b/mix_video/src/mixframemanager.c
@@ -0,0 +1,775 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+
+#include "mixvideolog.h"
+#include "mixframemanager.h"
+#include "mixvideoframe_private.h"
+
+#define INITIAL_FRAME_ARRAY_SIZE 16
+#define MIX_SECOND (G_USEC_PER_SEC * G_GINT64_CONSTANT (1000))
+
+static GObjectClass *parent_class = NULL;
+
+static void mix_framemanager_finalize(GObject * obj);
+G_DEFINE_TYPE( MixFrameManager, mix_framemanager, G_TYPE_OBJECT);
+
+static void mix_framemanager_init(MixFrameManager * self) {
+ /* TODO: public member initialization */
+
+ /* TODO: private member initialization */
+
+ if (!g_thread_supported()) {
+ g_thread_init(NULL);
+ }
+
+ self->lock = g_mutex_new();
+
+ self->flushing = FALSE;
+ self->eos = FALSE;
+ self->frame_array = NULL;
+ self->frame_queue = NULL;
+ self->initialized = FALSE;
+
+ self->mode = MIX_FRAMEORDER_MODE_DISPLAYORDER;
+ self->framerate_numerator = 30;
+ self->framerate_denominator = 1;
+
+ self->is_first_frame = TRUE;
+
+ /* for vc1 in asf */
+ self->p_frame = NULL;
+ self->prev_timestamp = 0;
+}
+
+static void mix_framemanager_class_init(MixFrameManagerClass * klass) {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ gobject_class->finalize = mix_framemanager_finalize;
+}
+
+MixFrameManager *mix_framemanager_new(void) {
+ MixFrameManager *ret = g_object_new(MIX_TYPE_FRAMEMANAGER, NULL);
+
+ return ret;
+}
+
+void mix_framemanager_finalize(GObject * obj) {
+ /* clean up here. */
+
+ MixFrameManager *fm = MIX_FRAMEMANAGER(obj);
+
+ /* cleanup here */
+ mix_framemanager_deinitialize(fm);
+
+ if (fm->lock) {
+ g_mutex_free(fm->lock);
+ fm->lock = NULL;
+ }
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixFrameManager *mix_framemanager_ref(MixFrameManager * fm) {
+ return (MixFrameManager *) g_object_ref(G_OBJECT(fm));
+}
+
+/* MixFrameManager class methods */
+
+MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm,
+ MixFrameOrderMode mode, gint framerate_numerator,
+ gint framerate_denominator, gboolean timebased_ordering) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ if (!MIX_IS_FRAMEMANAGER(fm) || (mode != MIX_FRAMEORDER_MODE_DISPLAYORDER
+ && mode != MIX_FRAMEORDER_MODE_DECODEORDER) || framerate_numerator
+ <= 0 || framerate_denominator <= 0) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (fm->initialized) {
+ return MIX_RESULT_ALREADY_INIT;
+ }
+
+ if (!g_thread_supported()) {
+ g_thread_init(NULL);
+ }
+
+ ret = MIX_RESULT_NO_MEMORY;
+ if (!fm->lock) {
+ fm->lock = g_mutex_new();
+ if (!fm->lock) {
+ goto cleanup;
+ }
+ }
+
+ if (mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) {
+ fm->frame_array = g_ptr_array_sized_new(INITIAL_FRAME_ARRAY_SIZE);
+ if (!fm->frame_array) {
+ goto cleanup;
+ }
+ }
+
+ fm->frame_queue = g_queue_new();
+ if (!fm->frame_queue) {
+ goto cleanup;
+ }
+
+ fm->framerate_numerator = framerate_numerator;
+ fm->framerate_denominator = framerate_denominator;
+ fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND
+ / fm->framerate_numerator;
+
+ fm->mode = mode;
+
+ fm->timebased_ordering = timebased_ordering;
+
+ fm->initialized = TRUE;
+
+ ret = MIX_RESULT_SUCCESS;
+
+ cleanup:
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ if (fm->frame_array) {
+ g_ptr_array_free(fm->frame_array, TRUE);
+ fm->frame_array = NULL;
+ }
+ if (fm->frame_queue) {
+ g_queue_free(fm->frame_queue);
+ fm->frame_queue = NULL;
+ }
+ }
+ return ret;
+}
+MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) {
+
+ if (!MIX_IS_FRAMEMANAGER(fm)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!fm->lock) {
+ return MIX_RESULT_FAIL;
+ }
+
+ if (!fm->initialized) {
+ return MIX_RESULT_NOT_INIT;
+ }
+
+ mix_framemanager_flush(fm);
+
+ g_mutex_lock(fm->lock);
+
+ if (fm->frame_array) {
+ g_ptr_array_free(fm->frame_array, TRUE);
+ fm->frame_array = NULL;
+ }
+ if (fm->frame_queue) {
+ g_queue_free(fm->frame_queue);
+ fm->frame_queue = NULL;
+ }
+
+ fm->initialized = FALSE;
+
+ g_mutex_unlock(fm->lock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm,
+ gint framerate_numerator, gint framerate_denominator) {
+
+ if (!MIX_IS_FRAMEMANAGER(fm)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!fm->lock) {
+ return MIX_RESULT_FAIL;
+ }
+
+ if (framerate_numerator <= 0 || framerate_denominator <= 0) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ g_mutex_lock(fm->lock);
+
+ fm->framerate_numerator = framerate_numerator;
+ fm->framerate_denominator = framerate_denominator;
+ fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND
+ / fm->framerate_numerator;
+
+ g_mutex_unlock(fm->lock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm,
+ gint *framerate_numerator, gint *framerate_denominator) {
+
+ if (!MIX_IS_FRAMEMANAGER(fm)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!fm->lock) {
+ return MIX_RESULT_FAIL;
+ }
+
+ if (!framerate_numerator || !framerate_denominator) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ g_mutex_lock(fm->lock);
+
+ *framerate_numerator = fm->framerate_numerator;
+ *framerate_denominator = fm->framerate_denominator;
+
+ g_mutex_unlock(fm->lock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm,
+ MixFrameOrderMode *mode) {
+
+ if (!MIX_IS_FRAMEMANAGER(fm)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!fm->lock) {
+ return MIX_RESULT_FAIL;
+ }
+
+ if (!mode) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ /* no need to use lock */
+ *mode = fm->mode;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) {
+
+ if (!MIX_IS_FRAMEMANAGER(fm)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!fm->initialized) {
+ return MIX_RESULT_NOT_INIT;
+ }
+
+ g_mutex_lock(fm->lock);
+
+ /* flush frame_array */
+ if (fm->frame_array) {
+ guint len = fm->frame_array->len;
+ if (len) {
+ guint idx = 0;
+ MixVideoFrame *frame = NULL;
+ for (idx = 0; idx < len; idx++) {
+ frame = (MixVideoFrame *) g_ptr_array_index(fm->frame_array,
+ idx);
+ if (frame) {
+ mix_videoframe_unref(frame);
+ g_ptr_array_index(fm->frame_array, idx) = NULL;
+ }
+ }
+ /* g_ptr_array_remove_range(fm->frame_array, 0, len); */
+ }
+ }
+
+ if (fm->frame_queue) {
+ guint len = fm->frame_queue->length;
+ if (len) {
+ MixVideoFrame *frame = NULL;
+ while ((frame = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue))) {
+ mix_videoframe_unref(frame);
+ }
+ }
+ }
+
+ if(fm->p_frame) {
+ mix_videoframe_unref(fm->p_frame);
+ fm->p_frame = NULL;
+ }
+ fm->prev_timestamp = 0;
+
+ fm->eos = FALSE;
+
+ fm->is_first_frame = TRUE;
+
+ g_mutex_unlock(fm->lock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MixVideoFrame *get_expected_frame_from_array(GPtrArray *array,
+ guint64 expected, guint64 tolerance, guint64 *frametimestamp) {
+
+ guint idx = 0;
+ guint len = 0;
+ guint64 timestamp = 0;
+ guint64 lowest_timestamp = (guint64)-1;
+ guint lowest_timestamp_idx = -1;
+
+ MixVideoFrame *frame = NULL;
+
+ if (!array || !expected || !tolerance || !frametimestamp || expected < tolerance) {
+
+ return NULL;
+ }
+
+ len = array->len;
+ if (!len) {
+ return NULL;
+ }
+
+ for (idx = 0; idx < len; idx++) {
+ MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx);
+ if (_frame) {
+
+ if (mix_videoframe_get_timestamp(_frame, &timestamp)
+ != MIX_RESULT_SUCCESS) {
+
+ /*
+ * Oops, this shall never happen!
+ * In case it heppens, release the frame!
+ */
+
+ mix_videoframe_unref(_frame);
+
+ /* make an available slot */
+ g_ptr_array_index(array, idx) = NULL;
+
+ break;
+ }
+
+ if (lowest_timestamp > timestamp)
+ {
+ lowest_timestamp = timestamp;
+ lowest_timestamp_idx = idx;
+ }
+ }
+ }
+
+ if (lowest_timestamp == (guint64)-1)
+ {
+ return NULL;
+ }
+
+
+ /* check if this is the expected next frame */
+ if (lowest_timestamp <= expected + tolerance)
+ {
+ MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_timestamp_idx);
+ /* make this slot available */
+ g_ptr_array_index(array, lowest_timestamp_idx) = NULL;
+
+ *frametimestamp = lowest_timestamp;
+ frame = _frame;
+ }
+
+ return frame;
+}
+
+void add_frame_into_array(GPtrArray *array, MixVideoFrame *mvf) {
+
+ gboolean found_slot = FALSE;
+ guint len = 0;
+
+ if (!array || !mvf) {
+ return;
+ }
+
+ /* do we have slot for this frame? */
+ len = array->len;
+ if (len) {
+ guint idx = 0;
+ gpointer frame = NULL;
+ for (idx = 0; idx < len; idx++) {
+ frame = g_ptr_array_index(array, idx);
+ if (!frame) {
+ found_slot = TRUE;
+ g_ptr_array_index(array, idx) = (gpointer) mvf;
+ break;
+ }
+ }
+ }
+
+ if (!found_slot) {
+ g_ptr_array_add(array, (gpointer) mvf);
+ }
+
+}
+
+MIX_RESULT mix_framemanager_timestamp_based_enqueue(MixFrameManager *fm,
+ MixVideoFrame *mvf) {
+ /*
+ * display order mode.
+ *
+ * if this is the first frame, we always push it into
+ * output queue, if it is not, check if it is the one
+ * expected, if yes, push it into the output queue.
+ * if not, put it into waiting list.
+ *
+ * while the expected frame is pushed into output queue,
+ * the expected next timestamp is also updated. with this
+ * updated expected next timestamp, we search for expected
+ * frame from the waiting list, if found, repeat the process.
+ *
+ */
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ guint64 timestamp = 0;
+
+ first_frame:
+
+ ret = mix_videoframe_get_timestamp(mvf, &timestamp);
+ if (ret != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ if (fm->is_first_frame) {
+
+ /*
+ * for the first frame, we can always put it into the output queue
+ */
+ g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
+
+ /*
+ * what timestamp of next frame shall be?
+ */
+ fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta;
+
+ fm->is_first_frame = FALSE;
+
+ } else {
+
+ /*
+ * is this the next frame expected?
+ */
+
+ /* calculate tolerance */
+ guint64 tolerance = fm->frame_timestamp_delta / 4;
+ MixVideoFrame *frame_from_array = NULL;
+ guint64 timestamp_frame_array = 0;
+
+ /*
+ * timestamp may be associated with the second field, which
+ * will not fall between the tolerance range.
+ */
+
+ if (timestamp <= fm->next_frame_timestamp + tolerance) {
+
+ /*
+ * ok, this is the frame expected, push it into output queue
+ */
+ g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
+
+ /*
+ * update next_frame_timestamp only if it falls within the tolerance range
+ */
+ if (timestamp >= fm->next_frame_timestamp - tolerance)
+ {
+ fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta;
+ }
+
+ /*
+ * since we updated next_frame_timestamp, there might be a frame
+ * in the frame_array that satisfying this new next_frame_timestamp
+ */
+
+ while ((frame_from_array = get_expected_frame_from_array(
+ fm->frame_array, fm->next_frame_timestamp, tolerance,
+ &timestamp_frame_array))) {
+
+ g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array);
+
+ /*
+ * update next_frame_timestamp only if it falls within the tolerance range
+ */
+ if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance)
+ {
+ fm->next_frame_timestamp = timestamp_frame_array
+ + fm->frame_timestamp_delta;
+ }
+ }
+
+ } else {
+
+ /*
+ * is discontinuity flag set for this frame ?
+ */
+ gboolean discontinuity = FALSE;
+ ret = mix_videoframe_get_discontinuity(mvf, &discontinuity);
+ if (ret != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ /*
+ * If this is a frame with discontinuity flag set, clear frame_array
+ * and treat the frame as the first frame.
+ */
+ if (discontinuity) {
+
+ guint len = fm->frame_array->len;
+ if (len) {
+ guint idx = 0;
+ MixVideoFrame *frame = NULL;
+ for (idx = 0; idx < len; idx++) {
+ frame = (MixVideoFrame *) g_ptr_array_index(
+ fm->frame_array, idx);
+ if (frame) {
+ mix_videoframe_unref(frame);
+ g_ptr_array_index(fm->frame_array, idx) = NULL;
+ }
+ }
+ }
+
+ fm->is_first_frame = TRUE;
+ goto first_frame;
+ }
+
+ /*
+ * handle variable frame rate:
+ * display any frame which time stamp is less than current one.
+ *
+ */
+ guint64 tolerance = fm->frame_timestamp_delta / 4;
+ MixVideoFrame *frame_from_array = NULL;
+ guint64 timestamp_frame_array = 0;
+
+ while ((frame_from_array = get_expected_frame_from_array(
+ fm->frame_array, timestamp, tolerance,
+ &timestamp_frame_array)))
+ {
+ g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array);
+
+ /*
+ * update next_frame_timestamp only if it falls within the tolerance range
+ */
+ if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance)
+ {
+ fm->next_frame_timestamp = timestamp_frame_array
+ + fm->frame_timestamp_delta;
+ }
+ }
+ /*
+ * this is not the expected frame, put it into frame_array
+ */
+
+ add_frame_into_array(fm->frame_array, mvf);
+ }
+ }
+ cleanup:
+
+ return ret;
+}
+
+MIX_RESULT mix_framemanager_frametype_based_enqueue(MixFrameManager *fm,
+ MixVideoFrame *mvf) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixFrameType frame_type;
+ guint64 timestamp = 0;
+
+ ret = mix_videoframe_get_frame_type(mvf, &frame_type);
+ if (ret != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ ret = mix_videoframe_get_timestamp(mvf, &timestamp);
+ if (ret != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+#ifdef MIX_LOG_ENABLE
+ if (frame_type == TYPE_I) {
+ LOG_I( "TYPE_I %"G_GINT64_FORMAT"\n", timestamp);
+ } else if (frame_type == TYPE_P) {
+ LOG_I( "TYPE_P %"G_GINT64_FORMAT"\n", timestamp);
+ } else if (frame_type == TYPE_B) {
+ LOG_I( "TYPE_B %"G_GINT64_FORMAT"\n", timestamp);
+ } else {
+ LOG_I( "TYPE_UNKNOWN %"G_GINT64_FORMAT"\n", timestamp);
+ }
+#endif
+
+ if (fm->is_first_frame) {
+ /*
+ * The first frame is not a I frame, unexpected!
+ */
+ if (frame_type != TYPE_I) {
+ goto cleanup;
+ }
+
+ g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
+ fm->is_first_frame = FALSE;
+ } else {
+
+ /*
+ * I P B B P B B ...
+ */
+ if (frame_type == TYPE_I || frame_type == TYPE_P) {
+
+ if (fm->p_frame) {
+
+ ret = mix_videoframe_set_timestamp(fm->p_frame,
+ fm->prev_timestamp);
+ if (ret != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame);
+ fm->p_frame = NULL;
+ }
+
+ /* it is an I frame, push it into the out queue */
+ /*if (frame_type == TYPE_I) {
+
+ g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
+
+ } else*/
+ {
+ /* it is a P frame, we can not push it to the out queue yet, save it */
+ fm->p_frame = mvf;
+ fm->prev_timestamp = timestamp;
+ }
+
+ ret = MIX_RESULT_SUCCESS;
+
+ } else {
+ /* it is a B frame, replace the timestamp with the previous one */
+ if (timestamp > fm->prev_timestamp) {
+ ret = mix_videoframe_set_timestamp(mvf, fm->prev_timestamp);
+ if (ret != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ /* save the timestamp */
+ fm->prev_timestamp = timestamp;
+ }
+ g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
+ ret = MIX_RESULT_SUCCESS;
+ }
+ }
+
+ cleanup:
+
+ return ret;
+}
+
+MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ /*fm->mode = MIX_FRAMEORDER_MODE_DECODEORDER;*/
+
+ if (!mvf) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!MIX_IS_FRAMEMANAGER(fm)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!fm->initialized) {
+ return MIX_RESULT_NOT_INIT;
+ }
+
+ /*
+ * This should never happen!
+ */
+ if (fm->mode != MIX_FRAMEORDER_MODE_DISPLAYORDER && fm->mode
+ != MIX_FRAMEORDER_MODE_DECODEORDER) {
+ return MIX_RESULT_FAIL;
+ }
+
+ g_mutex_lock(fm->lock);
+
+ ret = MIX_RESULT_SUCCESS;
+ if (fm->mode == MIX_FRAMEORDER_MODE_DECODEORDER) {
+ /*
+ * decode order mode, push the frame into output queue
+ */
+ g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
+
+ } else {
+
+ if (fm->timebased_ordering) {
+ ret = mix_framemanager_timestamp_based_enqueue(fm, mvf);
+ } else {
+ ret = mix_framemanager_frametype_based_enqueue(fm, mvf);
+ }
+ }
+
+ g_mutex_unlock(fm->lock);
+
+ return ret;
+}
+
+MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ if (!MIX_IS_FRAMEMANAGER(fm)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!mvf) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!fm->initialized) {
+ return MIX_RESULT_NOT_INIT;
+ }
+
+ g_mutex_lock(fm->lock);
+
+ ret = MIX_RESULT_FRAME_NOTAVAIL;
+ *mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue);
+ if (*mvf) {
+ ret = MIX_RESULT_SUCCESS;
+ } else if (fm->eos) {
+ ret = MIX_RESULT_EOS;
+ }
+
+ g_mutex_unlock(fm->lock);
+
+ return ret;
+}
+
+MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ if (!MIX_IS_FRAMEMANAGER(fm)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!fm->initialized) {
+ return MIX_RESULT_NOT_INIT;
+ }
+
+ g_mutex_lock(fm->lock);
+
+ fm->eos = TRUE;
+
+ g_mutex_unlock(fm->lock);
+
+ return ret;
+}
+
diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h
new file mode 100644
index 0000000..5dc663a
--- /dev/null
+++ b/mix_video/src/mixframemanager.h
@@ -0,0 +1,164 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_FRAMEMANAGER_H__
+#define __MIX_FRAMEMANAGER_H__
+
+#include <glib-object.h>
+#include "mixvideodef.h"
+#include "mixvideoframe.h"
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_FRAMEMANAGER (mix_framemanager_get_type ())
+#define MIX_FRAMEMANAGER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_FRAMEMANAGER, MixFrameManager))
+#define MIX_IS_FRAMEMANAGER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_FRAMEMANAGER))
+#define MIX_FRAMEMANAGER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_FRAMEMANAGER, MixFrameManagerClass))
+#define MIX_IS_FRAMEMANAGER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_FRAMEMANAGER))
+#define MIX_FRAMEMANAGER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_FRAMEMANAGER, MixFrameManagerClass))
+
+typedef struct _MixFrameManager MixFrameManager;
+typedef struct _MixFrameManagerClass MixFrameManagerClass;
+
+struct _MixFrameManager {
+ /*< public > */
+ GObject parent;
+
+ /*< public > */
+
+ /*< private > */
+ gboolean initialized;
+ gboolean flushing;
+ gboolean eos;
+
+ GMutex *lock;
+ GPtrArray *frame_array;
+ GQueue *frame_queue;
+
+ gint framerate_numerator;
+ gint framerate_denominator;
+ guint64 frame_timestamp_delta;
+
+ MixFrameOrderMode mode;
+
+ gboolean is_first_frame;
+ guint64 next_frame_timestamp;
+
+ /*
+ * For VC-1 in ASF.
+ */
+
+ MixVideoFrame *p_frame;
+ guint64 prev_timestamp;
+
+ gboolean timebased_ordering;
+};
+
+/**
+ * MixFrameManagerClass:
+ *
+ * MI-X Video object class
+ */
+struct _MixFrameManagerClass {
+ /*< public > */
+ GObjectClass parent_class;
+
+/* class members */
+
+/*< public > */
+};
+
+/**
+ * mix_framemanager_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_framemanager_get_type(void);
+
+/**
+ * mix_framemanager_new:
+ * @returns: A newly allocated instance of #MixFrameManager
+ *
+ * Use this method to create new instance of #MixFrameManager
+ */
+MixFrameManager *mix_framemanager_new(void);
+
+/**
+ * mix_framemanager_ref:
+ * @mix: object to add reference
+ * @returns: the MixFrameManager instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixFrameManager *mix_framemanager_ref(MixFrameManager * mix);
+
+/**
+ * mix_framemanager_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_framemanager_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/*
+ * Initialize FM
+ */
+MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm,
+ MixFrameOrderMode mode, gint framerate_numerator,
+ gint framerate_denominator, gboolean timebased_ordering);
+/*
+ * Deinitialize FM
+ */
+MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm);
+
+/*
+ * Set new framerate
+ */
+MIX_RESULT mix_framemanager_set_framerate(MixFrameManager *fm,
+ gint framerate_numerator, gint framerate_denominator);
+
+/*
+ * Get framerate
+ */
+MIX_RESULT mix_framemanager_get_framerate(MixFrameManager *fm,
+ gint *framerate_numerator, gint *framerate_denominator);
+
+
+/*
+ * Get Frame Order Mode
+ */
+MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm,
+ MixFrameOrderMode *mode);
+
+/*
+ * For discontiunity, reset FM
+ */
+MIX_RESULT mix_framemanager_flush(MixFrameManager *fm);
+
+/*
+ * Enqueue MixVideoFrame
+ */
+MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf);
+
+/*
+ * Dequeue MixVideoFrame in proper order depends on MixFrameOrderMode value
+ * during initialization.
+ */
+MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf);
+
+/*
+ * End of stream.
+ */
+MIX_RESULT mix_framemanager_eos(MixFrameManager *fm);
+
+
+#endif /* __MIX_FRAMEMANAGER_H__ */
diff --git a/mix_video/src/mixsurfacepool.c b/mix_video/src/mixsurfacepool.c
new file mode 100644
index 0000000..f7672c8
--- /dev/null
+++ b/mix_video/src/mixsurfacepool.c
@@ -0,0 +1,652 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixsurfacepool
+ * @short_description: MI-X Video Surface Pool
+ *
+ * A data object which stores and manipulates a pool of video surfaces.
+ */
+
+#include "mixvideolog.h"
+#include "mixsurfacepool.h"
+#include "mixvideoframe_private.h"
+
+#define MIX_LOCK(lock) g_mutex_lock(lock);
+#define MIX_UNLOCK(lock) g_mutex_unlock(lock);
+
+#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; }
+
+static GType _mix_surfacepool_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_surfacepool_type = g_define_type_id; }
+
+gboolean mix_surfacepool_copy(MixParams * target, const MixParams * src);
+MixParams *mix_surfacepool_dup(const MixParams * obj);
+gboolean mix_surfacepool_equal(MixParams * first, MixParams * second);
+static void mix_surfacepool_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixSurfacePool, mix_surfacepool, MIX_TYPE_PARAMS,
+ _do_init);
+
+static void mix_surfacepool_init(MixSurfacePool * self) {
+ /* initialize properties here */
+ self->free_list = NULL;
+ self->in_use_list = NULL;
+ self->free_list_max_size = 0;
+ self->free_list_cur_size = 0;
+ self->high_water_mark = 0;
+
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+
+ // TODO: relocate this mutex allocation -we can't communicate failure in ctor.
+ // Note that g_thread_init() has already been called by mix_video_init()
+ self->objectlock = g_mutex_new();
+
+}
+
+static void mix_surfacepool_class_init(MixSurfacePoolClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_surfacepool_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_surfacepool_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_surfacepool_dup;
+ mixparams_class->equal = (MixParamsEqualFunction) mix_surfacepool_equal;
+}
+
+MixSurfacePool *
+mix_surfacepool_new(void) {
+ MixSurfacePool *ret = (MixSurfacePool *) g_type_create_instance(
+ MIX_TYPE_SURFACEPOOL);
+ return ret;
+}
+
+void mix_surfacepool_finalize(MixParams * obj) {
+ /* clean up here. */
+
+ MixSurfacePool *self = MIX_SURFACEPOOL(obj);
+
+ if (self->objectlock) {
+ g_mutex_free(self->objectlock);
+ self->objectlock = NULL;
+ }
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixSurfacePool *
+mix_surfacepool_ref(MixSurfacePool * mix) {
+ return (MixSurfacePool *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_surfacepool_dup:
+ * @obj: a #MixSurfacePool object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_surfacepool_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_SURFACEPOOL(obj)) {
+
+ MIX_LOCK(MIX_SURFACEPOOL(obj)->objectlock);
+
+ MixSurfacePool *duplicate = mix_surfacepool_new();
+ if (mix_surfacepool_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_surfacepool_unref(duplicate);
+ }
+
+ MIX_UNLOCK(MIX_SURFACEPOOL(obj)->objectlock);
+
+ }
+ return ret;
+}
+
+/**
+ * mix_surfacepool_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_surfacepool_copy(MixParams * target, const MixParams * src) {
+ MixSurfacePool *this_target, *this_src;
+
+ if (MIX_IS_SURFACEPOOL(target) && MIX_IS_SURFACEPOOL(src)) {
+
+ MIX_LOCK(MIX_SURFACEPOOL(src)->objectlock);
+ MIX_LOCK(MIX_SURFACEPOOL(target)->objectlock);
+
+ // Cast the base object to this child object
+ this_target = MIX_SURFACEPOOL(target);
+ this_src = MIX_SURFACEPOOL(src);
+
+ // Free the existing properties
+
+ // Duplicate string
+ this_target->free_list = this_src->free_list;
+ this_target->in_use_list = this_src->in_use_list;
+ this_target->free_list_max_size = this_src->free_list_max_size;
+ this_target->free_list_cur_size = this_src->free_list_cur_size;
+ this_target->high_water_mark = this_src->high_water_mark;
+
+ MIX_UNLOCK(MIX_SURFACEPOOL(src)->objectlock);
+ MIX_UNLOCK(MIX_SURFACEPOOL(target)->objectlock);
+
+ // Now chainup base class
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_surfacepool_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_surfacepool_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixSurfacePool *this_first, *this_second;
+
+ if (MIX_IS_SURFACEPOOL(first) && MIX_IS_SURFACEPOOL(second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ MIX_LOCK(MIX_SURFACEPOOL(first)->objectlock);
+ MIX_LOCK(MIX_SURFACEPOOL(second)->objectlock);
+
+ this_first = MIX_SURFACEPOOL(first);
+ this_second = MIX_SURFACEPOOL(second);
+
+ /* TODO: add comparison for other properties */
+ if (this_first->free_list == this_second->free_list
+ && this_first->in_use_list == this_second->in_use_list
+ && this_first->free_list_max_size
+ == this_second->free_list_max_size
+ && this_first->free_list_cur_size
+ == this_second->free_list_cur_size
+ && this_first->high_water_mark == this_second->high_water_mark) {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = klass->equal(first, second);
+ else
+ ret = TRUE;
+ }
+
+ MIX_LOCK(MIX_SURFACEPOOL(first)->objectlock);
+ MIX_LOCK(MIX_SURFACEPOOL(second)->objectlock);
+
+ }
+
+ return ret;
+}
+
+/* Class Methods */
+
+/**
+ * mix_surfacepool_initialize:
+ * @returns: MIX_RESULT_SUCCESS if successful in creating the surface pool
+ *
+ * Use this method to create a new surface pool, consisting of a GSList of
+ * frame objects that represents a pool of surfaces.
+ */
+MIX_RESULT mix_surfacepool_initialize(MixSurfacePool * obj,
+ VASurfaceID *surfaces, guint num_surfaces) {
+
+ LOG_V( "Begin\n");
+
+ if (obj == NULL || surfaces == NULL) {
+
+ LOG_E(
+ "Error NULL ptrs, obj %x, surfaces %x\n", (guint) obj,
+ (guint) surfaces);
+
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ MIX_LOCK(obj->objectlock);
+
+ if ((obj->free_list != NULL) || (obj->in_use_list != NULL)) {
+ //surface pool is in use; return error; need proper cleanup
+ //TODO need cleanup here?
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_ALREADY_INIT;
+ }
+
+ if (num_surfaces == 0) {
+ obj->free_list = NULL;
+
+ obj->in_use_list = NULL;
+
+ obj->free_list_max_size = num_surfaces;
+
+ obj->free_list_cur_size = num_surfaces;
+
+ obj->high_water_mark = 0;
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_SUCCESS;
+ }
+
+ // Initialize the free pool with frame objects
+
+ gint i = 0;
+ MixVideoFrame *frame = NULL;
+
+ for (; i < num_surfaces; i++) {
+
+ //Create a frame object for each surface ID
+ frame = mix_videoframe_new();
+
+ if (frame == NULL) {
+ //TODO need to log an error here and do cleanup
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ // Set the frame ID to the surface ID
+ mix_videoframe_set_frame_id(frame, surfaces[i]);
+ // Set the ci frame index to the surface ID
+ mix_videoframe_set_ci_frame_idx (frame, i);
+ // Leave timestamp for each frame object as zero
+ // Set the pool reference in the private data of the frame object
+ mix_videoframe_set_pool(frame, obj);
+
+ //Add each frame object to the pool list
+ obj->free_list = g_slist_append(obj->free_list, frame);
+
+ }
+
+ obj->in_use_list = NULL;
+
+ obj->free_list_max_size = num_surfaces;
+
+ obj->free_list_cur_size = num_surfaces;
+
+ obj->high_water_mark = 0;
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+/**
+ * mix_surfacepool_put:
+ * @returns: SUCCESS or FAILURE
+ *
+ * Use this method to return a surface to the free pool
+ */
+MIX_RESULT mix_surfacepool_put(MixSurfacePool * obj, MixVideoFrame * frame) {
+
+ LOG_V( "Begin\n");
+ if (obj == NULL || frame == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ LOG_V( "Frame id: %d\n", frame->frame_id);
+ MIX_LOCK(obj->objectlock);
+
+ if (obj->in_use_list == NULL) {
+ //in use list cannot be empty if a frame is in use
+ //TODO need better error code for this
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_FAIL;
+ }
+
+ GSList *element = g_slist_find(obj->in_use_list, frame);
+ if (element == NULL) {
+ //Integrity error; frame not found in in use list
+ //TODO need better error code and handling for this
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_FAIL;
+ } else {
+ //Remove this element from the in_use_list
+ obj->in_use_list = g_slist_remove_link(obj->in_use_list, element);
+
+ //Concat the element to the free_list and reset the timestamp of the frame
+ //Note that the surface ID stays valid
+ mix_videoframe_set_timestamp(frame, 0);
+ obj->free_list = g_slist_concat(obj->free_list, element);
+
+ //increment the free list count
+ obj->free_list_cur_size++;
+ }
+
+ //Note that we do nothing with the ref count for this. We want it to
+ //stay at 1, which is what triggered it to be added back to the free list.
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_V( "End\n");
+ return MIX_RESULT_SUCCESS;
+}
+
+/**
+ * mix_surfacepool_get:
+ * @returns: SUCCESS or FAILURE
+ *
+ * Use this method to get a surface from the free pool
+ */
+MIX_RESULT mix_surfacepool_get(MixSurfacePool * obj, MixVideoFrame ** frame) {
+
+ LOG_V( "Begin\n");
+
+ if (obj == NULL || frame == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ MIX_LOCK(obj->objectlock);
+
+#if 0
+ if (obj->free_list == NULL) {
+#else
+ if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug
+#endif
+ //We are out of surfaces
+ //TODO need to log this as well
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_E( "out of surfaces\n");
+
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ //Remove a frame from the free pool
+
+ //We just remove the one at the head, since it's convenient
+ GSList *element = obj->free_list;
+ obj->free_list = g_slist_remove_link(obj->free_list, element);
+ if (element == NULL) {
+ //Unexpected behavior
+ //TODO need better error code and handling for this
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_E( "Element is null\n");
+
+ return MIX_RESULT_FAIL;
+ } else {
+ //Concat the element to the in_use_list
+ obj->in_use_list = g_slist_concat(obj->in_use_list, element);
+
+ //TODO replace with proper logging
+
+ LOG_I( "frame refcount%d\n",
+ MIX_PARAMS(element->data)->refcount);
+
+ //Set the out frame pointer
+ *frame = (MixVideoFrame *) element->data;
+
+ LOG_V( "Frame id: %d\n", (*frame)->frame_id);
+
+ //decrement the free list count
+ obj->free_list_cur_size--;
+
+ //Check the high water mark for surface use
+ guint size = g_slist_length(obj->in_use_list);
+ if (size > obj->high_water_mark)
+ obj->high_water_mark = size;
+ //TODO Log this high water mark
+ }
+
+ //Increment the reference count for the frame
+ mix_videoframe_ref(*frame);
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+
+gint mixframe_compare_index (MixVideoFrame * a, MixVideoFrame * b)
+{
+ if (a == NULL || b == NULL)
+ return -1;
+ if (a->ci_frame_idx == b->ci_frame_idx)
+ return 0;
+ else
+ return -1;
+}
+
+/**
+ * mix_surfacepool_get:
+ * @returns: SUCCESS or FAILURE
+ *
+ * Use this method to get a surface from the free pool according to the CI frame idx
+ */
+
+MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj, MixVideoFrame ** frame, MixVideoFrame *in_frame) {
+
+ LOG_V( "Begin\n");
+
+ if (obj == NULL || frame == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ MIX_LOCK(obj->objectlock);
+
+ if (obj->free_list == NULL) {
+ //We are out of surfaces
+ //TODO need to log this as well
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_E( "out of surfaces\n");
+
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ //Remove a frame from the free pool
+
+ //We just remove the one at the head, since it's convenient
+ GSList *element = g_slist_find_custom (obj->free_list, in_frame, (GCompareFunc) mixframe_compare_index);
+ obj->free_list = g_slist_remove_link(obj->free_list, element);
+ if (element == NULL) {
+ //Unexpected behavior
+ //TODO need better error code and handling for this
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_E( "Element is null\n");
+
+ return MIX_RESULT_FAIL;
+ } else {
+ //Concat the element to the in_use_list
+ obj->in_use_list = g_slist_concat(obj->in_use_list, element);
+
+ //TODO replace with proper logging
+
+ LOG_I( "frame refcount%d\n",
+ MIX_PARAMS(element->data)->refcount);
+
+ //Set the out frame pointer
+ *frame = (MixVideoFrame *) element->data;
+
+ //Check the high water mark for surface use
+ guint size = g_slist_length(obj->in_use_list);
+ if (size > obj->high_water_mark)
+ obj->high_water_mark = size;
+ //TODO Log this high water mark
+ }
+
+ //Increment the reference count for the frame
+ mix_videoframe_ref(*frame);
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+/**
+ * mix_surfacepool_check_available:
+ * @returns: SUCCESS or FAILURE
+ *
+ * Use this method to check availability of getting a surface from the free pool
+ */
+MIX_RESULT mix_surfacepool_check_available(MixSurfacePool * obj) {
+
+ LOG_V( "Begin\n");
+
+ if (obj == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ MIX_LOCK(obj->objectlock);
+
+#if 0
+ if (obj->free_list == NULL) {
+#else
+ if (obj->free_list_cur_size <= 1) { //Keep one surface free at all times for VBLANK bug
+#endif
+ //We are out of surfaces
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_W(
+ "Returning MIX_RESULT_POOLEMPTY because out of surfaces\n");
+
+ return MIX_RESULT_POOLEMPTY;
+ } else {
+ //Pool is not empty
+
+ MIX_UNLOCK(obj->objectlock);
+
+ LOG_I(
+ "Returning MIX_RESULT_SUCCESS because surfaces are available\n");
+
+ return MIX_RESULT_SUCCESS;
+ }
+
+}
+
+/**
+ * mix_surfacepool_deinitialize:
+ * @returns: SUCCESS or FAILURE
+ *
+ * Use this method to teardown a surface pool
+ */
+MIX_RESULT mix_surfacepool_deinitialize(MixSurfacePool * obj) {
+ if (obj == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ MIX_LOCK(obj->objectlock);
+
+ if ((obj->in_use_list != NULL) || (g_slist_length(obj->free_list)
+ != obj->free_list_max_size)) {
+ //TODO better error code
+ //We have outstanding frame objects in use and they need to be
+ //freed before we can deinitialize.
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_FAIL;
+ }
+
+ //Now remove frame objects from the list
+
+ MixVideoFrame *frame = NULL;
+
+ while (obj->free_list != NULL) {
+ //Get the frame object from the head of the list
+ frame = obj->free_list->data;
+ //frame = g_slist_nth_data(obj->free_list, 0);
+
+ //Release it
+ mix_videoframe_unref(frame);
+
+ //Delete the head node of the list and store the new head
+ obj->free_list = g_slist_delete_link(obj->free_list, obj->free_list);
+
+ //Repeat until empty
+ }
+
+ obj->free_list_max_size = 0;
+ obj->free_list_cur_size = 0;
+
+ //May want to log this information for tuning
+ obj->high_water_mark = 0;
+
+ MIX_UNLOCK(obj->objectlock);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+#define MIX_SURFACEPOOL_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_SURFACEPOOL_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_SURFACEPOOL(obj)) return MIX_RESULT_FAIL; \
+
+
+MIX_RESULT
+mix_surfacepool_dumpframe(MixVideoFrame *frame)
+{
+ LOG_I( "\tFrame %x, id %lu, refcount %d, ts %lu\n", (guint)frame,
+ frame->frame_id, MIX_PARAMS(frame)->refcount, (gulong) frame->timestamp);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT
+mix_surfacepool_dumpprint (MixSurfacePool * obj)
+{
+ //TODO replace this with proper logging later
+
+ LOG_I( "SURFACE POOL DUMP:\n");
+ LOG_I( "Free list size is %d\n", obj->free_list_cur_size);
+ LOG_I( "In use list size is %d\n", g_slist_length(obj->in_use_list));
+ LOG_I( "High water mark is %lu\n", obj->high_water_mark);
+
+ //Walk the free list and report the contents
+ LOG_I( "Free list contents:\n");
+ g_slist_foreach(obj->free_list, (GFunc) mix_surfacepool_dumpframe, NULL);
+
+ //Walk the in_use list and report the contents
+ LOG_I( "In Use list contents:\n");
+ g_slist_foreach(obj->in_use_list, (GFunc) mix_surfacepool_dumpframe, NULL);
+
+ return MIX_RESULT_SUCCESS;
+}
diff --git a/mix_video/src/mixsurfacepool.h b/mix_video/src/mixsurfacepool.h
new file mode 100644
index 0000000..6468ebe
--- /dev/null
+++ b/mix_video/src/mixsurfacepool.h
@@ -0,0 +1,158 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_SURFACEPOOL_H__
+#define __MIX_SURFACEPOOL_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+#include "mixvideoframe.h"
+
+#include <va/va.h>
+
+G_BEGIN_DECLS
+
+/**
+* MIX_TYPE_SURFACEPOOL:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_SURFACEPOOL (mix_surfacepool_get_type ())
+
+/**
+* MIX_SURFACEPOOL:
+* @obj: object to be type-casted.
+*/
+#define MIX_SURFACEPOOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_SURFACEPOOL, MixSurfacePool))
+
+/**
+* MIX_IS_SURFACEPOOL:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixSurfacePool
+*/
+#define MIX_IS_SURFACEPOOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_SURFACEPOOL))
+
+/**
+* MIX_SURFACEPOOL_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_SURFACEPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_SURFACEPOOL, MixSurfacePoolClass))
+
+/**
+* MIX_IS_SURFACEPOOL_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixSurfacePoolClass
+*/
+#define MIX_IS_SURFACEPOOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_SURFACEPOOL))
+
+/**
+* MIX_SURFACEPOOL_GET_CLASS:
+* @obj: a #MixSurfacePool object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_SURFACEPOOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_SURFACEPOOL, MixSurfacePoolClass))
+
+typedef struct _MixSurfacePool MixSurfacePool;
+typedef struct _MixSurfacePoolClass MixSurfacePoolClass;
+
+/**
+* MixSurfacePool:
+*
+* MI-X Video Surface Pool object
+*/
+struct _MixSurfacePool
+{
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+ GSList *free_list; /* list of free surfaces */
+ GSList *in_use_list; /* list of surfaces in use */
+ gulong free_list_max_size; /* initial size of the free list */
+ gulong free_list_cur_size; /* current size of the free list */
+ gulong high_water_mark; /* most surfaces in use at one time */
+// guint64 timestamp;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+
+ /*< private > */
+ GMutex *objectlock;
+
+};
+
+/**
+* MixSurfacePoolClass:
+*
+* MI-X Video Surface Pool object class
+*/
+struct _MixSurfacePoolClass
+{
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_surfacepool_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_surfacepool_get_type (void);
+
+/**
+* mix_surfacepool_new:
+* @returns: A newly allocated instance of #MixSurfacePool
+*
+* Use this method to create new instance of #MixSurfacePool
+*/
+MixSurfacePool *mix_surfacepool_new (void);
+/**
+* mix_surfacepool_ref:
+* @mix: object to add reference
+* @returns: the MixSurfacePool instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixSurfacePool *mix_surfacepool_ref (MixSurfacePool * mix);
+
+/**
+* mix_surfacepool_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_surfacepool_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+MIX_RESULT mix_surfacepool_initialize (MixSurfacePool * obj,
+ VASurfaceID *surfaces, guint num_surfaces);
+MIX_RESULT mix_surfacepool_put (MixSurfacePool * obj,
+ MixVideoFrame * frame);
+
+MIX_RESULT mix_surfacepool_get (MixSurfacePool * obj,
+ MixVideoFrame ** frame);
+
+MIX_RESULT mix_surfacepool_get_frame_with_ci_frameidx (MixSurfacePool * obj,
+ MixVideoFrame ** frame, MixVideoFrame *in_frame);
+
+MIX_RESULT mix_surfacepool_check_available (MixSurfacePool * obj);
+
+MIX_RESULT mix_surfacepool_deinitialize (MixSurfacePool * obj);
+
+G_END_DECLS
+
+#endif /* __MIX_SURFACEPOOL_H__ */
diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c
new file mode 100644
index 0000000..e9cba0a
--- /dev/null
+++ b/mix_video/src/mixvideo.c
@@ -0,0 +1,1638 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include <va/va.h> /* libVA */
+#include <X11/Xlib.h>
+#include <va/va_x11.h>
+
+#include "mixvideolog.h"
+
+#include "mixdisplayx11.h"
+#include "mixvideoframe.h"
+
+#include "mixframemanager.h"
+#include "mixvideorenderparams.h"
+#include "mixvideorenderparams_internal.h"
+
+#include "mixvideoformat.h"
+#include "mixvideoformat_vc1.h"
+#include "mixvideoformat_h264.h"
+#include "mixvideoformat_mp42.h"
+
+#include "mixvideoconfigparamsdec_vc1.h"
+#include "mixvideoconfigparamsdec_h264.h"
+#include "mixvideoconfigparamsdec_mp42.h"
+
+#include "mixvideoformatenc.h"
+#include "mixvideoformatenc_h264.h"
+#include "mixvideoformatenc_mpeg4.h"
+#include "mixvideoformatenc_preview.h"
+
+#include "mixvideoconfigparamsenc_h264.h"
+#include "mixvideoconfigparamsenc_mpeg4.h"
+#include "mixvideoconfigparamsenc_preview.h"
+
+
+#include "mixvideo.h"
+#include "mixvideo_private.h"
+
+#define USE_OPAQUE_POINTER
+
+#ifdef USE_OPAQUE_POINTER
+#define MIX_VIDEO_PRIVATE(mix) (MixVideoPrivate *)(mix->context)
+#else
+#define MIX_VIDEO_PRIVATE(mix) MIX_VIDEO_GET_PRIVATE(mix)
+#endif
+
+#define CHECK_INIT(mix, priv) \
+ if (!mix) { \
+ return MIX_RESULT_NULL_PTR; \
+ } \
+ if (!MIX_IS_VIDEO(mix)) { \
+ LOG_E( "Not MixVideo\n"); \
+ return MIX_RESULT_INVALID_PARAM; \
+ } \
+ priv = MIX_VIDEO_PRIVATE(mix); \
+ if (!priv->initialized) { \
+ LOG_E( "Not initialized\n"); \
+ return MIX_RESULT_NOT_INIT; \
+ }
+
+#define CHECK_INIT_CONFIG(mix, priv) \
+ CHECK_INIT(mix, priv); \
+ if (!priv->configured) { \
+ LOG_E( "Not configured\n"); \
+ return MIX_RESULT_NOT_CONFIGURED; \
+ }
+
+/*
+ * default implementation of virtual methods
+ */
+
+MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major,
+ guint * minor);
+
+MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode,
+ MixVideoInitParams * init_params, MixDrmParams * drm_init_params);
+
+MIX_RESULT mix_video_deinitialize_default(MixVideo * mix);
+
+MIX_RESULT mix_video_configure_default(MixVideo * mix,
+ MixVideoConfigParams * config_params, MixDrmParams * drm_config_params);
+
+MIX_RESULT mix_video_get_config_default(MixVideo * mix,
+ MixVideoConfigParams ** config_params);
+
+MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params);
+
+MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame);
+
+MIX_RESULT mix_video_release_frame_default(MixVideo * mix,
+ MixVideoFrame * frame);
+
+MIX_RESULT mix_video_render_default(MixVideo * mix,
+ MixVideoRenderParams * render_params, MixVideoFrame *frame);
+
+MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+
+MIX_RESULT mix_video_flush_default(MixVideo * mix);
+
+MIX_RESULT mix_video_eos_default(MixVideo * mix);
+
+MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state);
+
+MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf);
+
+MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf);
+
+MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size);
+
+
+static void mix_video_finalize(GObject * obj);
+MIX_RESULT mix_video_configure_decode(MixVideo * mix,
+ MixVideoConfigParamsDec * config_params_dec,
+ MixDrmParams * drm_config_params);
+
+MIX_RESULT mix_video_configure_encode(MixVideo * mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixDrmParams * drm_config_params);
+
+G_DEFINE_TYPE( MixVideo, mix_video, G_TYPE_OBJECT);
+
+static void mix_video_init(MixVideo * self) {
+
+ MixVideoPrivate *priv = MIX_VIDEO_GET_PRIVATE(self);
+
+#ifdef USE_OPAQUE_POINTER
+ self->context = priv;
+#else
+ self->context = NULL;
+#endif
+
+ /* private structure initialization */
+
+ mix_video_private_initialize(priv);
+}
+
+static void mix_video_class_init(MixVideoClass * klass) {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->finalize = mix_video_finalize;
+
+ /* Register and allocate the space the private structure for this object */
+ g_type_class_add_private(gobject_class, sizeof(MixVideoPrivate));
+
+ klass->get_version_func = mix_video_get_version_default;
+ klass->initialize_func = mix_video_initialize_default;
+ klass->deinitialize_func = mix_video_deinitialize_default;
+ klass->configure_func = mix_video_configure_default;
+ klass->get_config_func = mix_video_get_config_default;
+ klass->decode_func = mix_video_decode_default;
+ klass->get_frame_func = mix_video_get_frame_default;
+ klass->release_frame_func = mix_video_release_frame_default;
+ klass->render_func = mix_video_render_default;
+ klass->encode_func = mix_video_encode_default;
+ klass->flush_func = mix_video_flush_default;
+ klass->eos_func = mix_video_eos_default;
+ klass->get_state_func = mix_video_get_state_default;
+ klass->get_mix_buffer_func = mix_video_get_mixbuffer_default;
+ klass->release_mix_buffer_func = mix_video_release_mixbuffer_default;
+ klass->get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default;
+}
+
+MixVideo *mix_video_new(void) {
+
+ MixVideo *ret = g_object_new(MIX_TYPE_VIDEO, NULL);
+
+ return ret;
+}
+
+void mix_video_finalize(GObject * obj) {
+
+ /* clean up here. */
+
+ MixVideo *mix = MIX_VIDEO(obj);
+ mix_video_deinitialize(mix);
+}
+
+MixVideo *
+mix_video_ref(MixVideo * mix) {
+ return (MixVideo *) g_object_ref(G_OBJECT(mix));
+}
+
+/* private methods */
+
+#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; }
+
+void mix_video_private_initialize(MixVideoPrivate* priv) {
+ priv->objlock = NULL;
+ priv->initialized = FALSE;
+ priv->configured = FALSE;
+
+ /* libVA */
+ priv->va_display = NULL;
+ priv->va_major_version = -1;
+ priv->va_major_version = -1;
+
+ /* mix objects */
+ priv->frame_manager = NULL;
+ priv->video_format = NULL;
+ priv->video_format_enc = NULL; //for encoding
+ priv->surface_pool = NULL;
+ priv->buffer_pool = NULL;
+
+ priv->codec_mode = MIX_CODEC_MODE_DECODE;
+ priv->init_params = NULL;
+ priv->drm_params = NULL;
+ priv->config_params = NULL;
+}
+
+void mix_video_private_cleanup(MixVideoPrivate* priv) {
+
+ VAStatus va_status;
+
+ if (!priv) {
+ return;
+ }
+
+ if (priv->video_format_enc) {
+ mix_videofmtenc_deinitialize(priv->video_format_enc);
+ }
+
+ MIXUNREF(priv->frame_manager, mix_framemanager_unref)
+ MIXUNREF(priv->video_format, mix_videoformat_unref)
+ MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref)
+ //for encoding
+ MIXUNREF(priv->buffer_pool, mix_bufferpool_unref)
+ MIXUNREF(priv->surface_pool, mix_surfacepool_unref)
+/* MIXUNREF(priv->init_params, mix_videoinitparams_unref) */
+ MIXUNREF(priv->drm_params, mix_drmparams_unref)
+ MIXUNREF(priv->config_params, mix_videoconfigparams_unref)
+
+ /* terminate libVA */
+ if (priv->va_display) {
+ va_status = vaTerminate(priv->va_display);
+ LOG_V( "vaTerminate\n");
+ if (va_status != VA_STATUS_SUCCESS) {
+ LOG_W( "Failed vaTerminate\n");
+ } else {
+ priv->va_display = NULL;
+ }
+ }
+
+ MIXUNREF(priv->init_params, mix_videoinitparams_unref)
+
+ priv->va_major_version = -1;
+ priv->va_major_version = -1;
+
+ if (priv->objlock) {
+ g_mutex_free(priv->objlock);
+ priv->objlock = NULL;
+ }
+
+ priv->codec_mode = MIX_CODEC_MODE_DECODE;
+ priv->initialized = FALSE;
+ priv->configured = FALSE;
+}
+
+/* The following methods are defined in MI-X API */
+
+MIX_RESULT mix_video_get_version_default(MixVideo * mix, guint * major,
+ guint * minor) {
+ if (!mix || !major || !minor) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (!MIX_IS_VIDEO(mix)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ *major = MIXVIDEO_CURRENT - MIXVIDEO_AGE;
+ *minor = MIXVIDEO_AGE;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_video_initialize_default(MixVideo * mix, MixCodecMode mode,
+ MixVideoInitParams * init_params, MixDrmParams * drm_init_params) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+ MixDisplay *mix_display = NULL;
+
+ LOG_V( "Begin\n");
+
+ if (!mix || !init_params) {
+ LOG_E( "!mix || !init_params\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (mode >= MIX_CODEC_MODE_LAST) {
+ LOG_E("mode >= MIX_CODEC_MODE_LAST\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+#if 0 //we have encoding support
+ /* TODO: We need to support encoding in the future */
+ if (mode == MIX_CODEC_MODE_ENCODE) {
+ LOG_E("mode == MIX_CODEC_MODE_ENCODE\n");
+ return MIX_RESULT_NOTIMPL;
+ }
+#endif
+
+ if (!MIX_IS_VIDEO(mix)) {
+ LOG_E( "!MIX_IS_VIDEO(mix)\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!MIX_IS_VIDEOINITPARAMS(init_params)) {
+ LOG_E("!MIX_IS_VIDEOINITPARAMS(init_params\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ priv = MIX_VIDEO_PRIVATE(mix);
+
+ if (priv->initialized) {
+ LOG_W( "priv->initialized\n");
+ return MIX_RESULT_ALREADY_INIT;
+ }
+
+ /*
+ * Init thread before any threads/sync object are used.
+ * TODO: If thread is not supported, what we do?
+ */
+
+ if (!g_thread_supported()) {
+ LOG_W("!g_thread_supported()\n");
+ g_thread_init(NULL);
+ }
+
+ /* create object lock */
+ priv->objlock = g_mutex_new();
+ if (!priv->objlock) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "!priv->objlock\n");
+ goto cleanup;
+ }
+
+ /* clone mode */
+ priv->codec_mode = mode;
+
+ /* ref init_params */
+ priv->init_params = (MixVideoInitParams *) mix_params_ref(MIX_PARAMS(
+ init_params));
+ if (!priv->init_params) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "!priv->init_params\n");
+ goto cleanup;
+ }
+
+ /* NOTE: we don't do anything with drm_init_params */
+
+ /* libVA initialization */
+
+ {
+ VAStatus va_status;
+ Display *display = NULL;
+ ret = mix_videoinitparams_get_display(priv->init_params, &mix_display);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get display 1\n");
+ goto cleanup;
+ }
+
+ if (MIX_IS_DISPLAYX11(mix_display)) {
+ MixDisplayX11 *mix_displayx11 = MIX_DISPLAYX11(mix_display);
+ ret = mix_displayx11_get_display(mix_displayx11, &display);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get display 2\n");
+ goto cleanup;
+ }
+ } else {
+
+ /* TODO: add support to other MixDisplay type. For now, just return error!*/
+ LOG_E("It is not display x11\n");
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+
+ /* Now, we can initialize libVA */
+ priv->va_display = vaGetDisplay(display);
+
+ /* Oops! Fail to get VADisplay */
+ if (!priv->va_display) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Fail to get VADisplay\n");
+ goto cleanup;
+ }
+
+ /* Initialize libVA */
+ va_status = vaInitialize(priv->va_display, &priv->va_major_version,
+ &priv->va_minor_version);
+
+ /* Oops! Fail to initialize libVA */
+ if (va_status != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Fail to initialize libVA\n");
+ goto cleanup;
+ }
+
+ /* TODO: check the version numbers of libVA */
+
+ priv->initialized = TRUE;
+ ret = MIX_RESULT_SUCCESS;
+ }
+
+ cleanup:
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ mix_video_private_cleanup(priv);
+ }
+
+ MIXUNREF(mix_display, mix_display_unref);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_video_deinitialize_default(MixVideo * mix) {
+
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT(mix, priv);
+
+ mix_video_private_cleanup(priv);
+
+ LOG_V( "End\n");
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_video_configure_decode(MixVideo * mix,
+ MixVideoConfigParamsDec * config_params_dec, MixDrmParams * drm_config_params) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+ MixVideoConfigParamsDec *priv_config_params_dec = NULL;
+
+ gchar *mime_type = NULL;
+ guint fps_n, fps_d;
+ guint bufpoolsize = 0;
+
+ MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT(mix, priv);
+
+ if (!config_params_dec) {
+ LOG_E( "!config_params_dec\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (!MIX_IS_VIDEOCONFIGPARAMSDEC(config_params_dec)) {
+ LOG_E("Not a MixVideoConfigParamsDec\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+
+ /*
+ * MixVideo has already been configured, it should be
+ * re-configured.
+ *
+ * TODO: Allow MixVideo re-configuration
+ */
+ if (priv->configured) {
+ ret = MIX_RESULT_SUCCESS;
+ LOG_W( "Already configured\n");
+ goto cleanup;
+ }
+
+ /* Make a copy of config_params */
+ priv->config_params = (MixVideoConfigParams *) mix_params_dup(MIX_PARAMS(
+ config_params_dec));
+ if (!priv->config_params) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Fail to duplicate config_params\n");
+ goto cleanup;
+ }
+
+ priv_config_params_dec = (MixVideoConfigParamsDec *)priv->config_params;
+
+ /* Get fps, frame order mode and mime type from config_params */
+ ret = mix_videoconfigparamsdec_get_mime_type(priv_config_params_dec, &mime_type);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get mime type\n");
+ goto cleanup;
+ }
+
+ LOG_I( "mime : %s\n", mime_type);
+
+#ifdef MIX_LOG_ENABLE
+ if (g_strcmp0(mime_type, "video/x-wmv") == 0) {
+
+ LOG_I( "mime : video/x-wmv\n");
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) {
+ LOG_I( "VC1 config_param\n");
+ } else {
+ LOG_E("Not VC1 config_param\n");
+ }
+ }
+#endif
+
+ ret = mix_videoconfigparamsdec_get_frame_order_mode(priv_config_params_dec,
+ &frame_order_mode);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to frame order mode\n");
+ goto cleanup;
+ }
+
+ ret = mix_videoconfigparamsdec_get_frame_rate(priv_config_params_dec, &fps_n,
+ &fps_d);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get frame rate\n");
+ goto cleanup;
+ }
+
+ if (!fps_n) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "fps_n is 0\n");
+ goto cleanup;
+ }
+
+ ret = mix_videoconfigparamsdec_get_buffer_pool_size(priv_config_params_dec,
+ &bufpoolsize);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get buffer pool size\n");
+ goto cleanup;
+ }
+
+ /* create frame manager */
+ priv->frame_manager = mix_framemanager_new();
+ if (!priv->frame_manager) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to create frame manager\n");
+ goto cleanup;
+ }
+
+ /* initialize frame manager */
+
+ if (g_strcmp0(mime_type, "video/x-wmv") == 0 || g_strcmp0(mime_type,
+ "video/mpeg") == 0 || g_strcmp0(mime_type, "video/x-divx") == 0) {
+ ret = mix_framemanager_initialize(priv->frame_manager,
+ frame_order_mode, fps_n, fps_d, FALSE);
+ } else {
+ ret = mix_framemanager_initialize(priv->frame_manager,
+ frame_order_mode, fps_n, fps_d, TRUE);
+ }
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to initialize frame manager\n");
+ goto cleanup;
+ }
+
+ /* create buffer pool */
+ priv->buffer_pool = mix_bufferpool_new();
+ if (!priv->buffer_pool) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to create buffer pool\n");
+ goto cleanup;
+ }
+
+ ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to initialize buffer pool\n");
+ goto cleanup;
+ }
+
+ /* Finally, we can create MixVideoFormat */
+ /* What type of MixVideoFormat we need create? */
+
+ if (g_strcmp0(mime_type, "video/x-wmv") == 0
+ && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(priv_config_params_dec)) {
+
+ MixVideoFormat_VC1 *video_format = mix_videoformat_vc1_new();
+ if (!video_format) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to create VC-1 video format\n");
+ goto cleanup;
+ }
+
+ /* TODO: work specific to VC-1 */
+
+ priv->video_format = MIX_VIDEOFORMAT(video_format);
+
+ } else if (g_strcmp0(mime_type, "video/x-h264") == 0
+ && MIX_IS_VIDEOCONFIGPARAMSDEC_H264(priv_config_params_dec)) {
+
+ MixVideoFormat_H264 *video_format = mix_videoformat_h264_new();
+ if (!video_format) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to create H.264 video format\n");
+ goto cleanup;
+ }
+
+ /* TODO: work specific to H.264 */
+
+ priv->video_format = MIX_VIDEOFORMAT(video_format);
+
+ } else if (g_strcmp0(mime_type, "video/mpeg") == 0 || g_strcmp0(mime_type,
+ "video/x-divx") == 0) {
+
+ guint version = 0;
+
+ /* Is this mpeg4:2 ? */
+ if (g_strcmp0(mime_type, "video/mpeg") == 0) {
+
+ /*
+ * we don't support mpeg other than mpeg verion 4
+ */
+ if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) {
+ ret = MIX_RESULT_NOT_SUPPORTED;
+ goto cleanup;
+ }
+
+ /* what is the mpeg version ? */
+ ret = mix_videoconfigparamsdec_mp42_get_mpegversion(
+ MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get mpeg version\n");
+ goto cleanup;
+ }
+
+ /* if it is not MPEG4 */
+ if (version != 4) {
+ ret = MIX_RESULT_NOT_SUPPORTED;
+ goto cleanup;
+ }
+
+ } else {
+
+ /* config_param shall be MixVideoConfigParamsDecMP42 */
+ if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) {
+ ret = MIX_RESULT_NOT_SUPPORTED;
+ goto cleanup;
+ }
+
+ /* what is the divx version ? */
+ ret = mix_videoconfigparamsdec_mp42_get_divxversion(
+ MIX_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec), &version);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get divx version\n");
+ goto cleanup;
+ }
+
+ /* if it is not divx 4 or 5 */
+ if (version != 4 && version != 5) {
+ ret = MIX_RESULT_NOT_SUPPORTED;
+ goto cleanup;
+ }
+ }
+
+ MixVideoFormat_MP42 *video_format = mix_videoformat_mp42_new();
+ if (!video_format) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to create MPEG-4:2 video format\n");
+ goto cleanup;
+ }
+
+ /* TODO: work specific to MPEG-4:2 */
+ priv->video_format = MIX_VIDEOFORMAT(video_format);
+
+ } else {
+
+ /* Oops! A format we don't know */
+
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Unknown format, we can't handle it\n");
+ goto cleanup;
+ }
+
+ /* initialize MixVideoFormat */
+ ret = mix_videofmt_initialize(priv->video_format, priv_config_params_dec,
+ priv->frame_manager, priv->buffer_pool, &priv->surface_pool,
+ priv->va_display);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed initialize video format\n");
+ goto cleanup;
+ }
+
+ mix_surfacepool_ref(priv->surface_pool);
+
+ /* decide MixVideoFormat from mime_type*/
+
+ priv->configured = TRUE;
+ ret = MIX_RESULT_SUCCESS;
+
+ cleanup:
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ MIXUNREF(priv->config_params, mix_videoconfigparams_unref);
+ MIXUNREF(priv->frame_manager, mix_framemanager_unref);
+ MIXUNREF(priv->buffer_pool, mix_bufferpool_unref);
+ MIXUNREF(priv->video_format, mix_videoformat_unref);
+ }
+
+ if (mime_type) {
+ g_free(mime_type);
+ }
+
+ g_mutex_unlock(priv->objlock);
+ /* ---------------------- end lock --------------------- */
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_video_configure_encode(MixVideo * mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixDrmParams * drm_config_params) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+ MixVideoConfigParamsEnc *priv_config_params_enc = NULL;
+
+
+ gchar *mime_type = NULL;
+ MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264;
+ guint bufpoolsize = 0;
+
+ MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DECODEORDER;
+
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT(mix, priv);
+
+ if (!config_params_enc) {
+ LOG_E("!config_params_enc\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+ if (!MIX_IS_VIDEOCONFIGPARAMSENC(config_params_enc)) {
+ LOG_E("Not a MixVideoConfigParams\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+
+ /*
+ * MixVideo has already been configured, it should be
+ * re-configured.
+ *
+ * TODO: Allow MixVideo re-configuration
+ */
+ if (priv->configured) {
+ ret = MIX_RESULT_SUCCESS;
+ LOG_E( "Already configured\n");
+ goto cleanup;
+ }
+
+ /* Make a copy of config_params */
+ priv->config_params = (MixVideoConfigParams *) mix_params_dup(
+ MIX_PARAMS(config_params_enc));
+ if (!priv->config_params) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Fail to duplicate config_params\n");
+ goto cleanup;
+ }
+
+ priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params;
+
+ /* Get fps, frame order mode and mime type from config_params */
+ ret = mix_videoconfigparamsenc_get_mime_type(priv_config_params_enc,
+ &mime_type);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get mime type\n");
+ goto cleanup;
+ }
+
+ LOG_I( "mime : %s\n", mime_type);
+
+ ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc,
+ &encode_format);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get target format\n");
+ goto cleanup;
+ }
+
+ LOG_I( "encode_format : %d\n",
+ encode_format);
+
+ ret = mix_videoconfigparamsenc_get_buffer_pool_size(
+ priv_config_params_enc, &bufpoolsize);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get buffer pool size\n");
+ goto cleanup;
+ }
+
+ /* create frame manager */
+ priv->frame_manager = mix_framemanager_new();
+ if (!priv->frame_manager) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to create frame manager\n");
+ goto cleanup;
+ }
+
+ /* initialize frame manager */
+ /* frame rate can be any value for encoding. */
+ ret = mix_framemanager_initialize(priv->frame_manager, frame_order_mode,
+ 1, 1, FALSE);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to initialize frame manager\n");
+ goto cleanup;
+ }
+
+ /* create buffer pool */
+ priv->buffer_pool = mix_bufferpool_new();
+ if (!priv->buffer_pool) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to create buffer pool\n");
+ goto cleanup;
+ }
+
+ ret = mix_bufferpool_initialize(priv->buffer_pool, bufpoolsize);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to initialize buffer pool\n");
+ goto cleanup;
+ }
+
+ /* Finally, we can create MixVideoFormatEnc */
+ /* What type of MixVideoFormatEnc we need create? */
+
+ if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264
+ && MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) {
+
+ MixVideoFormatEnc_H264 *video_format_enc =
+ mix_videoformatenc_h264_new();
+ if (!video_format_enc) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n");
+ goto cleanup;
+ }
+
+ /* TODO: work specific to h264 encode */
+
+ priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
+
+ }
+ else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4
+ && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) {
+
+ MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new();
+ if (!video_format_enc) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n");
+ goto cleanup;
+ }
+
+ /* TODO: work specific to mpeg4 */
+
+ priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
+
+ }
+ else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW
+ && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) {
+
+ MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new();
+ if (!video_format_enc) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "mix_video_configure_encode: Failed to create preview video format\n");
+ goto cleanup;
+ }
+
+ priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
+
+ }
+ else {
+
+ /*unsupported format */
+ ret = MIX_RESULT_NOT_SUPPORTED;
+ LOG_E("Unknown format, we can't handle it\n");
+ goto cleanup;
+ }
+
+ /* initialize MixVideoEncFormat */
+ ret = mix_videofmtenc_initialize(priv->video_format_enc,
+ priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool,
+ priv->va_display);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed initialize video format\n");
+ goto cleanup;
+ }
+
+ mix_surfacepool_ref(priv->surface_pool);
+
+ priv->configured = TRUE;
+ ret = MIX_RESULT_SUCCESS;
+
+ cleanup:
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ MIXUNREF(priv->frame_manager, mix_framemanager_unref);
+ MIXUNREF(priv->config_params, mix_videoconfigparams_unref);
+ MIXUNREF(priv->buffer_pool, mix_bufferpool_unref);
+ MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref);
+ }
+
+ if (mime_type) {
+ g_free(mime_type);
+ }
+
+ g_mutex_unlock(priv->objlock);
+ /* ---------------------- end lock --------------------- */
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_video_configure_default(MixVideo * mix,
+ MixVideoConfigParams * config_params,
+ MixDrmParams * drm_config_params) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT(mix, priv);
+ if(!config_params) {
+ LOG_E("!config_params\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /*Decoder mode or Encoder mode*/
+ if (priv->codec_mode == MIX_CODEC_MODE_DECODE && MIX_IS_VIDEOCONFIGPARAMSDEC(config_params)) {
+ ret = mix_video_configure_decode(mix, (MixVideoConfigParamsDec*)config_params, NULL);
+ } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE && MIX_IS_VIDEOCONFIGPARAMSENC(config_params)) {
+ ret = mix_video_configure_encode(mix, (MixVideoConfigParamsEnc*)config_params, NULL);
+ } else {
+ LOG_E("Codec mode not supported\n");
+ }
+
+ LOG_V( "end\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_video_get_config_default(MixVideo * mix,
+ MixVideoConfigParams ** config_params) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ if (!config_params) {
+ LOG_E( "!config_params\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+
+ *config_params = MIX_VIDEOCONFIGPARAMS(mix_params_dup(MIX_PARAMS(priv->config_params)));
+ if(!*config_params) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to duplicate MixVideoConfigParams\n");
+ goto cleanup;
+ }
+
+ cleanup:
+
+ /* ---------------------- end lock --------------------- */
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+
+}
+
+MIX_RESULT mix_video_decode_default(MixVideo * mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT_CONFIG(mix, priv);
+ if(!bufin || !bufincnt || !decode_params) {
+ LOG_E( "!bufin || !bufincnt || !decode_params\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ //First check that we have surfaces available for decode
+ ret = mix_surfacepool_check_available(priv->surface_pool);
+
+ if (ret == MIX_RESULT_POOLEMPTY) {
+ LOG_I( "Out of surface\n");
+ return MIX_RESULT_OUTOFSURFACES;
+ }
+
+ g_mutex_lock(priv->objlock);
+
+ ret = mix_videofmt_decode(priv->video_format, bufin, bufincnt, decode_params);
+
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_video_get_frame_default(MixVideo * mix, MixVideoFrame ** frame) {
+
+ LOG_V( "Begin\n");
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ if (!frame) {
+ LOG_E( "!frame\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+
+ LOG_V("Calling frame manager dequeue\n");
+
+ ret = mix_framemanager_dequeue(priv->frame_manager, frame);
+
+ /* ---------------------- end lock --------------------- */
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_video_release_frame_default(MixVideo * mix,
+ MixVideoFrame * frame) {
+
+ LOG_V( "Begin\n");
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ if (!frame) {
+ LOG_E( "!frame\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /*
+ * We don't need lock here. MixVideoFrame has lock to
+ * protect itself.
+ */
+#if 0
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+#endif
+
+ LOG_I("Releasing reference frame %x\n", (guint) frame);
+ mix_videoframe_unref(frame);
+
+ ret = MIX_RESULT_SUCCESS;
+
+#if 0
+ /* ---------------------- end lock --------------------- */
+ g_mutex_unlock(priv->objlock);
+#endif
+
+ LOG_V( "End\n");
+
+ return ret;
+
+}
+
+MIX_RESULT mix_video_render_default(MixVideo * mix,
+ MixVideoRenderParams * render_params, MixVideoFrame *frame) {
+
+ LOG_V( "Begin\n");
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ MixDisplay *mix_display = NULL;
+ MixDisplayX11 *mix_display_x11 = NULL;
+
+ Display *display = NULL;
+ Drawable drawable = 0;
+
+ MixRect src_rect, dst_rect;
+
+ VARectangle *va_cliprects = NULL;
+ guint number_of_cliprects = 0;
+
+ /* VASurfaceID va_surface_id; */
+ gulong va_surface_id;
+ VAStatus va_status;
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ if (!render_params || !frame) {
+ LOG_E( "!render_params || !frame\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* Is this render param valid? */
+ if (!MIX_IS_VIDEORENDERPARAMS(render_params)) {
+ LOG_E("Not MixVideoRenderParams\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ /*
+ * We don't need lock here. priv->va_display may be the only variable
+ * seems need to be protected. But, priv->va_display is initialized
+ * when mixvideo object is initialized, and it keeps
+ * the same value thoughout the life of mixvideo.
+ */
+#if 0
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+#endif
+
+ /* get MixDisplay prop from render param */
+ ret = mix_videorenderparams_get_display(render_params, &mix_display);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get mix_display\n");
+ goto cleanup;
+ }
+
+ /* Is this MixDisplayX11 ? */
+ /* TODO: we shall also support MixDisplay other than MixDisplayX11 */
+ if (!MIX_IS_DISPLAYX11(mix_display)) {
+ ret = MIX_RESULT_INVALID_PARAM;
+ LOG_E( "Not MixDisplayX11\n");
+ goto cleanup;
+ }
+
+ /* cast MixDisplay to MixDisplayX11 */
+ mix_display_x11 = MIX_DISPLAYX11(mix_display);
+
+ /* Get Drawable */
+ ret = mix_displayx11_get_drawable(mix_display_x11, &drawable);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E( "Failed to get drawable\n");
+ goto cleanup;
+ }
+
+ /* Get Display */
+ ret = mix_displayx11_get_display(mix_display_x11, &display);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E( "Failed to get display\n");
+ goto cleanup;
+ }
+
+ /* get src_rect */
+ ret = mix_videorenderparams_get_src_rect(render_params, &src_rect);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get SOURCE src_rect\n");
+ goto cleanup;
+ }
+
+ /* get dst_rect */
+ ret = mix_videorenderparams_get_dest_rect(render_params, &dst_rect);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E( "Failed to get dst_rect\n");
+ goto cleanup;
+ }
+
+ /* get va_cliprects */
+ ret = mix_videorenderparams_get_cliprects_internal(render_params,
+ &va_cliprects, &number_of_cliprects);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get va_cliprects\n");
+ goto cleanup;
+ }
+
+ /* get surface id from frame */
+ ret = mix_videoframe_get_frame_id(frame, &va_surface_id);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get va_surface_id\n");
+ goto cleanup;
+ }
+ guint64 timestamp = 0;
+ mix_videoframe_get_timestamp(frame, &timestamp);
+ LOG_V( "Displaying surface ID %d, timestamp %"G_GINT64_FORMAT"\n", (int)va_surface_id, timestamp);
+
+ guint32 frame_structure = 0;
+ mix_videoframe_get_frame_structure(frame, &frame_structure);
+ /* TODO: the last param of vaPutSurface is de-interlacing flags,
+ what is value shall be*/
+ va_status = vaPutSurface(priv->va_display, (VASurfaceID) va_surface_id,
+ drawable, src_rect.x, src_rect.y, src_rect.width, src_rect.height,
+ dst_rect.x, dst_rect.y, dst_rect.width, dst_rect.height,
+ va_cliprects, number_of_cliprects, frame_structure);
+
+ if (va_status != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed vaPutSurface() : va_status = %d\n", va_status);
+ goto cleanup;
+ }
+
+ /* TODO: Is this only for X11? */
+ XSync(display, FALSE);
+
+ ret = MIX_RESULT_SUCCESS;
+
+ cleanup:
+
+ MIXUNREF(mix_display, mix_display_unref)
+ /* MIXUNREF(render_params, mix_videorenderparams_unref)*/
+
+#if 0
+ /* ---------------------- end lock --------------------- */
+ g_mutex_unlock(priv->objlock);
+#endif
+
+ LOG_V( "End\n");
+
+ return ret;
+
+}
+
+MIX_RESULT mix_video_encode_default(MixVideo * mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT_CONFIG(mix, priv);
+ if(!bufin || !bufincnt) { //we won't check encode_params here, it's just a placeholder
+ LOG_E( "!bufin || !bufincnt\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ g_mutex_lock(priv->objlock);
+
+ ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt,
+ iovout, iovoutcnt, encode_params);
+
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End\n");
+ return ret;
+}
+
+MIX_RESULT mix_video_flush_default(MixVideo * mix) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+
+ if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) {
+ ret = mix_videofmt_flush(priv->video_format);
+
+ ret = mix_framemanager_flush(priv->frame_manager);
+ } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE
+ && priv->video_format_enc != NULL) {
+ /*No framemanager for encoder now*/
+ ret = mix_videofmtenc_flush(priv->video_format_enc);
+ } else {
+ g_mutex_unlock(priv->objlock);
+ LOG_E("Invalid video_format/video_format_enc Pointer\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* ---------------------- end lock --------------------- */
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+
+}
+
+MIX_RESULT mix_video_eos_default(MixVideo * mix) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+
+ if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) {
+ ret = mix_videofmt_eos(priv->video_format);
+
+ /* frame manager will set EOS flag to be TRUE */
+ ret = mix_framemanager_eos(priv->frame_manager);
+ } else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE
+ && priv->video_format_enc != NULL) {
+ /*No framemanager now*/
+ ret = mix_videofmtenc_eos(priv->video_format_enc);
+ } else {
+ g_mutex_unlock(priv->objlock);
+ LOG_E("Invalid video_format/video_format_enc Pointer\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* ---------------------- end lock --------------------- */
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_video_get_state_default(MixVideo * mix, MixState * state) {
+
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ if (!state) {
+ LOG_E( "!state\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ *state = MIX_STATE_CONFIGURED;
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_video_get_mixbuffer_default(MixVideo * mix, MixBuffer ** buf) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ if (!buf) {
+ LOG_E( "!buf\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+
+ ret = mix_bufferpool_get(priv->buffer_pool, buf);
+
+ /* ---------------------- end lock --------------------- */
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End ret = 0x%x\n", ret);
+
+ return ret;
+
+}
+
+MIX_RESULT mix_video_release_mixbuffer_default(MixVideo * mix, MixBuffer * buf) {
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ if (!buf) {
+ LOG_E( "!buf\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ /* ---------------------- begin lock --------------------- */
+ g_mutex_lock(priv->objlock);
+
+ mix_buffer_unref(buf);
+
+ /* ---------------------- end lock --------------------- */
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End\n");
+ return ret;
+
+}
+
+MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size)
+{
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ MixVideoPrivate *priv = NULL;
+
+ LOG_V( "Begin\n");
+
+ if (!mix || !max_size) /* TODO: add other parameter NULL checking */
+ {
+ LOG_E( "!mix || !bufsize\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ CHECK_INIT_CONFIG(mix, priv);
+
+ g_mutex_lock(priv->objlock);
+
+ ret = mix_videofmtenc_get_max_coded_buffer_size(priv->video_format_enc, max_size);
+
+ g_mutex_unlock(priv->objlock);
+
+ LOG_V( "End\n");
+ return ret;
+}
+
+/*
+ * API functions
+ */
+
+#define CHECK_AND_GET_MIX_CLASS(mix, klass) \
+ if (!mix) { \
+ return MIX_RESULT_NULL_PTR; \
+ } \
+ if (!MIX_IS_VIDEO(mix)) { \
+ LOG_E( "Not MixVideo\n"); \
+ return MIX_RESULT_INVALID_PARAM; \
+ } \
+ klass = MIX_VIDEO_GET_CLASS(mix);
+
+
+MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->get_version_func) {
+ return klass->get_version_func(mix, major, minor);
+ }
+ return MIX_RESULT_NOTIMPL;
+
+}
+
+MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode,
+ MixVideoInitParams * init_params, MixDrmParams * drm_init_params) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->initialize_func) {
+ return klass->initialize_func(mix, mode, init_params, drm_init_params);
+ }
+ return MIX_RESULT_NOTIMPL;
+
+}
+
+MIX_RESULT mix_video_deinitialize(MixVideo * mix) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->deinitialize_func) {
+ return klass->deinitialize_func(mix);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_video_configure(MixVideo * mix,
+ MixVideoConfigParams * config_params,
+ MixDrmParams * drm_config_params) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->configure_func) {
+ return klass->configure_func(mix, config_params, drm_config_params);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_video_get_config(MixVideo * mix,
+ MixVideoConfigParams ** config_params_dec) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->get_config_func) {
+ return klass->get_config_func(mix, config_params_dec);
+ }
+ return MIX_RESULT_NOTIMPL;
+
+}
+
+MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt,
+ MixVideoDecodeParams * decode_params) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->decode_func) {
+ return klass->decode_func(mix, bufin, bufincnt,
+ decode_params);
+ }
+ return MIX_RESULT_NOTIMPL;
+
+}
+
+MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->get_frame_func) {
+ return klass->get_frame_func(mix, frame);
+ }
+ return MIX_RESULT_NOTIMPL;
+
+}
+
+MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->release_frame_func) {
+ return klass->release_frame_func(mix, frame);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_video_render(MixVideo * mix,
+ MixVideoRenderParams * render_params, MixVideoFrame *frame) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->render_func) {
+ return klass->render_func(mix, render_params, frame);
+ }
+ return MIX_RESULT_NOTIMPL;
+
+}
+
+MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt,
+ MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->encode_func) {
+ return klass->encode_func(mix, bufin, bufincnt, iovout, iovoutcnt,
+ encode_params);
+ }
+ return MIX_RESULT_NOTIMPL;
+
+}
+
+MIX_RESULT mix_video_flush(MixVideo * mix) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->flush_func) {
+ return klass->flush_func(mix);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_video_eos(MixVideo * mix) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->eos_func) {
+ return klass->eos_func(mix);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->get_state_func) {
+ return klass->get_state_func(mix, state);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->get_mix_buffer_func) {
+ return klass->get_mix_buffer_func(mix, buf);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf) {
+
+ MixVideoClass *klass = NULL;
+ CHECK_AND_GET_MIX_CLASS(mix, klass);
+
+ if (klass->release_mix_buffer_func) {
+ return klass->release_mix_buffer_func(mix, buf);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize) {
+
+ MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix);
+
+ if (klass->get_max_coded_buffer_size_func) {
+ return klass->get_max_coded_buffer_size_func(mix, bufsize);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h
new file mode 100644
index 0000000..0b50cf6
--- /dev/null
+++ b/mix_video/src/mixvideo.h
@@ -0,0 +1,208 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEO_H__
+#define __MIX_VIDEO_H__
+
+#include <glib-object.h>
+
+#include "mixdrmparams.h"
+#include "mixvideoinitparams.h"
+#include "mixvideoconfigparamsdec.h"
+#include "mixvideoconfigparamsenc.h"
+#include "mixvideodecodeparams.h"
+#include "mixvideoencodeparams.h"
+#include "mixvideorenderparams.h"
+#include "mixvideocaps.h"
+#include "mixbuffer.h"
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEO (mix_video_get_type ())
+#define MIX_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEO, MixVideo))
+#define MIX_IS_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEO))
+#define MIX_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEO, MixVideoClass))
+#define MIX_IS_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEO))
+#define MIX_VIDEO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEO, MixVideoClass))
+
+typedef struct _MixVideo MixVideo;
+typedef struct _MixVideoClass MixVideoClass;
+
+/*
+ * Virtual methods typedef
+ */
+
+typedef MIX_RESULT (*MixVideoGetVersionFunc)(MixVideo * mix, guint * major,
+ guint * minor);
+
+typedef MIX_RESULT (*MixVideoInitializeFunc)(MixVideo * mix, MixCodecMode mode,
+ MixVideoInitParams * init_params, MixDrmParams * drm_init_params);
+
+typedef MIX_RESULT (*MixVideoDeinitializeFunc)(MixVideo * mix);
+
+typedef MIX_RESULT (*MixVideoConfigureFunc)(MixVideo * mix,
+ MixVideoConfigParams * config_params,
+ MixDrmParams * drm_config_params);
+
+typedef MIX_RESULT (*MixVideoGetConfigFunc)(MixVideo * mix,
+ MixVideoConfigParams ** config_params);
+
+typedef MIX_RESULT (*MixVideoDecodeFunc)(MixVideo * mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params);
+
+typedef MIX_RESULT (*MixVideoGetFrameFunc)(MixVideo * mix,
+ MixVideoFrame ** frame);
+
+typedef MIX_RESULT (*MixVideoReleaseFrameFunc)(MixVideo * mix,
+ MixVideoFrame * frame);
+
+typedef MIX_RESULT (*MixVideoRenderFunc)(MixVideo * mix,
+ MixVideoRenderParams * render_params, MixVideoFrame *frame);
+
+typedef MIX_RESULT (*MixVideoEncodeFunc)(MixVideo * mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+
+typedef MIX_RESULT (*MixVideoFlushFunc)(MixVideo * mix);
+
+typedef MIX_RESULT (*MixVideoEOSFunc)(MixVideo * mix);
+
+typedef MIX_RESULT (*MixVideoGetStateFunc)(MixVideo * mix, MixState * state);
+
+typedef MIX_RESULT
+(*MixVideoGetMixBufferFunc)(MixVideo * mix, MixBuffer ** buf);
+
+typedef MIX_RESULT (*MixVideoReleaseMixBufferFunc)(MixVideo * mix,
+ MixBuffer * buf);
+
+typedef MIX_RESULT (*MixVideoGetMaxCodedBufferSizeFunc) (MixVideo * mix,
+ guint *max_size);
+
+/**
+ * MixVideo:
+ * @parent: Parent object.
+ * @streamState: Current state of the stream
+ * @decodeMode: Current decode mode of the device. This value is valid only when @codingMode equals #MIX_CODING_ENCODE.
+ * @encoding: <comment>TBD...</comment>
+ *
+ * MI-X Video object
+ */
+struct _MixVideo {
+ /*< public > */
+ GObject parent;
+
+ /*< public > */
+
+ /*< private > */
+ gpointer context;
+};
+
+/**
+ * MixVideoClass:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoClass {
+ /*< public > */
+ GObjectClass parent_class;
+
+ /* class members */
+
+ MixVideoGetVersionFunc get_version_func;
+ MixVideoInitializeFunc initialize_func;
+ MixVideoDeinitializeFunc deinitialize_func;
+ MixVideoConfigureFunc configure_func;
+ MixVideoGetConfigFunc get_config_func;
+ MixVideoDecodeFunc decode_func;
+ MixVideoGetFrameFunc get_frame_func;
+ MixVideoReleaseFrameFunc release_frame_func;
+ MixVideoRenderFunc render_func;
+ MixVideoEncodeFunc encode_func;
+ MixVideoFlushFunc flush_func;
+ MixVideoEOSFunc eos_func;
+ MixVideoGetStateFunc get_state_func;
+ MixVideoGetMixBufferFunc get_mix_buffer_func;
+ MixVideoReleaseMixBufferFunc release_mix_buffer_func;
+ MixVideoGetMaxCodedBufferSizeFunc get_max_coded_buffer_size_func;
+};
+
+/**
+ * mix_video_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_video_get_type(void);
+
+/**
+ * mix_video_new:
+ * @returns: A newly allocated instance of #MixVideo
+ *
+ * Use this method to create new instance of #MixVideo
+ */
+MixVideo *mix_video_new(void);
+
+/**
+ * mix_video_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideo instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideo *mix_video_ref(MixVideo * mix);
+
+/**
+ * mix_video_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_video_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor);
+
+MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode,
+ MixVideoInitParams * init_params, MixDrmParams * drm_init_params);
+
+MIX_RESULT mix_video_deinitialize(MixVideo * mix);
+
+MIX_RESULT mix_video_configure(MixVideo * mix,
+ MixVideoConfigParams * config_params,
+ MixDrmParams * drm_config_params);
+
+MIX_RESULT mix_video_get_config(MixVideo * mix,
+ MixVideoConfigParams ** config_params);
+
+MIX_RESULT mix_video_decode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt,
+ MixVideoDecodeParams * decode_params);
+
+MIX_RESULT mix_video_get_frame(MixVideo * mix, MixVideoFrame ** frame);
+
+MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame);
+
+MIX_RESULT mix_video_render(MixVideo * mix,
+ MixVideoRenderParams * render_params, MixVideoFrame *frame);
+
+MIX_RESULT mix_video_encode(MixVideo * mix, MixBuffer * bufin[], gint bufincnt,
+ MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+
+MIX_RESULT mix_video_flush(MixVideo * mix);
+
+MIX_RESULT mix_video_eos(MixVideo * mix);
+
+MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state);
+
+MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf);
+
+MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf);
+
+#endif /* __MIX_VIDEO_H__ */
diff --git a/mix_video/src/mixvideo_private.h b/mix_video/src/mixvideo_private.h
new file mode 100644
index 0000000..806d249
--- /dev/null
+++ b/mix_video/src/mixvideo_private.h
@@ -0,0 +1,57 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEO_PRIVATE_H__
+#define __MIX_VIDEO_PRIVATE_H__
+
+
+typedef struct _MixVideoPrivate MixVideoPrivate;
+
+struct _MixVideoPrivate {
+ /*< private > */
+
+ GMutex *objlock;
+ gboolean initialized;
+ gboolean configured;
+
+ VADisplay va_display;
+
+ int va_major_version;
+ int va_minor_version;
+
+ MixCodecMode codec_mode;
+
+ MixVideoInitParams *init_params;
+ MixDrmParams *drm_params;
+
+ MixVideoConfigParams *config_params;
+
+ MixFrameManager *frame_manager;
+ MixVideoFormat *video_format;
+ MixVideoFormatEnc *video_format_enc;
+
+ MixSurfacePool *surface_pool;
+ MixBufferPool *buffer_pool;
+
+};
+
+/**
+ * MIX_VIDEO_PRIVATE:
+ *
+ * Get private structure of this class.
+ * @obj: class object for which to get private data.
+ */
+#define MIX_VIDEO_GET_PRIVATE(obj) \
+ (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEO, MixVideoPrivate))
+
+/* Private functions */
+void mix_video_private_initialize(MixVideoPrivate* priv);
+void mix_video_private_cleanup(MixVideoPrivate* priv);
+
+
+#endif /* __MIX_VIDEO_PRIVATE_H__ */
diff --git a/mix_video/src/mixvideocaps.c b/mix_video/src/mixvideocaps.c
new file mode 100644
index 0000000..3a41c47
--- /dev/null
+++ b/mix_video/src/mixvideocaps.c
@@ -0,0 +1,261 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+* SECTION:mixvideocaps
+* @short_description: VideoConfig parameters
+*
+* A data object which stores videoconfig specific parameters.
+*/
+
+#include "mixvideocaps.h"
+
+#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; }
+
+static GType _mix_videocaps_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_videocaps_type = g_define_type_id; }
+
+gboolean mix_videocaps_copy (MixParams * target, const MixParams * src);
+MixParams *mix_videocaps_dup (const MixParams * obj);
+gboolean mix_videocaps_equal (MixParams * first, MixParams * second);
+static void mix_videocaps_finalize (MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoCaps, mix_videocaps, MIX_TYPE_PARAMS,
+ _do_init);
+
+static void
+mix_videocaps_init (MixVideoCaps * self)
+{
+ /* initialize properties here */
+ self->mix_caps = NULL;
+ self->video_hw_caps = NULL;
+
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+
+}
+
+static void
+mix_videocaps_class_init (MixVideoCapsClass * klass)
+{
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS (klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent (klass);
+
+ mixparams_class->finalize = mix_videocaps_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videocaps_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videocaps_dup;
+ mixparams_class->equal = (MixParamsEqualFunction) mix_videocaps_equal;
+}
+
+MixVideoCaps *
+mix_videocaps_new (void)
+{
+ MixVideoCaps *ret =
+ (MixVideoCaps *) g_type_create_instance (MIX_TYPE_VIDEOCAPS);
+ return ret;
+}
+
+void
+mix_videocaps_finalize (MixParams * obj)
+{
+ /* clean up here. */
+
+ MixVideoCaps *self = MIX_VIDEOCAPS (obj);
+ SAFE_FREE (self->mix_caps);
+ SAFE_FREE (self->video_hw_caps);
+
+ /* Chain up parent */
+ if (parent_class->finalize)
+ {
+ parent_class->finalize (obj);
+ }
+}
+
+MixVideoCaps *
+mix_videocaps_ref (MixVideoCaps * mix)
+{
+ return (MixVideoCaps *) mix_params_ref (MIX_PARAMS (mix));
+}
+
+/**
+* mix_videocaps_dup:
+* @obj: a #MixVideoCaps object
+* @returns: a newly allocated duplicate of the object.
+*
+* Copy duplicate of the object.
+*/
+MixParams *
+mix_videocaps_dup (const MixParams * obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOCAPS (obj))
+ {
+ MixVideoCaps *duplicate = mix_videocaps_new ();
+ if (mix_videocaps_copy (MIX_PARAMS (duplicate), MIX_PARAMS (obj)))
+ {
+ ret = MIX_PARAMS (duplicate);
+ }
+ else
+ {
+ mix_videocaps_unref (duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+* mix_videocaps_copy:
+* @target: copy to target
+* @src: copy from src
+* @returns: boolean indicates if copy is successful.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videocaps_copy (MixParams * target, const MixParams * src)
+{
+ MixVideoCaps *this_target, *this_src;
+
+ if (MIX_IS_VIDEOCAPS (target) && MIX_IS_VIDEOCAPS (src))
+ {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOCAPS (target);
+ this_src = MIX_VIDEOCAPS (src);
+
+ // Free the existing properties
+ SAFE_FREE (this_target->mix_caps);
+ SAFE_FREE (this_target->video_hw_caps);
+
+ // Duplicate string
+ this_target->mix_caps = g_strdup (this_src->mix_caps);
+ this_target->video_hw_caps = g_strdup (this_src->video_hw_caps);
+
+ // Now chainup base class
+ if (parent_class->copy)
+ {
+ return parent_class->copy (MIX_PARAMS_CAST (target),
+ MIX_PARAMS_CAST (src));
+ }
+ else
+ {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+* mix_videocaps_:
+* @first: first object to compare
+* @second: seond object to compare
+* @returns: boolean indicates if instance are equal.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videocaps_equal (MixParams * first, MixParams * second)
+{
+ gboolean ret = FALSE;
+ MixVideoCaps *this_first, *this_second;
+
+ if (MIX_IS_VIDEOCAPS (first) && MIX_IS_VIDEOCAPS (second))
+ {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOCAPS (first);
+ this_second = MIX_VIDEOCAPS (second);
+
+ /* TODO: add comparison for other properties */
+ if (g_strcmp0 (this_first->mix_caps, this_second->mix_caps) == 0
+ && g_strcmp0 (this_first->video_hw_caps,
+ this_second->video_hw_caps) == 0)
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class);
+ if (klass->equal)
+ ret = klass->equal (first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
+
+#define MIX_VIDEOCAPS_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOCAPS_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCAPS(obj)) return MIX_RESULT_FAIL; \
+
+
+/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */
+MIX_RESULT
+mix_videocaps_set_mix_caps (MixVideoCaps * obj, gchar * mix_caps)
+{
+ MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj);
+
+ SAFE_FREE (obj->mix_caps);
+ obj->mix_caps = g_strdup (mix_caps);
+ if (mix_caps != NULL && obj->mix_caps == NULL)
+ {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT
+mix_videocaps_get_mix_caps (MixVideoCaps * obj, gchar ** mix_caps)
+{
+ MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, mix_caps);
+ *mix_caps = g_strdup (obj->mix_caps);
+ if (*mix_caps == NULL && obj->mix_caps)
+ {
+ return MIX_RESULT_NO_MEMORY;
+ }
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT
+mix_videocaps_set_video_hw_caps (MixVideoCaps * obj, gchar * video_hw_caps)
+{
+ MIX_VIDEOCAPS_SETTER_CHECK_INPUT (obj);
+
+ SAFE_FREE (obj->video_hw_caps);
+ obj->video_hw_caps = g_strdup (video_hw_caps);
+ if (video_hw_caps != NULL && obj->video_hw_caps == NULL)
+ {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT
+mix_videocaps_get_video_hw_caps (MixVideoCaps * obj, gchar ** video_hw_caps)
+{
+ MIX_VIDEOCAPS_GETTER_CHECK_INPUT (obj, video_hw_caps);
+
+ *video_hw_caps = g_strdup (obj->video_hw_caps);
+ if (*video_hw_caps == NULL && obj->video_hw_caps)
+ {
+ return MIX_RESULT_NO_MEMORY;
+ }
+ return MIX_RESULT_SUCCESS;
+}
diff --git a/mix_video/src/mixvideocaps.h b/mix_video/src/mixvideocaps.h
new file mode 100644
index 0000000..6630c19
--- /dev/null
+++ b/mix_video/src/mixvideocaps.h
@@ -0,0 +1,137 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOCAPS_H__
+#define __MIX_VIDEOCAPS_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+
+/**
+* MIX_TYPE_VIDEOCAPS:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_VIDEOCAPS (mix_videocaps_get_type ())
+
+/**
+* MIX_VIDEOCAPS:
+* @obj: object to be type-casted.
+*/
+#define MIX_VIDEOCAPS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCAPS, MixVideoCaps))
+
+/**
+* MIX_IS_VIDEOCAPS:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixParams
+*/
+#define MIX_IS_VIDEOCAPS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCAPS))
+
+/**
+* MIX_VIDEOCAPS_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_VIDEOCAPS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCAPS, MixVideoCapsClass))
+
+/**
+* MIX_IS_VIDEOCAPS_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixParamsClass
+*/
+#define MIX_IS_VIDEOCAPS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCAPS))
+
+/**
+* MIX_VIDEOCAPS_GET_CLASS:
+* @obj: a #MixParams object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_VIDEOCAPS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCAPS, MixVideoCapsClass))
+
+typedef struct _MixVideoCaps MixVideoCaps;
+typedef struct _MixVideoCapsClass MixVideoCapsClass;
+
+/**
+* MixVideoCaps:
+*
+* MI-X VideoConfig Parameter object
+*/
+struct _MixVideoCaps
+{
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+ gchar *mix_caps;
+ gchar *video_hw_caps;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+* MixVideoCapsClass:
+*
+* MI-X VideoConfig object class
+*/
+struct _MixVideoCapsClass
+{
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_videocaps_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_videocaps_get_type (void);
+
+/**
+* mix_videocaps_new:
+* @returns: A newly allocated instance of #MixVideoCaps
+*
+* Use this method to create new instance of #MixVideoCaps
+*/
+MixVideoCaps *mix_videocaps_new (void);
+/**
+* mix_videocaps_ref:
+* @mix: object to add reference
+* @returns: the MixVideoCaps instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixVideoCaps *mix_videocaps_ref (MixVideoCaps * mix);
+
+/**
+* mix_videocaps_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_videocaps_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+MIX_RESULT mix_videocaps_set_mix_caps (MixVideoCaps * obj, gchar * mix_caps);
+MIX_RESULT mix_videocaps_get_mix_caps (MixVideoCaps * obj,
+ gchar ** mix_caps);
+
+MIX_RESULT mix_videocaps_set_video_hw_caps (MixVideoCaps * obj,
+ gchar * video_hw_caps);
+MIX_RESULT mix_videocaps_get_video_hw_caps (MixVideoCaps * obj,
+ gchar ** video_hw_caps);
+
+#endif /* __MIX_VIDEOCAPS_H__ */
diff --git a/mix_video/src/mixvideoconfigparams.c b/mix_video/src/mixvideoconfigparams.c
new file mode 100644
index 0000000..c355a4c
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparams.c
@@ -0,0 +1,157 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideoconfigparams
+ * @short_description: VideoConfig parameters
+ *
+ * A data object which stores videoconfig specific parameters.
+ */
+
+#include <string.h>
+#include "mixvideolog.h"
+#include "mixvideoconfigparams.h"
+
+static GType _mix_videoconfigparams_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_videoconfigparams_type = g_define_type_id; }
+
+gboolean mix_videoconfigparams_copy(MixParams * target, const MixParams * src);
+MixParams *mix_videoconfigparams_dup(const MixParams * obj);
+gboolean mix_videoconfigparams_equal(MixParams * first, MixParams * second);
+static void mix_videoconfigparams_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParams, mix_videoconfigparams,
+ MIX_TYPE_PARAMS, _do_init);
+
+static void mix_videoconfigparams_init(MixVideoConfigParams * self) {
+
+ /* initialize properties here */
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void mix_videoconfigparams_class_init(MixVideoConfigParamsClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_videoconfigparams_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparams_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparams_dup;
+ mixparams_class->equal
+ = (MixParamsEqualFunction) mix_videoconfigparams_equal;
+}
+
+MixVideoConfigParams *
+mix_videoconfigparams_new(void) {
+ MixVideoConfigParams *ret =
+ (MixVideoConfigParams *) g_type_create_instance(
+ MIX_TYPE_VIDEOCONFIGPARAMS);
+
+ return ret;
+}
+
+void mix_videoconfigparams_finalize(MixParams * obj) {
+
+ /* clean up here. */
+ /* MixVideoConfigParams *self = MIX_VIDEOCONFIGPARAMS(obj); */
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoConfigParams *
+mix_videoconfigparams_ref(MixVideoConfigParams * mix) {
+ return (MixVideoConfigParams *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videoconfigparams_dup:
+ * @obj: a #MixVideoConfigParams object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videoconfigparams_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOCONFIGPARAMS(obj)) {
+ MixVideoConfigParams *duplicate = mix_videoconfigparams_new();
+ if (mix_videoconfigparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videoconfigparams_unref(duplicate);
+ }
+ }
+
+ return ret;
+}
+
+/**
+ * mix_videoconfigparams_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparams_copy(MixParams * target, const MixParams * src) {
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOCONFIGPARAMS(target) && MIX_IS_VIDEOCONFIGPARAMS(src)) {
+
+ /* TODO: copy other properties if there's any */
+
+ /* Now chainup base class */
+ if (parent_class->copy) {
+ LOG_V( "parent_class->copy != NULL\n");
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ LOG_V( "parent_class->copy == NULL\n");
+ return TRUE;
+ }
+ }
+
+ LOG_V( "End\n");
+ return FALSE;
+}
+
+/**
+ * mix_videoconfigparams_:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparams_equal(MixParams * first, MixParams * second) {
+
+ gboolean ret = FALSE;
+
+ if (MIX_IS_VIDEOCONFIGPARAMS(first) && MIX_IS_VIDEOCONFIGPARAMS(second)) {
+
+ /* chaining up. */
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+
+ return ret;
+}
diff --git a/mix_video/src/mixvideoconfigparams.h b/mix_video/src/mixvideoconfigparams.h
new file mode 100644
index 0000000..b2bac41
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparams.h
@@ -0,0 +1,126 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOCONFIGPARAMS_H__
+#define __MIX_VIDEOCONFIGPARAMS_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_VIDEOCONFIGPARAMS:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEOCONFIGPARAMS (mix_videoconfigparams_get_type ())
+
+/**
+ * MIX_VIDEOCONFIGPARAMS:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParams))
+
+/**
+ * MIX_IS_VIDEOCONFIGPARAMS:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_VIDEOCONFIGPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMS))
+
+/**
+ * MIX_VIDEOCONFIGPARAMS_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParamsClass))
+
+/**
+ * MIX_IS_VIDEOCONFIGPARAMS_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_VIDEOCONFIGPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMS))
+
+/**
+ * MIX_VIDEOCONFIGPARAMS_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEOCONFIGPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMS, MixVideoConfigParamsClass))
+
+typedef struct _MixVideoConfigParams MixVideoConfigParams;
+typedef struct _MixVideoConfigParamsClass MixVideoConfigParamsClass;
+
+/**
+ * MixVideoConfigParams:
+ *
+ * MI-X VideoConfig Parameter object
+ */
+struct _MixVideoConfigParams {
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+ * MixVideoConfigParamsClass:
+ *
+ * MI-X VideoConfig object class
+ */
+struct _MixVideoConfigParamsClass {
+ /*< public > */
+ MixParamsClass parent_class;
+
+/* class members */
+};
+
+/**
+ * mix_videoconfigparams_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoconfigparams_get_type(void);
+
+/**
+ * mix_videoconfigparams_new:
+ * @returns: A newly allocated instance of #MixVideoConfigParams
+ *
+ * Use this method to create new instance of #MixVideoConfigParams
+ */
+MixVideoConfigParams *mix_videoconfigparams_new(void);
+/**
+ * mix_videoconfigparams_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoConfigParams instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoConfigParams *mix_videoconfigparams_ref(MixVideoConfigParams * mix);
+
+/**
+ * mix_videoconfigparams_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoconfigparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for other properties */
+
+#endif /* __MIX_VIDEOCONFIGPARAMS_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsdec.c b/mix_video/src/mixvideoconfigparamsdec.c
new file mode 100644
index 0000000..751b124
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsdec.c
@@ -0,0 +1,534 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideoconfigparams
+ * @short_description: VideoConfig parameters
+ *
+ * A data object which stores videoconfig specific parameters.
+ */
+
+#include <string.h>
+#include "mixvideolog.h"
+#include "mixvideoconfigparamsdec.h"
+
+static GType _mix_videoconfigparamsdec_type = 0;
+static MixVideoConfigParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_videoconfigparamsdec_type = g_define_type_id; }
+
+gboolean mix_videoconfigparamsdec_copy(MixParams * target, const MixParams * src);
+MixParams *mix_videoconfigparamsdec_dup(const MixParams * obj);
+gboolean mix_videoconfigparamsdec_equal(MixParams * first, MixParams * second);
+static void mix_videoconfigparamsdec_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDec, mix_videoconfigparamsdec,
+ MIX_TYPE_VIDEOCONFIGPARAMS, _do_init);
+
+static void mix_videoconfigparamsdec_init(MixVideoConfigParamsDec * self) {
+
+ /* initialize properties here */
+
+ self->frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER;
+ memset(&self->header, 0, sizeof(self->header));
+
+ self->mime_type = NULL;
+
+ self->frame_rate_num = 0;
+ self->frame_rate_denom = 0;
+
+ self->picture_width = 0;
+ self->picture_height = 0;
+
+ self->raw_format = 0;
+ self->rate_control = 0;
+ self->mixbuffer_pool_size = 0;
+ self->extra_surface_allocation = 0;
+
+ /* TODO: initialize other properties */
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void mix_videoconfigparamsdec_class_init(MixVideoConfigParamsDecClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixVideoConfigParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_videoconfigparamsdec_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparamsdec_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparamsdec_dup;
+ mixparams_class->equal
+ = (MixParamsEqualFunction) mix_videoconfigparamsdec_equal;
+}
+
+MixVideoConfigParamsDec *
+mix_videoconfigparamsdec_new(void) {
+ MixVideoConfigParamsDec *ret =
+ (MixVideoConfigParamsDec *) g_type_create_instance(
+ MIX_TYPE_VIDEOCONFIGPARAMSDEC);
+
+ return ret;
+}
+
+void mix_videoconfigparamsdec_finalize(MixParams * obj) {
+
+ /* clean up here. */
+ MixVideoConfigParamsDec *self = MIX_VIDEOCONFIGPARAMSDEC(obj);
+ MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class);
+
+
+ /* free header */
+ if (self->header.data) {
+ g_free(self->header.data);
+ memset(&self->header, 0, sizeof(self->header));
+ }
+
+ /* free mime_type */
+ if (self->mime_type->str)
+ g_string_free(self->mime_type, TRUE);
+ else
+ g_string_free(self->mime_type, FALSE);
+
+ /* Chain up parent */
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+}
+
+MixVideoConfigParamsDec *
+mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix) {
+ return (MixVideoConfigParamsDec *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videoconfigparamsdec_dup:
+ * @obj: a #MixVideoConfigParamsDec object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videoconfigparamsdec_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) {
+ MixVideoConfigParamsDec *duplicate = mix_videoconfigparamsdec_new();
+ if (mix_videoconfigparamsdec_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videoconfigparamsdec_unref(duplicate);
+ }
+ }
+
+ return ret;
+}
+
+/**
+ * mix_videoconfigparamsdec_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparamsdec_copy(MixParams * target, const MixParams * src) {
+
+ MixVideoConfigParamsDec *this_target, *this_src;
+ MIX_RESULT mix_result = MIX_RESULT_FAIL;
+ MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class);
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC(target) && MIX_IS_VIDEOCONFIGPARAMSDEC(src)) {
+
+ /* Cast the base object to this child object */
+ this_target = MIX_VIDEOCONFIGPARAMSDEC(target);
+ this_src = MIX_VIDEOCONFIGPARAMSDEC(src);
+
+ /* copy properties of primitive type */
+
+ this_target->frame_rate_num = this_src->frame_rate_num;
+ this_target->frame_rate_denom = this_src->frame_rate_denom;
+ this_target->picture_width = this_src->picture_width;
+ this_target->picture_height = this_src->picture_height;
+ this_target->raw_format = this_src->raw_format;
+ this_target->rate_control = this_src->rate_control;
+ this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size;
+ this_target->extra_surface_allocation = this_src->extra_surface_allocation;
+
+ /* copy properties of non-primitive */
+
+ /* copy header */
+ mix_result = mix_videoconfigparamsdec_set_header(this_target,
+ &this_src->header);
+
+ if (mix_result != MIX_RESULT_SUCCESS) {
+
+ LOG_E( "set_header failed: mix_result = 0x%x\n", mix_result);
+ return FALSE;
+ }
+
+ /* copy mime_type */
+ if (this_src->mime_type) {
+
+ mix_result = mix_videoconfigparamsdec_set_mime_type(this_target,
+ this_src->mime_type->str);
+ } else {
+ mix_result = mix_videoconfigparamsdec_set_mime_type(this_target, NULL);
+ }
+
+ if (mix_result != MIX_RESULT_SUCCESS) {
+ LOG_E( "set_mime_type failed: mix_result = 0x%x\n", mix_result);
+ return FALSE;
+ }
+
+ /* TODO: copy other properties if there's any */
+
+ /* Now chainup base class */
+ if (root_class->copy) {
+ LOG_V( "root_class->copy != NULL\n");
+ return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ LOG_E( "root_class->copy == NULL\n");
+ return TRUE;
+ }
+ }
+
+ LOG_V( "End\n");
+
+ return FALSE;
+}
+
+/**
+ * mix_videoconfigparamsdec_:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparamsdec_equal(MixParams * first, MixParams * second) {
+
+ gboolean ret = FALSE;
+
+ MixVideoConfigParamsDec *this_first, *this_second;
+ MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class);
+
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC(first) && MIX_IS_VIDEOCONFIGPARAMSDEC(second)) {
+
+ // Deep compare
+ // Cast the base object to this child object
+ this_first = MIX_VIDEOCONFIGPARAMSDEC(first);
+ this_second = MIX_VIDEOCONFIGPARAMSDEC(second);
+
+ /* check the equalitiy of the primitive type properties */
+ if (this_first->frame_order_mode != this_second->frame_order_mode) {
+ goto not_equal;
+ }
+
+ if (this_first->frame_rate_num != this_second->frame_rate_num
+ && this_first->frame_rate_denom
+ != this_second->frame_rate_denom) {
+ goto not_equal;
+ }
+
+ if (this_first->picture_width != this_second->picture_width
+ && this_first->picture_height != this_second->picture_height) {
+ goto not_equal;
+ }
+
+ if (this_first->raw_format != this_second->raw_format) {
+ goto not_equal;
+ }
+
+ if (this_first->rate_control != this_second->rate_control) {
+ goto not_equal;
+ }
+
+ if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) {
+ goto not_equal;
+ }
+
+ if (this_first->extra_surface_allocation != this_second->extra_surface_allocation) {
+ goto not_equal;
+ }
+
+ /* check the equalitiy of the none-primitive type properties */
+
+ /* MixIOVec header */
+
+ if (this_first->header.data_size != this_second->header.data_size) {
+ goto not_equal;
+ }
+
+ if (this_first->header.buffer_size != this_second->header.buffer_size) {
+ goto not_equal;
+ }
+
+ if (this_first->header.data && this_second->header.data) {
+ if (memcmp(this_first->header.data, this_second->header.data,
+ this_first->header.data_size) != 0) {
+ goto not_equal;
+ }
+ } else if (!(!this_first->header.data && !this_second->header.data)) {
+ goto not_equal;
+ }
+
+ /* compare mime_type */
+
+ if (this_first->mime_type && this_second->mime_type) {
+ if (g_string_equal(this_first->mime_type, this_second->mime_type)
+ != TRUE) {
+ goto not_equal;
+ }
+ } else if (!(!this_first->mime_type && !this_second->mime_type)) {
+ goto not_equal;
+ }
+
+ ret = TRUE;
+
+ not_equal:
+
+ if (ret != TRUE) {
+ return ret;
+ }
+
+ /* chaining up. */
+ if (root_class->equal)
+ ret = root_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+
+ return ret;
+}
+
+#define MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \
+ if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSDEC(obj)) return MIX_RESULT_FAIL; \
+
+/* TODO: Add getters and setters for other properties. The following is incomplete */
+
+MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode(
+ MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) {
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+ obj->frame_order_mode = frame_order_mode;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode(
+ MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) {
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode);
+ *frame_order_mode = obj->frame_order_mode;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj,
+ MixIOVec * header) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+
+ if (!header) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (header->data && header->buffer_size) {
+ obj->header.data = g_memdup(header->data, header->buffer_size);
+ if (!obj->header.data) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+ obj->header.buffer_size = header->buffer_size;
+ obj->header.data_size = header->data_size;
+ }
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj,
+ MixIOVec ** header) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, header);
+
+ if (obj->header.data && obj->header.buffer_size) {
+
+ *header = g_malloc(sizeof(MixIOVec));
+
+ if (*header == NULL) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ (*header)->data = g_memdup(obj->header.data, obj->header.buffer_size);
+ (*header)->buffer_size = obj->header.buffer_size;
+ (*header)->data_size = obj->header.data_size;
+
+ } else {
+ *header = NULL;
+ }
+ return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj,
+ const gchar * mime_type) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+
+ if (!mime_type) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (obj->mime_type) {
+ if (obj->mime_type->str)
+ g_string_free(obj->mime_type, TRUE);
+ else
+ g_string_free(obj->mime_type, FALSE);
+ }
+
+ obj->mime_type = g_string_new(mime_type);
+ if (!obj->mime_type) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_mime_type(MixVideoConfigParamsDec * obj,
+ gchar ** mime_type) {
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, mime_type);
+
+ if (!obj->mime_type) {
+ *mime_type = NULL;
+ return MIX_RESULT_SUCCESS;
+ }
+ *mime_type = g_strdup(obj->mime_type->str);
+ if (!*mime_type) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_frame_rate(MixVideoConfigParamsDec * obj,
+ guint frame_rate_num, guint frame_rate_denom) {
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+ obj->frame_rate_num = frame_rate_num;
+ obj->frame_rate_denom = frame_rate_denom;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_frame_rate(MixVideoConfigParamsDec * obj,
+ guint * frame_rate_num, guint * frame_rate_denom) {
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom);
+ *frame_rate_num = obj->frame_rate_num;
+ *frame_rate_denom = obj->frame_rate_denom;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * obj,
+ guint picture_width, guint picture_height) {
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+ obj->picture_width = picture_width;
+ obj->picture_height = picture_height;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * obj,
+ guint * picture_width, guint * picture_height) {
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height);
+ *picture_width = obj->picture_width;
+ *picture_height = obj->picture_height;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj,
+ guint raw_format) {
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+
+ /* TODO: check if the value of raw_format is valid */
+ obj->raw_format = raw_format;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_raw_format(MixVideoConfigParamsDec * obj,
+ guint *raw_format) {
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, raw_format);
+ *raw_format = obj->raw_format;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * obj,
+ guint rate_control) {
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+
+ /* TODO: check if the value of rate_control is valid */
+ obj->rate_control = rate_control;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * obj,
+ guint *rate_control) {
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, rate_control);
+ *rate_control = obj->rate_control;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size(
+ MixVideoConfigParamsDec * obj, guint bufpoolsize) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+
+ obj->mixbuffer_pool_size = bufpoolsize;
+ return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size(
+ MixVideoConfigParamsDec * obj, guint *bufpoolsize) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bufpoolsize);
+ *bufpoolsize = obj->mixbuffer_pool_size;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation(
+ MixVideoConfigParamsDec * obj,
+ guint extra_surface_allocation) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+
+ obj->extra_surface_allocation = extra_surface_allocation;
+ return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(
+ MixVideoConfigParamsDec * obj,
+ guint *extra_surface_allocation) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, extra_surface_allocation);
+ *extra_surface_allocation = obj->extra_surface_allocation;
+ return MIX_RESULT_SUCCESS;
+
+}
+
+
+
+
diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h
new file mode 100644
index 0000000..6aa9047
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsdec.h
@@ -0,0 +1,195 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOCONFIGPARAMSDEC_H__
+#define __MIX_VIDEOCONFIGPARAMSDEC_H__
+
+#include <mixvideoconfigparams.h>
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_VIDEOCONFIGPARAMSDEC:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEOCONFIGPARAMSDEC (mix_videoconfigparamsdec_get_type ())
+
+/**
+ * MIX_VIDEOCONFIGPARAMSDEC:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEOCONFIGPARAMSDEC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDec))
+
+/**
+ * MIX_IS_VIDEOCONFIGPARAMSDEC:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_VIDEOCONFIGPARAMSDEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC))
+
+/**
+ * MIX_VIDEOCONFIGPARAMSDEC_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEOCONFIGPARAMSDEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDecClass))
+
+/**
+ * MIX_IS_VIDEOCONFIGPARAMSDEC_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_VIDEOCONFIGPARAMSDEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC))
+
+/**
+ * MIX_VIDEOCONFIGPARAMSDEC_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEOCONFIGPARAMSDEC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC, MixVideoConfigParamsDecClass))
+
+typedef struct _MixVideoConfigParamsDec MixVideoConfigParamsDec;
+typedef struct _MixVideoConfigParamsDecClass MixVideoConfigParamsDecClass;
+
+/**
+ * MixVideoConfigParamsDec:
+ *
+ * MI-X VideoConfig Parameter object
+ */
+struct _MixVideoConfigParamsDec {
+ /*< public > */
+ MixVideoConfigParams parent;
+
+ /*< public > */
+ MixFrameOrderMode frame_order_mode;
+ MixIOVec header;
+
+ /* the type of the following members will be changed after MIX API doc is ready */
+ GString * mime_type;
+ guint frame_rate_num;
+ guint frame_rate_denom;
+ gulong picture_width;
+ gulong picture_height;
+ guint raw_format;
+ guint rate_control;
+
+ guint mixbuffer_pool_size;
+ guint extra_surface_allocation;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+ * MixVideoConfigParamsDecClass:
+ *
+ * MI-X VideoConfig object class
+ */
+struct _MixVideoConfigParamsDecClass {
+ /*< public > */
+ MixVideoConfigParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_videoconfigparamsdec_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoconfigparamsdec_get_type(void);
+
+/**
+ * mix_videoconfigparamsdec_new:
+ * @returns: A newly allocated instance of #MixVideoConfigParamsDec
+ *
+ * Use this method to create new instance of #MixVideoConfigParamsDec
+ */
+MixVideoConfigParamsDec *mix_videoconfigparamsdec_new(void);
+/**
+ * mix_videoconfigparamsdec_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoConfigParamsDec instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoConfigParamsDec *mix_videoconfigparamsdec_ref(MixVideoConfigParamsDec * mix);
+
+/**
+ * mix_videoconfigparamsdec_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoconfigparamsdec_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+MIX_RESULT mix_videoconfigparamsdec_set_frame_order_mode(
+ MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode);
+
+MIX_RESULT mix_videoconfigparamsdec_get_frame_order_mode(
+ MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode);
+
+MIX_RESULT mix_videoconfigparamsdec_set_header(MixVideoConfigParamsDec * obj,
+ MixIOVec *header);
+
+/* caller is responsible to g_free MixIOVec::data field */
+MIX_RESULT mix_videoconfigparamsdec_get_header(MixVideoConfigParamsDec * obj,
+ MixIOVec ** header);
+
+MIX_RESULT mix_videoconfigparamsdec_set_mime_type(MixVideoConfigParamsDec * obj,
+ const gchar * mime_type);
+
+MIX_RESULT mix_videoconfigparamsdec_get_mime_type(MixVideoConfigParamsDec * obj,
+ gchar ** mime_type);
+
+MIX_RESULT mix_videoconfigparamsdec_set_frame_rate(MixVideoConfigParamsDec * obj,
+ guint frame_rate_num, guint frame_rate_denom);
+
+MIX_RESULT mix_videoconfigparamsdec_get_frame_rate(MixVideoConfigParamsDec * obj,
+ guint * frame_rate_num, guint * frame_rate_denom);
+
+MIX_RESULT mix_videoconfigparamsdec_set_picture_res(MixVideoConfigParamsDec * obj,
+ guint picture_width, guint picture_height);
+
+MIX_RESULT mix_videoconfigparamsdec_get_picture_res(MixVideoConfigParamsDec * obj,
+ guint * picture_width, guint * picture_height);
+
+MIX_RESULT mix_videoconfigparamsdec_set_raw_format(MixVideoConfigParamsDec * obj,
+ guint raw_format);
+
+MIX_RESULT mix_videoconfigparamsdec_get_raw_format(MixVideoConfigParamsDec * obj,
+ guint *raw_format);
+
+MIX_RESULT mix_videoconfigparamsdec_set_rate_control(MixVideoConfigParamsDec * obj,
+ guint rate_control);
+
+MIX_RESULT mix_videoconfigparamsdec_get_rate_control(MixVideoConfigParamsDec * obj,
+ guint *rate_control);
+
+MIX_RESULT mix_videoconfigparamsdec_set_buffer_pool_size(MixVideoConfigParamsDec * obj,
+ guint bufpoolsize);
+
+MIX_RESULT mix_videoconfigparamsdec_get_buffer_pool_size(MixVideoConfigParamsDec * obj,
+ guint *bufpoolsize);
+
+MIX_RESULT mix_videoconfigparamsdec_set_extra_surface_allocation(MixVideoConfigParamsDec * obj,
+ guint extra_surface_allocation);
+
+MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigParamsDec * obj,
+ guint *extra_surface_allocation);
+
+/* TODO: Add getters and setters for other properties */
+
+#endif /* __MIX_VIDEOCONFIGPARAMSDEC_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.c b/mix_video/src/mixvideoconfigparamsdec_h264.c
new file mode 100644
index 0000000..271cbf7
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsdec_h264.c
@@ -0,0 +1,213 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+* SECTION:mixvideoconfigparamsdec_h264
+* @short_description: VideoConfig parameters
+*
+* A data object which stores videoconfig specific parameters.
+*/
+
+#include "mixvideoconfigparamsdec_h264.h"
+
+static GType _mix_videoconfigparamsdec_h264_type = 0;
+static MixVideoConfigParamsDecClass *parent_class = NULL;
+
+#define _do_init { _mix_videoconfigparamsdec_h264_type = g_define_type_id; }
+
+gboolean mix_videoconfigparamsdec_h264_copy (MixParams * target,
+ const MixParams * src);
+MixParams *mix_videoconfigparamsdec_h264_dup (const MixParams * obj);
+gboolean mix_videoconfigparamsdec_h264_equal (MixParams * first,
+ MixParams * second);
+static void mix_videoconfigparamsdec_h264_finalize (MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecH264, /* The name of the new type, in Camel case */
+ mix_videoconfigparamsdec_h264, /* The name of the new type in lowercase */
+ MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */
+ _do_init);
+
+void
+_mix_videoconfigparamsdec_h264_initialize (void)
+{
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref (mix_videoconfigparamsdec_h264_get_type ());
+}
+
+static void
+mix_videoconfigparamsdec_h264_init (MixVideoConfigParamsDecH264 * self)
+{
+ /* initialize properties here */
+ /* TODO: initialize properties */
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void
+mix_videoconfigparamsdec_h264_class_init (MixVideoConfigParamsDecH264Class * klass)
+{
+ MixVideoConfigParamsDecClass *this_parent_class =
+ MIX_VIDEOCONFIGPARAMSDEC_CLASS (klass);
+ MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class);
+
+ /* setup static parent class */
+ parent_class =
+ (MixVideoConfigParamsDecClass *) g_type_class_peek_parent (klass);
+
+ this_root_class->finalize = mix_videoconfigparamsdec_h264_finalize;
+ this_root_class->copy =
+ (MixParamsCopyFunction) mix_videoconfigparamsdec_h264_copy;
+ this_root_class->dup =
+ (MixParamsDupFunction) mix_videoconfigparamsdec_h264_dup;
+ this_root_class->equal =
+ (MixParamsEqualFunction) mix_videoconfigparamsdec_h264_equal;
+}
+
+MixVideoConfigParamsDecH264 *
+mix_videoconfigparamsdec_h264_new (void)
+{
+ MixVideoConfigParamsDecH264 *ret = (MixVideoConfigParamsDecH264 *)
+ g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264);
+
+ return ret;
+}
+
+void
+mix_videoconfigparamsdec_h264_finalize (MixParams * obj)
+{
+ /* MixVideoConfigParamsDecH264 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_H264 (obj); */
+ MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class);
+
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+
+ if (root_class->finalize)
+ {
+ root_class->finalize (obj);
+ }
+}
+
+MixVideoConfigParamsDecH264
+ * mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix)
+{
+ return (MixVideoConfigParamsDecH264 *) mix_params_ref (MIX_PARAMS (mix));
+}
+
+/**
+* mix_videoconfigparamsdec_h264_dup:
+* @obj: a #MixVideoConfigParamsDec object
+* @returns: a newly allocated duplicate of the object.
+*
+* Copy duplicate of the object.
+*/
+MixParams *
+mix_videoconfigparamsdec_h264_dup (const MixParams * obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (obj))
+ {
+ MixVideoConfigParamsDecH264 *duplicate = mix_videoconfigparamsdec_h264_new ();
+ if (mix_videoconfigparamsdec_h264_copy
+ (MIX_PARAMS (duplicate), MIX_PARAMS (obj)))
+ {
+ ret = MIX_PARAMS (duplicate);
+ }
+ else
+ {
+ mix_videoconfigparamsdec_h264_unref (duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+* mix_videoconfigparamsdec_h264_copy:
+* @target: copy to target
+* @src: copy from src
+* @returns: boolean indicates if copy is successful.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videoconfigparamsdec_h264_copy (MixParams * target, const MixParams * src)
+{
+ MixVideoConfigParamsDecH264 *this_target, *this_src;
+ MixParamsClass *root_class;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (target)
+ && MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (src))
+ {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOCONFIGPARAMSDEC_H264 (target);
+ this_src = MIX_VIDEOCONFIGPARAMSDEC_H264 (src);
+
+ // TODO: copy properties */
+
+ // Now chainup base class
+ root_class = MIX_PARAMS_CLASS (parent_class);
+
+ if (root_class->copy)
+ {
+ return root_class->copy (MIX_PARAMS_CAST (target),
+ MIX_PARAMS_CAST (src));
+ }
+ else
+ {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+* mix_videoconfigparamsdec_h264:
+* @first: first object to compare
+* @second: seond object to compare
+* @returns: boolean indicates if instance are equal.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videoconfigparamsdec_h264_equal (MixParams * first, MixParams * second)
+{
+ gboolean ret = FALSE;
+ MixVideoConfigParamsDecH264 *this_first, *this_second;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (first)
+ && MIX_IS_VIDEOCONFIGPARAMSDEC_H264 (second))
+ {
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOCONFIGPARAMSDEC_H264 (first);
+ this_second = MIX_VIDEOCONFIGPARAMSDEC_H264 (second);
+
+ /* TODO: add comparison for properties */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class);
+ if (klass->equal)
+ {
+ ret = klass->equal (first, second);
+ }
+ else
+ {
+ ret = TRUE;
+ }
+ }
+ }
+
+ return ret;
+}
+
+/* TODO: Add getters and setters for properties if any */
diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h
new file mode 100644
index 0000000..6d5f97d
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsdec_h264.h
@@ -0,0 +1,130 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOCONFIGPARAMSDEC_H264_H__
+#define __MIX_VIDEOCONFIGPARAMSDEC_H264_H__
+
+#include "mixvideoconfigparamsdec.h"
+#include "mixvideodef.h"
+
+/**
+* MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264 (mix_videoconfigparamsdec_h264_get_type ())
+
+/**
+* MIX_VIDEOCONFIGPARAMSDEC_H264:
+* @obj: object to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSDEC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSDEC_H264:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixVideoConfigParamsDecH264
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSDEC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264))
+
+/**
+* MIX_VIDEOCONFIGPARAMSDEC_H264_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSDEC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264Class))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSDEC_H264_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixVideoConfigParamsDecH264Class
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSDEC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264))
+
+/**
+* MIX_VIDEOCONFIGPARAMSDEC_H264_GET_CLASS:
+* @obj: a #MixParams object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_VIDEOCONFIGPARAMSDEC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264, MixVideoConfigParamsDecH264Class))
+
+typedef struct _MixVideoConfigParamsDecH264 MixVideoConfigParamsDecH264;
+typedef struct _MixVideoConfigParamsDecH264Class MixVideoConfigParamsDecH264Class;
+
+/**
+* MixVideoConfigParamsDecH264:
+*
+* MI-X VideoConfig Parameter object
+*/
+struct _MixVideoConfigParamsDecH264
+{
+ /*< public > */
+ MixVideoConfigParamsDec parent;
+
+ /*< public > */
+
+ /* TODO: Add H.264 configuration paramters */
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+* MixVideoConfigParamsDecH264Class:
+*
+* MI-X VideoConfig object class
+*/
+struct _MixVideoConfigParamsDecH264Class
+{
+ /*< public > */
+ MixVideoConfigParamsDecClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_videoconfigparamsdec_h264_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_videoconfigparamsdec_h264_get_type (void);
+
+/**
+* mix_videoconfigparamsdec_h264_new:
+* @returns: A newly allocated instance of #MixVideoConfigParamsDecH264
+*
+* Use this method to create new instance of #MixVideoConfigParamsDecH264
+*/
+MixVideoConfigParamsDecH264 *mix_videoconfigparamsdec_h264_new (void);
+/**
+* mix_videoconfigparamsdec_h264_ref:
+* @mix: object to add reference
+* @returns: the MixVideoConfigParamsDecH264 instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixVideoConfigParamsDecH264
+ * mix_videoconfigparamsdec_h264_ref (MixVideoConfigParamsDecH264 * mix);
+
+/**
+* mix_videoconfigparamsdec_h264_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_videoconfigparamsdec_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for other properties */
+
+#endif /* __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.c b/mix_video/src/mixvideoconfigparamsdec_mp42.c
new file mode 100644
index 0000000..17329e1
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsdec_mp42.c
@@ -0,0 +1,244 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideoconfigparamsdec_mp42
+ * @short_description: VideoConfig parameters
+ *
+ * A data object which stores videoconfig specific parameters.
+ */
+
+#include "mixvideolog.h"
+#include "mixvideoconfigparamsdec_mp42.h"
+
+static GType _mix_videoconfigparamsdec_mp42_type = 0;
+static MixVideoConfigParamsDecClass *parent_class = NULL;
+
+#define _do_init { _mix_videoconfigparamsdec_mp42_type = g_define_type_id; }
+
+gboolean mix_videoconfigparamsdec_mp42_copy(MixParams * target,
+ const MixParams * src);
+MixParams *mix_videoconfigparamsdec_mp42_dup(const MixParams * obj);
+gboolean
+ mix_videoconfigparamsdec_mp42_equal(MixParams * first, MixParams * second);
+static void mix_videoconfigparamsdec_mp42_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecMP42, /* The name of the new type, in Camel case */
+ mix_videoconfigparamsdec_mp42, /* The name of the new type in lowercase */
+ MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */
+ _do_init);
+
+void _mix_videoconfigparamsdec_mp42_initialize(void) {
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref(mix_videoconfigparamsdec_mp42_get_type());
+}
+
+static void mix_videoconfigparamsdec_mp42_init(MixVideoConfigParamsDecMP42 * self) {
+ /* initialize properties here */
+ /* TODO: initialize properties */
+
+ self->mpegversion = 0;
+ self->divxversion = 0;
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+
+}
+
+static void mix_videoconfigparamsdec_mp42_class_init(
+ MixVideoConfigParamsDecMP42Class * klass) {
+ MixVideoConfigParamsDecClass *this_parent_class = MIX_VIDEOCONFIGPARAMSDEC_CLASS(
+ klass);
+ MixParamsClass *this_root_class = MIX_PARAMS_CLASS(this_parent_class);
+
+ /* setup static parent class */
+ parent_class
+ = (MixVideoConfigParamsDecClass *) g_type_class_peek_parent(klass);
+
+ this_root_class->finalize = mix_videoconfigparamsdec_mp42_finalize;
+ this_root_class->copy
+ = (MixParamsCopyFunction) mix_videoconfigparamsdec_mp42_copy;
+ this_root_class->dup
+ = (MixParamsDupFunction) mix_videoconfigparamsdec_mp42_dup;
+ this_root_class->equal
+ = (MixParamsEqualFunction) mix_videoconfigparamsdec_mp42_equal;
+}
+
+MixVideoConfigParamsDecMP42 *
+mix_videoconfigparamsdec_mp42_new(void) {
+ MixVideoConfigParamsDecMP42 *ret =
+ (MixVideoConfigParamsDecMP42 *) g_type_create_instance(
+ MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42);
+
+ return ret;
+}
+
+void mix_videoconfigparamsdec_mp42_finalize(MixParams * obj) {
+ /* MixVideoConfigParamsDecMP42 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_MP42 (obj); */
+ MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class);
+
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+}
+
+MixVideoConfigParamsDecMP42 *
+mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix) {
+ return (MixVideoConfigParamsDecMP42 *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videoconfigparamsdec_mp42_dup:
+ * @obj: a #MixVideoConfigParamsDec object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videoconfigparamsdec_mp42_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ LOG_V( "Begin\n");
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) {
+ MixVideoConfigParamsDecMP42 *duplicate = mix_videoconfigparamsdec_mp42_new();
+ LOG_V( "duplicate = 0x%x\n", duplicate);
+ if (mix_videoconfigparamsdec_mp42_copy(MIX_PARAMS(duplicate), MIX_PARAMS(
+ obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videoconfigparamsdec_mp42_unref(duplicate);
+ }
+ }
+ LOG_V( "End\n");
+ return ret;
+}
+
+/**
+ * mix_videoconfigparamsdec_mp42_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparamsdec_mp42_copy(MixParams * target,
+ const MixParams * src) {
+ MixVideoConfigParamsDecMP42 *this_target, *this_src;
+ MixParamsClass *root_class;
+
+ LOG_V( "Begin\n");
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(target) && MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(
+ src)) {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOCONFIGPARAMSDEC_MP42(target);
+ this_src = MIX_VIDEOCONFIGPARAMSDEC_MP42(src);
+
+ // TODO: copy properties */
+ this_target->mpegversion = this_src->mpegversion;
+ this_target->divxversion = this_src->divxversion;
+
+ // Now chainup base class
+ root_class = MIX_PARAMS_CLASS(parent_class);
+
+ if (root_class->copy) {
+ LOG_V( "root_class->copy != NULL\n");
+ return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ LOG_V( "root_class->copy == NULL\n\n");
+ return TRUE;
+ }
+ }
+ LOG_V( "End\n");
+ return FALSE;
+}
+
+/**
+ * mix_videoconfigparamsdec_mp42:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparamsdec_mp42_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixVideoConfigParamsDecMP42 *this_first, *this_second;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(first) && MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(
+ second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOCONFIGPARAMSDEC_MP42(first);
+ this_second = MIX_VIDEOCONFIGPARAMSDEC_MP42(second);
+
+ /* TODO: add comparison for properties */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal) {
+ ret = klass->equal(first, second);
+ } else {
+ ret = TRUE;
+ }
+ }
+ }
+
+ return ret;
+}
+
+/* TODO: Add getters and setters for properties if any */
+
+#define MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj)) return MIX_RESULT_FAIL; \
+
+
+MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion(
+ MixVideoConfigParamsDecMP42 *obj, guint version) {
+ MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj);
+ obj->mpegversion = version;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion(
+ MixVideoConfigParamsDecMP42 *obj, guint *version) {
+ MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version);
+ *version = obj->mpegversion;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion(
+ MixVideoConfigParamsDecMP42 *obj, guint version) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_MP42_SETTER_CHECK_INPUT (obj);
+ obj->divxversion = version;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion(
+ MixVideoConfigParamsDecMP42 *obj, guint *version) {
+
+ MIX_VIDEOCONFIGPARAMSDEC_MP42_GETTER_CHECK_INPUT (obj, version);
+ *version = obj->divxversion;
+ return MIX_RESULT_SUCCESS;
+
+}
+
diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.h b/mix_video/src/mixvideoconfigparamsdec_mp42.h
new file mode 100644
index 0000000..5f68b42
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsdec_mp42.h
@@ -0,0 +1,141 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__
+#define __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__
+
+#include "mixvideoconfigparamsdec.h"
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42 (mix_videoconfigparamsdec_mp42_get_type ())
+
+/**
+ * MIX_VIDEOCONFIGPARAMSDEC_MP42:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEOCONFIGPARAMSDEC_MP42(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42))
+
+/**
+ * MIX_IS_VIDEOCONFIGPARAMSDEC_MP42:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixVideoConfigParamsDecMP42
+ */
+#define MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42))
+
+/**
+ * MIX_VIDEOCONFIGPARAMSDEC_MP42_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEOCONFIGPARAMSDEC_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42Class))
+
+/**
+ * MIX_IS_VIDEOCONFIGPARAMSDEC_MP42_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixVideoConfigParamsDecMP42Class
+ */
+#define MIX_IS_VIDEOCONFIGPARAMSDEC_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42))
+
+/**
+ * MIX_VIDEOCONFIGPARAMSDEC_MP42_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEOCONFIGPARAMSDEC_MP42_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42, MixVideoConfigParamsDecMP42Class))
+
+typedef struct _MixVideoConfigParamsDecMP42 MixVideoConfigParamsDecMP42;
+typedef struct _MixVideoConfigParamsDecMP42Class MixVideoConfigParamsDecMP42Class;
+
+/**
+ * MixVideoConfigParamsDecMP42:
+ *
+ * MI-X VideoConfig Parameter object
+ */
+struct _MixVideoConfigParamsDecMP42 {
+ /*< public > */
+ MixVideoConfigParamsDec parent;
+
+ /*< public > */
+
+ guint mpegversion;
+ guint divxversion;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+ * MixVideoConfigParamsDecMP42Class:
+ *
+ * MI-X VideoConfig object class
+ */
+struct _MixVideoConfigParamsDecMP42Class {
+ /*< public > */
+ MixVideoConfigParamsDecClass parent_class;
+
+/* class members */
+};
+
+/**
+ * mix_videoconfigparamsdec_mp42_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoconfigparamsdec_mp42_get_type(void);
+
+/**
+ * mix_videoconfigparamsdec_mp42_new:
+ * @returns: A newly allocated instance of #MixVideoConfigParamsDecMP42
+ *
+ * Use this method to create new instance of #MixVideoConfigParamsDecMP42
+ */
+MixVideoConfigParamsDecMP42 *mix_videoconfigparamsdec_mp42_new(void);
+/**
+ * mix_videoconfigparamsdec_mp42_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoConfigParamsDecMP42 instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoConfigParamsDecMP42
+* mix_videoconfigparamsdec_mp42_ref(MixVideoConfigParamsDecMP42 * mix);
+
+/**
+ * mix_videoconfigparamsdec_mp42_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoconfigparamsdec_mp42_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for other properties */
+MIX_RESULT mix_videoconfigparamsdec_mp42_set_mpegversion(
+ MixVideoConfigParamsDecMP42 *obj, guint version);
+
+MIX_RESULT mix_videoconfigparamsdec_mp42_get_mpegversion(
+ MixVideoConfigParamsDecMP42 *obj, guint *version);
+
+MIX_RESULT mix_videoconfigparamsdec_mp42_set_divxversion(
+ MixVideoConfigParamsDecMP42 *obj, guint version);
+
+MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion(
+ MixVideoConfigParamsDecMP42 *obj, guint *version);
+
+#endif /* __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.c b/mix_video/src/mixvideoconfigparamsdec_vc1.c
new file mode 100644
index 0000000..fdce4f3
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsdec_vc1.c
@@ -0,0 +1,188 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideoconfigparamsdec_vc1
+ * @short_description: VideoConfig parameters
+ *
+ * A data object which stores videoconfig specific parameters.
+ */
+
+#include "mixvideoconfigparamsdec_vc1.h"
+
+static GType _mix_videoconfigparamsdec_vc1_type = 0;
+static MixVideoConfigParamsDecClass *parent_class = NULL;
+
+#define _do_init { _mix_videoconfigparamsdec_vc1_type = g_define_type_id; }
+
+gboolean mix_videoconfigparamsdec_vc1_copy(MixParams * target,
+ const MixParams * src);
+MixParams *mix_videoconfigparamsdec_vc1_dup(const MixParams * obj);
+gboolean mix_videoconfigparamsdec_vc1_equal(MixParams * first, MixParams * second);
+static void mix_videoconfigparamsdec_vc1_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsDecVC1, /* The name of the new type, in Camel case */
+ mix_videoconfigparamsdec_vc1, /* The name of the new type in lowercase */
+ MIX_TYPE_VIDEOCONFIGPARAMSDEC, /* The GType of the parent type */
+ _do_init);
+
+void _mix_videoconfigparamsdec_vc1_initialize(void) {
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref(mix_videoconfigparamsdec_vc1_get_type());
+}
+
+static void mix_videoconfigparamsdec_vc1_init(MixVideoConfigParamsDecVC1 * self) {
+ /* initialize properties here */
+ /* TODO: initialize properties */
+
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void mix_videoconfigparamsdec_vc1_class_init(
+ MixVideoConfigParamsDecVC1Class * klass) {
+ MixVideoConfigParamsDecClass *this_parent_class = MIX_VIDEOCONFIGPARAMSDEC_CLASS(
+ klass);
+ MixParamsClass *this_root_class = MIX_PARAMS_CLASS(this_parent_class);
+
+ /* setup static parent class */
+ parent_class
+ = (MixVideoConfigParamsDecClass *) g_type_class_peek_parent(klass);
+
+ this_root_class->finalize = mix_videoconfigparamsdec_vc1_finalize;
+ this_root_class->copy
+ = (MixParamsCopyFunction) mix_videoconfigparamsdec_vc1_copy;
+ this_root_class->dup = (MixParamsDupFunction) mix_videoconfigparamsdec_vc1_dup;
+ this_root_class->equal
+ = (MixParamsEqualFunction) mix_videoconfigparamsdec_vc1_equal;
+}
+
+MixVideoConfigParamsDecVC1 *
+mix_videoconfigparamsdec_vc1_new(void) {
+ MixVideoConfigParamsDecVC1 *ret =
+ (MixVideoConfigParamsDecVC1 *) g_type_create_instance(
+ MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1);
+
+ return ret;
+}
+
+void mix_videoconfigparamsdec_vc1_finalize(MixParams * obj) {
+ /* MixVideoConfigParamsDecVC1 *this_obj = MIX_VIDEOCONFIGPARAMSDEC_VC1 (obj); */
+ MixParamsClass *root_class = MIX_PARAMS_CLASS(parent_class);
+
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+}
+
+MixVideoConfigParamsDecVC1 *
+mix_videoconfigparamsdec_vc1_ref(MixVideoConfigParamsDecVC1 * mix) {
+ return (MixVideoConfigParamsDecVC1 *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videoconfigparamsdec_vc1_dup:
+ * @obj: a #MixVideoConfigParamsDec object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videoconfigparamsdec_vc1_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj)) {
+ MixVideoConfigParamsDecVC1 *duplicate = mix_videoconfigparamsdec_vc1_new();
+ if (mix_videoconfigparamsdec_vc1_copy(MIX_PARAMS(duplicate), MIX_PARAMS(
+ obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videoconfigparamsdec_vc1_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_videoconfigparamsdec_vc1_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparamsdec_vc1_copy(MixParams * target,
+ const MixParams * src) {
+ MixVideoConfigParamsDecVC1 *this_target, *this_src;
+ MixParamsClass *root_class;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(target) && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(
+ src)) {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOCONFIGPARAMSDEC_VC1(target);
+ this_src = MIX_VIDEOCONFIGPARAMSDEC_VC1(src);
+
+ // TODO: copy properties */
+
+ // Now chainup base class
+ root_class = MIX_PARAMS_CLASS(parent_class);
+
+ if (root_class->copy) {
+ return root_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_videoconfigparamsdec_vc1:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparamsdec_vc1_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixVideoConfigParamsDecVC1 *this_first, *this_second;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(first) && MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(
+ second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOCONFIGPARAMSDEC_VC1(first);
+ this_second = MIX_VIDEOCONFIGPARAMSDEC_VC1(second);
+
+ /* TODO: add comparison for properties */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal) {
+ ret = klass->equal(first, second);
+ } else {
+ ret = TRUE;
+ }
+ }
+ }
+
+ return ret;
+}
+
+/* TODO: Add getters and setters for properties if any */
diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.h b/mix_video/src/mixvideoconfigparamsdec_vc1.h
new file mode 100644
index 0000000..ecf90e5
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsdec_vc1.h
@@ -0,0 +1,134 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOCONFIGPARAMSDEC_VC1_H__
+#define __MIX_VIDEOCONFIGPARAMSDEC_VC1_H__
+
+#include "mixvideoconfigparamsdec.h"
+#include "mixvideodef.h"
+
+/**
+* MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1 (mix_videoconfigparamsdec_vc1_get_type ())
+
+/**
+* MIX_VIDEOCONFIGPARAMSDEC_VC1:
+* @obj: object to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSDEC_VC1(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSDEC_VC1:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixVideoConfigParamsDecVC1
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1))
+
+/**
+* MIX_VIDEOCONFIGPARAMSDEC_VC1_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSDEC_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1Class))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSDEC_VC1_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixVideoConfigParamsDecVC1Class
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSDEC_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1))
+
+/**
+* MIX_VIDEOCONFIGPARAMSDEC_VC1_GET_CLASS:
+* @obj: a #MixParams object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_VIDEOCONFIGPARAMSDEC_VC1_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1, MixVideoConfigParamsDecVC1Class))
+
+typedef struct _MixVideoConfigParamsDecVC1 MixVideoConfigParamsDecVC1;
+typedef struct _MixVideoConfigParamsDecVC1Class MixVideoConfigParamsDecVC1Class;
+
+/**
+* MixVideoConfigParamsDecVC1:
+*
+* MI-X VideoConfig Parameter object
+*/
+struct _MixVideoConfigParamsDecVC1
+{
+ /*< public > */
+ MixVideoConfigParamsDec parent;
+
+ /*< public > */
+
+ /* TODO: Add VC1 configuration paramters */
+ /* TODO: wmv_version and fourcc type might be changed later */
+ guint wmv_version;
+ guint fourcc;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+* MixVideoConfigParamsDecVC1Class:
+*
+* MI-X VideoConfig object class
+*/
+struct _MixVideoConfigParamsDecVC1Class
+{
+ /*< public > */
+ MixVideoConfigParamsDecClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_videoconfigparamsdec_vc1_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_videoconfigparamsdec_vc1_get_type (void);
+
+/**
+* mix_videoconfigparamsdec_vc1_new:
+* @returns: A newly allocated instance of #MixVideoConfigParamsDecVC1
+*
+* Use this method to create new instance of #MixVideoConfigParamsDecVC1
+*/
+MixVideoConfigParamsDecVC1 *mix_videoconfigparamsdec_vc1_new (void);
+/**
+* mix_videoconfigparamsdec_vc1_ref:
+* @mix: object to add reference
+* @returns: the MixVideoConfigParamsDecVC1 instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixVideoConfigParamsDecVC1
+ * mix_videoconfigparamsdec_vc1_ref (MixVideoConfigParamsDecVC1 * mix);
+
+/**
+* mix_videoconfigparamsdec_vc1_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_videoconfigparamsdec_vc1_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for other properties */
+
+#endif /* __MIX_VIDEOCONFIGPARAMSDECDEC_VC1_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsenc.c b/mix_video/src/mixvideoconfigparamsenc.c
new file mode 100644
index 0000000..44a31ce
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsenc.c
@@ -0,0 +1,688 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideoconfigparamsenc
+ * @short_description: VideoConfig parameters
+ *
+ * A data object which stores videoconfig specific parameters.
+ */
+
+#include <string.h>
+#include "mixvideolog.h"
+#include "mixvideoconfigparamsenc.h"
+
+static GType _mix_videoconfigparamsenc_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define MDEBUG
+
+#define _do_init { _mix_videoconfigparamsenc_type = g_define_type_id; }
+
+gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src);
+MixParams *mix_videoconfigparamsenc_dup(const MixParams * obj);
+gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second);
+static void mix_videoconfigparamsenc_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEnc, mix_videoconfigparamsenc,
+ MIX_TYPE_VIDEOCONFIGPARAMS, _do_init);
+
+static void mix_videoconfigparamsenc_init(MixVideoConfigParamsEnc * self) {
+ /* initialize properties here */
+ self->bitrate = 0;
+ self->frame_rate_num = 30;
+ self->frame_rate_denom = 1;
+ self->initial_qp = 15;
+ self->min_qp = 0;
+
+ self->picture_width = 0;
+ self->picture_height = 0;
+
+ self->mime_type = NULL;
+ self->encode_format = 0;
+ self->intra_period = 30;
+
+ self->mixbuffer_pool_size = 0;
+
+ self->share_buf_mode = FALSE;
+
+ self->ci_frame_id = NULL;
+ self->ci_frame_num = 0;
+
+ self->need_display = TRUE;
+
+ self->rate_control = MIX_RATE_CONTROL_NONE;
+ self->raw_format = MIX_RAW_TARGET_FORMAT_YUV420;
+ self->profile = MIX_PROFILE_H264BASELINE;
+
+ /* TODO: initialize other properties */
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void mix_videoconfigparamsenc_class_init(MixVideoConfigParamsEncClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_videoconfigparamsenc_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparamsenc_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparamsenc_dup;
+ mixparams_class->equal
+ = (MixParamsEqualFunction) mix_videoconfigparamsenc_equal;
+}
+
+MixVideoConfigParamsEnc *
+mix_videoconfigparamsenc_new(void) {
+ MixVideoConfigParamsEnc *ret =
+ (MixVideoConfigParamsEnc *) g_type_create_instance(
+ MIX_TYPE_VIDEOCONFIGPARAMSENC);
+
+ return ret;
+}
+
+void mix_videoconfigparamsenc_finalize(MixParams * obj) {
+
+ /* clean up here. */
+ MixVideoConfigParamsEnc *self = MIX_VIDEOCONFIGPARAMSENC(obj);
+
+ /* free mime_type */
+ if (self->mime_type->str)
+ g_string_free(self->mime_type, TRUE);
+ else
+ g_string_free(self->mime_type, FALSE);
+
+ if (self->ci_frame_id)
+ g_free (self->ci_frame_id);
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoConfigParamsEnc *
+mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix) {
+ return (MixVideoConfigParamsEnc *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videoconfigparamsenc_dup:
+ * @obj: a #MixVideoConfigParamsEnc object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videoconfigparamsenc_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC(obj)) {
+ MixVideoConfigParamsEnc *duplicate = mix_videoconfigparamsenc_new();
+ if (mix_videoconfigparamsenc_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videoconfigparamsenc_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_videoconfigparamsenc_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparamsenc_copy(MixParams * target, const MixParams * src) {
+
+ MixVideoConfigParamsEnc *this_target, *this_src;
+ MIX_RESULT mix_result = MIX_RESULT_FAIL;
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC(target) && MIX_IS_VIDEOCONFIGPARAMSENC(src)) {
+
+ /* Cast the base object to this child object */
+ this_target = MIX_VIDEOCONFIGPARAMSENC(target);
+ this_src = MIX_VIDEOCONFIGPARAMSENC(src);
+
+ /* copy properties of primitive type */
+
+ this_target->bitrate = this_src->bitrate;
+ this_target->frame_rate_num = this_src->frame_rate_num;
+ this_target->frame_rate_denom = this_src->frame_rate_denom;
+ this_target->initial_qp = this_src->initial_qp;
+ this_target->min_qp = this_src->min_qp;
+ this_target->intra_period = this_src->intra_period;
+ this_target->picture_width = this_src->picture_width;
+ this_target->picture_height = this_src->picture_height;
+ this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size;
+ this_target->share_buf_mode = this_src->share_buf_mode;
+ this_target->encode_format = this_src->encode_format;
+ this_target->ci_frame_num = this_src->ci_frame_num;
+ this_target->draw= this_src->draw;
+ this_target->need_display = this_src->need_display;
+ this_target->rate_control = this_src->rate_control;
+ this_target->raw_format = this_src->raw_format;
+ this_target->profile = this_src->profile;
+
+ /* copy properties of non-primitive */
+
+ /* copy mime_type */
+
+ if (this_src->mime_type) {
+#ifdef MDEBUG
+ if (this_src->mime_type->str) {
+
+ LOG_I( "this_src->mime_type->str = %s %x\n",
+ this_src->mime_type->str, (unsigned int)this_src->mime_type->str);
+ }
+#endif
+
+ mix_result = mix_videoconfigparamsenc_set_mime_type(this_target,
+ this_src->mime_type->str);
+ } else {
+
+ LOG_I( "this_src->mime_type = NULL\n");
+
+ mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, NULL);
+ }
+
+ if (mix_result != MIX_RESULT_SUCCESS) {
+
+ LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n");
+ return FALSE;
+ }
+
+ mix_result = mix_videoconfigparamsenc_set_ci_frame_info (this_target, this_src->ci_frame_id,
+ this_src->ci_frame_num);
+
+ /* TODO: copy other properties if there's any */
+
+ /* Now chainup base class */
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+}
+
+
+/**
+ * mix_videoconfigparamsenc_:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoconfigparamsenc_equal(MixParams * first, MixParams * second) {
+
+ gboolean ret = FALSE;
+
+ MixVideoConfigParamsEnc *this_first, *this_second;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC(first) && MIX_IS_VIDEOCONFIGPARAMSENC(second)) {
+
+ // Deep compare
+ // Cast the base object to this child object
+ this_first = MIX_VIDEOCONFIGPARAMSENC(first);
+ this_second = MIX_VIDEOCONFIGPARAMSENC(second);
+
+ /* check the equalitiy of the primitive type properties */
+ if (this_first->bitrate != this_second->bitrate) {
+ goto not_equal;
+ }
+
+ if (this_first->frame_rate_num != this_second->frame_rate_num) {
+ goto not_equal;
+ }
+
+ if (this_first->frame_rate_denom != this_second->frame_rate_denom) {
+ goto not_equal;
+ }
+
+ if (this_first->initial_qp != this_second->initial_qp) {
+ goto not_equal;
+ }
+
+ if (this_first->min_qp != this_second->min_qp) {
+ goto not_equal;
+ }
+
+ if (this_first->intra_period != this_second->intra_period) {
+ goto not_equal;
+ }
+
+ if (this_first->picture_width != this_second->picture_width
+ && this_first->picture_height != this_second->picture_height) {
+ goto not_equal;
+ }
+
+ if (this_first->encode_format != this_second->encode_format) {
+ goto not_equal;
+ }
+
+ if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) {
+ goto not_equal;
+ }
+
+ if (this_first->share_buf_mode != this_second->share_buf_mode) {
+ goto not_equal;
+ }
+
+ if (this_first->ci_frame_id != this_second->ci_frame_id) {
+ goto not_equal;
+ }
+
+ if (this_first->ci_frame_num != this_second->ci_frame_num) {
+ goto not_equal;
+ }
+
+ if (this_first->draw != this_second->draw) {
+ goto not_equal;
+ }
+
+ if (this_first->need_display!= this_second->need_display) {
+ goto not_equal;
+ }
+
+ if (this_first->rate_control != this_second->rate_control) {
+ goto not_equal;
+ }
+
+ if (this_first->raw_format != this_second->raw_format) {
+ goto not_equal;
+ }
+
+ if (this_first->profile != this_second->profile) {
+ goto not_equal;
+ }
+
+ /* check the equalitiy of the none-primitive type properties */
+
+ /* compare mime_type */
+
+ if (this_first->mime_type && this_second->mime_type) {
+ if (g_string_equal(this_first->mime_type, this_second->mime_type)
+ != TRUE) {
+ goto not_equal;
+ }
+ } else if (!(!this_first->mime_type && !this_second->mime_type)) {
+ goto not_equal;
+ }
+
+ ret = TRUE;
+
+ not_equal:
+
+ if (ret != TRUE) {
+ return ret;
+ }
+
+ /* chaining up. */
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+
+ return ret;
+}
+
+#define MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR(obj, prop, prop2) \
+ if(!obj || !prop || !prop2 ) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSENC(obj)) return MIX_RESULT_FAIL; \
+
+/* TODO: Add getters and setters for other properties. The following is incomplete */
+
+
+MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj,
+ const gchar * mime_type) {
+
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+
+ if (!mime_type) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_I( "mime_type = %s %x\n",
+ mime_type, (unsigned int)mime_type);
+
+ if (obj->mime_type) {
+ if (obj->mime_type->str)
+ g_string_free(obj->mime_type, TRUE);
+ else
+ g_string_free(obj->mime_type, FALSE);
+ }
+
+
+ LOG_I( "mime_type = %s %x\n",
+ mime_type, (unsigned int)mime_type);
+
+ obj->mime_type = g_string_new(mime_type);
+ if (!obj->mime_type) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+
+ LOG_I( "mime_type = %s obj->mime_type->str = %s\n",
+ mime_type, obj->mime_type->str);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj,
+ gchar ** mime_type) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, mime_type);
+
+ if (!obj->mime_type) {
+ *mime_type = NULL;
+ return MIX_RESULT_SUCCESS;
+ }
+ *mime_type = g_strdup(obj->mime_type->str);
+ if (!*mime_type) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj,
+ guint frame_rate_num, guint frame_rate_denom) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->frame_rate_num = frame_rate_num;
+ obj->frame_rate_denom = frame_rate_denom;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj,
+ guint * frame_rate_num, guint * frame_rate_denom) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, frame_rate_num, frame_rate_denom);
+ *frame_rate_num = obj->frame_rate_num;
+ *frame_rate_denom = obj->frame_rate_denom;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj,
+ guint picture_width, guint picture_height) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->picture_width = picture_width;
+ obj->picture_height = picture_height;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj,
+ guint * picture_width, guint * picture_height) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, picture_width, picture_height);
+ *picture_width = obj->picture_width;
+ *picture_height = obj->picture_height;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_encode_format(MixVideoConfigParamsEnc * obj,
+ MixEncodeTargetFormat encode_format) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->encode_format = encode_format;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj,
+ MixEncodeTargetFormat* encode_format) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, encode_format);
+ *encode_format = obj->encode_format;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj,
+ guint bitrate) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->bitrate= bitrate;
+ return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj,
+ guint *bitrate) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bitrate);
+ *bitrate = obj->bitrate;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj,
+ guint initial_qp) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->initial_qp = initial_qp;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj,
+ guint *initial_qp) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, initial_qp);
+ *initial_qp = obj->initial_qp;
+ return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj,
+ guint min_qp) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->min_qp = min_qp;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj,
+ guint *min_qp) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, min_qp);
+ *min_qp = obj->min_qp;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj,
+ guint intra_period) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->intra_period = intra_period;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj,
+ guint *intra_period) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, intra_period);
+ *intra_period = obj->intra_period;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size(
+ MixVideoConfigParamsEnc * obj, guint bufpoolsize) {
+
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+
+ obj->mixbuffer_pool_size = bufpoolsize;
+ return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size(
+ MixVideoConfigParamsEnc * obj, guint *bufpoolsize) {
+
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bufpoolsize);
+ *bufpoolsize = obj->mixbuffer_pool_size;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (
+ MixVideoConfigParamsEnc * obj, gboolean share_buf_mod) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+
+ obj->share_buf_mode = share_buf_mod;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj,
+ gboolean *share_buf_mod) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, share_buf_mod);
+
+ *share_buf_mod = obj->share_buf_mode;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj,
+ gulong * ci_frame_id, guint ci_frame_num) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+
+
+ if (!ci_frame_id || !ci_frame_num) {
+ obj->ci_frame_id = NULL;
+ obj->ci_frame_num = 0;
+ return MIX_RESULT_SUCCESS;
+ }
+
+ if (obj->ci_frame_id)
+ g_free (obj->ci_frame_id);
+
+ guint size = ci_frame_num * sizeof (gulong);
+ obj->ci_frame_num = ci_frame_num;
+
+ obj->ci_frame_id = g_malloc (ci_frame_num * sizeof (gulong));
+ if (!(obj->ci_frame_id)) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ memcpy (obj->ci_frame_id, ci_frame_id, size);
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj,
+ gulong * *ci_frame_id, guint *ci_frame_num) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, ci_frame_id, ci_frame_num);
+
+ *ci_frame_num = obj->ci_frame_num;
+
+ if (!obj->ci_frame_id) {
+ *ci_frame_id = NULL;
+ return MIX_RESULT_SUCCESS;
+ }
+
+ if (obj->ci_frame_num) {
+ *ci_frame_id = g_malloc (obj->ci_frame_num * sizeof (gulong));
+
+ if (!*ci_frame_id) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ memcpy (*ci_frame_id, obj->ci_frame_id, obj->ci_frame_num * sizeof (gulong));
+
+ } else {
+ *ci_frame_id = NULL;
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj,
+ gulong draw) {
+
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->draw = draw;
+ return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj,
+ gulong *draw) {
+
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, draw);
+ *draw = obj->draw;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_need_display (
+ MixVideoConfigParamsEnc * obj, gboolean need_display) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+
+ obj->need_display = need_display;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj,
+ gboolean *need_display) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, need_display);
+
+ *need_display = obj->need_display;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj,
+ MixRateControl rate_control) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->rate_control = rate_control;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj,
+ MixRateControl * rate_control) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, rate_control);
+ *rate_control = obj->rate_control;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj,
+ MixRawTargetFormat raw_format) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->raw_format = raw_format;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj,
+ MixRawTargetFormat * raw_format) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, raw_format);
+ *raw_format = obj->raw_format;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj,
+ MixProfile profile) {
+ MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+ obj->profile = profile;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj,
+ MixProfile * profile) {
+ MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, profile);
+ *profile = obj->profile;
+ return MIX_RESULT_SUCCESS;
+}
+
diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h
new file mode 100644
index 0000000..6a1dfff
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsenc.h
@@ -0,0 +1,254 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOCONFIGPARAMSENC_H__
+#define __MIX_VIDEOCONFIGPARAMSENC_H__
+
+#include <mixvideoconfigparams.h>
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_VIDEOCONFIGPARAMSENC:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEOCONFIGPARAMSENC (mix_videoconfigparamsenc_get_type ())
+
+/**
+ * MIX_VIDEOCONFIGPARAMSENC:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEnc))
+
+/**
+ * MIX_IS_VIDEOCONFIGPARAMSENC:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_VIDEOCONFIGPARAMSENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC))
+
+/**
+ * MIX_VIDEOCONFIGPARAMSENC_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass))
+
+/**
+ * MIX_IS_VIDEOCONFIGPARAMSENC_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_VIDEOCONFIGPARAMSENC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC))
+
+/**
+ * MIX_VIDEOCONFIGPARAMSENC_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEOCONFIGPARAMSENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC, MixVideoConfigParamsEncClass))
+
+typedef struct _MixVideoConfigParamsEnc MixVideoConfigParamsEnc;
+typedef struct _MixVideoConfigParamsEncClass MixVideoConfigParamsEncClass;
+
+/**
+ * MixVideoConfigParamsEnc:
+ *
+ * MI-X VideoConfig Parameter object
+ */
+struct _MixVideoConfigParamsEnc {
+ /*< public > */
+ MixVideoConfigParams parent;
+
+ /*< public > */
+ //MixIOVec header;
+
+ /* the type of the following members will be changed after MIX API doc is ready */
+
+ MixProfile profile;
+ MixRawTargetFormat raw_format;
+ MixRateControl rate_control;
+
+ guint bitrate;
+ guint frame_rate_num;
+ guint frame_rate_denom;
+ guint initial_qp;
+ guint min_qp;
+ guint intra_period;
+ guint16 picture_width;
+ guint16 picture_height;
+
+ GString * mime_type;
+ MixEncodeTargetFormat encode_format;
+
+ guint mixbuffer_pool_size;
+
+ gboolean share_buf_mode;
+
+ gulong * ci_frame_id;
+ guint ci_frame_num;
+
+ gulong draw;
+ gboolean need_display;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+ * MixVideoConfigParamsEncClass:
+ *
+ * MI-X VideoConfig object class
+ */
+struct _MixVideoConfigParamsEncClass {
+ /*< public > */
+ MixVideoConfigParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_videoconfigparamsenc_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoconfigparamsenc_get_type(void);
+
+/**
+ * mix_videoconfigparamsenc_new:
+ * @returns: A newly allocated instance of #MixVideoConfigParamsEnc
+ *
+ * Use this method to create new instance of #MixVideoConfigParamsEnc
+ */
+MixVideoConfigParamsEnc *mix_videoconfigparamsenc_new(void);
+/**
+ * mix_videoconfigparamsenc_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoConfigParamsEnc instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoConfigParamsEnc *mix_videoconfigparamsenc_ref(MixVideoConfigParamsEnc * mix);
+
+/**
+ * mix_videoconfigparamsenc_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoconfigparamsenc_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+
+MIX_RESULT mix_videoconfigparamsenc_set_mime_type(MixVideoConfigParamsEnc * obj,
+ const gchar * mime_type);
+
+MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj,
+ gchar ** mime_type);
+
+MIX_RESULT mix_videoconfigparamsenc_set_frame_rate(MixVideoConfigParamsEnc * obj,
+ guint frame_rate_num, guint frame_rate_denom);
+
+MIX_RESULT mix_videoconfigparamsenc_get_frame_rate(MixVideoConfigParamsEnc * obj,
+ guint * frame_rate_num, guint * frame_rate_denom);
+
+MIX_RESULT mix_videoconfigparamsenc_set_picture_res(MixVideoConfigParamsEnc * obj,
+ guint picture_width, guint picture_height);
+
+MIX_RESULT mix_videoconfigparamsenc_get_picture_res(MixVideoConfigParamsEnc * obj,
+ guint * picture_width, guint * picture_height);
+
+MIX_RESULT mix_videoconfigparamsenc_set_encode_format (MixVideoConfigParamsEnc * obj,
+ MixEncodeTargetFormat encode_format);
+
+MIX_RESULT mix_videoconfigparamsenc_get_encode_format (MixVideoConfigParamsEnc * obj,
+ MixEncodeTargetFormat * encode_format);
+
+MIX_RESULT mix_videoconfigparamsenc_set_bit_rate (MixVideoConfigParamsEnc * obj,
+ guint bps);
+
+MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj,
+ guint *bps);
+
+MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj,
+ guint initial_qp);
+
+MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj,
+ guint *initial_qp);
+
+MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj,
+ guint min_qp);
+
+MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj,
+ guint *min_qp);
+
+MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj,
+ guint intra_period);
+
+MIX_RESULT mix_videoconfigparamsenc_get_intra_period (MixVideoConfigParamsEnc * obj,
+ guint *intra_period);
+
+MIX_RESULT mix_videoconfigparamsenc_set_buffer_pool_size(MixVideoConfigParamsEnc * obj,
+ guint bufpoolsize);
+
+MIX_RESULT mix_videoconfigparamsenc_get_buffer_pool_size(MixVideoConfigParamsEnc * obj,
+ guint *bufpoolsize);
+
+MIX_RESULT mix_videoconfigparamsenc_set_share_buf_mode (MixVideoConfigParamsEnc * obj,
+ gboolean share_buf_mod);
+
+MIX_RESULT mix_videoconfigparamsenc_get_share_buf_mode(MixVideoConfigParamsEnc * obj,
+ gboolean *share_buf_mod);
+
+MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj,
+ gulong * ci_frame_id, guint ci_frame_num);
+
+MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj,
+ gulong * *ci_frame_id, guint *ci_frame_num);
+
+MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj,
+ gulong draw);
+
+MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj,
+ gulong *draw);
+
+MIX_RESULT mix_videoconfigparamsenc_set_need_display (
+ MixVideoConfigParamsEnc * obj, gboolean need_display);
+
+MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj,
+ gboolean *need_display);
+
+
+MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj,
+ MixRateControl rcmode);
+
+MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj,
+ MixRateControl * rcmode);
+
+MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj,
+ MixRawTargetFormat raw_format);
+
+MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj,
+ MixRawTargetFormat * raw_format);
+
+MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj,
+ MixProfile profile);
+
+MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj,
+ MixProfile * profile);
+
+/* TODO: Add getters and setters for other properties */
+
+#endif /* __MIX_VIDEOCONFIGPARAMSENC_H__ */
+
diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.c b/mix_video/src/mixvideoconfigparamsenc_h264.c
new file mode 100644
index 0000000..3bfa52e
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsenc_h264.c
@@ -0,0 +1,322 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+* SECTION:mixvideoconfigparamsenc_h264
+* @short_description: VideoConfig parameters
+*
+* A data object which stores videoconfig specific parameters.
+*/
+
+#include "mixvideolog.h"
+#include "mixvideoconfigparamsenc_h264.h"
+
+#define MDEBUG
+
+
+static GType _mix_videoconfigparamsenc_h264_type = 0;
+static MixVideoConfigParamsEncClass *parent_class = NULL;
+
+#define _do_init { _mix_videoconfigparamsenc_h264_type = g_define_type_id; }
+
+gboolean mix_videoconfigparamsenc_h264_copy (MixParams * target,
+ const MixParams * src);
+MixParams *mix_videoconfigparamsenc_h264_dup (const MixParams * obj);
+gboolean mix_videoconfigparamsencenc_h264_equal (MixParams * first,
+ MixParams * second);
+static void mix_videoconfigparamsenc_h264_finalize (MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncH264, /* The name of the new type, in Camel case */
+ mix_videoconfigparamsenc_h264, /* The name of the new type in lowercase */
+ MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */
+ _do_init);
+
+void
+_mix_videoconfigparamsenc_h264_initialize (void)
+{
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref (mix_videoconfigparamsenc_h264_get_type ());
+}
+
+static void
+mix_videoconfigparamsenc_h264_init (MixVideoConfigParamsEncH264 * self)
+{
+ /* initialize properties here */
+ /* TODO: initialize properties */
+ self->basic_unit_size = 0;
+ self->slice_num = 1;
+ self->disable_deblocking_filter_idc = 0;
+
+ self->delimiter_type = MIX_DELIMITER_LENGTHPREFIX;
+
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void
+mix_videoconfigparamsenc_h264_class_init (MixVideoConfigParamsEncH264Class * klass)
+{
+ MixVideoConfigParamsEncClass *this_parent_class =
+ MIX_VIDEOCONFIGPARAMSENC_CLASS (klass);
+ MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class);
+
+ /* setup static parent class */
+ parent_class =
+ (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass);
+
+ this_root_class->finalize = mix_videoconfigparamsenc_h264_finalize;
+ this_root_class->copy =
+ (MixParamsCopyFunction) mix_videoconfigparamsenc_h264_copy;
+ this_root_class->dup =
+ (MixParamsDupFunction) mix_videoconfigparamsenc_h264_dup;
+ this_root_class->equal =
+ (MixParamsEqualFunction) mix_videoconfigparamsencenc_h264_equal;
+}
+
+MixVideoConfigParamsEncH264 *
+mix_videoconfigparamsenc_h264_new (void)
+{
+ MixVideoConfigParamsEncH264 *ret = (MixVideoConfigParamsEncH264 *)
+ g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_H264);
+
+ return ret;
+}
+
+void
+mix_videoconfigparamsenc_h264_finalize (MixParams * obj)
+{
+ /* MixVideoConfigParamsEncH264 *this_obj = MIX_VIDEOCONFIGPARAMSENC_H264 (obj); */
+ MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class);
+
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+
+ if (root_class->finalize)
+ {
+ root_class->finalize (obj);
+ }
+}
+
+MixVideoConfigParamsEncH264
+ * mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix)
+{
+ return (MixVideoConfigParamsEncH264 *) mix_params_ref (MIX_PARAMS (mix));
+}
+
+/**
+* mix_videoconfigparamsenc_h264_dup:
+* @obj: a #MixVideoConfigParams object
+* @returns: a newly allocated duplicate of the object.
+*
+* Copy duplicate of the object.
+*/
+MixParams *
+mix_videoconfigparamsenc_h264_dup (const MixParams * obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (obj))
+ {
+ MixVideoConfigParamsEncH264 *duplicate = mix_videoconfigparamsenc_h264_new ();
+ if (mix_videoconfigparamsenc_h264_copy
+ (MIX_PARAMS (duplicate), MIX_PARAMS (obj)))
+ {
+ ret = MIX_PARAMS (duplicate);
+ }
+ else
+ {
+ mix_videoconfigparamsenc_h264_unref (duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+* mix_videoconfigparamsenc_h264_copy:
+* @target: copy to target
+* @src: copy from src
+* @returns: boolean indicates if copy is successful.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videoconfigparamsenc_h264_copy (MixParams * target, const MixParams * src)
+{
+ MixVideoConfigParamsEncH264 *this_target, *this_src;
+ MixParamsClass *root_class;
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (target)
+ && MIX_IS_VIDEOCONFIGPARAMSENC_H264 (src))
+ {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOCONFIGPARAMSENC_H264 (target);
+ this_src = MIX_VIDEOCONFIGPARAMSENC_H264 (src);
+
+ //add properties
+ this_target->basic_unit_size = this_src->basic_unit_size;
+ this_target->slice_num = this_src->slice_num;
+ this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc;
+ this_target->delimiter_type = this_src->delimiter_type;
+
+
+ // Now chainup base class
+ root_class = MIX_PARAMS_CLASS (parent_class);
+
+ if (root_class->copy)
+ {
+ return root_class->copy (MIX_PARAMS_CAST (target),
+ MIX_PARAMS_CAST (src));
+ }
+ else
+ {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+* mix_videoconfigparamsenc_h264:
+* @first: first object to compare
+* @second: seond object to compare
+* @returns: boolean indicates if instance are equal.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videoconfigparamsencenc_h264_equal (MixParams * first, MixParams * second)
+{
+ gboolean ret = FALSE;
+ MixVideoConfigParamsEncH264 *this_first, *this_second;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (first)
+ && MIX_IS_VIDEOCONFIGPARAMSENC_H264 (second))
+ {
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOCONFIGPARAMSENC_H264 (first);
+ this_second = MIX_VIDEOCONFIGPARAMSENC_H264 (second);
+
+ if (this_first->basic_unit_size != this_second->basic_unit_size) {
+ goto not_equal;
+ }
+
+ if (this_first->slice_num != this_second->slice_num) {
+ goto not_equal;
+ }
+
+ if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) {
+ goto not_equal;
+ }
+
+ if (this_first->delimiter_type != this_second->delimiter_type) {
+ goto not_equal;
+ }
+
+
+ ret = TRUE;
+
+ not_equal:
+
+ if (ret != TRUE) {
+ return ret;
+ }
+
+ /* TODO: add comparison for properties */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class);
+ if (klass->equal)
+ {
+ ret = klass->equal (first, second);
+ }
+ else
+ {
+ ret = TRUE;
+ }
+ }
+ }
+
+ return ret;
+}
+
+/* TODO: Add getters and setters for properties if any */
+
+#define MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj)) return MIX_RESULT_FAIL; \
+
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj,
+ guint basic_unit_size) {
+ MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj);
+ obj->basic_unit_size = basic_unit_size;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj,
+ guint * basic_unit_size) {
+ MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, basic_unit_size);
+ *basic_unit_size = obj->basic_unit_size;
+ return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj,
+ guint disable_deblocking_filter_idc) {
+ MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj);
+ obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj,
+ guint * disable_deblocking_filter_idc) {
+ MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc);
+ *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc;
+ return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj,
+ guint slice_num) {
+ MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj);
+ obj->slice_num = slice_num;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj,
+ guint * slice_num) {
+ MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, slice_num);
+ *slice_num = obj->slice_num;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj,
+ MixDelimiterType delimiter_type) {
+ MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj);
+ obj->delimiter_type = delimiter_type;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj,
+ MixDelimiterType * delimiter_type) {
+ MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, delimiter_type);
+ *delimiter_type = obj->delimiter_type;
+ return MIX_RESULT_SUCCESS;
+}
diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h
new file mode 100644
index 0000000..c2359dd
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsenc_h264.h
@@ -0,0 +1,160 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOCONFIGPARAMSENC_H264_H__
+#define __MIX_VIDEOCONFIGPARAMSENC_H264_H__
+
+#include "mixvideoconfigparamsenc.h"
+#include "mixvideodef.h"
+
+/**
+* MIX_TYPE_VIDEOCONFIGPARAMSENC_H264:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_VIDEOCONFIGPARAMSENC_H264 (mix_videoconfigparamsenc_h264_get_type ())
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_H264:
+* @obj: object to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSENC_H264:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixVideoConfigParamsEncH264
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264))
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_H264_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixVideoConfigParamsEncH264Class
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264))
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS:
+* @obj: a #MixParams object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class))
+
+typedef struct _MixVideoConfigParamsEncH264 MixVideoConfigParamsEncH264;
+typedef struct _MixVideoConfigParamsEncH264Class MixVideoConfigParamsEncH264Class;
+
+/**
+* MixVideoConfigParamsEncH264:
+*
+* MI-X VideoConfig Parameter object
+*/
+struct _MixVideoConfigParamsEncH264
+{
+ /*< public > */
+ MixVideoConfigParamsEnc parent;
+
+ /*< public > */
+
+ /* TODO: Add H.264 configuration paramters */
+ guint basic_unit_size;
+ guint slice_num;
+ guint8 disable_deblocking_filter_idc;
+
+ MixDelimiterType delimiter_type;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+* MixVideoConfigParamsEncH264Class:
+*
+* MI-X VideoConfig object class
+*/
+struct _MixVideoConfigParamsEncH264Class
+{
+ /*< public > */
+ MixVideoConfigParamsEncClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_videoconfigparamsenc_h264_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_videoconfigparamsenc_h264_get_type (void);
+
+/**
+* mix_videoconfigparamsenc_h264_new:
+* @returns: A newly allocated instance of #MixVideoConfigParamsEncH264
+*
+* Use this method to create new instance of #MixVideoConfigParamsEncH264
+*/
+MixVideoConfigParamsEncH264 *mix_videoconfigparamsenc_h264_new (void);
+/**
+* mix_videoconfigparamsenc_h264_ref:
+* @mix: object to add reference
+* @returns: the MixVideoConfigParamsEncH264 instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixVideoConfigParamsEncH264
+ * mix_videoconfigparamsenc_h264_ref (MixVideoConfigParamsEncH264 * mix);
+
+/**
+* mix_videoconfigparamsenc_h264_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_videoconfigparamsenc_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for other properties */
+MIX_RESULT mix_videoconfigparamsenc_h264_set_bus (MixVideoConfigParamsEncH264 * obj,
+ guint basic_unit_size);
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_bus (MixVideoConfigParamsEncH264 * obj,
+ guint * basic_unit_size);
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj,
+ guint disable_deblocking_filter_idc);
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_dlk (MixVideoConfigParamsEncH264 * obj,
+ guint * disable_deblocking_filter_idc);
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj,
+ guint slice_num);
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj,
+ guint * slice_num);
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj,
+ MixDelimiterType delimiter_type);
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj,
+ MixDelimiterType * delimiter_type);
+
+#endif /* __MIX_VIDEOCONFIGPARAMSENC_H264_H__ */
+
diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.c b/mix_video/src/mixvideoconfigparamsenc_mpeg4.c
new file mode 100644
index 0000000..54e47a9
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.c
@@ -0,0 +1,300 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+* SECTION:mixvideoconfigparamsenc_mpeg4
+* @short_description: VideoConfig parameters
+*
+* A data object which stores videoconfig specific parameters.
+*/
+
+#include "mixvideolog.h"
+#include "mixvideoconfigparamsenc_mpeg4.h"
+
+#define MDEBUG
+
+
+static GType _mix_videoconfigparamsenc_mpeg4_type = 0;
+static MixVideoConfigParamsEncClass *parent_class = NULL;
+
+#define _do_init { _mix_videoconfigparamsenc_mpeg4_type = g_define_type_id; }
+
+gboolean mix_videoconfigparamsenc_mpeg4_copy (MixParams * target,
+ const MixParams * src);
+MixParams *mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj);
+gboolean mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first,
+ MixParams * second);
+static void mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncMPEG4, /* The name of the new type, in Camel case */
+ mix_videoconfigparamsenc_mpeg4, /* The name of the new type in lowercase */
+ MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */
+ _do_init);
+
+void
+_mix_videoconfigparamsenc_mpeg4_initialize (void)
+{
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref (mix_videoconfigparamsenc_mpeg4_get_type ());
+}
+
+static void
+mix_videoconfigparamsenc_mpeg4_init (MixVideoConfigParamsEncMPEG4 * self)
+{
+ /* initialize properties here */
+ /* TODO: initialize properties */
+
+ self->fixed_vop_time_increment = 3;
+ self->profile_and_level_indication = 3;
+ self->disable_deblocking_filter_idc = 0;
+
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void
+mix_videoconfigparamsenc_mpeg4_class_init (MixVideoConfigParamsEncMPEG4Class * klass)
+{
+ MixVideoConfigParamsEncClass *this_parent_class =
+ MIX_VIDEOCONFIGPARAMSENC_CLASS (klass);
+ MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class);
+
+ /* setup static parent class */
+ parent_class =
+ (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass);
+
+ this_root_class->finalize = mix_videoconfigparamsenc_mpeg4_finalize;
+ this_root_class->copy =
+ (MixParamsCopyFunction) mix_videoconfigparamsenc_mpeg4_copy;
+ this_root_class->dup =
+ (MixParamsDupFunction) mix_videoconfigparamsenc_mpeg4_dup;
+ this_root_class->equal =
+ (MixParamsEqualFunction) mix_videoconfigparamsencenc_mpeg4_equal;
+}
+
+MixVideoConfigParamsEncMPEG4 *
+mix_videoconfigparamsenc_mpeg4_new (void)
+{
+ MixVideoConfigParamsEncMPEG4 *ret = (MixVideoConfigParamsEncMPEG4 *)
+ g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4);
+
+ return ret;
+}
+
+void
+mix_videoconfigparamsenc_mpeg4_finalize (MixParams * obj)
+{
+ /* MixVideoConfigParamsEncMPEG4 *this_obj = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (obj); */
+ MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class);
+
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+
+ if (root_class->finalize)
+ {
+ root_class->finalize (obj);
+ }
+}
+
+MixVideoConfigParamsEncMPEG4
+ * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix)
+{
+ return (MixVideoConfigParamsEncMPEG4 *) mix_params_ref (MIX_PARAMS (mix));
+}
+
+/**
+* mix_videoconfigparamsenc_mpeg4_dup:
+* @obj: a #MixVideoConfigParams object
+* @returns: a newly allocated duplicate of the object.
+*
+* Copy duplicate of the object.
+*/
+MixParams *
+mix_videoconfigparamsenc_mpeg4_dup (const MixParams * obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (obj))
+ {
+ MixVideoConfigParamsEncMPEG4 *duplicate = mix_videoconfigparamsenc_mpeg4_new ();
+ if (mix_videoconfigparamsenc_mpeg4_copy
+ (MIX_PARAMS (duplicate), MIX_PARAMS (obj)))
+ {
+ ret = MIX_PARAMS (duplicate);
+ }
+ else
+ {
+ mix_videoconfigparamsenc_mpeg4_unref (duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+* mix_videoconfigparamsenc_mpeg4_copy:
+* @target: copy to target
+* @src: copy from src
+* @returns: boolean indicates if copy is successful.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videoconfigparamsenc_mpeg4_copy (MixParams * target, const MixParams * src)
+{
+ MixVideoConfigParamsEncMPEG4 *this_target, *this_src;
+ MixParamsClass *root_class;
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (target)
+ && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (src))
+ {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (target);
+ this_src = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (src);
+
+ //add properties
+ this_target->profile_and_level_indication= this_src->profile_and_level_indication;
+ this_target->fixed_vop_time_increment= this_src->fixed_vop_time_increment;
+ this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc;
+
+ // Now chainup base class
+ root_class = MIX_PARAMS_CLASS (parent_class);
+
+ if (root_class->copy)
+ {
+ return root_class->copy (MIX_PARAMS_CAST (target),
+ MIX_PARAMS_CAST (src));
+ }
+ else
+ {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+* mix_videoconfigparamsenc_mpeg4:
+* @first: first object to compare
+* @second: seond object to compare
+* @returns: boolean indicates if instance are equal.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videoconfigparamsencenc_mpeg4_equal (MixParams * first, MixParams * second)
+{
+ gboolean ret = FALSE;
+ MixVideoConfigParamsEncMPEG4 *this_first, *this_second;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (first)
+ && MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (second))
+ {
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (first);
+ this_second = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (second);
+
+ if (this_first->profile_and_level_indication!= this_second->profile_and_level_indication) {
+ goto not_equal;
+ }
+
+ if (this_first->fixed_vop_time_increment!= this_second->fixed_vop_time_increment) {
+ goto not_equal;
+ }
+
+ if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) {
+ goto not_equal;
+ }
+
+
+ ret = TRUE;
+
+ not_equal:
+
+ if (ret != TRUE) {
+ return ret;
+ }
+
+ /* TODO: add comparison for properties */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class);
+ if (klass->equal)
+ {
+ ret = klass->equal (first, second);
+ }
+ else
+ {
+ ret = TRUE;
+ }
+ }
+ }
+
+ return ret;
+}
+
+/* TODO: Add getters and setters for properties if any */
+
+#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj)) return MIX_RESULT_FAIL; \
+
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj,
+ guchar profile_and_level_indication) {
+ MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj);
+ obj->profile_and_level_indication = profile_and_level_indication;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj,
+ guchar * profile_and_level_indication) {
+ MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, profile_and_level_indication);
+ *profile_and_level_indication = obj->profile_and_level_indication;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj,
+ guint fixed_vop_time_increment) {
+ MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj);
+ obj->fixed_vop_time_increment = fixed_vop_time_increment;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj,
+ guint * fixed_vop_time_increment) {
+ MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, fixed_vop_time_increment);
+ *fixed_vop_time_increment = obj->fixed_vop_time_increment;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj,
+ guint disable_deblocking_filter_idc) {
+ MIX_VIDEOCONFIGPARAMSENC_MPEG4_SETTER_CHECK_INPUT (obj);
+ obj->disable_deblocking_filter_idc = disable_deblocking_filter_idc;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj,
+ guint * disable_deblocking_filter_idc) {
+ MIX_VIDEOCONFIGPARAMSENC_MPEG4_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc);
+ *disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc;
+ return MIX_RESULT_SUCCESS;
+}
+
diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h
new file mode 100644
index 0000000..e6322d5
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h
@@ -0,0 +1,152 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__
+#define __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__
+
+#include "mixvideoconfigparamsenc.h"
+#include "mixvideodef.h"
+
+/**
+* MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4 (mix_videoconfigparamsenc_mpeg4_get_type ())
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_MPEG4:
+* @obj: object to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixVideoConfigParamsEncMPEG4
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4))
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixVideoConfigParamsEncMPEG4Class
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4))
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS:
+* @obj: a #MixParams object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4, MixVideoConfigParamsEncMPEG4Class))
+
+typedef struct _MixVideoConfigParamsEncMPEG4 MixVideoConfigParamsEncMPEG4;
+typedef struct _MixVideoConfigParamsEncMPEG4Class MixVideoConfigParamsEncMPEG4Class;
+
+/**
+* MixVideoConfigParamsEncMPEG4:
+*
+* MI-X VideoConfig Parameter object
+*/
+struct _MixVideoConfigParamsEncMPEG4
+{
+ /*< public > */
+ MixVideoConfigParamsEnc parent;
+
+ /*< public > */
+
+ /* TODO: Add MPEG-4 configuration paramters */
+ guchar profile_and_level_indication;
+ guint fixed_vop_time_increment;
+ guint disable_deblocking_filter_idc;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+* MixVideoConfigParamsEncMPEG4Class:
+*
+* MI-X VideoConfig object class
+*/
+struct _MixVideoConfigParamsEncMPEG4Class
+{
+ /*< public > */
+ MixVideoConfigParamsEncClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_videoconfigparamsenc_mpeg4_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_videoconfigparamsenc_mpeg4_get_type (void);
+
+/**
+* mix_videoconfigparamsenc_mpeg4_new:
+* @returns: A newly allocated instance of #MixVideoConfigParamsEncMPEG4
+*
+* Use this method to create new instance of #MixVideoConfigParamsEncMPEG4
+*/
+MixVideoConfigParamsEncMPEG4 *mix_videoconfigparamsenc_mpeg4_new (void);
+/**
+* mix_videoconfigparamsenc_mpeg4_ref:
+* @mix: object to add reference
+* @returns: the MixVideoConfigParamsEncMPEG4 instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixVideoConfigParamsEncMPEG4
+ * mix_videoconfigparamsenc_mpeg4_ref (MixVideoConfigParamsEncMPEG4 * mix);
+
+/**
+* mix_videoconfigparamsenc_mpeg4_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_videoconfigparamsenc_mpeg4_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for other properties */
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_dlk (MixVideoConfigParamsEncMPEG4 * obj,
+ guint disable_deblocking_filter_idc);
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_dlk (MixVideoConfigParamsEncMPEG4 * obj,
+ guint * disable_deblocking_filter_idc);
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_profile_level (MixVideoConfigParamsEncMPEG4 * obj,
+ guchar profile_and_level_indication);
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_profile_level (MixVideoConfigParamsEncMPEG4 * obj,
+ guchar * profile_and_level_indication);
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_set_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj,
+ guint fixed_vop_time_increment);
+
+MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj,
+ guint * fixed_vop_time_increment);
+
+#endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.c b/mix_video/src/mixvideoconfigparamsenc_preview.c
new file mode 100644
index 0000000..ea0aaa1
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsenc_preview.c
@@ -0,0 +1,222 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+/**
+* SECTION:mixvideoconfigparamsenc_preview
+* @short_description: VideoConfig parameters
+*
+* A data object which stores videoconfig specific parameters.
+*/
+
+#include "mixvideolog.h"
+#include "mixvideoconfigparamsenc_preview.h"
+
+#define MDEBUG
+
+
+static GType _mix_videoconfigparamsenc_preview_type = 0;
+static MixVideoConfigParamsEncClass *parent_class = NULL;
+
+#define _do_init { _mix_videoconfigparamsenc_preview_type = g_define_type_id; }
+
+gboolean mix_videoconfigparamsenc_preview_copy (MixParams * target,
+ const MixParams * src);
+MixParams *mix_videoconfigparamsenc_preview_dup (const MixParams * obj);
+gboolean mix_videoconfigparamsencenc_preview_equal (MixParams * first,
+ MixParams * second);
+static void mix_videoconfigparamsenc_preview_finalize (MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoConfigParamsEncPreview, /* The name of the new type, in Camel case */
+ mix_videoconfigparamsenc_preview, /* The name of the new type in lowercase */
+ MIX_TYPE_VIDEOCONFIGPARAMSENC, /* The GType of the parent type */
+ _do_init);
+
+void
+_mix_videoconfigparamsenc_preview_initialize (void)
+{
+ /* the MixParams types need to be class_ref'd once before it can be
+ * done from multiple threads;
+ * see http://bugzilla.gnome.org/show_bug.cgi?id=304551 */
+ g_type_class_ref (mix_videoconfigparamsenc_preview_get_type ());
+}
+
+static void
+mix_videoconfigparamsenc_preview_init (MixVideoConfigParamsEncPreview * self)
+{
+ /* initialize properties here */
+ /* TODO: initialize properties */
+
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void
+mix_videoconfigparamsenc_preview_class_init (MixVideoConfigParamsEncPreviewClass * klass)
+{
+ MixVideoConfigParamsEncClass *this_parent_class =
+ MIX_VIDEOCONFIGPARAMSENC_CLASS (klass);
+ MixParamsClass *this_root_class = MIX_PARAMS_CLASS (this_parent_class);
+
+ /* setup static parent class */
+ parent_class =
+ (MixVideoConfigParamsEncClass *) g_type_class_peek_parent (klass);
+
+ this_root_class->finalize = mix_videoconfigparamsenc_preview_finalize;
+ this_root_class->copy =
+ (MixParamsCopyFunction) mix_videoconfigparamsenc_preview_copy;
+ this_root_class->dup =
+ (MixParamsDupFunction) mix_videoconfigparamsenc_preview_dup;
+ this_root_class->equal =
+ (MixParamsEqualFunction) mix_videoconfigparamsencenc_preview_equal;
+}
+
+MixVideoConfigParamsEncPreview *
+mix_videoconfigparamsenc_preview_new (void)
+{
+ MixVideoConfigParamsEncPreview *ret = (MixVideoConfigParamsEncPreview *)
+ g_type_create_instance (MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW);
+
+ return ret;
+}
+
+void
+mix_videoconfigparamsenc_preview_finalize (MixParams * obj)
+{
+ /* MixVideoConfigParamsEncPreview *this_obj = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (obj); */
+ MixParamsClass *root_class = MIX_PARAMS_CLASS (parent_class);
+
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+
+ if (root_class->finalize)
+ {
+ root_class->finalize (obj);
+ }
+}
+
+MixVideoConfigParamsEncPreview
+ * mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix)
+{
+ return (MixVideoConfigParamsEncPreview *) mix_params_ref (MIX_PARAMS (mix));
+}
+
+/**
+* mix_videoconfigparamsenc_preview_dup:
+* @obj: a #MixVideoConfigParams object
+* @returns: a newly allocated duplicate of the object.
+*
+* Copy duplicate of the object.
+*/
+MixParams *
+mix_videoconfigparamsenc_preview_dup (const MixParams * obj)
+{
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (obj))
+ {
+ MixVideoConfigParamsEncPreview *duplicate = mix_videoconfigparamsenc_preview_new ();
+ if (mix_videoconfigparamsenc_preview_copy
+ (MIX_PARAMS (duplicate), MIX_PARAMS (obj)))
+ {
+ ret = MIX_PARAMS (duplicate);
+ }
+ else
+ {
+ mix_videoconfigparamsenc_preview_unref (duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+* mix_videoconfigparamsenc_preview_copy:
+* @target: copy to target
+* @src: copy from src
+* @returns: boolean indicates if copy is successful.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videoconfigparamsenc_preview_copy (MixParams * target, const MixParams * src)
+{
+ MixVideoConfigParamsEncPreview *this_target, *this_src;
+ MixParamsClass *root_class;
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (target)
+ && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (src))
+ {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (target);
+ this_src = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (src);
+
+ // Now chainup base class
+ root_class = MIX_PARAMS_CLASS (parent_class);
+
+ if (root_class->copy)
+ {
+ return root_class->copy (MIX_PARAMS_CAST (target),
+ MIX_PARAMS_CAST (src));
+ }
+ else
+ {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+* mix_videoconfigparamsenc_preview:
+* @first: first object to compare
+* @second: seond object to compare
+* @returns: boolean indicates if instance are equal.
+*
+* Copy instance data from @src to @target.
+*/
+gboolean
+mix_videoconfigparamsencenc_preview_equal (MixParams * first, MixParams * second)
+{
+ gboolean ret = FALSE;
+ MixVideoConfigParamsEncPreview *this_first, *this_second;
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (first)
+ && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (second))
+ {
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (first);
+ this_second = MIX_VIDEOCONFIGPARAMSENC_PREVIEW (second);
+
+
+ ret = TRUE;
+
+
+ /* TODO: add comparison for properties */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS (parent_class);
+ if (klass->equal)
+ {
+ ret = klass->equal (first, second);
+ }
+ else
+ {
+ ret = TRUE;
+ }
+ }
+ }
+
+ return ret;
+}
+
+/* TODO: Add getters and setters for properties if any */
diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.h b/mix_video/src/mixvideoconfigparamsenc_preview.h
new file mode 100644
index 0000000..f9d3fbe
--- /dev/null
+++ b/mix_video/src/mixvideoconfigparamsenc_preview.h
@@ -0,0 +1,124 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__
+#define __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__
+
+#include "mixvideoconfigparamsenc.h"
+#include "mixvideodef.h"
+
+/**
+* MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW:
+*
+* Get type of class.
+*/
+#define MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW (mix_videoconfigparamsenc_preview_get_type ())
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_PREVIEW:
+* @obj: object to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreview))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW:
+* @obj: an object.
+*
+* Checks if the given object is an instance of #MixVideoConfigParamsEncPreview
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW))
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS:
+* @klass: class to be type-casted.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreviewClass))
+
+/**
+* MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS:
+* @klass: a class.
+*
+* Checks if the given class is #MixVideoConfigParamsEncPreviewClass
+*/
+#define MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW))
+
+/**
+* MIX_VIDEOCONFIGPARAMSENC_PREVIEW_GET_CLASS:
+* @obj: a #MixParams object.
+*
+* Get the class instance of the object.
+*/
+#define MIX_VIDEOCONFIGPARAMSENC_PREVIEW_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW, MixVideoConfigParamsEncPreviewClass))
+
+typedef struct _MixVideoConfigParamsEncPreview MixVideoConfigParamsEncPreview;
+typedef struct _MixVideoConfigParamsEncPreviewClass MixVideoConfigParamsEncPreviewClass;
+
+/**
+* MixVideoConfigParamsEncPreview:
+*
+* MI-X VideoConfig Parameter object
+*/
+struct _MixVideoConfigParamsEncPreview
+{
+ /*< public > */
+ MixVideoConfigParamsEnc parent;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+* MixVideoConfigParamsEncPreviewClass:
+*
+* MI-X VideoConfig object class
+*/
+struct _MixVideoConfigParamsEncPreviewClass
+{
+ /*< public > */
+ MixVideoConfigParamsEncClass parent_class;
+
+ /* class members */
+};
+
+/**
+* mix_videoconfigparamsenc_preview_get_type:
+* @returns: type
+*
+* Get the type of object.
+*/
+GType mix_videoconfigparamsenc_preview_get_type (void);
+
+/**
+* mix_videoconfigparamsenc_preview_new:
+* @returns: A newly allocated instance of #MixVideoConfigParamsEncPreview
+*
+* Use this method to create new instance of #MixVideoConfigParamsEncPreview
+*/
+MixVideoConfigParamsEncPreview *mix_videoconfigparamsenc_preview_new (void);
+/**
+* mix_videoconfigparamsenc_preview_ref:
+* @mix: object to add reference
+* @returns: the MixVideoConfigParamsEncPreview instance where reference count has been increased.
+*
+* Add reference count.
+*/
+MixVideoConfigParamsEncPreview
+ * mix_videoconfigparamsenc_preview_ref (MixVideoConfigParamsEncPreview * mix);
+
+/**
+* mix_videoconfigparamsenc_preview_unref:
+* @obj: object to unref.
+*
+* Decrement reference count of the object.
+*/
+#define mix_videoconfigparamsenc_preview_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+#endif /* __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ */
+
diff --git a/mix_video/src/mixvideodecodeparams.c b/mix_video/src/mixvideodecodeparams.c
new file mode 100644
index 0000000..0bb06e8
--- /dev/null
+++ b/mix_video/src/mixvideodecodeparams.c
@@ -0,0 +1,204 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideodecodeparams
+ * @short_description: VideoDecode parameters
+ *
+ * A data object which stores videodecode specific parameters.
+ */
+
+#include "mixvideodecodeparams.h"
+
+static GType _mix_videodecodeparams_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_videodecodeparams_type = g_define_type_id; }
+
+gboolean mix_videodecodeparams_copy(MixParams * target, const MixParams * src);
+MixParams *mix_videodecodeparams_dup(const MixParams * obj);
+gboolean mix_videodecodeparams_equal(MixParams * first, MixParams * second);
+static void mix_videodecodeparams_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoDecodeParams, mix_videodecodeparams,
+ MIX_TYPE_PARAMS, _do_init);
+
+static void mix_videodecodeparams_init(MixVideoDecodeParams * self) {
+ /* initialize properties here */
+
+ /* TODO: initialize properties */
+
+ self->timestamp = 0;
+ self->discontinuity = FALSE;
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void mix_videodecodeparams_class_init(MixVideoDecodeParamsClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_videodecodeparams_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videodecodeparams_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videodecodeparams_dup;
+ mixparams_class->equal
+ = (MixParamsEqualFunction) mix_videodecodeparams_equal;
+}
+
+MixVideoDecodeParams *
+mix_videodecodeparams_new(void) {
+ MixVideoDecodeParams *ret =
+ (MixVideoDecodeParams *) g_type_create_instance(
+ MIX_TYPE_VIDEODECODEPARAMS);
+
+ return ret;
+}
+
+void mix_videodecodeparams_finalize(MixParams * obj) {
+ /* clean up here. */
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoDecodeParams *
+mix_videodecodeparams_ref(MixVideoDecodeParams * mix) {
+ return (MixVideoDecodeParams *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videodecodeparams_dup:
+ * @obj: a #MixVideoDecodeParams object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videodecodeparams_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEODECODEPARAMS(obj)) {
+ MixVideoDecodeParams *duplicate = mix_videodecodeparams_new();
+ if (mix_videodecodeparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videodecodeparams_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_videodecodeparams_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videodecodeparams_copy(MixParams * target, const MixParams * src) {
+ MixVideoDecodeParams *this_target, *this_src;
+
+ if (MIX_IS_VIDEODECODEPARAMS(target) && MIX_IS_VIDEODECODEPARAMS(src)) {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEODECODEPARAMS(target);
+ this_src = MIX_VIDEODECODEPARAMS(src);
+
+ // TODO: copy properties */
+
+ // Now chainup base class
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_videodecodeparams_:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videodecodeparams_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixVideoDecodeParams *this_first, *this_second;
+
+ if (MIX_IS_VIDEODECODEPARAMS(first) && MIX_IS_VIDEODECODEPARAMS(second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEODECODEPARAMS(first);
+ this_second = MIX_VIDEODECODEPARAMS(second);
+
+ /* TODO: add comparison for properties */
+ /* if ( first properties == sencod properties) */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
+
+#define MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEODECODEPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+
+/* TODO: Add getters and setters for properties. */
+
+MIX_RESULT mix_videodecodeparams_set_timestamp(MixVideoDecodeParams * obj,
+ guint64 timestamp) {
+ MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj);
+ obj->timestamp = timestamp;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj,
+ guint64 * timestamp) {
+ MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp);
+ *timestamp = obj->timestamp;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj,
+ gboolean discontinuity) {
+ MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj);
+ obj->discontinuity = discontinuity;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj,
+ gboolean *discontinuity) {
+ MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity);
+ *discontinuity = obj->discontinuity;
+ return MIX_RESULT_SUCCESS;
+}
+
diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h
new file mode 100644
index 0000000..34f1a22
--- /dev/null
+++ b/mix_video/src/mixvideodecodeparams.h
@@ -0,0 +1,139 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEODECODEPARAMS_H__
+#define __MIX_VIDEODECODEPARAMS_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_VIDEODECODEPARAMS:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEODECODEPARAMS (mix_videodecodeparams_get_type ())
+
+/**
+ * MIX_VIDEODECODEPARAMS:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEODECODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParams))
+
+/**
+ * MIX_IS_VIDEODECODEPARAMS:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_VIDEODECODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEODECODEPARAMS))
+
+/**
+ * MIX_VIDEODECODEPARAMS_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEODECODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParamsClass))
+
+/**
+ * MIX_IS_VIDEODECODEPARAMS_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_VIDEODECODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEODECODEPARAMS))
+
+/**
+ * MIX_VIDEODECODEPARAMS_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEODECODEPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEODECODEPARAMS, MixVideoDecodeParamsClass))
+
+typedef struct _MixVideoDecodeParams MixVideoDecodeParams;
+typedef struct _MixVideoDecodeParamsClass MixVideoDecodeParamsClass;
+
+/**
+ * MixVideoDecodeParams:
+ *
+ * MI-X VideoDecode Parameter object
+ */
+struct _MixVideoDecodeParams {
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+
+ /* TODO: Add properties */
+ guint64 timestamp;
+ gboolean discontinuity;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+ * MixVideoDecodeParamsClass:
+ *
+ * MI-X VideoDecode object class
+ */
+struct _MixVideoDecodeParamsClass {
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_videodecodeparams_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videodecodeparams_get_type(void);
+
+/**
+ * mix_videodecodeparams_new:
+ * @returns: A newly allocated instance of #MixVideoDecodeParams
+ *
+ * Use this method to create new instance of #MixVideoDecodeParams
+ */
+MixVideoDecodeParams *mix_videodecodeparams_new(void);
+/**
+ * mix_videodecodeparams_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoDecodeParams instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoDecodeParams *mix_videodecodeparams_ref(MixVideoDecodeParams * mix);
+
+/**
+ * mix_videodecodeparams_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videodecodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for properties */
+MIX_RESULT mix_videodecodeparams_set_timestamp(MixVideoDecodeParams * obj,
+ guint64 timestamp);
+MIX_RESULT mix_videodecodeparams_get_timestamp(MixVideoDecodeParams * obj,
+ guint64 * timestamp);
+
+MIX_RESULT mix_videodecodeparams_set_discontinuity(MixVideoDecodeParams * obj,
+ gboolean discontinuity);
+MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj,
+ gboolean *discontinuity);
+
+#endif /* __MIX_VIDEODECODEPARAMS_H__ */
diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h
new file mode 100644
index 0000000..bb80987
--- /dev/null
+++ b/mix_video/src/mixvideodef.h
@@ -0,0 +1,114 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEO_DEF_H__
+#define __MIX_VIDEO_DEF_H__
+#include <mixresult.h>
+
+/*
+ * MI-X video error code
+ */
+typedef enum {
+ MIX_RESULT_FRAME_NOTAVAIL = MIX_RESULT_ERROR_VIDEO_START + 1,
+ MIX_RESULT_EOS,
+ MIX_RESULT_POOLEMPTY,
+ MIX_RESULT_OUTOFSURFACES,
+ MIX_RESULT_DROPFRAME,
+ MIX_RESULT_NOTIMPL,
+ MIX_RESULT_VIDEO_LAST
+} MIX_VIDEO_ERROR_CODE;
+
+/*
+ MixCodecMode
+ */
+typedef enum {
+ MIX_CODEC_MODE_ENCODE = 0,
+ MIX_CODEC_MODE_DECODE,
+ MIX_CODEC_MODE_LAST
+} MixCodecMode;
+
+typedef enum {
+ MIX_FRAMEORDER_MODE_DISPLAYORDER = 0,
+ MIX_FRAMEORDER_MODE_DECODEORDER,
+ MIX_FRAMEORDER_MODE_LAST
+} MixFrameOrderMode;
+
+typedef struct _MixIOVec {
+ guchar *data;
+ gint buffer_size;
+ gint data_size;
+} MixIOVec;
+
+typedef struct _MixRect {
+ gshort x;
+ gshort y;
+ gushort width;
+ gushort height;
+} MixRect;
+
+typedef enum {
+ MIX_STATE_UNINITIALIZED = 0,
+ MIX_STATE_INITIALIZED,
+ MIX_STATE_CONFIGURED,
+ MIX_STATE_LAST
+} MixState;
+
+
+typedef enum
+{
+ MIX_RAW_TARGET_FORMAT_NONE = 0,
+ MIX_RAW_TARGET_FORMAT_YUV420 = 1,
+ MIX_RAW_TARGET_FORMAT_YUV422 = 2,
+ MIX_RAW_TARGET_FORMAT_YUV444 = 4,
+ MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000,
+ MIX_RAW_TARGET_FORMAT_LAST
+} MixRawTargetFormat;
+
+
+typedef enum
+{
+ MIX_ENCODE_TARGET_FORMAT_MPEG4 = 0,
+ MIX_ENCODE_TARGET_FORMAT_H263 = 2,
+ MIX_ENCODE_TARGET_FORMAT_H264 = 4,
+ MIX_ENCODE_TARGET_FORMAT_PREVIEW = 8,
+ MIX_ENCODE_TARGET_FORMAT_LAST
+} MixEncodeTargetFormat;
+
+
+typedef enum
+{
+ MIX_RATE_CONTROL_NONE = 1,
+ MIX_RATE_CONTROL_CBR = 2,
+ MIX_RATE_CONTROL_VBR = 4,
+ MIX_RATE_CONTROL_LAST
+} MixRateControl;
+
+typedef enum
+{
+ MIX_PROFILE_MPEG2SIMPLE = 0,
+ MIX_PROFILE_MPEG2MAIN,
+ MIX_PROFILE_MPEG4SIMPLE,
+ MIX_PROFILE_MPEG4ADVANCEDSIMPLE,
+ MIX_PROFILE_MPEG4MAIN,
+ MIX_PROFILE_H264BASELINE,
+ MIX_PROFILE_H264MAIN,
+ MIX_PROFILE_H264HIGH,
+ MIX_PROFILE_VC1SIMPLE,
+ MIX_PROFILE_VC1MAIN,
+ MIX_PROFILE_VC1ADVANCED,
+ MIX_PROFILE_H263BASELINE
+} MixProfile;
+
+typedef enum
+{
+ MIX_DELIMITER_LENGTHPREFIX = 0,
+ MIX_DELIMITER_ANNEXB
+} MixDelimiterType;
+
+
+#endif /* __MIX_VIDEO_DEF_H__ */
diff --git a/mix_video/src/mixvideoencodeparams.c b/mix_video/src/mixvideoencodeparams.c
new file mode 100644
index 0000000..809004d
--- /dev/null
+++ b/mix_video/src/mixvideoencodeparams.c
@@ -0,0 +1,204 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideoencodeparams
+ * @short_description: VideoDecode parameters
+ *
+ * A data object which stores videodecode specific parameters.
+ */
+
+#include "mixvideoencodeparams.h"
+
+static GType _mix_videoencodeparams_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_videoencodeparams_type = g_define_type_id; }
+
+gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src);
+MixParams *mix_videoencodeparams_dup(const MixParams * obj);
+gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second);
+static void mix_videoencodeparams_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoEncodeParams, mix_videoencodeparams,
+ MIX_TYPE_PARAMS, _do_init);
+
+static void mix_videoencodeparams_init(MixVideoEncodeParams * self) {
+ /* initialize properties here */
+
+ /* TODO: initialize properties */
+
+ self->timestamp = 0;
+ self->discontinuity = FALSE;
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void mix_videoencodeparams_class_init(MixVideoEncodeParamsClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_videoencodeparams_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videoencodeparams_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videoencodeparams_dup;
+ mixparams_class->equal
+ = (MixParamsEqualFunction) mix_videoencodeparams_equal;
+}
+
+MixVideoEncodeParams *
+mix_videoencodeparams_new(void) {
+ MixVideoEncodeParams *ret =
+ (MixVideoEncodeParams *) g_type_create_instance(
+ MIX_TYPE_VIDEOENCODEPARAMS);
+
+ return ret;
+}
+
+void mix_videoencodeparams_finalize(MixParams * obj) {
+ /* clean up here. */
+ /* TODO: cleanup resources allocated */
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoEncodeParams *
+mix_videoencodeparams_ref(MixVideoEncodeParams * mix) {
+ return (MixVideoEncodeParams *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videoencodeparams_dup:
+ * @obj: a #MixVideoEncodeParams object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videoencodeparams_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOENCODEPARAMS(obj)) {
+ MixVideoEncodeParams *duplicate = mix_videoencodeparams_new();
+ if (mix_videoencodeparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videoencodeparams_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_videoencodeparams_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoencodeparams_copy(MixParams * target, const MixParams * src) {
+ MixVideoEncodeParams *this_target, *this_src;
+
+ if (MIX_IS_VIDEOENCODEPARAMS(target) && MIX_IS_VIDEOENCODEPARAMS(src)) {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOENCODEPARAMS(target);
+ this_src = MIX_VIDEOENCODEPARAMS(src);
+
+ // TODO: copy properties */
+
+ // Now chainup base class
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_videoencodeparams_:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoencodeparams_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixVideoEncodeParams *this_first, *this_second;
+
+ if (MIX_IS_VIDEOENCODEPARAMS(first) && MIX_IS_VIDEOENCODEPARAMS(second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOENCODEPARAMS(first);
+ this_second = MIX_VIDEOENCODEPARAMS(second);
+
+ /* TODO: add comparison for properties */
+ /* if ( first properties == sencod properties) */
+ {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
+
+#define MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOENCODEPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+
+/* TODO: Add getters and setters for properties. */
+
+MIX_RESULT mix_videoencodeparams_set_timestamp(MixVideoEncodeParams * obj,
+ guint64 timestamp) {
+ MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj);
+ obj->timestamp = timestamp;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoencodeparams_get_timestamp(MixVideoEncodeParams * obj,
+ guint64 * timestamp) {
+ MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, timestamp);
+ *timestamp = obj->timestamp;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj,
+ gboolean discontinuity) {
+ MIX_VIDEOENCODEPARAMS_SETTER_CHECK_INPUT (obj);
+ obj->discontinuity = discontinuity;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj,
+ gboolean *discontinuity) {
+ MIX_VIDEOENCODEPARAMS_GETTER_CHECK_INPUT (obj, discontinuity);
+ *discontinuity = obj->discontinuity;
+ return MIX_RESULT_SUCCESS;
+}
+
diff --git a/mix_video/src/mixvideoencodeparams.h b/mix_video/src/mixvideoencodeparams.h
new file mode 100644
index 0000000..8709cb9
--- /dev/null
+++ b/mix_video/src/mixvideoencodeparams.h
@@ -0,0 +1,140 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOENCODEPARAMS_H__
+#define __MIX_VIDEOENCODEPARAMS_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_VIDEOENCODEPARAMS:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEOENCODEPARAMS (mix_videoencodeparams_get_type ())
+
+/**
+ * MIX_VIDEOENCODEPARAMS:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEOENCODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParams))
+
+/**
+ * MIX_IS_VIDEOENCODEPARAMS:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_VIDEOENCODEPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOENCODEPARAMS))
+
+/**
+ * MIX_VIDEOENCODEPARAMS_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEOENCODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParamsClass))
+
+/**
+ * MIX_IS_VIDEOENCODEPARAMS_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_VIDEOENCODEPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOENCODEPARAMS))
+
+/**
+ * MIX_VIDEOENCODEPARAMS_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEOENCODEPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOENCODEPARAMS, MixVideoEncodeParamsClass))
+
+typedef struct _MixVideoEncodeParams MixVideoEncodeParams;
+typedef struct _MixVideoEncodeParamsClass MixVideoEncodeParamsClass;
+
+/**
+ * MixVideoEncodeParams:
+ *
+ * MI-X VideoDecode Parameter object
+ */
+struct _MixVideoEncodeParams {
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+
+ /* TODO: Add properties */
+ guint64 timestamp;
+ gboolean discontinuity;
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+ * MixVideoEncodeParamsClass:
+ *
+ * MI-X VideoDecode object class
+ */
+struct _MixVideoEncodeParamsClass {
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_videoencodeparams_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoencodeparams_get_type(void);
+
+/**
+ * mix_videoencodeparams_new:
+ * @returns: A newly allocated instance of #MixVideoEncodeParams
+ *
+ * Use this method to create new instance of #MixVideoEncodeParams
+ */
+MixVideoEncodeParams *mix_videoencodeparams_new(void);
+/**
+ * mix_videoencodeparams_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoEncodeParams instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoEncodeParams *mix_videoencodeparams_ref(MixVideoEncodeParams * mix);
+
+/**
+ * mix_videoencodeparams_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoencodeparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/* TODO: Add getters and setters for properties */
+MIX_RESULT mix_videoencodeparams_set_timestamp(MixVideoEncodeParams * obj,
+ guint64 timestamp);
+MIX_RESULT mix_videoencodeparams_get_timestamp(MixVideoEncodeParams * obj,
+ guint64 * timestamp);
+
+MIX_RESULT mix_videoencodeparams_set_discontinuity(MixVideoEncodeParams * obj,
+ gboolean discontinuity);
+MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj,
+ gboolean *discontinuity);
+
+#endif /* __MIX_VIDEOENCODEPARAMS_H__ */
+
diff --git a/mix_video/src/mixvideoformat.c b/mix_video/src/mixvideoformat.c
new file mode 100644
index 0000000..fa601cb
--- /dev/null
+++ b/mix_video/src/mixvideoformat.c
@@ -0,0 +1,401 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+#include "mixvideolog.h"
+
+#include "mixvideoformat.h"
+
+#define MIXUNREF(obj, unref) if(obj) { unref(obj); obj = NULL; }
+
+
+/* Default vmethods implementation */
+static MIX_RESULT mix_videofmt_getcaps_default(MixVideoFormat *mix,
+ GString *msg);
+static MIX_RESULT mix_videofmt_initialize_default(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay vadisplay);
+static MIX_RESULT
+ mix_videofmt_decode_default(MixVideoFormat *mix,
+ MixBuffer * bufin[], gint bufincnt,
+ MixVideoDecodeParams * decode_params);
+static MIX_RESULT mix_videofmt_flush_default(MixVideoFormat *mix);
+static MIX_RESULT mix_videofmt_eos_default(MixVideoFormat *mix);
+static MIX_RESULT mix_videofmt_deinitialize_default(MixVideoFormat *mix);
+
+static GObjectClass *parent_class = NULL;
+
+static void mix_videoformat_finalize(GObject * obj);
+G_DEFINE_TYPE (MixVideoFormat, mix_videoformat, G_TYPE_OBJECT);
+
+static void mix_videoformat_init(MixVideoFormat * self) {
+
+ /* public member initialization */
+ /* These are all public because MixVideoFormat objects are completely internal to MixVideo,
+ no need for private members */
+
+ self->initialized = FALSE;
+ self->framemgr = NULL;
+ self->surfacepool = NULL;
+ self->inputbufpool = NULL;
+ self->inputbufqueue = NULL;
+ self->va_display = NULL;
+ self->va_context = VA_INVALID_ID;
+ self->va_config = VA_INVALID_ID;
+ self->va_surfaces = NULL;
+ self->va_num_surfaces = 0;
+ self->mime_type = NULL;
+ self->frame_rate_num = 0;
+ self->frame_rate_denom = 0;
+ self->picture_width = 0;
+ self->picture_height = 0;
+ self->parse_in_progress = FALSE;
+ self->current_timestamp = 0;
+}
+
+static void mix_videoformat_class_init(MixVideoFormatClass * klass) {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ gobject_class->finalize = mix_videoformat_finalize;
+
+ /* setup vmethods with base implementation */
+ klass->getcaps = mix_videofmt_getcaps_default;
+ klass->initialize = mix_videofmt_initialize_default;
+ klass->decode = mix_videofmt_decode_default;
+ klass->flush = mix_videofmt_flush_default;
+ klass->eos = mix_videofmt_eos_default;
+ klass->deinitialize = mix_videofmt_deinitialize_default;
+}
+
+MixVideoFormat *
+mix_videoformat_new(void) {
+ MixVideoFormat *ret = g_object_new(MIX_TYPE_VIDEOFORMAT, NULL);
+
+ return ret;
+}
+
+void mix_videoformat_finalize(GObject * obj) {
+ /* clean up here. */
+ VAStatus va_status;
+
+ MixVideoFormat *mix = MIX_VIDEOFORMAT(obj);
+ MixInputBufferEntry *buf_entry = NULL;
+
+ if(mix->objectlock) {
+ g_mutex_free(mix->objectlock);
+ mix->objectlock = NULL;
+ }
+
+ if (mix->mime_type)
+ {
+ if (mix->mime_type->str)
+ g_string_free(mix->mime_type, TRUE);
+ else
+ g_string_free(mix->mime_type, FALSE);
+ }
+
+ //MiVideo object calls the _deinitialize() for frame manager
+ MIXUNREF(mix->framemgr, mix_framemanager_unref);
+
+ if (mix->surfacepool)
+ {
+ mix_surfacepool_deinitialize(mix->surfacepool);
+ MIXUNREF(mix->surfacepool, mix_surfacepool_unref);
+ }
+
+ //libVA cleanup (vaTerminate is called from MixVideo object)
+ if (mix->va_display) {
+ if (mix->va_context != VA_INVALID_ID)
+ {
+ va_status = vaDestroyConfig(mix->va_display, mix->va_config);
+ if (va_status != VA_STATUS_SUCCESS) {
+ LOG_W( "Failed vaDestroyConfig\n");
+ }
+ mix->va_config = VA_INVALID_ID;
+ }
+ if (mix->va_context != VA_INVALID_ID)
+ {
+ va_status = vaDestroyContext(mix->va_display, mix->va_context);
+ if (va_status != VA_STATUS_SUCCESS) {
+ LOG_W( "Failed vaDestroyContext\n");
+ }
+ mix->va_context = VA_INVALID_ID;
+ }
+ if (mix->va_surfaces)
+ {
+ va_status = vaDestroySurfaces(mix->va_display, mix->va_surfaces, mix->va_num_surfaces);
+ if (va_status != VA_STATUS_SUCCESS) {
+ LOG_W( "Failed vaDestroySurfaces\n");
+ }
+ g_free(mix->va_surfaces);
+ mix->va_surfaces = NULL;
+ mix->va_num_surfaces = 0;
+ }
+ }
+
+
+ //Deinit input buffer queue
+
+ while (!g_queue_is_empty(mix->inputbufqueue))
+ {
+ buf_entry = g_queue_pop_head(mix->inputbufqueue);
+ mix_buffer_unref(buf_entry->buf);
+ g_free(buf_entry);
+ }
+
+ g_queue_free(mix->inputbufqueue);
+
+ //MixBuffer pool is deallocated in MixVideo object
+ mix->inputbufpool = NULL;
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoFormat *
+mix_videoformat_ref(MixVideoFormat * mix) {
+ return (MixVideoFormat *) g_object_ref(G_OBJECT(mix));
+}
+
+/* Default vmethods implementation */
+static MIX_RESULT mix_videofmt_getcaps_default(MixVideoFormat *mix,
+ GString *msg) {
+ g_print("mix_videofmt_getcaps_default\n");
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmt_initialize_default(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display) {
+
+ LOG_V( "Begin\n");
+
+ MIX_RESULT res = MIX_RESULT_SUCCESS;
+ MixInputBufferEntry *buf_entry = NULL;
+
+ if (!mix || !config_params || !frame_mgr || !input_buf_pool || !surface_pool || !va_display)
+ {
+ LOG_E( "NUll pointer passed in\n");
+ return (MIX_RESULT_NULL_PTR);
+ }
+
+ // Create object lock
+ // Note that g_thread_init() has already been called by mix_video_init()
+ if (mix->objectlock) //If already exists, then deallocate old one (we are being re-initialized)
+ {
+ g_mutex_free(mix->objectlock);
+ mix->objectlock = NULL;
+ }
+ mix->objectlock = g_mutex_new();
+ if (!mix->objectlock) {
+ LOG_E( "!mix->objectlock\n");
+ return (MIX_RESULT_NO_MEMORY);
+ }
+
+ g_mutex_lock(mix->objectlock);
+
+ //Clean up any previous framemgr
+ MIXUNREF(mix->framemgr, mix_framemanager_unref);
+ mix->framemgr = frame_mgr;
+ mix_framemanager_ref(mix->framemgr);
+
+ mix->va_display = va_display;
+
+ if (mix->mime_type) //Clean up any previous mime_type
+ {
+ if (mix->mime_type->str)
+ g_string_free(mix->mime_type, TRUE);
+ else
+ g_string_free(mix->mime_type, FALSE);
+ }
+ gchar *mime_tmp = NULL;
+ res = mix_videoconfigparamsdec_get_mime_type(config_params, &mime_tmp);
+ if (mime_tmp)
+ {
+ mix->mime_type = g_string_new(mime_tmp);
+ g_free(mime_tmp);
+ if (!mix->mime_type) //new failed
+ {
+ res = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Could not duplicate mime_type\n");
+ goto cleanup;
+ }
+ } //else there is no mime_type; leave as NULL
+
+ res = mix_videoconfigparamsdec_get_frame_rate(config_params, &(mix->frame_rate_num), &(mix->frame_rate_denom));
+ if (res != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error getting frame_rate\n");
+ goto cleanup;
+ }
+ res = mix_videoconfigparamsdec_get_picture_res(config_params, &(mix->picture_width), &(mix->picture_height));
+ if (res != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error getting picture_res\n");
+ goto cleanup;
+ }
+
+ if (mix->inputbufqueue)
+ {
+ //Deinit previous input buffer queue
+
+ while (!g_queue_is_empty(mix->inputbufqueue))
+ {
+ buf_entry = g_queue_pop_head(mix->inputbufqueue);
+ mix_buffer_unref(buf_entry->buf);
+ g_free(buf_entry);
+ }
+
+ g_queue_free(mix->inputbufqueue);
+ }
+
+ //MixBuffer pool is cleaned up in MixVideo object
+ mix->inputbufpool = NULL;
+
+ mix->inputbufpool = input_buf_pool;
+ mix->inputbufqueue = g_queue_new();
+ if (!mix->inputbufqueue) //New failed
+ {
+ res = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Could not duplicate mime_type\n");
+ goto cleanup;
+ }
+
+ // surface pool, VA context/config and parser handle are initialized by
+ // derived classes
+
+
+ cleanup:
+ if (res != MIX_RESULT_SUCCESS) {
+
+ MIXUNREF(mix->framemgr, mix_framemanager_unref);
+ if (mix->mime_type)
+ {
+ if (mix->mime_type->str)
+ g_string_free(mix->mime_type, TRUE);
+ else
+ g_string_free(mix->mime_type, FALSE);
+ mix->mime_type = NULL;
+ }
+
+ if (mix->objectlock)
+ g_mutex_unlock(mix->objectlock);
+ g_mutex_free(mix->objectlock);
+ mix->objectlock = NULL;
+ mix->frame_rate_num = 0;
+ mix->frame_rate_denom = 1;
+ mix->picture_width = 0;
+ mix->picture_height = 0;
+
+ } else {
+ //Normal unlock
+ if (mix->objectlock)
+ g_mutex_unlock(mix->objectlock);
+ }
+
+ LOG_V( "End\n");
+
+ return res;
+}
+
+static MIX_RESULT mix_videofmt_decode_default(MixVideoFormat *mix,
+ MixBuffer * bufin[], gint bufincnt,
+ MixVideoDecodeParams * decode_params) {
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmt_flush_default(MixVideoFormat *mix) {
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmt_eos_default(MixVideoFormat *mix) {
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmt_deinitialize_default(MixVideoFormat *mix) {
+
+ //All teardown is being done in _finalize()
+
+ return MIX_RESULT_SUCCESS;
+}
+
+/* mixvideoformat class methods implementation */
+
+MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg) {
+ MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix);
+ g_print("mix_videofmt_getcaps\n");
+ if (klass->getcaps) {
+ return klass->getcaps(mix, msg);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_videofmt_initialize(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display) {
+ MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix);
+
+ if (klass->initialize) {
+ return klass->initialize(mix, config_params, frame_mgr,
+ input_buf_pool, surface_pool, va_display);
+ }
+
+ return MIX_RESULT_FAIL;
+
+}
+
+MIX_RESULT mix_videofmt_decode(MixVideoFormat *mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params) {
+
+ MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix);
+ if (klass->decode) {
+ return klass->decode(mix, bufin, bufincnt, decode_params);
+ }
+
+ return MIX_RESULT_FAIL;
+}
+
+MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix) {
+ MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix);
+ if (klass->flush) {
+ return klass->flush(mix);
+ }
+
+ return MIX_RESULT_FAIL;
+}
+
+MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix) {
+ MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix);
+ if (klass->eos) {
+ return klass->eos(mix);
+ }
+
+ return MIX_RESULT_FAIL;
+}
+
+MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix) {
+ MixVideoFormatClass *klass = MIX_VIDEOFORMAT_GET_CLASS(mix);
+ if (klass->deinitialize) {
+ return klass->deinitialize(mix);
+ }
+
+ return MIX_RESULT_FAIL;
+}
diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h
new file mode 100644
index 0000000..c2e4769
--- /dev/null
+++ b/mix_video/src/mixvideoformat.h
@@ -0,0 +1,160 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFORMAT_H__
+#define __MIX_VIDEOFORMAT_H__
+
+#include <va/va.h>
+#include <glib-object.h>
+#include "vbp_loader.h"
+#include "mixvideodef.h"
+#include "mixdrmparams.h"
+#include "mixvideoconfigparamsdec.h"
+#include "mixvideodecodeparams.h"
+#include "mixvideoframe.h"
+#include "mixframemanager.h"
+#include "mixsurfacepool.h"
+#include "mixbuffer.h"
+#include "mixbufferpool.h"
+#include "mixvideoformatqueue.h"
+
+// Redefine the Handle defined in vbp_loader.h
+#define VBPhandle Handle
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEOFORMAT (mix_videoformat_get_type ())
+#define MIX_VIDEOFORMAT(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT, MixVideoFormat))
+#define MIX_IS_VIDEOFORMAT(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT))
+#define MIX_VIDEOFORMAT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT, MixVideoFormatClass))
+#define MIX_IS_VIDEOFORMAT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT))
+#define MIX_VIDEOFORMAT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT, MixVideoFormatClass))
+
+typedef struct _MixVideoFormat MixVideoFormat;
+typedef struct _MixVideoFormatClass MixVideoFormatClass;
+
+/* vmethods typedef */
+
+typedef MIX_RESULT (*MixVideoFmtGetCapsFunc)(MixVideoFormat *mix, GString *msg);
+typedef MIX_RESULT (*MixVideoFmtInitializeFunc)(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+typedef MIX_RESULT (*MixVideoFmtDecodeFunc)(MixVideoFormat *mix,
+ MixBuffer * bufin[], gint bufincnt,
+ MixVideoDecodeParams * decode_params);
+typedef MIX_RESULT (*MixVideoFmtFlushFunc)(MixVideoFormat *mix);
+typedef MIX_RESULT (*MixVideoFmtEndOfStreamFunc)(MixVideoFormat *mix);
+typedef MIX_RESULT (*MixVideoFmtDeinitializeFunc)(MixVideoFormat *mix);
+
+struct _MixVideoFormat {
+ /*< public > */
+ GObject parent;
+
+ /*< public > */
+
+ /*< private > */
+ GMutex *objectlock;
+ gboolean initialized;
+ MixFrameManager *framemgr;
+ MixSurfacePool *surfacepool;
+ VADisplay va_display;
+ VAContextID va_context;
+ VAConfigID va_config;
+ VASurfaceID *va_surfaces;
+ guint va_num_surfaces;
+ VBPhandle parser_handle;
+ GString *mime_type;
+ guint frame_rate_num;
+ guint frame_rate_denom;
+ guint picture_width;
+ guint picture_height;
+ gboolean parse_in_progress;
+ gboolean discontinuity_frame_in_progress;
+ guint64 current_timestamp;
+ MixBufferPool *inputbufpool;
+ GQueue *inputbufqueue;
+};
+
+/**
+ * MixVideoFormatClass:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoFormatClass {
+ /*< public > */
+ GObjectClass parent_class;
+
+ /* class members */
+
+ /*< public > */
+ MixVideoFmtGetCapsFunc getcaps;
+ MixVideoFmtInitializeFunc initialize;
+ MixVideoFmtDecodeFunc decode;
+ MixVideoFmtFlushFunc flush;
+ MixVideoFmtEndOfStreamFunc eos;
+ MixVideoFmtDeinitializeFunc deinitialize;
+};
+
+/**
+ * mix_videoformat_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoformat_get_type(void);
+
+/**
+ * mix_videoformat_new:
+ * @returns: A newly allocated instance of #MixVideoFormat
+ *
+ * Use this method to create new instance of #MixVideoFormat
+ */
+MixVideoFormat *mix_videoformat_new(void);
+
+/**
+ * mix_videoformat_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFormat instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFormat *mix_videoformat_ref(MixVideoFormat * mix);
+
+/**
+ * mix_videoformat_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoformat_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+MIX_RESULT mix_videofmt_getcaps(MixVideoFormat *mix, GString *msg);
+
+MIX_RESULT mix_videofmt_initialize(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+
+MIX_RESULT mix_videofmt_decode(MixVideoFormat *mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params);
+
+MIX_RESULT mix_videofmt_flush(MixVideoFormat *mix);
+
+MIX_RESULT mix_videofmt_eos(MixVideoFormat *mix);
+
+MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix);
+
+#endif /* __MIX_VIDEOFORMAT_H__ */
diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c
new file mode 100644
index 0000000..9e81cbf
--- /dev/null
+++ b/mix_video/src/mixvideoformat_h264.c
@@ -0,0 +1,1663 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+#include <va/va_x11.h>
+
+#include "mixvideolog.h"
+#include "mixvideoformat_h264.h"
+
+#ifdef MIX_LOG_ENABLE
+static int mix_video_h264_counter = 0;
+#endif /* MIX_LOG_ENABLE */
+
+/* The parent class. The pointer will be saved
+ * in this class's initialization. The pointer
+ * can be used for chaining method call if needed.
+ */
+static MixVideoFormatClass *parent_class = NULL;
+
+static void mix_videoformat_h264_finalize(GObject * obj);
+
+/*
+ * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT
+ */
+G_DEFINE_TYPE (MixVideoFormat_H264, mix_videoformat_h264, MIX_TYPE_VIDEOFORMAT);
+
+static void mix_videoformat_h264_init(MixVideoFormat_H264 * self) {
+ MixVideoFormat *parent = MIX_VIDEOFORMAT(self);
+
+ /* public member initialization */
+ /* These are all public because MixVideoFormat objects are completely internal to MixVideo,
+ no need for private members */
+ self->dpb_surface_table = NULL;
+
+ /* NOTE: we don't need to do this here.
+ * This just demostrates how to access
+ * member varibles beloned to parent
+ */
+ parent->initialized = FALSE;
+}
+
+static void mix_videoformat_h264_class_init(
+ MixVideoFormat_H264Class * klass) {
+
+ /* root class */
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* direct parent class */
+ MixVideoFormatClass *video_format_class =
+ MIX_VIDEOFORMAT_CLASS(klass);
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ /* setup finializer */
+ gobject_class->finalize = mix_videoformat_h264_finalize;
+
+ /* setup vmethods with base implementation */
+ /* This is where we can override base class methods if needed */
+ video_format_class->getcaps = mix_videofmt_h264_getcaps;
+ video_format_class->initialize = mix_videofmt_h264_initialize;
+ video_format_class->decode = mix_videofmt_h264_decode;
+ video_format_class->flush = mix_videofmt_h264_flush;
+ video_format_class->eos = mix_videofmt_h264_eos;
+ video_format_class->deinitialize = mix_videofmt_h264_deinitialize;
+}
+
+MixVideoFormat_H264 *
+mix_videoformat_h264_new(void) {
+ MixVideoFormat_H264 *ret =
+ g_object_new(MIX_TYPE_VIDEOFORMAT_H264, NULL);
+
+ return ret;
+}
+
+void mix_videoformat_h264_finalize(GObject * obj) {
+ gint32 pret = VBP_OK;
+
+ /* clean up here. */
+
+ MixVideoFormat *parent = NULL;
+ MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(obj);
+ GObjectClass *root_class = (GObjectClass *) parent_class;
+
+ parent = MIX_VIDEOFORMAT(self);
+
+ //surfacepool is deallocated by parent
+ //inputbufqueue is deallocated by parent
+ //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces
+
+ //Free the DPB surface table
+ //First remove all the entries (frames will be unrefed)
+ g_hash_table_remove_all(self->dpb_surface_table);
+ //Then unref the table
+ g_hash_table_unref(self->dpb_surface_table);
+ self->dpb_surface_table = NULL;
+
+ g_mutex_lock(parent->objectlock);
+ parent->initialized = TRUE;
+ parent->parse_in_progress = FALSE;
+ parent->current_timestamp = 0;
+
+ //Close the parser
+ pret = vbp_close(parent->parser_handle);
+ parent->parser_handle = NULL;
+ if (pret != VBP_OK)
+ {
+ LOG_E( "Error closing parser\n");
+ }
+
+ g_mutex_unlock(parent->objectlock);
+
+ /* Chain up parent */
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+}
+
+MixVideoFormat_H264 *
+mix_videoformat_h264_ref(MixVideoFormat_H264 * mix) {
+ return (MixVideoFormat_H264 *) g_object_ref(G_OBJECT(mix));
+}
+
+/* H.264 vmethods implementation */
+MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg) {
+
+MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (mix == NULL || msg == NULL)
+ {
+ LOG_E( "NUll pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ /* Chainup parent method.
+ */
+
+ if (parent_class->getcaps) {
+ ret = parent_class->getcaps(mix, msg);
+ }
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display ) {
+
+ uint32 pret = 0;
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ enum _vbp_parser_type ptype = VBP_H264;
+ vbp_data_h264 *data = NULL;
+ MixVideoFormat *parent = NULL;
+ MixIOVec *header = NULL;
+ gint numprofs = 0, numactualprofs = 0;
+ gint numentrypts = 0, numactualentrypts = 0;
+ VADisplay vadisplay = NULL;
+ VAProfile *profiles = NULL;
+ VAEntrypoint *entrypts = NULL;
+ VAConfigAttrib attrib;
+ VAStatus vret = VA_STATUS_SUCCESS;
+ guint extra_surfaces = 0;
+ VASurfaceID *surfaces = NULL;
+ guint numSurfaces = 0;
+
+ //TODO Partition this method into smaller methods
+
+ if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL)
+ {
+ LOG_E( "NUll pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ /* Chainup parent method. */
+
+ if (parent_class->initialize) {
+ ret = parent_class->initialize(mix, config_params,
+ frame_mgr, input_buf_pool, surface_pool,
+ va_display);
+ }
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error initializing\n");
+ return ret;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_H264(mix))
+ return MIX_RESULT_INVALID_PARAM;
+
+ parent = MIX_VIDEOFORMAT(mix);
+ MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+
+ LOG_V( "Locking\n");
+ //From now on, we exit this function through cleanup:
+ g_mutex_lock(parent->objectlock);
+
+ LOG_V( "Before vbp_open\n");
+ //Load the bitstream parser
+ pret = vbp_open(ptype, &(parent->parser_handle));
+
+ LOG_V( "After vbp_open\n");
+ if (!(pret == VBP_OK))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error opening parser\n");
+ goto cleanup;
+ }
+ LOG_V( "Opened parser\n");
+
+ ret = mix_videoconfigparamsdec_get_header(config_params,
+ &header);
+
+ if ((ret != MIX_RESULT_SUCCESS) || (header == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Cannot get header data\n");
+ goto cleanup;
+ }
+
+ ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params,
+ &extra_surfaces);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Cannot get extra surface allocation setting\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle);
+
+ pret = vbp_parse(parent->parser_handle, header->data,
+ header->data_size, TRUE);
+
+ if (!((pret == VBP_OK) || (pret == VBP_DONE)))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error parsing header data\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Parsed header\n");
+
+ //Get the header data and save
+ pret = vbp_query(parent->parser_handle, (void *)&data);
+
+ if ((pret != VBP_OK) || (data == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error reading parsed header data\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Queried parser for header data\n");
+
+ //Time for libva initialization
+
+ vadisplay = parent->va_display;
+
+ numprofs = vaMaxNumProfiles(vadisplay);
+ profiles = g_malloc(numprofs*sizeof(VAProfile));
+
+ if (!profiles)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating memory\n");
+ goto cleanup;
+ }
+
+ vret = vaQueryConfigProfiles(vadisplay, profiles,
+ &numactualprofs);
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing video driver\n");
+ goto cleanup;
+ }
+
+ //check the desired profile support
+ gint vaprof = 0;
+
+ //TODO Need to cover more cases
+ switch (data->codec_data->profile_idc)
+ {
+#if 1
+//TODO Reinstate this once constraint_set1 flag has been added to codec_data
+ case 66: //Baseline profile
+
+ LOG_V( "mix_videofmt_h264_initialize: Baseline profile\n");
+ if (data->codec_data->constraint_set1_flag == 0)
+ {
+ for (; vaprof < numactualprofs; vaprof++)
+ {
+ if (profiles[vaprof] == VAProfileH264Baseline)
+ break;
+ }
+ } else
+ {
+ for (; vaprof < numactualprofs; vaprof++)
+ {
+ if (profiles[vaprof] == VAProfileH264High)
+ break;
+ }
+ }
+ if ((vaprof >= numprofs) || ((profiles[vaprof] != VAProfileH264Baseline) && (profiles[vaprof] != VAProfileH264High)))
+ //Did not get the profile we wanted
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Profile not supported by driver\n");
+ goto cleanup;
+ }
+ break;
+#endif
+
+#if 0
+//Code left in place in case bug is fixed in libva
+ case 77: //Main profile (need to set to High for libva bug)
+ LOG_V( "mix_videofmt_h264_initialize: Main profile\n");
+
+ for (; vaprof < numactualprofs; vaprof++)
+ {
+ if (profiles[vaprof] == VAProfileH264Main)
+ break;
+ }
+ if (vaprof >= numprofs || profiles[vaprof] != VAProfileH264Main)
+ //Did not get the profile we wanted
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Profile not supported by driver\n");
+ goto cleanup;
+ }
+ break;
+#endif
+
+ case 100: //High profile
+ default: //Set to High as default
+
+ LOG_V( "High profile\n");
+
+ for (; vaprof < numactualprofs; vaprof++)
+ {
+ if (profiles[vaprof] == VAProfileH264High)
+ break;
+ }
+ if (vaprof >= numprofs || profiles[vaprof] != VAProfileH264High)
+ //Did not get the profile we wanted
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Profile not supported by driver\n");
+ goto cleanup;
+ }
+ break;
+
+
+ }
+
+ numentrypts = vaMaxNumEntrypoints(vadisplay);
+ entrypts = g_malloc(numentrypts*sizeof(VAEntrypoint));
+
+ if (!entrypts)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating memory\n");
+ goto cleanup;
+ }
+
+ vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof],
+ entrypts, &numactualentrypts);
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing driver\n");
+ goto cleanup;
+ }
+
+ gint vaentrypt = 0;
+ for (; vaentrypt < numactualentrypts; vaentrypt++)
+ {
+ if (entrypts[vaentrypt] == VAEntrypointVLD)
+ break;
+ }
+ if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD)
+ //Did not get the entrypt we wanted
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Entry point not supported by driver\n");
+ goto cleanup;
+ }
+
+ //We are requesting RT attributes
+ attrib.type = VAConfigAttribRTFormat;
+
+ vret = vaGetConfigAttributes(vadisplay, profiles[vaprof],
+ entrypts[vaentrypt], &attrib, 1);
+
+ //TODO Handle other values returned for RT format
+ // and check with requested format provided in config params
+ //Right now only YUV 4:2:0 is supported by libva
+ // and this is our default
+ if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) ||
+ vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing driver\n");
+ goto cleanup;
+ }
+
+ //Initialize and save the VA config ID
+ vret = vaCreateConfig(vadisplay, profiles[vaprof],
+ entrypts[vaentrypt], &attrib, 1, &(parent->va_config));
+
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing driver\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Created libva config with profile %d\n", vaprof);
+
+
+ //Initialize the surface pool
+
+ LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames);
+
+
+ // handle both frame and field coding for interlaced content
+ int num_ref_pictures = data->codec_data->num_ref_frames;
+ if (!data->codec_data->frame_mbs_only_flag &&
+ !data->codec_data->mb_adaptive_frame_field_flag)
+ {
+
+ // field coding, two fields share the same surface.
+ //num_ref_pictures *= 2;
+ }
+
+ //Adding 1 to work around VBLANK issue
+ parent->va_num_surfaces = 1 + extra_surfaces + (((num_ref_pictures + 3) <
+ MIX_VIDEO_H264_SURFACE_NUM) ?
+ (num_ref_pictures + 3)
+ : MIX_VIDEO_H264_SURFACE_NUM);
+
+ numSurfaces = parent->va_num_surfaces;
+
+ parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces);
+
+ surfaces = parent->va_surfaces;
+
+ if (surfaces == NULL)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Cannot allocate temporary data\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Codec data says picture size is %d x %d\n", (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16);
+ LOG_V( "getcaps says picture size is %d x %d\n", parent->picture_width, parent->picture_height);
+
+ vret = vaCreateSurfaces(vadisplay, (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16,
+ (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, entrypts[vaentrypt],
+ numSurfaces, surfaces);
+
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error allocating surfaces\n");
+ goto cleanup;
+ }
+
+ parent->surfacepool = mix_surfacepool_new();
+ *surface_pool = parent->surfacepool;
+
+ if (parent->surfacepool == NULL)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing surface pool\n");
+ goto cleanup;
+ }
+
+
+ ret = mix_surfacepool_initialize(parent->surfacepool,
+ surfaces, numSurfaces);
+
+ switch (ret)
+ {
+ case MIX_RESULT_SUCCESS:
+ break;
+ case MIX_RESULT_ALREADY_INIT: //This case is for future use when we can be initialized multiple times. It is to detect when we have not been reset before re-initializing.
+ default:
+ ret = MIX_RESULT_ALREADY_INIT;
+ LOG_E( "Error init failure\n");
+ goto cleanup;
+ break;
+ }
+
+ LOG_V( "Created %d libva surfaces\n", numSurfaces);
+
+ //Initialize and save the VA context ID
+ //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2
+ vret = vaCreateContext(vadisplay, parent->va_config,
+ parent->picture_width, parent->picture_height,
+ 0, surfaces, numSurfaces,
+ &(parent->va_context));
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing video driver\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Created libva context width %d, height %d\n", parent->picture_width, parent->picture_height);
+
+ //Create our table of Decoded Picture Buffer "in use" surfaces
+ self->dpb_surface_table = g_hash_table_new_full(NULL, NULL, mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value);
+
+ if (self->dpb_surface_table == NULL)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating dbp surface table\n");
+ goto cleanup; //leave this goto here in case other code is added between here and cleanup label
+ }
+
+ cleanup:
+ if (ret != MIX_RESULT_SUCCESS) {
+ pret = vbp_close(parent->parser_handle);
+ parent->parser_handle = NULL;
+ parent->initialized = FALSE;
+
+ } else {
+ parent->initialized = TRUE;
+ }
+
+ if (header != NULL)
+ {
+ if (header->data != NULL)
+ g_free(header->data);
+ g_free(header);
+ header = NULL;
+ }
+
+ g_free(profiles);
+ g_free(entrypts);
+
+ LOG_V( "Unlocking\n");
+ g_mutex_unlock(parent->objectlock);
+
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_h264_decode(MixVideoFormat *mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params) {
+
+ uint32 pret = 0;
+ int i = 0;
+ MixVideoFormat *parent = NULL;
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ guint64 ts = 0;
+ vbp_data_h264 *data = NULL;
+ gboolean discontinuity = FALSE;
+ MixInputBufferEntry *bufentry = NULL;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL || bufin == NULL || decode_params == NULL )
+ {
+ LOG_E( "NUll pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* Chainup parent method.
+ We are not chaining up to parent method for now.
+ */
+
+#if 0
+ if (parent_class->decode) {
+ return parent_class->decode(mix, bufin, bufincnt,
+ decode_params);
+ }
+#endif
+
+ if (!MIX_IS_VIDEOFORMAT_H264(mix))
+ return MIX_RESULT_INVALID_PARAM;
+
+ parent = MIX_VIDEOFORMAT(mix);
+
+
+ ret = mix_videodecodeparams_get_timestamp(decode_params,
+ &ts);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videodecodeparams_get_discontinuity(decode_params,
+ &discontinuity);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ return MIX_RESULT_FAIL;
+ }
+
+ //From now on, we exit this function through cleanup:
+
+ LOG_V( "Locking\n");
+ g_mutex_lock(parent->objectlock);
+
+ LOG_V( "parse in progress is %d\n", parent->parse_in_progress);
+ //If this is a new frame and we haven't retrieved parser
+ // workload data from previous frame yet, do so
+ if ((ts != parent->current_timestamp) &&
+ (parent->parse_in_progress))
+ {
+
+ //query for data
+ pret = vbp_query(parent->parser_handle,
+ (void *) &data);
+
+ if ((pret != VBP_OK) || (data == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing parser\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Queried for last frame data\n");
+
+ //process and decode data
+ ret = mix_videofmt_h264_process_decode(mix,
+ data, parent->current_timestamp,
+ parent->discontinuity_frame_in_progress);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ //We log this but need to process the new frame data, so do not return
+ LOG_E( "Process_decode failed.\n");
+ }
+
+ LOG_V( "Called process and decode for last frame\n");
+
+ parent->parse_in_progress = FALSE;
+
+ }
+
+ parent->current_timestamp = ts;
+ parent->discontinuity_frame_in_progress = discontinuity;
+
+ LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts);
+
+ for (i = 0; i < bufincnt; i++)
+ {
+
+ LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", (int)parent->parser_handle, (guint)bufin[i]->data, bufin[i]->size);
+
+ pret = vbp_parse(parent->parser_handle,
+ bufin[i]->data,
+ bufin[i]->size,
+ FALSE);
+
+ LOG_V( "Called parse for current frame\n");
+
+ if ((pret == VBP_DONE) || (pret == VBP_OK))
+ {
+ //query for data
+ pret = vbp_query(parent->parser_handle,
+ (void *) &data);
+
+ if ((pret != VBP_OK) || (data == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error getting parser data\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Called query for current frame\n");
+
+ //Increase the ref count of this input buffer
+ mix_buffer_ref(bufin[i]);
+
+ //Create a new MixInputBufferEntry
+ //TODO make this from a pool to optimize
+ bufentry = g_malloc(sizeof(
+ MixInputBufferEntry));
+ if (bufentry == NULL)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating bufentry\n");
+ goto cleanup;
+ }
+
+ bufentry->buf = bufin[i];
+ LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts);
+ bufentry->timestamp = ts;
+
+ LOG_V( "Enqueue this input buffer for current frame\n");
+ LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp);
+
+ //Enqueue this input buffer
+ g_queue_push_tail(parent->inputbufqueue,
+ (gpointer)bufentry);
+
+ //process and decode data
+ ret = mix_videofmt_h264_process_decode(mix,
+ data, ts, discontinuity);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ //We log this but continue since we need to complete our processing of input buffers
+ LOG_E( "Process_decode failed.\n");
+ }
+
+ LOG_V( "Called process and decode for current frame\n");
+
+ parent->parse_in_progress = FALSE;
+ }
+ else if (pret != VBP_OK)
+ {
+ //We log this but continue since we need to complete our processing of input buffers
+ LOG_E( "Parsing failed.\n");
+ ret = MIX_RESULT_FAIL;
+ }
+ else
+ {
+
+ LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n");
+
+ //Increase the ref count of this input buffer
+ mix_buffer_ref(bufin[i]);
+
+ //Create a new MixInputBufferEntry
+ //TODO make this from a pool to optimize
+ bufentry = g_malloc(sizeof
+ (MixInputBufferEntry));
+ if (bufentry == NULL)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating bufentry\n");
+ goto cleanup;
+ }
+ bufentry->buf = bufin[i];
+ LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts);
+ bufentry->timestamp = ts;
+
+ LOG_V( "Enqueue this input buffer for current frame\n");
+ LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp);
+
+ //Enqueue this input buffer
+ g_queue_push_tail(parent->inputbufqueue,
+ (gpointer)bufentry);
+ LOG_V( "Setting parse_in_progress to TRUE\n");
+ parent->parse_in_progress = TRUE;
+ }
+
+ }
+
+
+ cleanup:
+
+ LOG_V( "Unlocking\n");
+ g_mutex_unlock(parent->objectlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix) {
+
+MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL)
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ uint32 pret = 0;
+ MixInputBufferEntry *bufentry = NULL;
+
+
+ /* Chainup parent method.
+ We are not chaining up to parent method for now.
+ */
+
+#if 0
+ if (parent_class->flush) {
+ return parent_class->flush(mix, msg);
+ }
+#endif
+
+ MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+
+ g_mutex_lock(mix->objectlock);
+
+ //Clear the contents of inputbufqueue
+ while (!g_queue_is_empty(mix->inputbufqueue))
+ {
+ bufentry = (MixInputBufferEntry *) g_queue_pop_head(
+ mix->inputbufqueue);
+ if (bufentry == NULL) continue;
+
+ mix_buffer_unref(bufentry->buf);
+ g_free(bufentry);
+ }
+
+ //Clear parse_in_progress flag and current timestamp
+ mix->parse_in_progress = FALSE;
+ mix->discontinuity_frame_in_progress = FALSE;
+ mix->current_timestamp = 0;
+
+ //Clear the DPB surface table
+ g_hash_table_remove_all(self->dpb_surface_table);
+
+ //Call parser flush
+ pret = vbp_flush(mix->parser_handle);
+ if (pret != VBP_OK)
+ ret = MIX_RESULT_FAIL;
+
+ g_mutex_unlock(mix->objectlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ vbp_data_h264 *data = NULL;
+ uint32 pret = 0;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL)
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* Chainup parent method.
+ We are not chaining up to parent method for now.
+ */
+
+#if 0
+ if (parent_class->eos) {
+ return parent_class->eos(mix, msg);
+ }
+#endif
+
+ g_mutex_lock(mix->objectlock);
+
+ //if a frame is in progress, process the frame
+ if (mix->parse_in_progress)
+ {
+ //query for data
+ pret = vbp_query(mix->parser_handle,
+ (void *) &data);
+
+ if ((pret != VBP_OK) || (data == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error getting last parse data\n");
+ goto cleanup;
+ }
+
+ //process and decode data
+ ret = mix_videofmt_h264_process_decode(mix,
+ data, mix->current_timestamp,
+ mix->discontinuity_frame_in_progress);
+ mix->parse_in_progress = FALSE;
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error processing last frame\n");
+ goto cleanup;
+ }
+
+ }
+
+cleanup:
+
+ g_mutex_unlock(mix->objectlock);
+
+ //Call Frame Manager with _eos()
+ ret = mix_framemanager_eos(mix->framemgr);
+
+ LOG_V( "End\n");
+
+ return ret;
+
+
+}
+
+MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix) {
+
+//Note this method is not called; may remove in future
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL)
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* Chainup parent method.
+ */
+
+ if (parent_class->deinitialize) {
+ return parent_class->deinitialize(mix);
+ }
+
+ //Most stuff is cleaned up in parent_class->finalize() and in _finalize
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+#define HACK_DPB
+#ifdef HACK_DPB
+static inline void mix_videofmt_h264_hack_dpb(MixVideoFormat *mix,
+ vbp_picture_data_h264* pic_data
+ )
+{
+
+ gboolean found = FALSE;
+ guint tflags = 0;
+ VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms;
+ VAPictureH264 *pRefList = NULL;
+ int i = 0, j = 0, k = 0, list = 0;
+
+ MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+
+ //Set the surface ID for everything in the parser DPB to INVALID
+ for (i = 0; i < 16; i++)
+ {
+ pic_params->ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
+ pic_params->ReferenceFrames[i].frame_idx = -1;
+ pic_params->ReferenceFrames[i].TopFieldOrderCnt = -1;
+ pic_params->ReferenceFrames[i].BottomFieldOrderCnt = -1;
+ pic_params->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; //assuming we don't need to OR with existing flags
+ }
+
+ pic_params->num_ref_frames = 0;
+
+ for (i = 0; i < pic_data->num_slices; i++)
+ {
+
+ //Copy from the List0 and List1 surface IDs
+ pRefList = pic_data->slc_data[i].slc_parms.RefPicList0;
+ for (list = 0; list < 2; list++)
+ {
+ for (j = 0; j < 32; j++)
+ {
+ if (pRefList[j].flags & VA_PICTURE_H264_INVALID)
+ {
+ break; //no more valid reference frames in this list
+ }
+ found = FALSE;
+ for (k = 0; k < pic_params->num_ref_frames; k++)
+ {
+ if (pic_params->ReferenceFrames[k].TopFieldOrderCnt == pRefList[j].TopFieldOrderCnt)
+ {
+ ///check for complementary field
+ tflags = pic_params->ReferenceFrames[k].flags | pRefList[j].flags;
+ //If both TOP and BOTTOM are set, we'll clear those flags
+ if ((tflags & VA_PICTURE_H264_TOP_FIELD) &&
+ (tflags & VA_PICTURE_H264_TOP_FIELD))
+ pic_params->ReferenceFrames[k].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ found = TRUE; //already in the DPB; will not add this one
+ break;
+ }
+ }
+ if (!found)
+ {
+ guint poc = mix_videofmt_h264_get_poc(&(pRefList[j]));
+ gpointer video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc);
+ pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id =
+ ((MixVideoFrame *)video_frame)->frame_id;
+
+ LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id);
+
+ pic_params->ReferenceFrames[pic_params->num_ref_frames].flags =
+ pRefList[j].flags;
+ pic_params->ReferenceFrames[pic_params->num_ref_frames].frame_idx =
+ pRefList[j].frame_idx;
+ pic_params->ReferenceFrames[pic_params->num_ref_frames].TopFieldOrderCnt =
+ pRefList[j].TopFieldOrderCnt;
+ pic_params->ReferenceFrames[pic_params->num_ref_frames++].BottomFieldOrderCnt =
+ pRefList[j].BottomFieldOrderCnt;
+ }
+
+ }
+ pRefList = pic_data->slc_data[i].slc_parms.RefPicList1;
+ }
+
+ }
+}
+#endif
+
+
+MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix,
+ vbp_data_h264 *data,
+ guint64 timestamp,
+ gboolean discontinuity,
+ int pic_index,
+ MixVideoFrame *frame)
+{
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ VAStatus vret = VA_STATUS_SUCCESS;
+ VADisplay vadisplay = NULL;
+ VAContextID vacontext;
+ guint buffer_id_cnt = 0;
+ VABufferID *buffer_ids = NULL;
+
+ //TODO Partition this method into smaller methods
+
+ LOG_V( "Begin\n");
+
+ if ((mix == NULL) || (data == NULL) || (data->pic_data == NULL) || (frame == NULL))
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ vbp_picture_data_h264* pic_data = &(data->pic_data[pic_index]);
+
+
+ //After this point, all exits from this function are through cleanup:
+
+ if (!MIX_IS_VIDEOFORMAT_H264(mix))
+ return MIX_RESULT_INVALID_PARAM;
+
+ MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+
+ VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms;
+
+ if (pic_params == NULL)
+ {
+ ret = MIX_RESULT_NULL_PTR;
+ LOG_E( "Error reading parser data\n");
+ goto cleanup;
+ }
+
+ //TODO
+ //Check for frame gaps and repeat frames if necessary
+
+ LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2);
+
+ buffer_ids = g_malloc(sizeof(VABufferID) *
+ ((pic_data->num_slices * 2) + 2));
+
+ if (buffer_ids == NULL)
+ {
+ LOG_E( "Cannot allocate buffer IDs\n");
+ ret = MIX_RESULT_NO_MEMORY;
+ goto cleanup;
+ }
+
+ //Set up reference frames for the picture parameter buffer
+
+ //Set the picture type (I, B or P frame)
+ //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type)
+ MixFrameType frame_type = TYPE_INVALID;
+
+ switch (pic_data->slc_data->slc_parms.slice_type)
+ {
+ case 0:
+ case 3:
+ case 5:
+ case 8:
+ frame_type = TYPE_P;
+ break;
+ case 1:
+ case 6:
+ frame_type = TYPE_B;
+ break;
+ case 2:
+ case 4:
+ case 7:
+ case 9:
+ frame_type = TYPE_I;
+ break;
+ default:
+ break;
+ }
+
+ //Do not have to check for B frames after a seek
+ //Note: Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise
+ // DPB will not be correct and frames may come in with invalid references
+ // This will be detected when DPB is checked for valid mapped surfaces and
+ // error returned from there.
+
+ LOG_V( "Getting a new surface for frame_num %d\n", pic_params->frame_num);
+ LOG_V( "frame type is %d\n", frame_type);
+
+
+
+ //Set the frame type for the frame object (used in reordering by frame manager)
+ ret = mix_videoframe_set_frame_type(frame, frame_type);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error setting frame type on frame\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Updating DPB for libva\n");
+
+ //Now handle the reference frames and surface IDs for DPB and current frame
+ mix_videofmt_h264_handle_ref_frames(mix, pic_params, frame);
+
+#ifdef HACK_DPB
+ //We have to provide a hacked DPB rather than complete DPB for libva as workaround
+ mix_videofmt_h264_hack_dpb(mix, pic_data);
+#endif
+
+ //Libva buffer set up
+
+ vadisplay = mix->va_display;
+ vacontext = mix->va_context;
+
+ LOG_V( "Creating libva picture parameter buffer\n");
+ LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames);
+
+ //First the picture parameter buffer
+ vret = vaCreateBuffer(vadisplay, vacontext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferH264),
+ 1,
+ pic_params,
+ &buffer_ids[buffer_id_cnt]);
+ buffer_id_cnt++;
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaCreateBuffer\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Creating libva IQMatrix buffer\n");
+
+
+ //Then the IQ matrix buffer
+ vret = vaCreateBuffer(vadisplay, vacontext,
+ VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferH264),
+ 1,
+ data->IQ_matrix_buf,
+ &buffer_ids[buffer_id_cnt]);
+ buffer_id_cnt++;
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaCreateBuffer\n");
+ goto cleanup;
+ }
+
+
+ //Now for slices
+ int i = 0;
+ gpointer video_frame;
+ for (;i < pic_data->num_slices; i++)
+ {
+
+ LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i);
+
+ //Do slice parameters
+
+ //First patch up the List0 and List1 surface IDs
+ int j = 0;
+ guint poc = 0;
+ for (; j <= pic_data->slc_data[i].slc_parms.num_ref_idx_l0_active_minus1; j++)
+ {
+ if (!(pic_data->slc_data[i].slc_parms.RefPicList0[j].flags & VA_PICTURE_H264_INVALID))
+ {
+ poc = mix_videofmt_h264_get_poc(&(pic_data->slc_data[i].slc_parms.RefPicList0[j]));
+ video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc);
+ if (video_frame == NULL)
+ {
+ LOG_E( "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic));
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+ else
+ {
+ pic_data->slc_data[i].slc_parms.RefPicList0[j].picture_id =
+ ((MixVideoFrame *)video_frame)->frame_id;
+ }
+ }
+
+ }
+
+ if ((pic_data->slc_data->slc_parms.slice_type == 1) || (pic_data->slc_data->slc_parms.slice_type == 6))
+ {
+ for (j = 0; j <= pic_data->slc_data[i].slc_parms.num_ref_idx_l1_active_minus1; j++)
+ {
+ if (!(pic_data->slc_data[i].slc_parms.RefPicList1[j].flags & VA_PICTURE_H264_INVALID))
+ {
+ poc = mix_videofmt_h264_get_poc(&(pic_data->slc_data[i].slc_parms.RefPicList1[j]));
+ video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc);
+ if (video_frame == NULL)
+ {
+ LOG_E( "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic));
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+ else
+ {
+ pic_data->slc_data[i].slc_parms.RefPicList1[j].picture_id =
+ ((MixVideoFrame *)video_frame)->frame_id;
+ }
+ }
+ }
+ }
+
+
+ //Then do the libva setup
+
+ vret = vaCreateBuffer(vadisplay, vacontext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferH264),
+ 1,
+ &(pic_data->slc_data[i].slc_parms),
+ &buffer_ids[buffer_id_cnt]);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaCreateBuffer\n");
+ goto cleanup;
+ }
+
+ buffer_id_cnt++;
+
+
+ LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size);
+
+
+ //Do slice data
+
+ vret = vaCreateBuffer(vadisplay, vacontext,
+ VASliceDataBufferType,
+ //size
+ pic_data->slc_data[i].slice_size,
+ //num_elements
+ 1,
+ //slice data buffer pointer
+ //Note that this is the original data buffer ptr;
+ // offset to the actual slice data is provided in
+ // slice_data_offset in VASliceParameterBufferH264
+ pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset,
+ &buffer_ids[buffer_id_cnt]);
+
+ buffer_id_cnt++;
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaCreateBuffer\n");
+ goto cleanup;
+ }
+
+ }
+
+ gulong surface = 0;
+
+ //Get our surface ID from the frame object
+ ret = mix_videoframe_get_frame_id(frame, &surface);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error getting surface ID from frame object\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling vaBeginPicture\n");
+
+ //Now we can begin the picture
+ vret = vaBeginPicture(vadisplay, vacontext, surface);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaBeginPicture\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling vaRenderPicture\n");
+
+ //Render the picture
+ vret = vaRenderPicture(vadisplay, vacontext,
+ buffer_ids,
+ buffer_id_cnt);
+
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaRenderPicture\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling vaEndPicture\n");
+
+ //End picture
+ vret = vaEndPicture(vadisplay, vacontext);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaEndPicture\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling vaSyncSurface\n");
+
+ //Decode the picture
+ vret = vaSyncSurface(vadisplay, surface);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaSyncSurface\n");
+ goto cleanup;
+ }
+
+
+ if (pic_index == 0)
+ {
+ //Set the discontinuity flag
+ mix_videoframe_set_discontinuity(frame, discontinuity);
+
+ //Set the timestamp
+ mix_videoframe_set_timestamp(frame, timestamp);
+
+ guint32 frame_structure = VA_FRAME_PICTURE;
+ if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
+ {
+ frame_structure = VA_TOP_FIELD;
+ }
+ else if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
+ {
+ frame_structure = VA_BOTTOM_FIELD;
+ }
+ mix_videoframe_set_frame_structure(frame, frame_structure);
+ }
+ else
+ {
+ // frame must be field-coded, no need to set
+ // discontinuity falg and time stamp again
+ mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD | VA_TOP_FIELD);
+ }
+
+ //TODO need to save off frame when handling is added for repeat frames?
+
+//TODO Complete YUVDUMP code and move into base class
+#ifdef YUVDUMP
+ if (mix_video_h264_counter < 10)
+ ret = GetImageFromSurface (mix, frame);
+// g_usleep(5000000);
+#endif /* YUVDUMP */
+
+ LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp);
+
+
+ cleanup:
+
+ if (NULL != buffer_ids)
+ g_free(buffer_ids);
+
+
+ LOG_V( "End\n");
+
+ return ret;
+
+}
+
+
+MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix,
+ vbp_data_h264 *data,
+ guint64 timestamp,
+ gboolean discontinuity)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ int i = 0;
+
+ if ((mix == NULL) || (data == NULL))
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ //Get a frame from the surface pool
+ MixVideoFrame *frame = NULL;
+
+ ret = mix_surfacepool_get(mix->surfacepool, &frame);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error getting frame from surfacepool\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ for (i = 0; i < data->num_pictures; i++)
+ {
+ ret = mix_videofmt_h264_process_decode_picture(mix, data, timestamp, discontinuity, i, frame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Failed to process decode picture %d, error = %#X.", data->buf_number, ret);
+ break;
+ }
+ }
+
+ if (ret == MIX_RESULT_SUCCESS)
+ {
+ //Enqueue the decoded frame using frame manager
+ ret = mix_framemanager_enqueue(mix->framemgr, frame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error enqueuing frame object\n");
+ mix_videoframe_unref(frame);
+ }
+
+ }
+ else
+ {
+ mix_videoframe_unref(frame);
+ }
+ mix_videofmt_h264_release_input_buffers(mix, timestamp);
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix,
+ VAPictureParameterBufferH264* pic_params,
+ MixVideoFrame * current_frame
+ ) {
+
+ guint poc = 0;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL || current_frame == NULL || pic_params == NULL)
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+
+ LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d. Surface ID is %d\n", pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id);
+
+#ifdef MIX_LOG_ENABLE
+ if (pic_params->CurrPic.flags & VA_PICTURE_H264_INVALID)
+ LOG_V( "Flags show VA_PICTURE_H264_INVALID\n");
+
+ if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
+ LOG_V( "Flags show VA_PICTURE_H264_TOP_FIELD\n");
+
+ if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
+ LOG_V( "Flags show VA_PICTURE_H264_BOTTOM_FIELD\n");
+
+ if (pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE)
+ LOG_V( "Flags show VA_PICTURE_H264_SHORT_TERM_REFERENCE\n");
+
+ if (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)
+ LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n");
+#endif
+
+ MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+
+
+ //First we need to check the parser DBP against our DPB table
+ //So for each item in our DBP table, we look to see if it is in the parser DPB
+ //If it is not, it gets unrefed and removed
+#ifdef MIX_LOG_ENABLE
+ guint num_removed =
+#endif
+ g_hash_table_foreach_remove(self->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params);
+
+ LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed);
+
+
+ MixVideoFrame *mvf = NULL;
+ gboolean found = FALSE;
+ //Set the surface ID for everything in the parser DPB
+ int i = 0;
+ for (; i < 16; i++)
+ {
+ if (!(pic_params->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID))
+ {
+
+ poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i]));
+ LOG_V( "Looking up poc %d in dpb table\n", poc);
+ found = g_hash_table_lookup_extended(self->dpb_surface_table, (gpointer)poc, NULL, (gpointer)&mvf);
+
+ if (found)
+ {
+ pic_params->ReferenceFrames[i].picture_id = mvf->frame_id;
+ LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id);
+ } else {
+ LOG_V( "Looking up poc %d in dpb table did not find value\n", poc);
+ }
+ LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", poc, i, (gint)pic_params->ReferenceFrames[i].picture_id);
+ }
+
+ }
+
+
+ //Set picture_id for current picture
+ pic_params->CurrPic.picture_id = current_frame->frame_id;
+
+ //Check to see if current frame is a reference frame
+ if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE))
+ {
+ //Get current frame's POC
+ poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic));
+
+ //Increment the reference count for this frame
+ mix_videoframe_ref(current_frame);
+
+ LOG_V( "Inserting poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id);
+ //Add this frame to the DPB surface table
+ g_hash_table_insert(self->dpb_surface_table, (gpointer)poc, current_frame);
+ }
+
+
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+guint mix_videofmt_h264_get_poc(VAPictureH264 *pic)
+{
+
+ if (pic == NULL)
+ return 0;
+
+ if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD)
+ return pic->BottomFieldOrderCnt;
+
+
+ if (pic->flags & VA_PICTURE_H264_TOP_FIELD)
+ return pic->TopFieldOrderCnt;
+
+ return pic->TopFieldOrderCnt;
+
+}
+
+
+gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer user_data)
+{
+ gboolean ret = TRUE;
+
+ if ((value == NULL) || (user_data == NULL)) //Note that 0 is valid value for key
+ return FALSE;
+
+ VAPictureH264* vaPic = NULL;
+ int i = 0;
+ for (; i < 16; i++)
+ {
+ vaPic = &(((VAPictureParameterBufferH264*)user_data)->ReferenceFrames[i]);
+ if (vaPic->flags & VA_PICTURE_H264_INVALID)
+ continue;
+
+ if ((guint)key == vaPic->TopFieldOrderCnt ||
+ (guint)key == vaPic->BottomFieldOrderCnt)
+ {
+ ret = FALSE;
+ break;
+ }
+ }
+
+ return ret;
+}
+
+void mix_videofmt_h264_destroy_DPB_key(gpointer data)
+{
+//TODO remove this method and don't register it with the hash table foreach call; it is no longer needed
+ LOG_V( "Begin, poc of %d\n", (guint)data);
+ LOG_V( "End\n");
+
+ return;
+}
+
+void mix_videofmt_h264_destroy_DPB_value(gpointer data)
+{
+ LOG_V( "Begin\n");
+ if (data == NULL)
+ return ;
+ mix_videoframe_unref((MixVideoFrame *)data);
+
+ return;
+}
+
+
+MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix,
+ guint64 timestamp
+ ) {
+
+ MixInputBufferEntry *bufentry = NULL;
+ gboolean done = FALSE;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ //Dequeue and release all input buffers for this frame
+
+ LOG_V( "Releasing all the MixBuffers for this frame\n");
+
+ //While the head of the queue has timestamp == current ts
+ //dequeue the entry, unref the MixBuffer, and free the struct
+ done = FALSE;
+ while (!done)
+ {
+ bufentry = (MixInputBufferEntry *) g_queue_peek_head(
+ mix->inputbufqueue);
+ if (bufentry == NULL) break;
+ LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp);
+
+ if (bufentry->timestamp != timestamp)
+ {
+ LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp);
+ done = TRUE;
+ break;
+ }
+
+ bufentry = (MixInputBufferEntry *) g_queue_pop_head(
+ mix->inputbufqueue);
+ LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf);
+ mix_buffer_unref(bufentry->buf);
+ g_free(bufentry);
+ }
+
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+
+
diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h
new file mode 100644
index 0000000..a04048c
--- /dev/null
+++ b/mix_video/src/mixvideoformat_h264.h
@@ -0,0 +1,129 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFORMAT_H264_H__
+#define __MIX_VIDEOFORMAT_H264_H__
+
+#include "mixvideoformat.h"
+#include "mixvideoframe_private.h"
+
+#define MIX_VIDEO_H264_SURFACE_NUM 20
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEOFORMAT_H264 (mix_videoformat_h264_get_type ())
+#define MIX_VIDEOFORMAT_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264))
+#define MIX_IS_VIDEOFORMAT_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_H264))
+#define MIX_VIDEOFORMAT_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264Class))
+#define MIX_IS_VIDEOFORMAT_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_H264))
+#define MIX_VIDEOFORMAT_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_H264, MixVideoFormat_H264Class))
+
+typedef struct _MixVideoFormat_H264 MixVideoFormat_H264;
+typedef struct _MixVideoFormat_H264Class MixVideoFormat_H264Class;
+
+struct _MixVideoFormat_H264 {
+ /*< public > */
+ MixVideoFormat parent;
+
+ /*< public > */
+
+ /*< private > */
+ GHashTable *dpb_surface_table;
+};
+
+/**
+ * MixVideoFormat_H264Class:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoFormat_H264Class {
+ /*< public > */
+ MixVideoFormatClass parent_class;
+
+ /* class members */
+
+ /*< public > */
+};
+
+/**
+ * mix_videoformat_h264_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoformat_h264_get_type(void);
+
+/**
+ * mix_videoformat_h264_new:
+ * @returns: A newly allocated instance of #MixVideoFormat_H264
+ *
+ * Use this method to create new instance of #MixVideoFormat_H264
+ */
+MixVideoFormat_H264 *mix_videoformat_h264_new(void);
+
+/**
+ * mix_videoformat_h264_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFormat_H264 instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFormat_H264 *mix_videoformat_h264_ref(MixVideoFormat_H264 * mix);
+
+/**
+ * mix_videoformat_h264_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoformat_h264_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/* H.264 vmethods */
+MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg);
+MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+MIX_RESULT mix_videofmt_h264_decode(MixVideoFormat *mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params);
+MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix);
+MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix);
+MIX_RESULT mix_videofmt_h264_deinitialize(MixVideoFormat *mix);
+
+/* Local Methods */
+
+MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix,
+ VAPictureParameterBufferH264* pic_params,
+ MixVideoFrame * current_frame);
+
+
+MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix,
+ vbp_data_h264 *data,
+ guint64 timestamp,
+ gboolean discontinuity);
+
+
+MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix,
+ guint64 timestamp);
+
+
+/* Helper functions to manage the DPB table */
+gboolean mix_videofmt_h264_check_in_DPB(gpointer key, gpointer value, gpointer user_data);
+void mix_videofmt_h264_destroy_DPB_key(gpointer data);
+void mix_videofmt_h264_destroy_DPB_value(gpointer data);
+guint mix_videofmt_h264_get_poc(VAPictureH264 *pic);
+
+
+
+
+#endif /* __MIX_VIDEOFORMAT_H264_H__ */
diff --git a/mix_video/src/mixvideoformat_mp42.c b/mix_video/src/mixvideoformat_mp42.c
new file mode 100644
index 0000000..3aae249
--- /dev/null
+++ b/mix_video/src/mixvideoformat_mp42.c
@@ -0,0 +1,1416 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+#include <string.h>
+#include "mixvideolog.h"
+#include "mixvideoformat_mp42.h"
+
+enum {
+ MP4_VOP_TYPE_I = 0,
+ MP4_VOP_TYPE_P = 1,
+ MP4_VOP_TYPE_B = 2,
+ MP4_VOP_TYPE_S = 3,
+};
+
+/*
+ * This is for divx packed stream
+ */
+typedef struct _PackedStream PackedStream;
+struct _PackedStream {
+ vbp_picture_data_mp42 *picture_data;
+ MixBuffer *mix_buffer;
+};
+
+/*
+ * Clone and destroy vbp_picture_data_mp42
+ */
+static vbp_picture_data_mp42 *mix_videoformat_mp42_clone_picture_data(
+ vbp_picture_data_mp42 *picture_data);
+static void mix_videoformat_mp42_free_picture_data(
+ vbp_picture_data_mp42 *picture_data);
+static void mix_videoformat_mp42_flush_packed_stream_queue(
+ GQueue *packed_stream_queue);
+
+/* The parent class. The pointer will be saved
+ * in this class's initialization. The pointer
+ * can be used for chaining method call if needed.
+ */
+static MixVideoFormatClass *parent_class = NULL;
+
+static void mix_videoformat_mp42_finalize(GObject * obj);
+
+/*
+ * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT
+ */
+G_DEFINE_TYPE( MixVideoFormat_MP42, mix_videoformat_mp42, MIX_TYPE_VIDEOFORMAT);
+
+static void mix_videoformat_mp42_init(MixVideoFormat_MP42 * self) {
+ MixVideoFormat *parent = MIX_VIDEOFORMAT(self);
+
+ self->reference_frames[0] = NULL;
+ self->reference_frames[1] = NULL;
+
+ self->last_frame = NULL;
+ self->last_vop_coding_type = -1;
+
+ self->packed_stream_queue = NULL;
+
+ /* NOTE: we don't need to do this here.
+ * This just demostrates how to access
+ * member varibles beloned to parent
+ */
+ parent->initialized = FALSE;
+}
+
+static void mix_videoformat_mp42_class_init(MixVideoFormat_MP42Class * klass) {
+
+ /* root class */
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* direct parent class */
+ MixVideoFormatClass *video_format_class = MIX_VIDEOFORMAT_CLASS(klass);
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ /* setup finializer */
+ gobject_class->finalize = mix_videoformat_mp42_finalize;
+
+ /* setup vmethods with base implementation */
+ video_format_class->getcaps = mix_videofmt_mp42_getcaps;
+ video_format_class->initialize = mix_videofmt_mp42_initialize;
+ video_format_class->decode = mix_videofmt_mp42_decode;
+ video_format_class->flush = mix_videofmt_mp42_flush;
+ video_format_class->eos = mix_videofmt_mp42_eos;
+ video_format_class->deinitialize = mix_videofmt_mp42_deinitialize;
+}
+
+MixVideoFormat_MP42 *mix_videoformat_mp42_new(void) {
+ MixVideoFormat_MP42 *ret = g_object_new(MIX_TYPE_VIDEOFORMAT_MP42, NULL);
+
+ return ret;
+}
+
+void mix_videoformat_mp42_finalize(GObject * obj) {
+ /* clean up here. */
+
+ /* MixVideoFormat_MP42 *mix = MIX_VIDEOFORMAT_MP42(obj); */
+ GObjectClass *root_class = (GObjectClass *) parent_class;
+ MixVideoFormat *parent = NULL;
+ gint32 vbp_ret = VBP_OK;
+ MixVideoFormat_MP42 *self = NULL;
+
+ LOG_V("Begin\n");
+
+ if (obj == NULL) {
+ LOG_E("obj is NULL\n");
+ return;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_MP42(obj)) {
+ LOG_E("obj is not mixvideoformat_mp42\n");
+ return;
+ }
+
+ self = MIX_VIDEOFORMAT_MP42(obj);
+ parent = MIX_VIDEOFORMAT(self);
+
+ //surfacepool is deallocated by parent
+ //inputbufqueue is deallocated by parent
+ //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces
+
+ g_mutex_lock(parent->objectlock);
+
+ /* unref reference frames */
+ {
+ gint idx = 0;
+ for (idx = 0; idx < 2; idx++) {
+ if (self->reference_frames[idx] != NULL) {
+ mix_videoframe_unref(self->reference_frames[idx]);
+ self->reference_frames[idx] = NULL;
+ }
+ }
+ }
+
+
+ /* Reset state */
+ parent->initialized = TRUE;
+ parent->parse_in_progress = FALSE;
+ parent->discontinuity_frame_in_progress = FALSE;
+ parent->current_timestamp = 0;
+
+ /* Close the parser */
+ vbp_ret = vbp_close(parent->parser_handle);
+ parent->parser_handle = NULL;
+
+ if (self->packed_stream_queue) {
+ mix_videoformat_mp42_flush_packed_stream_queue(self->packed_stream_queue);
+ g_queue_free(self->packed_stream_queue);
+ }
+ self->packed_stream_queue = NULL;
+
+ g_mutex_unlock(parent->objectlock);
+
+ /* Chain up parent */
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+
+ LOG_V("End\n");
+}
+
+MixVideoFormat_MP42 *
+mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix) {
+ return (MixVideoFormat_MP42 *) g_object_ref(G_OBJECT(mix));
+}
+
+/* MP42 vmethods implementation */
+MIX_RESULT mix_videofmt_mp42_getcaps(MixVideoFormat *mix, GString *msg) {
+
+//This method is reserved for future use
+
+ LOG_V("Begin\n");
+ if (parent_class->getcaps) {
+ return parent_class->getcaps(mix, msg);
+ }
+
+ LOG_V("End\n");
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_videofmt_mp42_initialize(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool,
+ VADisplay va_display) {
+ uint32 vbp_ret = 0;
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ vbp_data_mp42 *data = NULL;
+ MixVideoFormat *parent = NULL;
+ MixIOVec *header = NULL;
+
+ VAProfile va_profile = VAProfileMPEG4AdvancedSimple;
+ VAConfigAttrib attrib;
+
+ VAStatus va_ret = VA_STATUS_SUCCESS;
+ guint number_extra_surfaces = 0;
+ VASurfaceID *surfaces = NULL;
+ guint numSurfaces = 0;
+
+ MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);
+
+ if (mix == NULL || config_params == NULL || frame_mgr == NULL) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_MP42(mix)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ LOG_V("begin\n");
+
+ if (parent_class->initialize) {
+ ret = parent_class->initialize(mix, config_params, frame_mgr,
+ input_buf_pool, surface_pool, va_display);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to initialize parent!\n");
+ return ret;
+ }
+ }
+
+ parent = MIX_VIDEOFORMAT(mix);
+
+ g_mutex_lock(parent->objectlock);
+
+ parent->initialized = FALSE;
+
+ vbp_ret = vbp_open(VBP_MPEG4, &(parent->parser_handle));
+
+ if (vbp_ret != VBP_OK) {
+ LOG_E("Failed to call vbp_open()\n");
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+
+ /*
+ * avidemux doesn't pass codec_data, we need handle this.
+ */
+
+ LOG_V("Try to get header data from config_param\n");
+
+ ret = mix_videoconfigparamsdec_get_header(config_params, &header);
+ if (ret == MIX_RESULT_SUCCESS && header != NULL) {
+
+ LOG_V("Found header data from config_param\n");
+ vbp_ret = vbp_parse(parent->parser_handle, header->data, header->data_size,
+ TRUE);
+
+ LOG_V("vbp_parse() returns 0x%x\n", vbp_ret);
+
+ g_free(header->data);
+ g_free(header);
+
+ if (!((vbp_ret == VBP_OK) || (vbp_ret == VBP_DONE))) {
+ LOG_E("Failed to call vbp_parse() to parse header data!\n");
+ goto cleanup;
+ }
+
+ /* Get the header data and save */
+
+ LOG_V("Call vbp_query()\n");
+ vbp_ret = vbp_query(parent->parser_handle, (void *) &data);
+ LOG_V("vbp_query() returns 0x%x\n", vbp_ret);
+
+ if ((vbp_ret != VBP_OK) || (data == NULL)) {
+ LOG_E("Failed to call vbp_query() to query header data parsing result\n");
+ goto cleanup;
+ }
+
+ if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) {
+ va_profile = VAProfileMPEG4AdvancedSimple;
+ LOG_V("The profile is VAProfileMPEG4AdvancedSimple from header data\n");
+ } else {
+ va_profile = VAProfileMPEG4Simple;
+ LOG_V("The profile is VAProfileMPEG4Simple from header data\n");
+ }
+ }
+
+ va_display = parent->va_display;
+
+ /* We are requesting RT attributes */
+ attrib.type = VAConfigAttribRTFormat;
+
+ va_ret = vaGetConfigAttributes(va_display, va_profile, VAEntrypointVLD,
+ &attrib, 1);
+ if (va_ret != VA_STATUS_SUCCESS) {
+ LOG_E("Failed to call vaGetConfigAttributes()\n");
+ goto cleanup;
+ }
+
+ if ((attrib.value & VA_RT_FORMAT_YUV420) == 0) {
+ LOG_E("The attrib.value is wrong!\n");
+ goto cleanup;
+ }
+
+ va_ret = vaCreateConfig(va_display, va_profile, VAEntrypointVLD, &attrib,
+ 1, &(parent->va_config));
+
+ if (va_ret != VA_STATUS_SUCCESS) {
+ LOG_E("Failed to call vaCreateConfig()!\n");
+ goto cleanup;
+ }
+
+ ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params,
+ &number_extra_surfaces);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to call mix_videoconfigparams_get_extra_surface_allocation()!\n");
+ goto cleanup;
+ }
+
+ parent->va_num_surfaces = number_extra_surfaces + 4;
+ if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) {
+ parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM;
+ }
+
+ numSurfaces = parent->va_num_surfaces;
+
+ parent->va_surfaces = g_malloc(sizeof(VASurfaceID) * numSurfaces);
+ if (!parent->va_surfaces) {
+ LOG_E("Not enough memory to allocate surfaces!\n");
+ ret = MIX_RESULT_NO_MEMORY;
+ goto cleanup;
+ }
+
+ surfaces = parent->va_surfaces;
+
+ va_ret = vaCreateSurfaces(va_display, parent->picture_width,
+ parent->picture_height, VA_RT_FORMAT_YUV420, numSurfaces,
+ surfaces);
+ if (va_ret != VA_STATUS_SUCCESS) {
+ LOG_E("Failed to call vaCreateSurfaces()!\n");
+ goto cleanup;
+ }
+
+ parent->surfacepool = mix_surfacepool_new();
+ if (parent->surfacepool == NULL) {
+ LOG_E("Not enough memory to create surface pool!\n");
+ ret = MIX_RESULT_NO_MEMORY;
+ goto cleanup;
+ }
+
+ *surface_pool = parent->surfacepool;
+
+ ret = mix_surfacepool_initialize(parent->surfacepool, surfaces,
+ numSurfaces);
+
+ /* Initialize and save the VA context ID
+ * Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2
+ */
+ va_ret = vaCreateContext(va_display, parent->va_config,
+ parent->picture_width, parent->picture_height, 0, surfaces,
+ numSurfaces, &(parent->va_context));
+
+ if (va_ret != VA_STATUS_SUCCESS) {
+ LOG_E("Failed to call vaCreateContext()!\n");
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+
+ /*
+ * Packed stream queue
+ */
+
+ self->packed_stream_queue = g_queue_new();
+ if (!self->packed_stream_queue) {
+ LOG_E("Failed to crate packed stream queue!\n");
+ ret = MIX_RESULT_NO_MEMORY;
+ goto cleanup;
+ }
+
+ self->last_frame = NULL;
+ self->last_vop_coding_type = -1;
+ parent->initialized = FALSE;
+ ret = MIX_RESULT_SUCCESS;
+
+ cleanup:
+
+ g_mutex_unlock(parent->objectlock);
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params) {
+ uint32 vbp_ret = 0;
+ MixVideoFormat *parent = NULL;
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+ guint64 ts = 0;
+ vbp_data_mp42 *data = NULL;
+ gboolean discontinuity = FALSE;
+ MixInputBufferEntry *bufentry = NULL;
+ gint i = 0;
+
+ LOG_V("Begin\n");
+
+ if (mix == NULL || bufin == NULL || decode_params == NULL) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_MP42(mix)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ parent = MIX_VIDEOFORMAT(mix);
+
+ g_mutex_lock(parent->objectlock);
+
+ ret = mix_videodecodeparams_get_timestamp(decode_params, &ts);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get timestamp\n");
+ goto cleanup;
+ }
+
+ LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts);
+
+ ret
+ = mix_videodecodeparams_get_discontinuity(decode_params,
+ &discontinuity);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get discontinuity\n");
+ goto cleanup;
+ }
+
+ /* If this is a new frame and we haven't retrieved parser
+ * workload data from previous frame yet, do so
+ */
+
+ if ((ts != parent->current_timestamp) && (parent->parse_in_progress)) {
+
+ LOG_V("timestamp changed and parsing is still in progress\n");
+
+ /* this is new data and the old data parsing is not complete, continue
+ * to parse the old data
+ */
+ vbp_ret = vbp_query(parent->parser_handle, (void *) &data);
+ LOG_V("vbp_query() returns 0x%x\n", vbp_ret);
+
+ if ((vbp_ret != VBP_OK) || (data == NULL)) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("vbp_ret != VBP_OK || data == NULL\n");
+ goto cleanup;
+ }
+
+ ret = mix_videofmt_mp42_process_decode(mix, data,
+ parent->current_timestamp,
+ parent->discontinuity_frame_in_progress);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ /* We log this but need to process
+ * the new frame data, so do not return
+ */
+ LOG_W("process_decode failed.\n");
+ }
+
+ /* we are done parsing for old data */
+ parent->parse_in_progress = FALSE;
+ }
+
+ parent->current_timestamp = ts;
+ parent->discontinuity_frame_in_progress = discontinuity;
+
+ /* we parse data buffer one by one */
+ for (i = 0; i < bufincnt; i++) {
+
+ LOG_V(
+ "Calling parse for current frame, parse handle %d, buf %x, size %d\n",
+ (int) parent->parser_handle, (guint) bufin[i]->data,
+ bufin[i]->size);
+
+ vbp_ret = vbp_parse(parent->parser_handle, bufin[i]->data,
+ bufin[i]->size, FALSE);
+
+ LOG_V("vbp_parse() returns 0x%x\n", vbp_ret);
+
+ /* The parser failed to parse */
+ if (vbp_ret != VBP_DONE && vbp_ret != VBP_OK) {
+ LOG_E("vbp_parse() ret = %d\n", vbp_ret);
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+
+ LOG_V("vbp_parse() ret = %d\n", vbp_ret);
+
+ if (vbp_ret == VBP_OK || vbp_ret == VBP_DONE) {
+
+ LOG_V("Now, parsing is done (VBP_DONE)!\n");
+
+ vbp_ret = vbp_query(parent->parser_handle, (void *) &data);
+ LOG_V("vbp_query() returns 0x%x\n", vbp_ret);
+
+ if ((vbp_ret != VBP_OK) || (data == NULL)) {
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+
+ /* Increase the ref count of this input buffer */
+ mix_buffer_ref(bufin[i]);
+
+ /* Create a new MixInputBufferEntry
+ * TODO: make this from a pool later
+ */
+ bufentry = g_malloc(sizeof(MixInputBufferEntry));
+ if (bufentry == NULL) {
+ ret = MIX_RESULT_NO_MEMORY;
+ goto cleanup;
+ }
+
+ bufentry->buf = bufin[i];
+ bufentry->timestamp = ts;
+
+ LOG_I("bufentry->buf = %x bufentry->timestamp FOR VBP_DONE = %"G_GINT64_FORMAT"\n", bufentry->buf, bufentry->timestamp);
+
+ /* Enqueue this input buffer */
+ g_queue_push_tail(parent->inputbufqueue, (gpointer) bufentry);
+
+ /* process and decode data */
+ ret
+ = mix_videofmt_mp42_process_decode(mix, data, ts,
+ discontinuity);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ /* We log this but continue since we need
+ * to complete our processing
+ */
+ LOG_W("process_decode failed.\n");
+ }
+
+ LOG_V("Called process and decode for current frame\n");
+
+ parent->parse_in_progress = FALSE;
+
+ }
+#if 0
+ /*
+ * The DHG parser checks for next_sc, if next_sc is a start code, it thinks the current parsing is done: VBP_DONE.
+ * For our situtation, this not the case. The start code is always begin with the gstbuffer. At the end of frame,
+ * the start code is never found.
+ */
+
+ else if (vbp_ret == VBP_OK) {
+
+ LOG_V("Now, parsing is not done (VBP_OK)!\n");
+
+ LOG_V(
+ "Enqueuing buffer and going on to next (if any) for this frame\n");
+
+ /* Increase the ref count of this input buffer */
+ mix_buffer_ref(bufin[i]);
+
+ /* Create a new MixInputBufferEntry
+ * TODO make this from a pool later
+ */
+ bufentry = g_malloc(sizeof(MixInputBufferEntry));
+ if (bufentry == NULL) {
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+
+ bufentry->buf = bufin[i];
+ bufentry->timestamp = ts;
+ LOG_I("bufentry->buf = %x bufentry->timestamp FOR VBP_OK = %"G_GINT64_FORMAT"\n", bufentry->buf, bufentry->timestamp);
+
+ /* Enqueue this input buffer */
+ g_queue_push_tail(parent->inputbufqueue, (gpointer) bufentry);
+ parent->parse_in_progress = TRUE;
+ }
+#endif
+ }
+
+ cleanup:
+
+ g_mutex_unlock(parent->objectlock);
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix,
+ vbp_data_mp42 *data, guint64 timestamp, gboolean discontinuity) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ VAStatus va_ret = VA_STATUS_SUCCESS;
+ VADisplay va_display = NULL;
+ VAContextID va_context;
+
+ MixVideoFormat_MP42 *self = NULL;
+ vbp_picture_data_mp42 *picture_data = NULL;
+ VAPictureParameterBufferMPEG4 *picture_param = NULL;
+ VAIQMatrixBufferMPEG4 *iq_matrix_buffer = NULL;
+ vbp_slice_data_mp42 *slice_data = NULL;
+ VASliceParameterBufferMPEG4 *slice_param = NULL;
+
+ gint frame_type = -1;
+ guint buffer_id_number = 0;
+ guint buffer_id_cnt = 0;
+ VABufferID *buffer_ids = NULL;
+ MixVideoFrame *frame = NULL;
+
+ gint idx = 0, jdx = 0;
+ gulong surface = 0;
+
+ MixBuffer *mix_buffer = NULL;
+ gboolean is_from_queued_data = FALSE;
+
+ LOG_V("Begin\n");
+
+ if ((mix == NULL) || (data == NULL)) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_MP42(mix)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ self = MIX_VIDEOFORMAT_MP42(mix);
+
+ LOG_V("data->number_pictures = %d\n", data->number_pictures);
+
+ if (data->number_pictures == 0) {
+ LOG_W("data->number_pictures == 0\n");
+ mix_videofmt_mp42_release_input_buffers(mix, timestamp);
+ return ret;
+ }
+
+ is_from_queued_data = FALSE;
+
+ /* Do we have packed frames? */
+ if (data->number_pictures > 1) {
+
+ /*
+
+ Assumption:
+
+ 1. In one packed frame, there's only one P or I frame and the
+ reference frame will be the first one in the packed frame
+ 2. In packed frame, there's no skipped frame(vop_coded = 0)
+ 3. In one packed frame, if there're n B frames, there will be
+ n N-VOP frames to follow the packed frame.
+ The timestamp of each N-VOP frame will be used for each B frames
+ in the packed frame
+ 4. N-VOP frame is the frame with vop_coded = 0.
+
+ {P, B, B, B }, N, N, N, P, P, P, I, ...
+
+ */
+
+ MixInputBufferEntry *bufentry = NULL;
+ PackedStream *packed_stream = NULL;
+ vbp_picture_data_mp42 *cloned_picture_data = NULL;
+
+ LOG_V("This is packed frame\n");
+
+ /*
+ * Is the packed_frame_queue empty? If not, how come
+ * a packed frame can follow another packed frame without
+ * necessary number of N-VOP between them?
+ */
+
+ if (!g_queue_is_empty(self->packed_stream_queue)) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("The previous packed frame is not fully processed yet!\n");
+ goto cleanup;
+ }
+
+ /* Packed frame shall be something like this {P, B, B, B, ... B } */
+ for (idx = 0; idx < data->number_pictures; idx++) {
+ picture_data = &(data->picture_data[idx]);
+ picture_param = &(picture_data->picture_param);
+ frame_type = picture_param->vop_fields.bits.vop_coding_type;
+
+ /* Is the first frame in the packed frames a reference frame? */
+ if (idx == 0 && frame_type != MP4_VOP_TYPE_I && frame_type
+ != MP4_VOP_TYPE_P) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("The first frame in packed frame is not I or B\n");
+ goto cleanup;
+ }
+
+ if (idx != 0 && frame_type != MP4_VOP_TYPE_B) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("The frame other than the first one in packed frame is not B\n");
+ goto cleanup;
+ }
+
+ if (picture_data->vop_coded == 0) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("In packed frame, there's unexpected skipped frame\n");
+ goto cleanup;
+ }
+ }
+
+ LOG_V("The packed frame looks valid\n");
+
+ /* Okay, the packed-frame looks ok. Now, we enqueue all the B frames */
+ bufentry
+ = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue);
+ if (bufentry == NULL) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("There's data in in inputbufqueue\n");
+ goto cleanup;
+ }
+
+ LOG_V("Enqueue all B frames in the packed frame\n");
+
+ mix_buffer = bufentry->buf;
+ for (idx = 1; idx < data->number_pictures; idx++) {
+ picture_data = &(data->picture_data[idx]);
+ cloned_picture_data = mix_videoformat_mp42_clone_picture_data(
+ picture_data);
+ if (!cloned_picture_data) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to allocate memory for cloned picture_data\n");
+ goto cleanup;
+ }
+
+ packed_stream = g_malloc(sizeof(PackedStream));
+ if (packed_stream == NULL) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to allocate memory for packed_stream\n");
+ goto cleanup;
+ }
+
+ packed_stream->mix_buffer = mix_buffer_ref(mix_buffer);
+ packed_stream->picture_data = cloned_picture_data;
+
+ g_queue_push_tail(self->packed_stream_queue,
+ (gpointer) packed_stream);
+ }
+
+ LOG_V("Prepare to decode the first frame in the packed frame\n");
+
+ /* we are going to process the firs frame */
+ picture_data = &(data->picture_data[0]);
+
+ } else {
+
+ LOG_V("This is a single frame\n");
+
+ /* Okay, we only have one frame */
+ if (g_queue_is_empty(self->packed_stream_queue)) {
+ /* If the packed_stream_queue is empty, everything is fine */
+ picture_data = &(data->picture_data[0]);
+
+ LOG_V("There's no packed frame not processed yet\n");
+
+ } else {
+ /* The packed_stream_queue is not empty, is this frame N-VOP? */
+ picture_data = &(data->picture_data[0]);
+ if (picture_data->vop_coded != 0) {
+
+ LOG_V("The packed frame queue is not empty, we will flush it\n");
+
+ /*
+ * Unexpected! We flush the packed_stream_queue and begin to process the
+ * current frame if it is not a B frame
+ */
+ mix_videoformat_mp42_flush_packed_stream_queue(
+ self->packed_stream_queue);
+
+ picture_param = &(picture_data->picture_param);
+ frame_type = picture_param->vop_fields.bits.vop_coding_type;
+
+ if (frame_type == MP4_VOP_TYPE_B) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("The frame right after packed frame is B frame!\n");
+ goto cleanup;
+ }
+
+ } else {
+ /* This is N-VOP, process B frame from the packed_stream_queue */
+ PackedStream *packed_stream = NULL;
+
+ LOG_V("N-VOP found, we ignore it and start to process the B frame from the packed frame queue\n");
+
+ packed_stream = (PackedStream *) g_queue_pop_head(
+ self->packed_stream_queue);
+ picture_data = packed_stream->picture_data;
+ mix_buffer = packed_stream->mix_buffer;
+ g_free(packed_stream);
+ is_from_queued_data = TRUE;
+ }
+ }
+ }
+
+ picture_param = &(picture_data->picture_param);
+ iq_matrix_buffer = &(picture_data->iq_matrix_buffer);
+
+ if (picture_param == NULL) {
+ ret = MIX_RESULT_NULL_PTR;
+ LOG_E("picture_param == NULL\n");
+ goto cleanup;
+ }
+
+ /* If the frame type is not I, P or B */
+ frame_type = picture_param->vop_fields.bits.vop_coding_type;
+ if (frame_type != MP4_VOP_TYPE_I && frame_type != MP4_VOP_TYPE_P
+ && frame_type != MP4_VOP_TYPE_B) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("frame_type is not I, P or B. frame_type = %d\n", frame_type);
+ goto cleanup;
+ }
+
+ /*
+ * This is a skipped frame (vop_coded = 0)
+ * Please note that this is not a N-VOP (DivX).
+ */
+ if (picture_data->vop_coded == 0) {
+
+ MixVideoFrame *skip_frame = NULL;
+ gulong frame_id = VA_INVALID_SURFACE;
+
+ LOG_V("vop_coded == 0\n");
+ if (self->last_frame == NULL) {
+ LOG_W("Previous frame is NULL\n");
+
+ /*
+ * We shouldn't get a skipped frame
+ * before we are able to get a real frame
+ */
+ ret = MIX_RESULT_DROPFRAME;
+ goto cleanup;
+ }
+
+ skip_frame = mix_videoframe_new();
+ ret = mix_videoframe_set_is_skipped(skip_frame, TRUE);
+ mix_videoframe_ref(self->last_frame);
+
+ ret = mix_videoframe_get_frame_id(self->last_frame, &frame_id);
+ ret = mix_videoframe_set_frame_id(skip_frame, frame_id);
+ ret = mix_videoframe_set_frame_type(skip_frame, MP4_VOP_TYPE_P);
+ ret = mix_videoframe_set_real_frame(skip_frame, self->last_frame);
+ ret = mix_videoframe_set_timestamp(skip_frame, timestamp);
+ ret = mix_videoframe_set_discontinuity(skip_frame, FALSE);
+
+ LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n",
+ (guint)skip_frame, (guint)frame_id, timestamp);
+
+ /* Release our input buffers */
+ ret = mix_videofmt_mp42_release_input_buffers(mix, timestamp);
+
+ /* Enqueue the skipped frame using frame manager */
+ ret = mix_framemanager_enqueue(mix->framemgr, skip_frame);
+ goto cleanup;
+ }
+
+ /*
+ * Decide the number of buffer to use
+ */
+
+ buffer_id_number = picture_data->number_slices * 2 + 2;
+ LOG_V("number_slices is %d, allocating %d buffer_ids\n",
+ picture_data->number_slices, buffer_id_number);
+
+ /*
+ * Check for B frames after a seek
+ * We need to have both reference frames in hand before we can decode a B frame
+ * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME
+ */
+ if (frame_type == MP4_VOP_TYPE_B) {
+
+ if (self->reference_frames[1] == NULL) {
+ LOG_W("Insufficient reference frames for B frame\n");
+ ret = MIX_RESULT_DROPFRAME;
+ goto cleanup;
+ }
+ }
+
+ buffer_ids = g_malloc(sizeof(VABufferID) * buffer_id_number);
+ if (buffer_ids == NULL) {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E("Failed to allocate buffer_ids!\n");
+ goto cleanup;
+ }
+
+ LOG_V("Getting a new surface\n");LOG_V("frame type is %d\n", frame_type);
+
+ /* Get a frame from the surface pool */
+ ret = mix_surfacepool_get(mix->surfacepool, &frame);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get frame from surface pool!\n");
+ goto cleanup;
+ }
+
+ /*
+ * Set the frame type for the frame object (used in reordering by frame manager)
+ */
+ ret = mix_videoframe_set_frame_type(frame, frame_type);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to set frame type!\n");
+ goto cleanup;
+ }
+
+ /* If I or P frame, update the reference array */
+ if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) {
+ LOG_V("Updating forward/backward references for libva\n");
+
+ self->last_vop_coding_type = frame_type;
+ mix_videofmt_mp42_handle_ref_frames(mix, frame_type, frame);
+ }
+
+ LOG_V("Setting reference frames in picparams, frame_type = %d\n",
+ frame_type);
+
+ switch (frame_type) {
+ case MP4_VOP_TYPE_I:
+ picture_param->forward_reference_picture = VA_INVALID_SURFACE;
+ picture_param->backward_reference_picture = VA_INVALID_SURFACE;
+ LOG_V("I frame, surface ID %u\n", (guint) frame->frame_id);
+ break;
+ case MP4_VOP_TYPE_P:
+ picture_param-> forward_reference_picture
+ = self->reference_frames[0]->frame_id;
+ picture_param-> backward_reference_picture = VA_INVALID_SURFACE;
+
+ LOG_V("P frame, surface ID %u, forw ref frame is %u\n",
+ (guint) frame->frame_id,
+ (guint) self->reference_frames[0]->frame_id);
+ break;
+ case MP4_VOP_TYPE_B:
+
+ picture_param->vop_fields.bits.backward_reference_vop_coding_type
+ = self->last_vop_coding_type;
+
+ picture_param->forward_reference_picture
+ = self->reference_frames[1]->frame_id;
+ picture_param->backward_reference_picture
+ = self->reference_frames[0]->frame_id;
+
+ LOG_V("B frame, surface ID %u, forw ref %d, back ref %d\n",
+ (guint) frame->frame_id,
+ (guint) picture_param->forward_reference_picture,
+ (guint) picture_param->backward_reference_picture);
+ break;
+ case MP4_VOP_TYPE_S:
+ LOG_W("MP4_VOP_TYPE_S, Will never reach here\n");
+ break;
+
+ default:
+ LOG_W("default, Will never reach here\n");
+ break;
+
+ }
+
+ /* Libva buffer set up */
+ va_display = mix->va_display;
+ va_context = mix->va_context;
+
+ LOG_V("Creating libva picture parameter buffer\n");
+
+ /* First the picture parameter buffer */
+ buffer_id_cnt = 0;
+ va_ret = vaCreateBuffer(va_display, va_context,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferMPEG4), 1, picture_param,
+ &buffer_ids[buffer_id_cnt]);
+ buffer_id_cnt++;
+
+ if (va_ret != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed to create va buffer of type VAPictureParameterBufferMPEG4!\n");
+ goto cleanup;
+ }
+
+ LOG_V("Creating libva VAIQMatrixBufferMPEG4 buffer\n");
+
+ if (picture_param->vol_fields.bits.quant_type) {
+ va_ret = vaCreateBuffer(va_display, va_context, VAIQMatrixBufferType,
+ sizeof(VAIQMatrixBufferMPEG4), 1, iq_matrix_buffer,
+ &buffer_ids[buffer_id_cnt]);
+
+ if (va_ret != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed to create va buffer of type VAIQMatrixBufferType!\n");
+ goto cleanup;
+ }
+ buffer_id_cnt++;
+ }
+
+ /* Now for slices */
+ for (jdx = 0; jdx < picture_data->number_slices; jdx++) {
+
+ slice_data = &(picture_data->slice_data[jdx]);
+ slice_param = &(slice_data->slice_param);
+
+ LOG_V(
+ "Creating libva slice parameter buffer, for slice %d\n",
+ jdx);
+
+ /* Do slice parameters */
+ va_ret = vaCreateBuffer(va_display, va_context,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferMPEG4), 1, slice_param,
+ &buffer_ids[buffer_id_cnt]);
+ if (va_ret != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed to create va buffer of type VASliceParameterBufferMPEG4!\n");
+ goto cleanup;
+ }
+ buffer_id_cnt++;
+
+ /* Do slice data */
+ va_ret = vaCreateBuffer(va_display, va_context, VASliceDataBufferType,
+ slice_data->slice_size, 1, slice_data->buffer_addr
+ + slice_data->slice_offset, &buffer_ids[buffer_id_cnt]);
+ if (va_ret != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed to create va buffer of type VASliceDataBufferType!\n");
+ goto cleanup;
+ }
+ buffer_id_cnt++;
+ }
+
+ /* Get our surface ID from the frame object */
+ ret = mix_videoframe_get_frame_id(frame, &surface);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to get frame id: ret = 0x%x\n", ret);
+ goto cleanup;
+ }
+
+ LOG_V("Calling vaBeginPicture\n");
+
+ /* Now we can begin the picture */
+ va_ret = vaBeginPicture(va_display, va_context, surface);
+ if (va_ret != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed to vaBeginPicture(): va_ret = 0x%x\n", va_ret);
+ goto cleanup;
+ }
+
+ LOG_V("Calling vaRenderPicture\n");
+
+ /* Render the picture */
+ va_ret = vaRenderPicture(va_display, va_context, buffer_ids, buffer_id_cnt);
+ if (va_ret != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed to vaRenderPicture(): va_ret = 0x%x\n", va_ret);
+ goto cleanup;
+ }
+
+ LOG_V("Calling vaEndPicture\n");
+
+ /* End picture */
+ va_ret = vaEndPicture(va_display, va_context);
+ if (va_ret != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed to vaEndPicture(): va_ret = 0x%x\n", va_ret);
+ goto cleanup;
+ }
+
+ LOG_V("Calling vaSyncSurface\n");
+
+ /* Decode the picture */
+ va_ret = vaSyncSurface(va_display, surface);
+ if (va_ret != VA_STATUS_SUCCESS) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("Failed to vaSyncSurface(): va_ret = 0x%x\n", va_ret);
+ goto cleanup;
+ }
+
+ /* Set the discontinuity flag */
+ mix_videoframe_set_discontinuity(frame, discontinuity);
+
+ /* Set the timestamp */
+ mix_videoframe_set_timestamp(frame, timestamp);
+
+ LOG_V("Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp);
+
+ /* Enqueue the decoded frame using frame manager */
+ ret = mix_framemanager_enqueue(mix->framemgr, frame);
+ if (ret != MIX_RESULT_SUCCESS) {
+ LOG_E("Failed to mix_framemanager_enqueue()!\n");
+ goto cleanup;
+ }
+
+ /* For I or P frames, save this frame off for skipped frame handling */
+ if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) {
+ if (self->last_frame != NULL) {
+ mix_videoframe_unref(self->last_frame);
+ }
+ self->last_frame = frame;
+ mix_videoframe_ref(frame);
+ }
+
+ ret = MIX_RESULT_SUCCESS;
+
+ cleanup:
+
+ if (ret != MIX_RESULT_SUCCESS && frame != NULL) {
+ mix_videoframe_unref(frame);
+ }
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ mix_videoformat_mp42_flush_packed_stream_queue(
+ self->packed_stream_queue);
+ }
+
+ g_free(buffer_ids);
+ mix_videofmt_mp42_release_input_buffers(mix, timestamp);
+
+ if (is_from_queued_data) {
+ if (mix_buffer) {
+ mix_buffer_unref(mix_buffer);
+ }
+ mix_videoformat_mp42_free_picture_data(picture_data);
+ }
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);
+ MixInputBufferEntry *bufentry = NULL;
+
+ LOG_V("Begin\n");
+
+ g_mutex_lock(mix->objectlock);
+
+ mix_videoformat_mp42_flush_packed_stream_queue(self->packed_stream_queue);
+
+ /*
+ * Clear the contents of inputbufqueue
+ */
+ while (!g_queue_is_empty(mix->inputbufqueue)) {
+ bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue);
+ if (bufentry == NULL) {
+ continue;
+ }
+
+ mix_buffer_unref(bufentry->buf);
+ g_free(bufentry);
+ }
+
+ /*
+ * Clear parse_in_progress flag and current timestamp
+ */
+ mix->parse_in_progress = FALSE;
+ mix->discontinuity_frame_in_progress = FALSE;
+ mix->current_timestamp = 0;
+
+ {
+ gint idx = 0;
+ for (idx = 0; idx < 2; idx++) {
+ if (self->reference_frames[idx] != NULL) {
+ mix_videoframe_unref(self->reference_frames[idx]);
+ self->reference_frames[idx] = NULL;
+ }
+ }
+ }
+
+ /* Call parser flush */
+ vbp_flush(mix->parser_handle);
+
+ g_mutex_unlock(mix->objectlock);
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ vbp_data_mp42 *data = NULL;
+ uint32 vbp_ret = 0;
+
+ LOG_V("Begin\n");
+
+ if (mix == NULL) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_MP42(mix)) {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ g_mutex_lock(mix->objectlock);
+
+ /* if a frame is in progress, process the frame */
+ if (mix->parse_in_progress) {
+ /* query for data */
+ vbp_ret = vbp_query(mix->parser_handle, (void *) &data);
+ LOG_V("vbp_query() returns 0x%x\n", vbp_ret);
+
+ if ((vbp_ret != VBP_OK) || (data == NULL)) {
+ ret = MIX_RESULT_FAIL;
+ LOG_E("vbp_ret != VBP_OK || data == NULL\n");
+ goto cleanup;
+ }
+
+ /* process and decode data */
+ ret = mix_videofmt_mp42_process_decode(mix, data,
+ mix->current_timestamp, mix->discontinuity_frame_in_progress);
+ mix->parse_in_progress = FALSE;
+
+ }
+
+ ret = mix_framemanager_eos(mix->framemgr);
+
+ cleanup:
+
+ g_mutex_unlock(mix->objectlock);
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_mp42_deinitialize(MixVideoFormat *mix) {
+
+ /*
+ * We do the all the cleanup in _finalize
+ */
+
+ MIX_RESULT ret = MIX_RESULT_FAIL;
+
+ LOG_V("Begin\n");
+
+ if (mix == NULL) {
+ LOG_V("mix is NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_MP42(mix)) {
+ LOG_V("mix is not mixvideoformat_mp42\n");
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (parent_class->deinitialize) {
+ ret = parent_class->deinitialize(mix);
+ }
+
+ LOG_V("End\n");
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_mp42_handle_ref_frames(MixVideoFormat *mix,
+ enum _picture_type frame_type, MixVideoFrame * current_frame) {
+
+ MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);
+
+ LOG_V("Begin\n");
+
+ if (mix == NULL || current_frame == NULL) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ switch (frame_type) {
+ case MP4_VOP_TYPE_I:
+ case MP4_VOP_TYPE_P:
+ LOG_V("Refing reference frame %x\n", (guint) current_frame);
+
+ mix_videoframe_ref(current_frame);
+
+ /* should only happen on first frame */
+ if (self->reference_frames[0] == NULL) {
+ self->reference_frames[0] = current_frame;
+ /* should only happen on second frame */
+ } else if (self->reference_frames[1] == NULL) {
+ self->reference_frames[1] = current_frame;
+ } else {
+ LOG_V("Releasing reference frame %x\n",
+ (guint) self->reference_frames[0]);
+ mix_videoframe_unref(self->reference_frames[0]);
+ self->reference_frames[0] = self->reference_frames[1];
+ self->reference_frames[1] = current_frame;
+ }
+ break;
+ case MP4_VOP_TYPE_B:
+ case MP4_VOP_TYPE_S:
+ default:
+ break;
+
+ }
+
+ LOG_V("End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix,
+ guint64 timestamp) {
+
+ MixInputBufferEntry *bufentry = NULL;
+ gboolean done = FALSE;
+
+ LOG_V("Begin\n");
+
+ if (mix == NULL) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ /* Dequeue and release all input buffers for this frame */
+ LOG_V("Releasing all the MixBuffers for this frame\n");
+
+ /*
+ * While the head of the queue has timestamp == current ts
+ * dequeue the entry, unref the MixBuffer, and free the struct
+ */
+ done = FALSE;
+ while (!done) {
+ bufentry
+ = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue);
+ if (bufentry == NULL) {
+ break;
+ }
+
+ LOG_V("head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n",
+ (guint)bufentry->buf, timestamp, bufentry->timestamp);
+
+ if (bufentry->timestamp != timestamp) {
+ LOG_V("buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n",
+ (guint)bufentry->buf, timestamp, bufentry->timestamp);
+
+ done = TRUE;
+ break;
+ }
+
+ bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue);
+ LOG_V("Unref this MixBuffers %x\n", (guint) bufentry->buf);
+
+ mix_buffer_unref(bufentry->buf);
+ g_free(bufentry);
+ }
+
+ LOG_V("End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+vbp_picture_data_mp42 *mix_videoformat_mp42_clone_picture_data(
+ vbp_picture_data_mp42 *picture_data) {
+
+ gboolean succ = FALSE;
+
+ if (!picture_data) {
+ return NULL;
+ }
+
+ if (picture_data->number_slices == 0) {
+ return NULL;
+ }
+
+ vbp_picture_data_mp42 *cloned_picture_data = g_try_new0(
+ vbp_picture_data_mp42, 1);
+ if (cloned_picture_data == NULL) {
+ goto cleanup;
+ }
+
+ memcpy(cloned_picture_data, picture_data, sizeof(vbp_picture_data_mp42));
+
+ cloned_picture_data->number_slices = picture_data->number_slices;
+ cloned_picture_data->slice_data = g_try_new0(vbp_slice_data_mp42,
+ picture_data->number_slices);
+ if (cloned_picture_data->slice_data == NULL) {
+ goto cleanup;
+ }
+
+ memcpy(cloned_picture_data->slice_data, picture_data->slice_data,
+ sizeof(vbp_slice_data_mp42) * (picture_data->number_slices));
+
+ succ = TRUE;
+
+ cleanup:
+
+ if (!succ) {
+ mix_videoformat_mp42_free_picture_data(cloned_picture_data);
+ return NULL;
+ }
+
+ return cloned_picture_data;
+}
+
+void mix_videoformat_mp42_free_picture_data(vbp_picture_data_mp42 *picture_data) {
+ if (picture_data) {
+ if (picture_data->slice_data) {
+ g_free(picture_data->slice_data);
+ }
+ g_free(picture_data);
+ }
+}
+
+void mix_videoformat_mp42_flush_packed_stream_queue(GQueue *packed_stream_queue) {
+
+ PackedStream *packed_stream = NULL;
+
+ if (packed_stream_queue == NULL) {
+ return;
+ }
+ while (!g_queue_is_empty(packed_stream_queue)) {
+ packed_stream = (PackedStream *) g_queue_pop_head(packed_stream_queue);
+ if (packed_stream == NULL) {
+ continue;
+ }
+
+ if (packed_stream->picture_data) {
+ mix_videoformat_mp42_free_picture_data(packed_stream->picture_data);
+ }
+
+ if (packed_stream->mix_buffer) {
+ mix_buffer_unref(packed_stream->mix_buffer);
+ }
+ g_free(packed_stream);
+ }
+}
diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h
new file mode 100644
index 0000000..67ee210
--- /dev/null
+++ b/mix_video/src/mixvideoformat_mp42.h
@@ -0,0 +1,117 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFORMAT_MP42_H__
+#define __MIX_VIDEOFORMAT_MP42_H__
+
+#include "mixvideoformat.h"
+#include "mixvideoframe_private.h"
+
+//Note: this is only a max limit. Real number of surfaces allocated is calculated in mix_videoformat_mp42_initialize()
+#define MIX_VIDEO_MP42_SURFACE_NUM 8
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEOFORMAT_MP42 (mix_videoformat_mp42_get_type ())
+#define MIX_VIDEOFORMAT_MP42(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42))
+#define MIX_IS_VIDEOFORMAT_MP42(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_MP42))
+#define MIX_VIDEOFORMAT_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42Class))
+#define MIX_IS_VIDEOFORMAT_MP42_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_MP42))
+#define MIX_VIDEOFORMAT_MP42_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_MP42, MixVideoFormat_MP42Class))
+
+typedef struct _MixVideoFormat_MP42 MixVideoFormat_MP42;
+typedef struct _MixVideoFormat_MP42Class MixVideoFormat_MP42Class;
+
+struct _MixVideoFormat_MP42 {
+ /*< public > */
+ MixVideoFormat parent;
+
+ /*< public > */
+
+ /*< private > */
+ MixVideoFrame * reference_frames[2];
+ MixVideoFrame * last_frame;
+ gint last_vop_coding_type;
+
+ GQueue *packed_stream_queue;
+};
+
+/**
+ * MixVideoFormat_MP42Class:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoFormat_MP42Class {
+ /*< public > */
+ MixVideoFormatClass parent_class;
+
+/* class members */
+
+/*< public > */
+};
+
+/**
+ * mix_videoformat_mp42_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoformat_mp42_get_type(void);
+
+/**
+ * mix_videoformat_mp42_new:
+ * @returns: A newly allocated instance of #MixVideoFormat_MP42
+ *
+ * Use this method to create new instance of #MixVideoFormat_MP42
+ */
+MixVideoFormat_MP42 *mix_videoformat_mp42_new(void);
+
+/**
+ * mix_videoformat_mp42_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFormat_MP42 instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFormat_MP42 *mix_videoformat_mp42_ref(MixVideoFormat_MP42 * mix);
+
+/**
+ * mix_videoformat_mp42_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoformat_mp42_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/* MP42 vmethods */
+MIX_RESULT mix_videofmt_mp42_getcaps(MixVideoFormat *mix, GString *msg);
+MIX_RESULT mix_videofmt_mp42_initialize(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params);
+MIX_RESULT mix_videofmt_mp42_flush(MixVideoFormat *mix);
+MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix);
+MIX_RESULT mix_videofmt_mp42_deinitialize(MixVideoFormat *mix);
+
+/* Local Methods */
+
+MIX_RESULT mix_videofmt_mp42_handle_ref_frames(MixVideoFormat *mix,
+ enum _picture_type frame_type, MixVideoFrame * current_frame);
+
+MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix,
+ vbp_data_mp42 *data, guint64 timestamp, gboolean discontinuity);
+
+MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix,
+ guint64 timestamp);
+
+#endif /* __MIX_VIDEOFORMAT_MP42_H__ */
diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c
new file mode 100644
index 0000000..ec09985
--- /dev/null
+++ b/mix_video/src/mixvideoformat_vc1.c
@@ -0,0 +1,1749 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+#include "mixvideolog.h"
+
+#include "mixvideoformat_vc1.h"
+#include <va/va_x11.h>
+
+#ifdef YUVDUMP
+//TODO Complete YUVDUMP code and move into base class
+#include <stdio.h>
+#endif /* YUVDUMP */
+
+#include <string.h>
+
+
+#ifdef MIX_LOG_ENABLE
+static int mix_video_vc1_counter = 0;
+#endif
+
+/* The parent class. The pointer will be saved
+ * in this class's initialization. The pointer
+ * can be used for chaining method call if needed.
+ */
+static MixVideoFormatClass *parent_class = NULL;
+
+static void mix_videoformat_vc1_finalize(GObject * obj);
+
+/*
+ * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMAT
+ */
+G_DEFINE_TYPE (MixVideoFormat_VC1, mix_videoformat_vc1, MIX_TYPE_VIDEOFORMAT);
+
+static void mix_videoformat_vc1_init(MixVideoFormat_VC1 * self) {
+ MixVideoFormat *parent = MIX_VIDEOFORMAT(self);
+
+ /* public member initialization */
+ /* These are all public because MixVideoFormat objects are completely internal to MixVideo,
+ no need for private members */
+ self->reference_frames[0] = NULL;
+ self->reference_frames[1] = NULL;
+
+ /* NOTE: we don't need to do this here.
+ * This just demostrates how to access
+ * member varibles beloned to parent
+ */
+ parent->initialized = FALSE;
+}
+
+static void mix_videoformat_vc1_class_init(
+ MixVideoFormat_VC1Class * klass) {
+
+ /* root class */
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* direct parent class */
+ MixVideoFormatClass *video_format_class =
+ MIX_VIDEOFORMAT_CLASS(klass);
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ /* setup finializer */
+ gobject_class->finalize = mix_videoformat_vc1_finalize;
+
+ /* setup vmethods with base implementation */
+ /* This is where we can override base class methods if needed */
+ video_format_class->getcaps = mix_videofmt_vc1_getcaps;
+ video_format_class->initialize = mix_videofmt_vc1_initialize;
+ video_format_class->decode = mix_videofmt_vc1_decode;
+ video_format_class->flush = mix_videofmt_vc1_flush;
+ video_format_class->eos = mix_videofmt_vc1_eos;
+ video_format_class->deinitialize = mix_videofmt_vc1_deinitialize;
+}
+
+MixVideoFormat_VC1 *
+mix_videoformat_vc1_new(void) {
+ MixVideoFormat_VC1 *ret =
+ g_object_new(MIX_TYPE_VIDEOFORMAT_VC1, NULL);
+
+ return ret;
+}
+
+void mix_videoformat_vc1_finalize(GObject * obj) {
+ gint32 pret = VBP_OK;
+
+ /* clean up here. */
+
+ MixVideoFormat *parent = NULL;
+ MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(obj);
+ GObjectClass *root_class = (GObjectClass *) parent_class;
+
+ parent = MIX_VIDEOFORMAT(self);
+
+ g_mutex_lock(parent->objectlock);
+
+ //surfacepool is deallocated by parent
+ //inputbufqueue is deallocated by parent
+ //parent calls vaDestroyConfig, vaDestroyContext and vaDestroySurfaces
+
+ //Unref our reference frames
+ int i = 0;
+ for (; i < 2; i++)
+ {
+ if (self->reference_frames[i] != NULL)
+ {
+ mix_videoframe_unref(self->reference_frames[i]);
+ self->reference_frames[i] = NULL;
+ }
+ }
+
+ //Reset state
+ parent->initialized = TRUE;
+ parent->parse_in_progress = FALSE;
+ parent->discontinuity_frame_in_progress = FALSE;
+ parent->current_timestamp = 0;
+
+ //Close the parser
+ pret = vbp_close(parent->parser_handle);
+ parent->parser_handle = NULL;
+ if (pret != VBP_OK)
+ {
+ LOG_E( "Error closing parser\n");
+ }
+
+ g_mutex_unlock(parent->objectlock);
+
+ /* Chain up parent */
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+}
+
+MixVideoFormat_VC1 *
+mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix) {
+ return (MixVideoFormat_VC1 *) g_object_ref(G_OBJECT(mix));
+}
+
+/* VC1 vmethods implementation */
+MIX_RESULT mix_videofmt_vc1_getcaps(MixVideoFormat *mix, GString *msg) {
+
+ MIX_RESULT ret = MIX_RESULT_NOTIMPL;
+
+//This method is reserved for future use
+
+ if (mix == NULL || msg == NULL)
+ {
+ LOG_E( "NUll pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ /* Chainup parent method.
+ */
+
+ if (parent_class->getcaps) {
+ ret = parent_class->getcaps(mix, msg);
+ }
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_vc1_update_seq_header(
+ MixVideoConfigParamsDec* config_params,
+ MixIOVec *header)
+{
+ guint width = 0;
+ guint height = 0;
+
+ guint i = 0;
+ guchar* p = header->data;
+ MIX_RESULT res = MIX_RESULT_SUCCESS;
+
+ if (!config_params || !header)
+ {
+ LOG_E( "NUll pointer passed in\n");
+ return (MIX_RESULT_NULL_PTR);
+ }
+
+ res = mix_videoconfigparamsdec_get_picture_res(
+ config_params,
+ &width,
+ &height);
+
+ if (MIX_RESULT_SUCCESS != res)
+ {
+ return res;
+ }
+
+ /* Check for start codes. If one exist, then this is VC-1 and not WMV. */
+ while (i < header->data_size - 2)
+ {
+ if ((p[i] == 0) &&
+ (p[i + 1] == 0) &&
+ (p[i + 2] == 1))
+ {
+ return MIX_RESULT_SUCCESS;
+ }
+ i++;
+ }
+
+ p = g_malloc0(header->data_size + 9);
+
+ if (!p)
+ {
+ LOG_E( "Cannot allocate memory\n");
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ /* If we get here we have 4+ bytes of codec data that must be formatted */
+ /* to pass through as an RCV sequence header. */
+ p[0] = 0;
+ p[1] = 0;
+ p[2] = 1;
+ p[3] = 0x0f; /* Start code. */
+
+ p[4] = (width >> 8) & 0x0ff;
+ p[5] = width & 0x0ff;
+ p[6] = (height >> 8) & 0x0ff;
+ p[7] = height & 0x0ff;
+
+ memcpy(p + 8, header->data, header->data_size);
+ *(p + header->data_size + 8) = 0x80;
+
+ g_free(header->data);
+ header->data = p;
+ header->data_size = header->data_size + 9;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+
+
+MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display) {
+
+ uint32 pret = 0;
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ enum _vbp_parser_type ptype = VBP_VC1;
+ vbp_data_vc1 *data = NULL;
+ MixVideoFormat *parent = NULL;
+ MixVideoFormat_VC1 *self = NULL;
+ MixIOVec *header = NULL;
+ gint numprofs = 0, numactualprofs = 0;
+ gint numentrypts = 0, numactualentrypts = 0;
+ VADisplay vadisplay = NULL;
+ VAProfile *profiles = NULL;
+ VAEntrypoint *entrypts = NULL;
+ VAConfigAttrib attrib;
+ VAStatus vret = VA_STATUS_SUCCESS;
+ guint extra_surfaces = 0;
+ VASurfaceID *surfaces = NULL;
+ guint numSurfaces = 0;
+
+ //TODO Partition this method into smaller methods
+
+ if (mix == NULL || config_params == NULL || frame_mgr == NULL || !input_buf_pool || !surface_pool || !va_display)
+ {
+ LOG_E( "NUll pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ /* Chainup parent method.
+ */
+
+ if (parent_class->initialize) {
+ ret = parent_class->initialize(mix, config_params,
+ frame_mgr, input_buf_pool, surface_pool,
+ va_display);
+ }
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ return ret;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_VC1(mix))
+ return MIX_RESULT_INVALID_PARAM;
+
+ parent = MIX_VIDEOFORMAT(mix);
+ self = MIX_VIDEOFORMAT_VC1(mix);
+
+ LOG_V( "Locking\n");
+ //From now on, we exit this function through cleanup:
+ g_mutex_lock(parent->objectlock);
+
+ //Load the bitstream parser
+ pret = vbp_open(ptype, &(parent->parser_handle));
+
+ if (!(pret == VBP_OK))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error opening parser\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Opened parser\n");
+
+ ret = mix_videoconfigparamsdec_get_header(config_params,
+ &header);
+
+ if ((ret != MIX_RESULT_SUCCESS) || (header == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Cannot get header data\n");
+ goto cleanup;
+ }
+
+ ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params,
+ &extra_surfaces);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Cannot get extra surface allocation setting\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle);
+
+ ret = mix_videofmt_vc1_update_seq_header(
+ config_params,
+ header);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error updating sequence header\n");
+ goto cleanup;
+ }
+
+ pret = vbp_parse(parent->parser_handle, header->data,
+ header->data_size, TRUE);
+
+ if (!((pret == VBP_OK) || (pret == VBP_DONE)))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error parsing header data, size %d\n", header->data_size);
+ goto cleanup;
+ }
+
+
+ LOG_V( "Parsed header\n");
+ //Get the header data and save
+ pret = vbp_query(parent->parser_handle, (void *)&data);
+
+ if ((pret != VBP_OK) || (data == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error reading parsed header data\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Queried parser for header data\n");
+
+ //Time for libva initialization
+
+ vadisplay = parent->va_display;
+
+ numprofs = vaMaxNumProfiles(vadisplay);
+ profiles = g_malloc(numprofs*sizeof(VAProfile));
+
+ if (!profiles)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating memory\n");
+ goto cleanup;
+ }
+
+ vret = vaQueryConfigProfiles(vadisplay, profiles,
+ &numactualprofs);
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing video driver\n");
+ goto cleanup;
+ }
+
+ //check the desired profile support
+ gint vaprof = 0;
+
+ VAProfile profile;
+ switch (data->se_data->PROFILE)
+ {
+ case 0:
+ profile = VAProfileVC1Simple;
+ break;
+
+ case 1:
+ profile = VAProfileVC1Main;
+ break;
+
+ default:
+ profile = VAProfileVC1Advanced;
+ break;
+ }
+
+ for (; vaprof < numactualprofs; vaprof++)
+ {
+ if (profiles[vaprof] == profile)
+ break;
+ }
+ if (vaprof >= numprofs || profiles[vaprof] != profile)
+ //Did not get the profile we wanted
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Profile not supported by driver\n");
+ goto cleanup;
+ }
+
+ numentrypts = vaMaxNumEntrypoints(vadisplay);
+ entrypts = g_malloc(numentrypts*sizeof(VAEntrypoint));
+
+ if (!entrypts)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating memory\n");
+ goto cleanup;
+ }
+
+ vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof],
+ entrypts, &numactualentrypts);
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing driver\n");
+ goto cleanup;
+ }
+
+ gint vaentrypt = 0;
+ for (; vaentrypt < numactualentrypts; vaentrypt++)
+ {
+ if (entrypts[vaentrypt] == VAEntrypointVLD)
+ break;
+ }
+ if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD)
+ //Did not get the entrypt we wanted
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Entry point not supported by driver\n");
+ goto cleanup;
+ }
+
+ //We are requesting RT attributes
+ attrib.type = VAConfigAttribRTFormat;
+
+ vret = vaGetConfigAttributes(vadisplay, profiles[vaprof],
+ entrypts[vaentrypt], &attrib, 1);
+
+ //TODO Handle other values returned for RT format
+ // and check with requested format provided in config params
+ //Right now only YUV 4:2:0 is supported by libva
+ // and this is our default
+ if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) ||
+ vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing driver\n");
+ goto cleanup;
+ }
+
+ //Initialize and save the VA config ID
+ vret = vaCreateConfig(vadisplay, profiles[vaprof],
+ entrypts[vaentrypt], &attrib, 1, &(parent->va_config));
+
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing driver\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Created libva config with profile %d\n", vaprof);
+
+ //Check for loop filtering
+ if (data->se_data->LOOPFILTER == 1)
+ self->loopFilter = TRUE;
+ else
+ self->loopFilter = FALSE;
+
+ LOG_V( "loop filter is %d, TFCNTRFLAG is %d\n", data->se_data->LOOPFILTER, data->se_data->TFCNTRFLAG);
+
+ //Initialize the surface pool
+
+
+ if ((data->se_data->MAXBFRAMES > 0) || (data->se_data->PROFILE == 3) || (data->se_data->PROFILE == 1))
+ //If Advanced profile, have to assume B frames may be present, since MAXBFRAMES is not valid for this prof
+ self->haveBframes = TRUE;
+ else
+ self->haveBframes = FALSE;
+
+ //Calculate VC1 numSurfaces based on max number of B frames or
+ // MIX_VIDEO_VC1_SURFACE_NUM, whichever is less
+
+ //Adding 1 to work around VBLANK issue
+ parent->va_num_surfaces = 1 + extra_surfaces + ((3 + (self->haveBframes ? 1 : 0) <
+ MIX_VIDEO_VC1_SURFACE_NUM) ?
+ (3 + (self->haveBframes ? 1 : 0))
+ : MIX_VIDEO_VC1_SURFACE_NUM);
+
+ numSurfaces = parent->va_num_surfaces;
+
+ parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces);
+
+ surfaces = parent->va_surfaces;
+
+ if (surfaces == NULL)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Cannot allocate temporary data\n");
+ goto cleanup;
+ }
+
+ vret = vaCreateSurfaces(vadisplay, parent->picture_width,
+ parent->picture_height, entrypts[vaentrypt],
+ numSurfaces, surfaces);
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error allocating surfaces\n");
+ goto cleanup;
+ }
+
+ parent->surfacepool = mix_surfacepool_new();
+ *surface_pool = parent->surfacepool;
+
+ if (parent->surfacepool == NULL)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing surface pool\n");
+ goto cleanup;
+ }
+
+
+ ret = mix_surfacepool_initialize(parent->surfacepool,
+ surfaces, numSurfaces);
+
+ switch (ret)
+ {
+ case MIX_RESULT_SUCCESS:
+ break;
+ case MIX_RESULT_ALREADY_INIT:
+ default:
+ ret = MIX_RESULT_ALREADY_INIT;
+ LOG_E( "Error init failure\n");
+ goto cleanup;
+ break;
+ }
+
+ LOG_V( "Created %d libva surfaces, MAXBFRAMES is %d\n", numSurfaces, data->se_data->MAXBFRAMES);
+
+ //Initialize and save the VA context ID
+ //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2
+ vret = vaCreateContext(vadisplay, parent->va_config,
+ parent->picture_width, parent->picture_height,
+ 0, surfaces, numSurfaces,
+ &(parent->va_context));
+ if (!(vret == VA_STATUS_SUCCESS))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing video driver\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Created libva context width %d, height %d\n", parent->picture_width, parent->picture_height);
+
+ LOG_V( "mix_video vinfo: Content type %s, %s\n", (header->data_size > 8) ? "VC-1" : "WMV", (data->se_data->INTERLACE) ? "interlaced" : "progressive");
+ LOG_V( "mix_video vinfo: Content width %d, height %d\n", parent->picture_width, parent->picture_height);
+ LOG_V( "mix_video vinfo: MAXBFRAMES %d (note that for Advanced profile, MAXBFRAMES can be zero and there still can be B frames in the content)\n", data->se_data->MAXBFRAMES);
+ LOG_V( "mix_video vinfo: PROFILE %d, LEVEL %d\n", data->se_data->PROFILE, data->se_data->LEVEL);
+
+
+ cleanup:
+ if (ret != MIX_RESULT_SUCCESS) {
+ pret = vbp_close(parent->parser_handle);
+ parent->parser_handle = NULL;
+ parent->initialized = FALSE;
+
+ } else {
+ parent->initialized = TRUE;
+ }
+
+ if (header != NULL)
+ {
+ if (header->data != NULL)
+ g_free(header->data);
+ g_free(header);
+ header = NULL;
+ }
+
+ g_free(profiles);
+ g_free(entrypts);
+
+ self->lastFrame = NULL;
+
+
+ LOG_V( "Unlocking\n");
+ g_mutex_unlock(parent->objectlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_vc1_decode(MixVideoFormat *mix,
+ MixBuffer * bufin[], gint bufincnt,
+ MixVideoDecodeParams * decode_params) {
+
+ uint32 pret = 0;
+ int i = 0;
+ MixVideoFormat *parent = NULL;
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ guint64 ts = 0;
+ vbp_data_vc1 *data = NULL;
+ gboolean discontinuity = FALSE;
+ MixInputBufferEntry *bufentry = NULL;
+
+ if (mix == NULL || bufin == NULL || decode_params == NULL )
+ {
+ LOG_E( "NUll pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ //TODO remove iovout and iovoutcnt; they are not used (need to remove from MixVideo/MI-X API too)
+
+ LOG_V( "Begin\n");
+
+ /* Chainup parent method.
+ We are not chaining up to parent method for now.
+ */
+
+#if 0
+ if (parent_class->decode) {
+ return parent_class->decode(mix, bufin, bufincnt,
+ decode_params);
+ }
+#endif
+
+ if (!MIX_IS_VIDEOFORMAT_VC1(mix))
+ return MIX_RESULT_INVALID_PARAM;
+
+ parent = MIX_VIDEOFORMAT(mix);
+
+
+ ret = mix_videodecodeparams_get_timestamp(decode_params,
+ &ts);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videodecodeparams_get_discontinuity(decode_params,
+ &discontinuity);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ return MIX_RESULT_FAIL;
+ }
+
+ //From now on, we exit this function through cleanup:
+
+ LOG_V( "Locking\n");
+ g_mutex_lock(parent->objectlock);
+
+ //If this is a new frame and we haven't retrieved parser
+ // workload data from previous frame yet, do so
+ if ((ts != parent->current_timestamp) &&
+ (parent->parse_in_progress))
+ {
+
+ //query for data
+ pret = vbp_query(parent->parser_handle,
+ (void *) &data);
+
+ if ((pret != VBP_OK) || (data == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error initializing parser\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Queried for last frame data\n");
+
+ //process and decode data
+ ret = mix_videofmt_vc1_process_decode(mix,
+ data, parent->current_timestamp,
+ parent->discontinuity_frame_in_progress);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ //We log this but need to process the new frame data, so do not return
+ LOG_E( "process_decode failed.\n");
+ }
+
+ LOG_V( "Called process and decode for last frame\n");
+
+ parent->parse_in_progress = FALSE;
+
+ }
+
+ parent->current_timestamp = ts;
+ parent->discontinuity_frame_in_progress = discontinuity;
+
+ LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_vc1_counter++, ts);
+
+ for (i = 0; i < bufincnt; i++)
+ {
+
+ LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", (int)parent->parser_handle, (guint)bufin[i]->data, bufin[i]->size);
+
+ pret = vbp_parse(parent->parser_handle,
+ bufin[i]->data,
+ bufin[i]->size,
+ FALSE);
+
+ LOG_V( "Called parse for current frame\n");
+
+ if (pret == VBP_DONE)
+ {
+ //query for data
+ pret = vbp_query(parent->parser_handle,
+ (void *) &data);
+
+ if ((pret != VBP_OK) || (data == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error getting parser data\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Called query for current frame\n");
+
+ //Increase the ref count of this input buffer
+ mix_buffer_ref(bufin[i]);
+
+ //Create a new MixInputBufferEntry
+ //TODO make this from a pool to optimize
+ bufentry = g_malloc(sizeof(
+ MixInputBufferEntry));
+ if (bufentry == NULL)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating bufentry\n");
+ goto cleanup;
+ }
+
+ bufentry->buf = bufin[i];
+ LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts);
+ bufentry->timestamp = ts;
+
+ LOG_V( "Enqueue this input buffer for current frame\n");
+ LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp);
+
+ //Enqueue this input buffer
+ g_queue_push_tail(parent->inputbufqueue,
+ (gpointer)bufentry);
+
+ //process and decode data
+ ret = mix_videofmt_vc1_process_decode(mix,
+ data, ts, discontinuity);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ //We log this but continue since we need to complete our processing of input buffers
+ LOG_E( "Process_decode failed.\n");
+ }
+
+ LOG_V( "Called process and decode for current frame\n");
+
+ parent->parse_in_progress = FALSE;
+ }
+ else if (pret != VBP_OK)
+ {
+ //We log this but continue since we need to complete our processing of input buffers
+ LOG_E( "Parsing failed.\n");
+ ret = MIX_RESULT_FAIL;
+ }
+ else
+ {
+
+ LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n");
+
+ //Increase the ref count of this input buffer
+ mix_buffer_ref(bufin[i]);
+
+ //Create a new MixInputBufferEntry
+ //TODO make this from a pool to optimize
+ bufentry = g_malloc(sizeof
+ (MixInputBufferEntry));
+ if (bufentry == NULL)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating bufentry\n");
+ goto cleanup;
+ }
+ bufentry->buf = bufin[i];
+ bufentry->timestamp = ts;
+
+ //Enqueue this input buffer
+ g_queue_push_tail(parent->inputbufqueue,
+ (gpointer)bufentry);
+ parent->parse_in_progress = TRUE;
+ }
+
+ }
+
+
+ cleanup:
+
+ LOG_V( "Unlocking\n");
+ g_mutex_unlock(parent->objectlock);
+
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+#ifdef YUVDUMP
+//TODO Complete this YUVDUMP code and move into base class
+
+MIX_RESULT GetImageFromSurface (MixVideoFormat *mix, MixVideoFrame * frame)
+
+{
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAImageFormat va_image_format;
+ VAImage va_image;
+
+ unsigned char* pBuffer;
+ unsigned int ui32SrcWidth = mix->picture_width;
+ unsigned int ui32SrcHeight = mix->picture_height;
+ unsigned int ui32Stride;
+ unsigned int ui32ChromaOffset;
+ FILE *fp = NULL;
+ int r = 0;
+
+ int i;
+
+ g_print ("GetImageFromSurface \n");
+
+ if ((mix == NULL) || (frame == NULL))
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ fp = fopen("yuvdump.yuv", "a+");
+
+ static int have_va_image = 0;
+
+ if (!have_va_image)
+ {
+ va_image_format.fourcc = VA_FOURCC_NV12;
+// va_image_format.fourcc = VA_FOURCC_YV12;
+
+ vaStatus = vaCreateImage(mix->va_display, &va_image_format, ui32SrcWidth, ui32SrcHeight, &va_image);
+ have_va_image = 1;
+ }
+
+ vaStatus = vaGetImage( mix->va_display, frame->frame_id, 0, 0, ui32SrcWidth, ui32SrcHeight, va_image.image_id );
+ vaStatus = vaMapBuffer( mix->va_display, va_image.buf, (void **) &pBuffer);
+ ui32ChromaOffset = va_image.offsets[1];
+ ui32Stride = va_image.pitches[0];
+
+ if (VA_STATUS_SUCCESS != vaStatus)
+ {
+ g_print ("VideoProcessBlt: Unable to copy surface\n\r");
+ return vaStatus;
+ }
+
+ {
+ g_print ("before copy memory....\n");
+ g_print ("width = %d, height = %d\n", ui32SrcWidth, ui32SrcHeight);
+ g_print ("data_size = %d\n", va_image.data_size);
+ g_print ("num_planes = %d\n", va_image.num_planes);
+ g_print ("va_image.pitches[0] = %d\n", va_image.pitches[0]);
+ g_print ("va_image.pitches[1] = %d\n", va_image.pitches[1]);
+ g_print ("va_image.pitches[2] = %d\n", va_image.pitches[2]);
+ g_print ("va_image.offsets[0] = %d\n", va_image.offsets[0]);
+ g_print ("va_image.offsets[1] = %d\n", va_image.offsets[1]);
+ g_print ("va_image.offsets[2] = %d\n", va_image.offsets[2]);
+// r = fwrite (pBuffer, 1, va_image.offsets[1], fp);
+
+ r = fwrite (pBuffer, va_image.offsets[1], 1, fp);
+
+ for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2)
+ r = fwrite (pBuffer + va_image.offsets[1] + i / 2, 1, 1, fp);
+
+ for (i = 0; i < ui32SrcWidth * ui32SrcHeight / 2; i +=2)
+ r = fwrite (pBuffer + va_image.offsets[1] + i / 2 + 1, 1, 1, fp);
+
+ g_print ("ui32ChromaOffset = %d, ui32Stride = %d\n", ui32ChromaOffset, ui32Stride);
+
+ }
+
+ vaStatus = vaUnmapBuffer( mix->va_display, va_image.buf);
+
+ return vaStatus;
+
+}
+#endif /* YUVDUMP */
+
+
+MIX_RESULT mix_videofmt_vc1_decode_a_picture(
+ MixVideoFormat* mix,
+ vbp_data_vc1 *data,
+ int pic_index,
+ MixVideoFrame *frame)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ VAStatus vret = VA_STATUS_SUCCESS;
+ VADisplay vadisplay = NULL;
+ VAContextID vacontext;
+ guint buffer_id_cnt = 0;
+ VABufferID *buffer_ids = NULL;
+ MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix);
+
+ vbp_picture_data_vc1* pic_data = &(data->pic_data[pic_index]);
+ VAPictureParameterBufferVC1 *pic_params = pic_data->pic_parms;
+
+ if (pic_params == NULL)
+ {
+ ret = MIX_RESULT_NULL_PTR;
+ LOG_E( "Error reading parser data\n");
+ goto cleanup;
+ }
+
+ LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2);
+
+ //Set up reference frames for the picture parameter buffer
+
+ //Set the picture type (I, B or P frame)
+ enum _picture_type frame_type = pic_params->picture_fields.bits.picture_type;
+
+
+ //Check for B frames after a seek
+ //We need to have both reference frames in hand before we can decode a B frame
+ //If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME
+ //Note: demuxer should do the right thing and only seek to I frame, so we should
+ // not get P frame first, but may get B frames after the first I frame
+ if (frame_type == VC1_PTYPE_B)
+ {
+ if (self->reference_frames[1] == NULL)
+ {
+ LOG_E( "Insufficient reference frames for B frame\n");
+ ret = MIX_RESULT_DROPFRAME;
+ goto cleanup;
+ }
+ }
+
+ buffer_ids = g_malloc(sizeof(VABufferID) * ((pic_data->num_slices * 2) + 2));
+ if (buffer_ids == NULL)
+ {
+ LOG_E( "Cannot allocate buffer IDs\n");
+ ret = MIX_RESULT_NO_MEMORY;
+ goto cleanup;
+ }
+
+ LOG_V( "Getting a new surface\n");
+ LOG_V( "frame type is %d\n", frame_type);
+
+ gulong surface = 0;
+
+ //Get our surface ID from the frame object
+ ret = mix_videoframe_get_frame_id(frame, &surface);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error getting surface ID from frame object\n");
+ goto cleanup;
+ }
+
+ //Get a frame from the surface pool
+
+ if (0 == pic_index)
+ {
+ //Set the frame type for the frame object (used in reordering by frame manager)
+ switch (frame_type)
+ {
+ case VC1_PTYPE_I: // I frame type
+ case VC1_PTYPE_P: // P frame type
+ case VC1_PTYPE_B: // B frame type
+ ret = mix_videoframe_set_frame_type(frame, frame_type);
+ break;
+ case VC1_PTYPE_BI: // BI frame type
+ ret = mix_videoframe_set_frame_type(frame, TYPE_I);
+ break;
+ //Not indicated here case VC1_PTYPE_SKIPPED:
+ default:
+ break;
+ }
+ }
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error setting frame type on frame\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Setting reference frames in picparams, frame_type = %d\n", frame_type);
+
+ //TODO Check if we need to add more handling of B or P frames when reference frames are not set up (such as after flush/seek)
+
+ switch (frame_type)
+ {
+ case VC1_PTYPE_I: // I frame type
+ /* forward and backward reference pictures are not used but just set to current
+ surface to be in consistence with test suite
+ */
+ pic_params->forward_reference_picture = surface;
+ pic_params->backward_reference_picture = surface;
+ LOG_V( "I frame, surface ID %u\n", (guint)frame->frame_id);
+ LOG_V( "mix_video vinfo: Frame type is I\n");
+ break;
+ case VC1_PTYPE_P: // P frame type
+
+ // check REFDIST in the picture parameter buffer
+ if (0 != pic_params->reference_fields.bits.reference_distance_flag &&
+ 0 != pic_params->reference_fields.bits.reference_distance)
+ {
+ /* The previous decoded frame (distance is up to 16 but not 0) is used
+ for reference, as we don't allocate that many surfaces so the reference picture
+ could have been overwritten and hence not avaiable for reference.
+ */
+ LOG_E( "reference distance is not 0!");
+ ret = MIX_RESULT_FAIL;
+ goto cleanup;
+ }
+ if (1 == pic_index)
+ {
+ // handle interlace field coding case
+ if (1 == pic_params->reference_fields.bits.num_reference_pictures ||
+ 1 == pic_params->reference_fields.bits.reference_field_pic_indicator)
+ {
+ /* two reference fields or the second closest I/P field is used for
+ prediction. Set forward reference picture to INVALID so it will be
+ updated to a valid previous reconstructed reference frame later.
+ */
+ pic_params->forward_reference_picture = VA_INVALID_SURFACE;
+ }
+ else
+ {
+ /* the closest I/P is used for reference so it must be the
+ complementary field in the same surface.
+ */
+ pic_params->forward_reference_picture = surface;
+ }
+ }
+ if (VA_INVALID_SURFACE == pic_params->forward_reference_picture)
+ {
+ if (self->reference_frames[1])
+ {
+ pic_params->forward_reference_picture = self->reference_frames[1]->frame_id;
+ }
+ else if (self->reference_frames[0])
+ {
+ pic_params->forward_reference_picture = self->reference_frames[0]->frame_id;
+ }
+ else
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error could not find reference frames for P frame\n");
+ goto cleanup;
+ }
+ }
+ pic_params->backward_reference_picture = VA_INVALID_SURFACE;
+
+ LOG_V( "P frame, surface ID %u, forw ref frame is %u\n", (guint)frame->frame_id, (guint)self->reference_frames[0]->frame_id);
+ LOG_V( "mix_video vinfo: Frame type is P\n");
+ break;
+
+ case VC1_PTYPE_B: // B frame type
+ LOG_V( "B frame, forw ref %d, back ref %d\n", (guint)self->reference_frames[0]->frame_id, (guint)self->reference_frames[1]->frame_id);
+
+ if (!self->haveBframes) //We don't expect B frames and have not allocated a surface
+ // for the extra ref frame so this is an error
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Unexpected B frame, cannot process\n");
+ goto cleanup;
+ }
+
+ pic_params->forward_reference_picture = self->reference_frames[0]->frame_id;
+ pic_params->backward_reference_picture = self->reference_frames[1]->frame_id;
+
+ LOG_V( "B frame, surface ID %u, forw ref %d, back ref %d\n", (guint)frame->frame_id, (guint)self->reference_frames[0]->frame_id, (guint)self->reference_frames[1]->frame_id);
+ LOG_V( "mix_video vinfo: Frame type is B\n");
+ break;
+
+ case VC1_PTYPE_BI:
+ pic_params->forward_reference_picture = VA_INVALID_SURFACE;
+ pic_params->backward_reference_picture = VA_INVALID_SURFACE;
+ LOG_V( "BI frame\n");
+ LOG_V( "mix_video vinfo: Frame type is BI\n");
+ break;
+
+ case VC1_PTYPE_SKIPPED:
+ //Will never happen here
+ break;
+
+ default:
+ LOG_V( "Hit default\n");
+ break;
+
+ }
+
+ //Loop filter handling
+ if (self->loopFilter)
+ {
+ LOG_V( "Setting in loop decoded picture to current frame\n");
+ LOG_V( "Double checking picparams inloop filter is %d\n", pic_params->entrypoint_fields.bits.loopfilter);
+ pic_params->inloop_decoded_picture = frame->frame_id;
+ }
+ else
+ {
+ LOG_V( "Setting in loop decoded picture to invalid\n");
+ pic_params->inloop_decoded_picture = VA_INVALID_SURFACE;
+ }
+
+ //Libva buffer set up
+
+ vadisplay = mix->va_display;
+ vacontext = mix->va_context;
+
+ LOG_V( "Creating libva picture parameter buffer\n");
+
+ //First the picture parameter buffer
+ vret = vaCreateBuffer(
+ vadisplay,
+ vacontext,
+ VAPictureParameterBufferType,
+ sizeof(VAPictureParameterBufferVC1),
+ 1,
+ pic_params,
+ &buffer_ids[buffer_id_cnt]);
+
+ buffer_id_cnt++;
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaCreateBuffer\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Creating libva bitplane buffer\n");
+
+ if (pic_params->bitplane_present.value)
+ {
+ //Then the bitplane buffer
+ vret = vaCreateBuffer(
+ vadisplay,
+ vacontext,
+ VABitPlaneBufferType,
+ pic_data->size_bitplanes,
+ 1,
+ pic_data->packed_bitplanes,
+ &buffer_ids[buffer_id_cnt]);
+
+ buffer_id_cnt++;
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaCreateBuffer\n");
+ goto cleanup;
+ }
+ }
+
+ //Now for slices
+ int i = 0;
+ for (; i < pic_data->num_slices; i++)
+ {
+ LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i);
+
+ //Do slice parameters
+ vret = vaCreateBuffer(
+ vadisplay,
+ vacontext,
+ VASliceParameterBufferType,
+ sizeof(VASliceParameterBufferVC1),
+ 1,
+ &(pic_data->slc_data[i].slc_parms),
+ &buffer_ids[buffer_id_cnt]);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaCreateBuffer\n");
+ goto cleanup;
+ }
+
+ buffer_id_cnt++;
+
+ LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size);
+
+
+ //Do slice data
+ vret = vaCreateBuffer(
+ vadisplay,
+ vacontext,
+ VASliceDataBufferType,
+ //size
+ pic_data->slc_data[i].slice_size,
+ //num_elements
+ 1,
+ //slice data buffer pointer
+ //Note that this is the original data buffer ptr;
+ // offset to the actual slice data is provided in
+ // slice_data_offset in VASliceParameterBufferVC1
+ pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset,
+ &buffer_ids[buffer_id_cnt]);
+
+ buffer_id_cnt++;
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaCreateBuffer\n");
+ goto cleanup;
+ }
+ }
+
+
+ LOG_V( "Calling vaBeginPicture\n");
+
+ //Now we can begin the picture
+ vret = vaBeginPicture(vadisplay, vacontext, surface);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaBeginPicture\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling vaRenderPicture\n");
+
+ //Render the picture
+ vret = vaRenderPicture(
+ vadisplay,
+ vacontext,
+ buffer_ids,
+ buffer_id_cnt);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaRenderPicture\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling vaEndPicture\n");
+
+ //End picture
+ vret = vaEndPicture(vadisplay, vacontext);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaEndPicture\n");
+ goto cleanup;
+ }
+
+ LOG_V( "Calling vaSyncSurface\n");
+
+ //Decode the picture
+ vret = vaSyncSurface(vadisplay, surface);
+
+ if (vret != VA_STATUS_SUCCESS)
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Video driver returned error from vaSyncSurface\n");
+ goto cleanup;
+ }
+
+cleanup:
+ if (NULL != buffer_ids)
+ g_free(buffer_ids);
+
+ return ret;
+}
+
+
+MIX_RESULT mix_videofmt_vc1_process_decode(
+ MixVideoFormat *mix,
+ vbp_data_vc1 *data,
+ guint64 timestamp,
+ gboolean discontinuity)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ gboolean unrefVideoFrame = FALSE;
+ MixVideoFrame *frame = NULL;
+
+ //TODO Partition this method into smaller methods
+
+ LOG_V( "Begin\n");
+
+ if ((mix == NULL) || (data == NULL))
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (0 == data->num_pictures || NULL == data->pic_data)
+ {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ if (!MIX_IS_VIDEOFORMAT_VC1(mix))
+ {
+ return MIX_RESULT_INVALID_PARAM;
+ }
+
+ //After this point, all exits from this function are through cleanup:
+ MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix);
+
+ //Check for skipped frame
+ //For skipped frames, we will reuse the last P or I frame surface and treat as P frame
+ if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED)
+ {
+
+ LOG_V( "mix_video vinfo: Frame type is SKIPPED\n");
+ if (self->lastFrame == NULL)
+ {
+ //we shouldn't get a skipped frame before we are able to get a real frame
+ LOG_E( "Error for skipped frame, prev frame is NULL\n");
+ ret = MIX_RESULT_DROPFRAME;
+ goto cleanup;
+ }
+
+ //We don't worry about this memory allocation because SKIPPED is not a common case
+ //Doing the allocation on the fly is a more efficient choice than trying to manage yet another pool
+ MixVideoFrame *skip_frame = mix_videoframe_new();
+ if (skip_frame == NULL)
+ {
+ ret = MIX_RESULT_NO_MEMORY;
+ LOG_E( "Error allocating new video frame object for skipped frame\n");
+ goto cleanup;
+ }
+
+ mix_videoframe_set_is_skipped(skip_frame, TRUE);
+// mix_videoframe_ref(skip_frame);
+ mix_videoframe_ref(self->lastFrame);
+ gulong frameid = VA_INVALID_SURFACE;
+ mix_videoframe_get_frame_id(self->lastFrame, &frameid);
+ mix_videoframe_set_frame_id(skip_frame, frameid);
+ mix_videoframe_set_frame_type(skip_frame, VC1_PTYPE_P);
+ mix_videoframe_set_real_frame(skip_frame, self->lastFrame);
+ mix_videoframe_set_timestamp(skip_frame, timestamp);
+ mix_videoframe_set_discontinuity(skip_frame, FALSE);
+ LOG_V( "Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n", (guint)skip_frame, (guint)frameid, timestamp);
+
+ //Process reference frames
+ LOG_V( "Updating skipped frame forward/backward references for libva\n");
+ mix_videofmt_vc1_handle_ref_frames(mix,
+ VC1_PTYPE_P,
+ skip_frame);
+
+ //Enqueue the skipped frame using frame manager
+ ret = mix_framemanager_enqueue(mix->framemgr, skip_frame);
+
+ goto cleanup;
+
+ }
+
+ ret = mix_surfacepool_get(mix->surfacepool, &frame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error getting frame from surfacepool\n");
+ goto cleanup;
+
+ }
+ unrefVideoFrame = TRUE;
+
+ // TO DO: handle multiple frames parsed from a sample buffer
+ int index;
+ int num_pictures = (data->num_pictures > 1) ? 2 : 1;
+
+ for (index = 0; index < num_pictures; index++)
+ {
+ ret = mix_videofmt_vc1_decode_a_picture(mix, data, index, frame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Failed to decode a picture.\n");
+ goto cleanup;
+ }
+ }
+
+ //Set the discontinuity flag
+ mix_videoframe_set_discontinuity(frame, discontinuity);
+
+ //Set the timestamp
+ mix_videoframe_set_timestamp(frame, timestamp);
+
+ // setup frame structure
+ if (data->num_pictures > 1)
+ {
+ if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field)
+ mix_videoframe_set_frame_structure(frame, VA_TOP_FIELD);
+ else
+ mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD);
+ }
+ else
+ {
+ mix_videoframe_set_frame_structure(frame, VA_FRAME_PICTURE);
+ }
+
+ enum _picture_type frame_type = data->pic_data[0].pic_parms->picture_fields.bits.picture_type;
+
+ //For I or P frames
+ //Save this frame off for skipped frame handling
+ if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P))
+ {
+ if (self->lastFrame != NULL)
+ {
+ mix_videoframe_unref(self->lastFrame);
+ }
+ self->lastFrame = frame;
+ mix_videoframe_ref(frame);
+ }
+
+ //Update the references frames for the current frame
+ if ((frame_type == VC1_PTYPE_I) || (frame_type == VC1_PTYPE_P)) //If I or P frame, update the reference array
+ {
+ LOG_V( "Updating forward/backward references for libva\n");
+ mix_videofmt_vc1_handle_ref_frames(mix,
+ frame_type,
+ frame);
+ }
+
+//TODO Complete YUVDUMP code and move into base class
+#ifdef YUVDUMP
+ if (mix_video_vc1_counter < 10)
+ ret = GetImageFromSurface (mix, frame);
+// g_usleep(5000000);
+#endif /* YUVDUMP */
+
+ LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp);
+
+ //Enqueue the decoded frame using frame manager
+ ret = mix_framemanager_enqueue(mix->framemgr, frame);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error enqueuing frame object\n");
+ goto cleanup;
+ }
+ unrefVideoFrame = FALSE;
+
+
+cleanup:
+
+ mix_videofmt_vc1_release_input_buffers(mix, timestamp);
+ if (unrefVideoFrame)
+ mix_videoframe_unref(frame);
+
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_vc1_flush(MixVideoFormat *mix)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ if (mix == NULL)
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ uint32 pret = 0;
+ MixInputBufferEntry *bufentry = NULL;
+
+ /* Chainup parent method.
+ We are not chaining up to parent method for now.
+ */
+
+#if 0
+ if (parent_class->flush)
+ {
+ return parent_class->flush(mix, msg);
+ }
+#endif
+
+ MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix);
+
+ g_mutex_lock(mix->objectlock);
+
+ //Clear the contents of inputbufqueue
+ while (!g_queue_is_empty(mix->inputbufqueue))
+ {
+ bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue);
+ if (bufentry == NULL)
+ continue;
+
+ mix_buffer_unref(bufentry->buf);
+ g_free(bufentry);
+ }
+
+ //Clear parse_in_progress flag and current timestamp
+ mix->parse_in_progress = FALSE;
+ mix->discontinuity_frame_in_progress = FALSE;
+ mix->current_timestamp = 0;
+
+ int i = 0;
+ for (; i < 2; i++)
+ {
+ if (self->reference_frames[i] != NULL)
+ {
+ mix_videoframe_unref(self->reference_frames[i]);
+ self->reference_frames[i] = NULL;
+ }
+ }
+
+ //Call parser flush
+ pret = vbp_flush(mix->parser_handle);
+ if (pret != VBP_OK)
+ ret = MIX_RESULT_FAIL;
+
+ g_mutex_unlock(mix->objectlock);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_vc1_eos(MixVideoFormat *mix)
+{
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ vbp_data_vc1 *data = NULL;
+ uint32 pret = 0;
+
+ if (mix == NULL)
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+
+ /* Chainup parent method.
+ We are not chaining up to parent method for now.
+ */
+
+#if 0
+ if (parent_class->eos)
+ {
+ return parent_class->eos(mix, msg);
+ }
+#endif
+
+ g_mutex_lock(mix->objectlock);
+
+ //if a frame is in progress, process the frame
+ if (mix->parse_in_progress)
+ {
+ //query for data
+ pret = vbp_query(mix->parser_handle, (void *) &data);
+
+ if ((pret != VBP_OK) || (data == NULL))
+ {
+ ret = MIX_RESULT_FAIL;
+ LOG_E( "Error getting last parse data\n");
+ goto cleanup;
+ }
+
+ //process and decode data
+ ret = mix_videofmt_vc1_process_decode(mix,
+ data, mix->current_timestamp,
+ mix->discontinuity_frame_in_progress);
+ mix->parse_in_progress = FALSE;
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E( "Error processing last frame\n");
+ goto cleanup;
+ }
+ }
+
+cleanup:
+
+ g_mutex_unlock(mix->objectlock);
+
+ //Call Frame Manager with _eos()
+ ret = mix_framemanager_eos(mix->framemgr);
+
+ LOG_V( "End\n");
+
+ return ret;
+}
+
+MIX_RESULT mix_videofmt_vc1_deinitialize(MixVideoFormat *mix)
+{
+ //Note this method is not called; may remove in future
+ if (mix == NULL)
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ /* Chainup parent method.
+ */
+
+ if (parent_class->deinitialize)
+ {
+ return parent_class->deinitialize(mix);
+ }
+
+ //Most stuff is cleaned up in parent_class->finalize() and in _finalize
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmt_vc1_handle_ref_frames(
+ MixVideoFormat *mix,
+ enum _picture_type frame_type,
+ MixVideoFrame * current_frame)
+{
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL || current_frame == NULL)
+ {
+ LOG_E( "Null pointer passed in\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ MixVideoFormat_VC1 *self = MIX_VIDEOFORMAT_VC1(mix);
+
+
+ switch (frame_type)
+ {
+ case VC1_PTYPE_I: // I frame type
+ case VC1_PTYPE_P: // P frame type
+ LOG_V( "Refing reference frame %x\n", (guint) current_frame);
+ mix_videoframe_ref(current_frame);
+
+ //If we have B frames, we need to keep forward and backward reference frames
+ if (self->haveBframes)
+ {
+ if (self->reference_frames[0] == NULL) //should only happen on first frame
+ {
+ self->reference_frames[0] = current_frame;
+// self->reference_frames[1] = NULL;
+ }
+ else if (self->reference_frames[1] == NULL) //should only happen on second frame
+ {
+ self->reference_frames[1] = current_frame;
+ }
+ else
+ {
+ LOG_V( "Releasing reference frame %x\n", (guint) self->reference_frames[0]);
+ mix_videoframe_unref(self->reference_frames[0]);
+ self->reference_frames[0] = self->reference_frames[1];
+ self->reference_frames[1] = current_frame;
+ }
+ }
+ else //No B frames in this content, only need to keep the forward reference frame
+ {
+ LOG_V( "Releasing reference frame %x\n", (guint) self->reference_frames[0]);
+ if (self->reference_frames[0] != NULL)
+ mix_videoframe_unref(self->reference_frames[0]);
+ self->reference_frames[0] = current_frame;
+
+ }
+ break;
+ case VC1_PTYPE_B: // B or BI frame type (should not happen)
+ case VC1_PTYPE_BI:
+ default:
+ LOG_E( "Wrong frame type for handling reference frames\n");
+ return MIX_RESULT_FAIL;
+ break;
+
+ }
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmt_vc1_release_input_buffers(
+ MixVideoFormat *mix,
+ guint64 timestamp)
+{
+ MixInputBufferEntry *bufentry = NULL;
+ gboolean done = FALSE;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ //Dequeue and release all input buffers for this frame
+
+ LOG_V( "Releasing all the MixBuffers for this frame\n");
+
+ //While the head of the queue has timestamp == current ts
+ //dequeue the entry, unref the MixBuffer, and free the struct
+ done = FALSE;
+ while (!done)
+ {
+ bufentry = (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue);
+ if (bufentry == NULL)
+ break;
+
+ LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp);
+
+ if (bufentry->timestamp != timestamp)
+ {
+ LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp);
+ done = TRUE;
+ break;
+ }
+
+ bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue);
+
+ LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf);
+ mix_buffer_unref(bufentry->buf);
+ g_free(bufentry);
+ }
+
+
+ LOG_V( "End\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+
diff --git a/mix_video/src/mixvideoformat_vc1.h b/mix_video/src/mixvideoformat_vc1.h
new file mode 100644
index 0000000..9af8a8d
--- /dev/null
+++ b/mix_video/src/mixvideoformat_vc1.h
@@ -0,0 +1,123 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFORMAT_VC1_H__
+#define __MIX_VIDEOFORMAT_VC1_H__
+
+#include "mixvideoformat.h"
+#include "mixvideoframe_private.h"
+
+//Note: this is only a max limit. Actual number of surfaces allocated is calculated in mix_videoformat_vc1_initialize()
+#define MIX_VIDEO_VC1_SURFACE_NUM 8
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEOFORMAT_VC1 (mix_videoformat_vc1_get_type ())
+#define MIX_VIDEOFORMAT_VC1(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1))
+#define MIX_IS_VIDEOFORMAT_VC1(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMAT_VC1))
+#define MIX_VIDEOFORMAT_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1Class))
+#define MIX_IS_VIDEOFORMAT_VC1_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMAT_VC1))
+#define MIX_VIDEOFORMAT_VC1_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMAT_VC1, MixVideoFormat_VC1Class))
+
+typedef struct _MixVideoFormat_VC1 MixVideoFormat_VC1;
+typedef struct _MixVideoFormat_VC1Class MixVideoFormat_VC1Class;
+
+struct _MixVideoFormat_VC1 {
+ /*< public > */
+ MixVideoFormat parent;
+
+ /*< public > */
+
+ /*< private > */
+ MixVideoFrame * reference_frames[2];
+ gboolean haveBframes;
+ gboolean loopFilter;
+ MixVideoFrame * lastFrame;
+};
+
+/**
+ * MixVideoFormat_VC1Class:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoFormat_VC1Class {
+ /*< public > */
+ MixVideoFormatClass parent_class;
+
+ /* class members */
+
+ /*< public > */
+};
+
+/**
+ * mix_videoformat_vc1_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoformat_vc1_get_type(void);
+
+/**
+ * mix_videoformat_vc1_new:
+ * @returns: A newly allocated instance of #MixVideoFormat_VC1
+ *
+ * Use this method to create new instance of #MixVideoFormat_VC1
+ */
+MixVideoFormat_VC1 *mix_videoformat_vc1_new(void);
+
+/**
+ * mix_videoformat_vc1_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFormat_VC1 instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFormat_VC1 *mix_videoformat_vc1_ref(MixVideoFormat_VC1 * mix);
+
+/**
+ * mix_videoformat_vc1_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoformat_vc1_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/* VC1 vmethods */
+MIX_RESULT mix_videofmt_vc1_getcaps(MixVideoFormat *mix, GString *msg);
+MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix,
+ MixVideoConfigParamsDec * config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+MIX_RESULT mix_videofmt_vc1_decode(MixVideoFormat *mix, MixBuffer * bufin[],
+ gint bufincnt, MixVideoDecodeParams * decode_params);
+MIX_RESULT mix_videofmt_vc1_flush(MixVideoFormat *mix);
+MIX_RESULT mix_videofmt_vc1_eos(MixVideoFormat *mix);
+MIX_RESULT mix_videofmt_vc1_deinitialize(MixVideoFormat *mix);
+
+/* Local Methods */
+
+MIX_RESULT mix_videofmt_vc1_handle_ref_frames(MixVideoFormat *mix,
+ enum _picture_type frame_type,
+ MixVideoFrame * current_frame);
+
+
+MIX_RESULT mix_videofmt_vc1_process_decode(MixVideoFormat *mix,
+ vbp_data_vc1 *data,
+ guint64 timestamp,
+ gboolean discontinuity);
+
+
+MIX_RESULT mix_videofmt_vc1_release_input_buffers(MixVideoFormat *mix,
+ guint64 timestamp);
+
+#endif /* __MIX_VIDEOFORMAT_VC1_H__ */
diff --git a/mix_video/src/mixvideoformatenc.c b/mix_video/src/mixvideoformatenc.c
new file mode 100644
index 0000000..e7d1e8e
--- /dev/null
+++ b/mix_video/src/mixvideoformatenc.c
@@ -0,0 +1,502 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+#include "mixvideolog.h"
+#include "mixvideoformatenc.h"
+
+//#define MDEBUG
+
+/* Default vmethods implementation */
+static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix,
+ GString *msg);
+static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay vadisplay);
+
+static MIX_RESULT
+mix_videofmtenc_encode_default(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix);
+static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix);
+static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix);
+static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default(
+ MixVideoFormatEnc *mix, guint *max_size);
+
+
+static GObjectClass *parent_class = NULL;
+
+static void mix_videoformatenc_finalize(GObject * obj);
+G_DEFINE_TYPE (MixVideoFormatEnc, mix_videoformatenc, G_TYPE_OBJECT);
+
+static void mix_videoformatenc_init(MixVideoFormatEnc * self) {
+ /* TODO: public member initialization */
+
+ /* TODO: private member initialization */
+
+ self->objectlock = g_mutex_new();
+
+ self->initialized = FALSE;
+ self->framemgr = NULL;
+ self->surfacepool = NULL;
+ self->inputbufpool = NULL;
+ self->inputbufqueue = NULL;
+ self->va_display = NULL;
+ self->va_context = 0;
+ self->va_config = 0;
+ self->mime_type = NULL;
+ self->frame_rate_num= 0;
+ self->frame_rate_denom = 1;
+ self->picture_width = 0;
+ self->picture_height = 0;
+ self->initial_qp = 0;
+ self->min_qp = 0;
+ self->intra_period = 0;
+ self->bitrate = 0;
+ self->share_buf_mode = FALSE;
+ self->ci_frame_id = NULL;
+ self->ci_frame_num = 0;
+ self->drawable = 0x0;
+ self->need_display = TRUE;
+
+ self->va_rcmode = VA_RC_NONE;
+ self->va_format = VA_RT_FORMAT_YUV420;
+ self->va_entrypoint = VAEntrypointEncSlice;
+ self->va_profile = VAProfileH264Baseline;
+
+ //add more properties here
+}
+
+static void mix_videoformatenc_class_init(MixVideoFormatEncClass * klass) {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ gobject_class->finalize = mix_videoformatenc_finalize;
+
+ /* setup vmethods with base implementation */
+ klass->getcaps = mix_videofmtenc_getcaps_default;
+ klass->initialize = mix_videofmtenc_initialize_default;
+ klass->encode = mix_videofmtenc_encode_default;
+ klass->flush = mix_videofmtenc_flush_default;
+ klass->eos = mix_videofmtenc_eos_default;
+ klass->deinitialize = mix_videofmtenc_deinitialize_default;
+ klass->getmaxencodedbufsize = mix_videofmtenc_get_max_coded_buffer_size_default;
+}
+
+MixVideoFormatEnc *
+mix_videoformatenc_new(void) {
+ MixVideoFormatEnc *ret = g_object_new(MIX_TYPE_VIDEOFORMATENC, NULL);
+
+ return ret;
+}
+
+void mix_videoformatenc_finalize(GObject * obj) {
+ /* clean up here. */
+
+ if (obj == NULL) {
+ LOG_E( "obj == NULL\n");
+ return;
+ }
+
+ MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj);
+
+ LOG_V( "\n");
+
+ if(mix->objectlock) {
+ g_mutex_free(mix->objectlock);
+ mix->objectlock = NULL;
+ }
+
+ //MiVideo object calls the _deinitialize() for frame manager
+ if (mix->framemgr)
+ {
+ mix_framemanager_unref(mix->framemgr);
+ mix->framemgr = NULL;
+ }
+
+ if (mix->mime_type)
+ {
+ if (mix->mime_type->str)
+ g_string_free(mix->mime_type, TRUE);
+ else
+ g_string_free(mix->mime_type, FALSE);
+ }
+
+ if (mix->ci_frame_id)
+ g_free (mix->ci_frame_id);
+
+
+ if (mix->surfacepool)
+ {
+ mix_surfacepool_deinitialize(mix->surfacepool);
+ mix_surfacepool_unref(mix->surfacepool);
+ mix->surfacepool = NULL;
+ }
+
+
+ /* TODO: cleanup here */
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoFormatEnc *
+mix_videoformatenc_ref(MixVideoFormatEnc * mix) {
+ return (MixVideoFormatEnc *) g_object_ref(G_OBJECT(mix));
+}
+
+/* Default vmethods implementation */
+static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix,
+ GString *msg) {
+ LOG_V( "Begin\n");
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmtenc_initialize_default(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display) {
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL ||config_params_enc == NULL) {
+ LOG_E(
+ "!mix || config_params_enc == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ //TODO check return values of getter fns for config_params
+
+ g_mutex_lock(mix->objectlock);
+
+ mix->framemgr = frame_mgr;
+ mix_framemanager_ref(mix->framemgr);
+
+ mix->va_display = va_display;
+
+ LOG_V(
+ "Start to get properities from parent params\n");
+
+ /* get properties from param (parent) Object*/
+ ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc,
+ &(mix->bitrate));
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_bps\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc,
+ &(mix->frame_rate_num), &(mix->frame_rate_denom));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_frame_rate\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc,
+ &(mix->initial_qp));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_init_qp\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+
+ ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc,
+ &(mix->min_qp));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_min_qp\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc,
+ &(mix->intra_period));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_intra_period\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc,
+ &(mix->picture_width), &(mix->picture_height));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_picture_res\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_share_buf_mode (config_params_enc,
+ &(mix->share_buf_mode));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+
+ ret = mix_videoconfigparamsenc_get_ci_frame_info (config_params_enc,
+ &(mix->ci_frame_id), &(mix->ci_frame_num));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+
+ ret = mix_videoconfigparamsenc_get_drawable (config_params_enc,
+ &(mix->drawable));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_drawable\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_need_display (config_params_enc,
+ &(mix->need_display));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_drawable\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc,
+ &(mix->va_rcmode));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_rc_mode\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_raw_format (config_params_enc,
+ &(mix->va_format));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_format\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_get_profile (config_params_enc,
+ (MixProfile *) &(mix->va_profile));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_get_profile\n");
+ g_mutex_unlock(mix->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V(
+ "======Video Encode Parent Object properities======:\n");
+
+ LOG_I( "mix->bitrate = %d\n",
+ mix->bitrate);
+ LOG_I( "mix->frame_rate = %d\n",
+ mix->frame_rate_denom / mix->frame_rate_denom);
+ LOG_I( "mix->initial_qp = %d\n",
+ mix->initial_qp);
+ LOG_I( "mix->min_qp = %d\n",
+ mix->min_qp);
+ LOG_I( "mix->intra_period = %d\n",
+ mix->intra_period);
+ LOG_I( "mix->picture_width = %d\n",
+ mix->picture_width);
+ LOG_I( "mix->picture_height = %d\n",
+ mix->picture_height);
+ LOG_I( "mix->share_buf_mode = %d\n",
+ mix->share_buf_mode);
+ LOG_I( "mix->ci_frame_id = 0x%08x\n",
+ mix->ci_frame_id);
+ LOG_I( "mix->ci_frame_num = %d\n",
+ mix->ci_frame_num);
+ LOG_I( "mix->drawable = 0x%08x\n",
+ mix->drawable);
+ LOG_I( "mix->need_display = %d\n",
+ mix->need_display);
+ LOG_I( "mix->va_format = %d\n",
+ mix->va_format);
+ LOG_I( "mix->va_profile = %d\n",
+ mix->va_profile);
+ LOG_I( "mix->va_rcmode = %d\n\n",
+ mix->va_rcmode);
+
+ g_mutex_unlock(mix->objectlock);
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmtenc_encode_default (MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params) {
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmtenc_flush_default(MixVideoFormatEnc *mix) {
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmtenc_eos_default(MixVideoFormatEnc *mix) {
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix) {
+
+ //TODO decide whether to put any of the teardown from _finalize() here
+
+ return MIX_RESULT_SUCCESS;
+}
+
+static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default(
+ MixVideoFormatEnc *mix, guint *max_size) {
+
+
+ return MIX_RESULT_SUCCESS;
+}
+
+/* mixvideoformatenc class methods implementation */
+
+MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) {
+ MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
+
+ LOG_V( "Begin\n");
+
+ if (klass->getcaps) {
+ return klass->getcaps(mix, msg);
+ }
+ return MIX_RESULT_NOTIMPL;
+}
+
+MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display) {
+ MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
+
+ /*frame_mgr and input_buf_pool is reserved for future use*/
+ if (klass->initialize) {
+ return klass->initialize(mix, config_params_enc, frame_mgr,
+ input_buf_pool, surface_pool, va_display);
+ }
+
+ return MIX_RESULT_FAIL;
+
+}
+
+MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params) {
+
+ MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
+ if (klass->encode) {
+ return klass->encode(mix, bufin, bufincnt, iovout, iovoutcnt, encode_params);
+ }
+
+ return MIX_RESULT_FAIL;
+}
+
+MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix) {
+ MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
+ if (klass->flush) {
+ return klass->flush(mix);
+ }
+
+ return MIX_RESULT_FAIL;
+}
+
+MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix) {
+ MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
+ if (klass->eos) {
+ return klass->eos(mix);
+ }
+
+ return MIX_RESULT_FAIL;
+}
+
+MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix) {
+ MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
+ if (klass->deinitialize) {
+ return klass->deinitialize(mix);
+ }
+
+ return MIX_RESULT_FAIL;
+}
+
+MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint * max_size) {
+
+ MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
+ if (klass->encode) {
+ return klass->getmaxencodedbufsize(mix, max_size);
+ }
+
+ return MIX_RESULT_FAIL;
+}
diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h
new file mode 100644
index 0000000..0e1c07a
--- /dev/null
+++ b/mix_video/src/mixvideoformatenc.h
@@ -0,0 +1,178 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFORMATENC_H__
+#define __MIX_VIDEOFORMATENC_H__
+
+#include <va/va.h>
+#include <glib-object.h>
+#include "mixvideodef.h"
+#include "mixdrmparams.h"
+#include "mixvideoconfigparamsenc.h"
+#include "mixvideoframe.h"
+#include "mixframemanager.h"
+#include "mixsurfacepool.h"
+#include "mixbuffer.h"
+#include "mixbufferpool.h"
+#include "mixvideoformatqueue.h"
+#include "mixvideoencodeparams.h"
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEOFORMATENC (mix_videoformatenc_get_type ())
+#define MIX_VIDEOFORMATENC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEnc))
+#define MIX_IS_VIDEOFORMATENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC))
+#define MIX_VIDEOFORMATENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEncClass))
+#define MIX_IS_VIDEOFORMATENC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC))
+#define MIX_VIDEOFORMATENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC, MixVideoFormatEncClass))
+
+typedef struct _MixVideoFormatEnc MixVideoFormatEnc;
+typedef struct _MixVideoFormatEncClass MixVideoFormatEncClass;
+
+/* vmethods typedef */
+
+/* TODO: change return type and method parameters */
+typedef MIX_RESULT (*MixVideoFmtEncGetCapsFunc)(MixVideoFormatEnc *mix, GString *msg);
+typedef MIX_RESULT (*MixVideoFmtEncInitializeFunc)(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc* config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+typedef MIX_RESULT (*MixVideoFmtEncodeFunc)(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+typedef MIX_RESULT (*MixVideoFmtEncFlushFunc)(MixVideoFormatEnc *mix);
+typedef MIX_RESULT (*MixVideoFmtEncEndOfStreamFunc)(MixVideoFormatEnc *mix);
+typedef MIX_RESULT (*MixVideoFmtEncDeinitializeFunc)(MixVideoFormatEnc *mix);
+typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, guint *max_size);
+
+struct _MixVideoFormatEnc {
+ /*< public > */
+ GObject parent;
+
+ /*< public > */
+
+ /*< private > */
+ GMutex *objectlock;
+ gboolean initialized;
+ MixFrameManager *framemgr;
+ MixSurfacePool *surfacepool;
+ VADisplay va_display;
+ VAContextID va_context;
+ VAConfigID va_config;
+ GString *mime_type;
+
+ guint frame_rate_num;
+ guint frame_rate_denom;
+ guint picture_width;
+ guint picture_height;
+
+ guint initial_qp;
+ guint min_qp;
+ guint intra_period;
+ guint bitrate;
+
+ gboolean share_buf_mode;
+ gulong * ci_frame_id;
+ guint ci_frame_num;
+
+ gulong drawable;
+ gboolean need_display;
+
+ VAProfile va_profile;
+ VAEntrypoint va_entrypoint;
+ guint va_format;
+ guint va_rcmode;
+
+
+ MixBufferPool *inputbufpool;
+ GQueue *inputbufqueue;
+};
+
+/**
+ * MixVideoFormatEncClass:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoFormatEncClass {
+ /*< public > */
+ GObjectClass parent_class;
+
+ /* class members */
+
+ /*< public > */
+ MixVideoFmtEncGetCapsFunc getcaps;
+ MixVideoFmtEncInitializeFunc initialize;
+ MixVideoFmtEncodeFunc encode;
+ MixVideoFmtEncFlushFunc flush;
+ MixVideoFmtEncEndOfStreamFunc eos;
+ MixVideoFmtEncDeinitializeFunc deinitialize;
+ MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize;
+};
+
+/**
+ * mix_videoformatenc_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoformatenc_get_type(void);
+
+/**
+ * mix_videoformatenc_new:
+ * @returns: A newly allocated instance of #MixVideoFormatEnc
+ *
+ * Use this method to create new instance of #MixVideoFormatEnc
+ */
+MixVideoFormatEnc *mix_videoformatenc_new(void);
+
+/**
+ * mix_videoformatenc_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFormatEnc instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFormatEnc *mix_videoformatenc_ref(MixVideoFormatEnc * mix);
+
+/**
+ * mix_videoformatenc_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoformatenc_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/* TODO: change method parameter list */
+MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg);
+
+MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * enc_config_params,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+
+MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+
+MIX_RESULT mix_videofmtenc_flush(MixVideoFormatEnc *mix);
+
+MIX_RESULT mix_videofmtenc_eos(MixVideoFormatEnc *mix);
+
+MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix);
+
+MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint *max_size);
+
+
+#endif /* __MIX_VIDEOFORMATENC_H__ */
diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c
new file mode 100644
index 0000000..8472e93
--- /dev/null
+++ b/mix_video/src/mixvideoformatenc_h264.c
@@ -0,0 +1,1954 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "mixvideolog.h"
+
+#include "mixvideoformatenc_h264.h"
+#include "mixvideoconfigparamsenc_h264.h"
+
+#define MDEBUG
+#undef SHOW_SRC
+
+#ifdef SHOW_SRC
+Window win = 0;
+#endif /* SHOW_SRC */
+
+
+/* The parent class. The pointer will be saved
+ * in this class's initialization. The pointer
+ * can be used for chaining method call if needed.
+ */
+static MixVideoFormatEncClass *parent_class = NULL;
+
+static void mix_videoformatenc_h264_finalize(GObject * obj);
+
+/*
+ * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC
+ */
+G_DEFINE_TYPE (MixVideoFormatEnc_H264, mix_videoformatenc_h264, MIX_TYPE_VIDEOFORMATENC);
+
+static void mix_videoformatenc_h264_init(MixVideoFormatEnc_H264 * self) {
+ MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self);
+
+ /* TODO: public member initialization */
+
+ /* TODO: private member initialization */
+ self->encoded_frames = 0;
+ self->pic_skipped = FALSE;
+ self->is_intra = TRUE;
+ self->cur_fame = NULL;
+ self->ref_fame = NULL;
+ self->rec_fame = NULL;
+
+ self->ci_shared_surfaces = NULL;
+ self->surfaces= NULL;
+ self->surface_num = 0;
+
+ parent->initialized = FALSE;
+}
+
+static void mix_videoformatenc_h264_class_init(
+ MixVideoFormatEnc_H264Class * klass) {
+
+ /* root class */
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* direct parent class */
+ MixVideoFormatEncClass *video_formatenc_class =
+ MIX_VIDEOFORMATENC_CLASS(klass);
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ /* setup finializer */
+ gobject_class->finalize = mix_videoformatenc_h264_finalize;
+
+ /* setup vmethods with base implementation */
+ /* TODO: decide if we need to override the parent's methods */
+ video_formatenc_class->getcaps = mix_videofmtenc_h264_getcaps;
+ video_formatenc_class->initialize = mix_videofmtenc_h264_initialize;
+ video_formatenc_class->encode = mix_videofmtenc_h264_encode;
+ video_formatenc_class->flush = mix_videofmtenc_h264_flush;
+ video_formatenc_class->eos = mix_videofmtenc_h264_eos;
+ video_formatenc_class->deinitialize = mix_videofmtenc_h264_deinitialize;
+ video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h264_get_max_encoded_buf_size;
+}
+
+MixVideoFormatEnc_H264 *
+mix_videoformatenc_h264_new(void) {
+ MixVideoFormatEnc_H264 *ret =
+ g_object_new(MIX_TYPE_VIDEOFORMATENC_H264, NULL);
+
+ return ret;
+}
+
+void mix_videoformatenc_h264_finalize(GObject * obj) {
+ /* clean up here. */
+
+ /*MixVideoFormatEnc_H264 *mix = MIX_VIDEOFORMATENC_H264(obj); */
+ GObjectClass *root_class = (GObjectClass *) parent_class;
+
+ LOG_V( "\n");
+
+ /* Chain up parent */
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+}
+
+MixVideoFormatEnc_H264 *
+mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix) {
+ return (MixVideoFormatEnc_H264 *) g_object_ref(G_OBJECT(mix));
+}
+
+/*H.264 vmethods implementation */
+MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg) {
+
+ /* TODO: add codes for H.264 */
+
+ /* TODO: decide if we need to chainup parent method.
+ * if we do, the following is the code:
+ */
+
+ LOG_V( "mix_videofmtenc_h264_getcaps\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+
+ if (parent_class->getcaps) {
+ return parent_class->getcaps(mix, msg);
+ }
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display ) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ MixVideoFormatEnc *parent = NULL;
+ MixVideoConfigParamsEncH264 * config_params_enc_h264;
+
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VASurfaceID * surfaces;
+
+ gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs;
+ gint va_num_profiles, va_num_entrypoints;
+
+ VAProfile *va_profiles = NULL;
+ VAEntrypoint *va_entrypoints = NULL;
+ VAConfigAttrib va_attrib[2];
+ guint index;
+
+
+ /*frame_mgr and input_buf_pool is reservered for future use*/
+
+ if (mix == NULL || config_params_enc == NULL || va_display == NULL) {
+ LOG_E(
+ "mix == NULL || config_params_enc == NULL || va_display == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "begin\n");
+
+
+ //TODO additional parameter checking
+
+ /* Chainup parent method. */
+#if 1
+ if (parent_class->initialize) {
+ ret = parent_class->initialize(mix, config_params_enc,
+ frame_mgr, input_buf_pool, surface_pool,
+ va_display);
+ }
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ return ret;
+ }
+
+#endif //disable it currently
+
+ if (MIX_IS_VIDEOFORMATENC_H264(mix))
+ {
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix);
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) {
+ config_params_enc_h264 =
+ MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc);
+ } else {
+ LOG_V(
+ "mix_videofmtenc_h264_initialize: no h264 config params found\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ g_mutex_lock(parent->objectlock);
+
+ LOG_V(
+ "Start to get properities from h.264 params\n");
+
+ /* get properties from H264 params Object, which is special to H264 format*/
+ ret = mix_videoconfigparamsenc_h264_get_bus (config_params_enc_h264,
+ &self->basic_unit_size);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_h264_get_bus\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+
+ ret = mix_videoconfigparamsenc_h264_get_dlk (config_params_enc_h264,
+ &self->disable_deblocking_filter_idc);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_h264_get_dlk\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+
+ ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264,
+ &self->slice_num);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_h264_get_slice_num\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264,
+ &self->delimiter_type);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E (
+ "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "======H264 Encode Object properities======:\n");
+
+ LOG_I( "self->basic_unit_size = %d\n",
+ self->basic_unit_size);
+ LOG_I( "self->disable_deblocking_filter_idc = %d\n",
+ self->disable_deblocking_filter_idc);
+ LOG_I( "self->slice_num = %d\n",
+ self->slice_num);
+ LOG_I ("self->delimiter_type = %d\n",
+ self->delimiter_type);
+
+ LOG_V(
+ "Get properities from params done\n");
+
+
+ //display = XOpenDisplay(NULL);
+ //va_display = vaGetDisplay (videoencobj->display);
+
+ parent->va_display = va_display;
+
+ LOG_V( "Get Display\n");
+ LOG_I( "Display = 0x%08x\n",
+ (guint)va_display);
+
+ //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver);
+ //g_print ("vaInitialize va_status = %d\n", va_status);
+
+
+#if 0
+ /* query the vender information, can ignore*/
+ va_vendor = vaQueryVendorString (va_display);
+ LOG_I( "Vendor = %s\n",
+ va_vendor);
+#endif
+
+ /*get the max number for profiles/entrypoints/attribs*/
+ va_max_num_profiles = vaMaxNumProfiles(va_display);
+ LOG_I( "va_max_num_profiles = %d\n",
+ va_max_num_profiles);
+
+ va_max_num_entrypoints = vaMaxNumEntrypoints(va_display);
+ LOG_I( "va_max_num_entrypoints = %d\n",
+ va_max_num_entrypoints);
+
+ va_max_num_attribs = vaMaxNumConfigAttributes(va_display);
+ LOG_I( "va_max_num_attribs = %d\n",
+ va_max_num_attribs);
+
+ va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles);
+ va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints);
+
+ if (va_profiles == NULL || va_entrypoints ==NULL)
+ {
+ LOG_E(
+ "!va_profiles || !va_entrypoints\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ LOG_I(
+ "va_profiles = 0x%08x\n", (guint)va_profiles);
+
+ LOG_V( "vaQueryConfigProfiles\n");
+
+
+ va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaQueryConfigProfiles\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "vaQueryConfigProfiles Done\n");
+
+
+
+ /*check whether profile is supported*/
+ for(index= 0; index < va_num_profiles; index++) {
+ if(parent->va_profile == va_profiles[index])
+ break;
+ }
+
+ if(index == va_num_profiles)
+ {
+ LOG_E( "Profile not supported\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+ LOG_V( "vaQueryConfigEntrypoints\n");
+
+
+ /*Check entry point*/
+ va_status = vaQueryConfigEntrypoints(va_display,
+ parent->va_profile,
+ va_entrypoints, &va_num_entrypoints);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaQueryConfigEntrypoints\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ for (index = 0; index < va_num_entrypoints; index ++) {
+ if (va_entrypoints[index] == VAEntrypointEncSlice) {
+ break;
+ }
+ }
+
+ if (index == va_num_entrypoints) {
+ LOG_E( "Entrypoint not found\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+
+ /*free profiles and entrypoints*/
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+
+ va_attrib[0].type = VAConfigAttribRTFormat;
+ va_attrib[1].type = VAConfigAttribRateControl;
+
+ LOG_V( "vaGetConfigAttributes\n");
+
+ va_status = vaGetConfigAttributes(va_display, parent->va_profile,
+ parent->va_entrypoint,
+ &va_attrib[0], 2);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaGetConfigAttributes\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ if ((va_attrib[0].value & parent->va_format) == 0) {
+ LOG_E( "Matched format not found\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+
+ if ((va_attrib[1].value & parent->va_rcmode) == 0) {
+ LOG_E( "RC mode not found\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+ va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420;
+ va_attrib[1].value = parent->va_rcmode;
+
+ LOG_V( "======VA Configuration======\n");
+
+ LOG_I( "profile = %d\n",
+ parent->va_profile);
+ LOG_I( "va_entrypoint = %d\n",
+ parent->va_entrypoint);
+ LOG_I( "va_attrib[0].type = %d\n",
+ va_attrib[0].type);
+ LOG_I( "va_attrib[1].type = %d\n",
+ va_attrib[1].type);
+ LOG_I( "va_attrib[0].value (Format) = %d\n",
+ va_attrib[0].value);
+ LOG_I( "va_attrib[1].value (RC mode) = %d\n",
+ va_attrib[1].value);
+
+ LOG_V( "vaCreateConfig\n");
+
+ va_status = vaCreateConfig(va_display, parent->va_profile,
+ parent->va_entrypoint,
+ &va_attrib[0], 2, &(parent->va_config));
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaCreateConfig\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ /*TODO: compute the surface number*/
+ int numSurfaces;
+
+ if (parent->share_buf_mode) {
+ numSurfaces = 2;
+ }
+ else {
+ numSurfaces = 8;
+ parent->ci_frame_num = 0;
+ }
+
+ self->surface_num = numSurfaces + parent->ci_frame_num;
+
+ surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces);
+
+ if (surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate surface\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ LOG_V( "vaCreateSurfaces\n");
+
+ va_status = vaCreateSurfaces(va_display, parent->picture_width,
+ parent->picture_height, parent->va_format,
+ numSurfaces, surfaces);
+ //TODO check vret and return fail if needed
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaCreateSurfaces\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ if (parent->share_buf_mode) {
+
+ LOG_V(
+ "We are in share buffer mode!\n");
+ self->ci_shared_surfaces =
+ g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num);
+
+ if (self->ci_shared_surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate shared surface\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ guint index;
+ for(index = 0; index < parent->ci_frame_num; index++) {
+
+ LOG_I( "ci_frame_id = %lu\n",
+ parent->ci_frame_id[index]);
+
+ LOG_V(
+ "vaCreateSurfaceFromCIFrame\n");
+
+ va_status = vaCreateSurfaceFromCIFrame(va_display,
+ (gulong) (parent->ci_frame_id[index]),
+ &self->ci_shared_surfaces[index]);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateSurfaceFromCIFrame\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ LOG_V(
+ "vaCreateSurfaceFromCIFrame Done\n");
+
+ }// if (parent->share_buf_mode)
+
+ self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num);
+
+ if (self->surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate private surface\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ if (parent->share_buf_mode) {
+ /*shared surfaces should be put in pool first,
+ * because we will get it accoring to CI index*/
+ for(index = 0; index < parent->ci_frame_num; index++)
+ self->surfaces[index] = self->ci_shared_surfaces[index];
+ }
+
+ for(index = 0; index < numSurfaces; index++) {
+ self->surfaces[index + parent->ci_frame_num] = surfaces[index];
+ }
+
+ LOG_V( "assign surface Done\n");
+ LOG_I( "Created %d libva surfaces\n",
+ numSurfaces + parent->ci_frame_num);
+
+#if 0 //current put this in gst
+ images = g_malloc(sizeof(VAImage)*numSurfaces);
+ if (images == NULL)
+ {
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ for (index = 0; index < numSurfaces; index++) {
+ //Derive an VAImage from an existing surface.
+ //The image buffer can then be mapped/unmapped for CPU access
+ va_status = vaDeriveImage(va_display, surfaces[index],
+ &images[index]);
+ }
+#endif
+
+ LOG_V( "mix_surfacepool_new\n");
+
+ parent->surfacepool = mix_surfacepool_new();
+ if (surface_pool)
+ *surface_pool = parent->surfacepool;
+ //which is useful to check before encode
+
+ if (parent->surfacepool == NULL)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_new\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "mix_surfacepool_initialize\n");
+
+ ret = mix_surfacepool_initialize(parent->surfacepool,
+ self->surfaces, parent->ci_frame_num + numSurfaces);
+
+ switch (ret)
+ {
+ case MIX_RESULT_SUCCESS:
+ break;
+ case MIX_RESULT_ALREADY_INIT:
+ //TODO cleanup and/or retry
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ default:
+ break;
+ }
+
+
+ //Initialize and save the VA context ID
+ LOG_V( "vaCreateContext\n");
+
+ va_status = vaCreateContext(va_display, parent->va_config,
+ parent->picture_width, parent->picture_height,
+ 0, self->surfaces, parent->ci_frame_num + numSurfaces,
+ &(parent->va_context));
+
+ LOG_I(
+ "Created libva context width %d, height %d\n",
+ parent->picture_width, parent->picture_height);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateContext\n");
+ LOG_I( "va_status = %d\n",
+ (guint)va_status);
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ guint max_size = 0;
+ ret = mix_videofmtenc_h264_get_max_encoded_buf_size (parent, &max_size);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_videofmtenc_h264_get_max_encoded_buf_size\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+
+ }
+
+ /*Create coded buffer for output*/
+ va_status = vaCreateBuffer (va_display, parent->va_context,
+ VAEncCodedBufferType,
+ self->coded_buf_size, //
+ 1, NULL,
+ &self->coded_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer: VAEncCodedBufferType\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+#ifdef SHOW_SRC
+ Display * display = XOpenDisplay (NULL);
+
+ LOG_I( "display = 0x%08x\n",
+ (guint) display);
+ win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0,
+ parent->picture_width, parent->picture_height, 0, 0,
+ WhitePixel(display, 0));
+ XMapWindow(display, win);
+ XSelectInput(display, win, KeyPressMask | StructureNotifyMask);
+
+ XSync(display, False);
+ LOG_I( "va_display = 0x%08x\n",
+ (guint) va_display);
+
+#endif /* SHOW_SRC */
+
+ parent->initialized = TRUE;
+
+ g_mutex_unlock(parent->objectlock);
+ g_free (surfaces);
+
+ }
+ else
+ {
+ LOG_E(
+ "not H264 video encode Object\n");
+ return MIX_RESULT_FAIL;
+
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ MixVideoFormatEnc *parent = NULL;
+
+ LOG_V( "Begin\n");
+
+ /*currenly only support one input and output buffer*/
+ //TODO: params i
+
+ if (bufincnt != 1 || iovoutcnt != 1) {
+ LOG_E(
+ "buffer count not equel to 1\n");
+ LOG_E(
+ "maybe some exception occurs\n");
+ }
+
+ if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) {
+ LOG_E(
+ "!mix || !bufin[0] ||!iovout[0]\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ //TODO: encode_params is reserved here for future usage.
+
+ /* TODO: decide if we need to chainup parent method.
+ * * * if we do, the following is the code:
+ * */
+
+#if 0
+ if (parent_class->encode) {
+ return parent_class->encode(mix, bufin, bufincnt, iovout,
+ iovoutcnt, encode_params);
+ }
+#endif
+
+ if (MIX_IS_VIDEOFORMATENC_H264(mix))
+ {
+
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264 (mix);
+
+ LOG_V( "Locking\n");
+ g_mutex_lock(parent->objectlock);
+
+
+ //TODO: also we could move some encode Preparation work to here
+
+ LOG_V(
+ "mix_videofmtenc_h264_process_encode\n");
+
+ ret = mix_videofmtenc_h264_process_encode (self,
+ bufin[0], iovout[0]);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_h264_process_encode\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V( "UnLocking\n");
+
+ g_mutex_unlock(parent->objectlock);
+ }
+ else
+ {
+ LOG_E(
+ "not H264 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix) {
+
+ //MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+
+ /*not chain to parent flush func*/
+#if 0
+ if (parent_class->flush) {
+ return parent_class->flush(mix, msg);
+ }
+#endif
+
+ MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix);
+
+ g_mutex_lock(mix->objectlock);
+
+#if 0
+ /*unref the current source surface*/
+ if (self->cur_fame != NULL)
+ {
+ mix_videoframe_unref (self->cur_fame);
+ self->cur_fame = NULL;
+ }
+#endif
+
+ /*unref the reconstructed surface*/
+ if (self->rec_fame != NULL)
+ {
+ mix_videoframe_unref (self->rec_fame);
+ self->rec_fame = NULL;
+ }
+
+ /*unref the reference surface*/
+ if (self->ref_fame != NULL)
+ {
+ mix_videoframe_unref (self->ref_fame);
+ self->ref_fame = NULL;
+ }
+
+ /*reset the properities*/
+ self->encoded_frames = 0;
+ self->pic_skipped = FALSE;
+ self->is_intra = TRUE;
+
+ g_mutex_unlock(mix->objectlock);
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix) {
+
+ /* TODO: add codes for H.264 */
+
+ /* TODO: decide if we need to chainup parent method.
+ * if we do, the following is the code:
+ */
+
+ LOG_V( "\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (parent_class->eos) {
+ return parent_class->eos(mix);
+ }
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix) {
+
+ MixVideoFormatEnc *parent = NULL;
+ VAStatus va_status;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix);
+
+ LOG_V( "Release frames\n");
+
+ g_mutex_lock(parent->objectlock);
+
+#if 0
+ /*unref the current source surface*/
+ if (self->cur_fame != NULL)
+ {
+ mix_videoframe_unref (self->cur_fame);
+ self->cur_fame = NULL;
+ }
+#endif
+
+ /*unref the reconstructed surface*/
+ if (self->rec_fame != NULL)
+ {
+ mix_videoframe_unref (self->rec_fame);
+ self->rec_fame = NULL;
+ }
+
+ /*unref the reference surface*/
+ if (self->ref_fame != NULL)
+ {
+ mix_videoframe_unref (self->ref_fame);
+ self->ref_fame = NULL;
+ }
+
+ LOG_V( "Release surfaces\n");
+
+ if (self->ci_shared_surfaces)
+ {
+ g_free (self->ci_shared_surfaces);
+ self->ci_shared_surfaces = NULL;
+ }
+
+ if (self->surfaces)
+ {
+ g_free (self->surfaces);
+ self->surfaces = NULL;
+ }
+
+ LOG_V( "vaDestroyContext\n");
+
+ va_status = vaDestroyContext (parent->va_display, parent->va_context);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaDestroyContext\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "vaDestroyConfig\n");
+
+ va_status = vaDestroyConfig (parent->va_display, parent->va_config);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaDestroyConfig\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ parent->initialized = TRUE;
+
+ g_mutex_unlock(parent->objectlock);
+
+#if 1
+ if (parent_class->deinitialize) {
+ return parent_class->deinitialize(mix);
+ }
+#endif
+
+ //Most stuff is cleaned up in parent_class->finalize()
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_send_seq_params (MixVideoFormatEnc_H264 *mix)
+{
+
+ VAStatus va_status;
+ VAEncSequenceParameterBufferH264 h264_seq_param;
+
+ MixVideoFormatEnc *parent = NULL;
+
+ if (mix == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ LOG_V( "Begin\n\n");
+
+ if (MIX_IS_VIDEOFORMATENC_H264(mix))
+ {
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ /*set up the sequence params for HW*/
+ h264_seq_param.level_idc = 30; //TODO, hard code now
+ h264_seq_param.intra_period = parent->intra_period;
+ h264_seq_param.picture_width_in_mbs = parent->picture_width / 16;
+ h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16;
+ h264_seq_param.bits_per_second = parent->bitrate;
+ h264_seq_param.frame_rate =
+ (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom;
+ h264_seq_param.initial_qp = parent->initial_qp;
+ h264_seq_param.min_qp = parent->min_qp;
+ h264_seq_param.basic_unit_size = mix->basic_unit_size; //for rate control usage
+ h264_seq_param.intra_period = parent->intra_period;
+ //h264_seq_param.vui_flag = 248;
+ //h264_seq_param.seq_parameter_set_id = 176;
+
+ LOG_V(
+ "===h264 sequence params===\n");
+
+ LOG_I( "seq_parameter_set_id = %d\n",
+ (guint)h264_seq_param.seq_parameter_set_id);
+ LOG_I( "level_idc = %d\n",
+ (guint)h264_seq_param.level_idc);
+ LOG_I( "intra_period = %d\n",
+ h264_seq_param.intra_period);
+ LOG_I( "picture_width_in_mbs = %d\n",
+ h264_seq_param.picture_width_in_mbs);
+ LOG_I( "picture_height_in_mbs = %d\n",
+ h264_seq_param.picture_height_in_mbs);
+ LOG_I( "bitrate = %d\n",
+ h264_seq_param.bits_per_second);
+ LOG_I( "frame_rate = %d\n",
+ h264_seq_param.frame_rate);
+ LOG_I( "initial_qp = %d\n",
+ h264_seq_param.initial_qp);
+ LOG_I( "min_qp = %d\n",
+ h264_seq_param.min_qp);
+ LOG_I( "basic_unit_size = %d\n",
+ h264_seq_param.basic_unit_size);
+ LOG_I( "vui_flag = %d\n\n",
+ h264_seq_param.vui_flag);
+
+ va_status = vaCreateBuffer(parent->va_display, parent->va_context,
+ VAEncSequenceParameterBufferType,
+ sizeof(h264_seq_param),
+ 1, &h264_seq_param,
+ &mix->seq_param_buf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ va_status = vaRenderPicture(parent->va_display, parent->va_context,
+ &mix->seq_param_buf, 1);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaRenderPicture\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+ else
+ {
+ LOG_E(
+ "not H264 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+
+
+}
+
+MIX_RESULT mix_videofmtenc_h264_send_picture_parameter (MixVideoFormatEnc_H264 *mix)
+{
+ VAStatus va_status;
+ VAEncPictureParameterBufferH264 h264_pic_param;
+ MixVideoFormatEnc *parent = NULL;
+
+ if (mix == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ LOG_V( "Begin\n\n");
+
+#if 0 //not needed currently
+ MixVideoConfigParamsEncH264 * params_h264
+ = MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc);
+#endif
+
+ if (MIX_IS_VIDEOFORMATENC_H264(mix)) {
+
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ /*set picture params for HW*/
+ h264_pic_param.reference_picture = mix->ref_fame->frame_id;
+ h264_pic_param.reconstructed_picture = mix->rec_fame->frame_id;
+ h264_pic_param.coded_buf = mix->coded_buf;
+ h264_pic_param.picture_width = parent->picture_width;
+ h264_pic_param.picture_height = parent->picture_height;
+ h264_pic_param.last_picture = 0;
+
+
+ LOG_V(
+ "======h264 picture params======\n");
+ LOG_I( "reference_picture = 0x%08x\n",
+ h264_pic_param.reference_picture);
+ LOG_I( "reconstructed_picture = 0x%08x\n",
+ h264_pic_param.reconstructed_picture);
+ LOG_I( "coded_buf = 0x%08x\n",
+ h264_pic_param.coded_buf);
+ LOG_I( "picture_width = %d\n",
+ h264_pic_param.picture_width);
+ LOG_I( "picture_height = %d\n\n",
+ h264_pic_param.picture_height);
+
+ va_status = vaCreateBuffer(parent->va_display, parent->va_context,
+ VAEncPictureParameterBufferType,
+ sizeof(h264_pic_param),
+ 1,&h264_pic_param,
+ &mix->pic_param_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ va_status = vaRenderPicture(parent->va_display, parent->va_context,
+ &mix->pic_param_buf, 1);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaRenderPicture\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+ else
+ {
+ LOG_E(
+ "not H264 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+ return MIX_RESULT_SUCCESS;
+
+}
+
+
+MIX_RESULT mix_videofmtenc_h264_send_slice_parameter (MixVideoFormatEnc_H264 *mix)
+{
+ VAStatus va_status;
+
+ guint slice_num;
+ guint slice_height;
+ guint slice_index;
+ guint slice_height_in_mb;
+
+ if (mix == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ LOG_V( "Begin\n\n");
+
+
+ MixVideoFormatEnc *parent = NULL;
+
+ if (MIX_IS_VIDEOFORMATENC_H264(mix))
+ {
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ slice_num = mix->slice_num;
+ slice_height = parent->picture_height / slice_num;
+
+ slice_height += 15;
+ slice_height &= (~15);
+
+#if 1
+ va_status = vaCreateBuffer (parent->va_display, parent->va_context,
+ VAEncSliceParameterBufferType,
+ sizeof(VAEncSliceParameterBuffer),
+ slice_num, NULL,
+ &mix->slice_param_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ VAEncSliceParameterBuffer *slice_param, *current_slice;
+
+ va_status = vaMapBuffer(parent->va_display,
+ mix->slice_param_buf,
+ (void **)&slice_param);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaMapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ current_slice = slice_param;
+
+
+ for (slice_index = 0; slice_index < slice_num; slice_index++) {
+ current_slice = slice_param + slice_index;
+ slice_height_in_mb =
+ min (slice_height, parent->picture_height
+ - slice_index * slice_height) / 16;
+
+ // starting MB row number for this slice
+ current_slice->start_row_number = slice_index * slice_height / 16;
+ // slice height measured in MB
+ current_slice->slice_height = slice_height_in_mb;
+ current_slice->slice_flags.bits.is_intra = mix->is_intra;
+ current_slice->slice_flags.bits.disable_deblocking_filter_idc
+ = mix->disable_deblocking_filter_idc;
+
+ LOG_V(
+ "======h264 slice params======\n");
+
+ LOG_I( "slice_index = %d\n",
+ (gint) slice_index);
+ LOG_I( "start_row_number = %d\n",
+ (gint) current_slice->start_row_number);
+ LOG_I( "slice_height_in_mb = %d\n",
+ (gint) current_slice->slice_height);
+ LOG_I( "slice.is_intra = %d\n",
+ (gint) current_slice->slice_flags.bits.is_intra);
+ LOG_I(
+ "disable_deblocking_filter_idc = %d\n\n",
+ (gint) mix->disable_deblocking_filter_idc);
+
+ }
+
+ va_status = vaUnmapBuffer(parent->va_display, mix->slice_param_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaUnmapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+#endif
+
+#if 0
+ VAEncSliceParameterBuffer slice_param;
+ slice_index = 0;
+ slice_height_in_mb = slice_height / 16;
+ slice_param.start_row_number = 0;
+ slice_param.slice_height = slice_height / 16;
+ slice_param.slice_flags.bits.is_intra = mix->is_intra;
+ slice_param.slice_flags.bits.disable_deblocking_filter_idc
+ = mix->disable_deblocking_filter_idc;
+
+ va_status = vaCreateBuffer (parent->va_display, parent->va_context,
+ VAEncSliceParameterBufferType,
+ sizeof(slice_param),
+ slice_num, &slice_param,
+ &mix->slice_param_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+#endif
+
+ va_status = vaRenderPicture(parent->va_display, parent->va_context,
+ &mix->slice_param_buf, 1);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaRenderPicture\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ }
+ else
+ {
+ LOG_E(
+ "not H264 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix,
+ MixBuffer * bufin, MixIOVec * iovout)
+{
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VADisplay va_display = NULL;
+ VAContextID va_context;
+ gulong surface = 0;
+ guint16 width, height;
+
+ MixVideoFrame * tmp_fame;
+ guint8 *buf;
+
+ if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) {
+ LOG_E(
+ "mix == NUL) || bufin == NULL || iovout == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOFORMATENC_H264(mix))
+ {
+
+ MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ va_display = parent->va_display;
+ va_context = parent->va_context;
+ width = parent->picture_width;
+ height = parent->picture_height;
+
+
+ LOG_I( "encoded_frames = %d\n",
+ mix->encoded_frames);
+ LOG_I( "is_intra = %d\n",
+ mix->is_intra);
+ LOG_I( "ci_frame_id = 0x%08x\n",
+ (guint) parent->ci_frame_id);
+
+ /* determine the picture type*/
+ if ((mix->encoded_frames % parent->intra_period) == 0) {
+ mix->is_intra = TRUE;
+ } else {
+ mix->is_intra = FALSE;
+ }
+
+ LOG_I( "is_intra_picture = %d\n",
+ mix->is_intra);
+
+ LOG_V(
+ "Get Surface from the pool\n");
+
+ /*current we use one surface for source data,
+ * one for reference and one for reconstructed*/
+ /*TODO, could be refine here*/
+
+ if (!parent->share_buf_mode) {
+ LOG_V(
+ "We are NOT in share buffer mode\n");
+
+ if (mix->ref_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame);
+ if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (mix->rec_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (parent->need_display) {
+ mix->cur_fame = NULL;
+ }
+
+ if (mix->cur_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ LOG_V( "Get Surface Done\n");
+
+
+ VAImage src_image;
+ guint8 *pvbuf;
+ guint8 *dst_y;
+ guint8 *dst_uv;
+ int i,j;
+
+ LOG_V(
+ "map source data to surface\n");
+
+ ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_videoframe_get_frame_id\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_I(
+ "surface id = 0x%08x\n", (guint) surface);
+
+ va_status = vaDeriveImage(va_display, surface, &src_image);
+ //need to destroy
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaDeriveImage\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ VAImage *image = &src_image;
+
+ LOG_V( "vaDeriveImage Done\n");
+
+
+ va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed to vaMapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "vaImage information\n");
+ LOG_I(
+ "image->pitches[0] = %d\n", image->pitches[0]);
+ LOG_I(
+ "image->pitches[1] = %d\n", image->pitches[1]);
+ LOG_I(
+ "image->offsets[0] = %d\n", image->offsets[0]);
+ LOG_I(
+ "image->offsets[1] = %d\n", image->offsets[1]);
+ LOG_I(
+ "image->num_planes = %d\n", image->num_planes);
+ LOG_I(
+ "image->width = %d\n", image->width);
+ LOG_I(
+ "image->height = %d\n", image->height);
+
+ LOG_I(
+ "input buf size = %d\n", bufin->size);
+
+ guint8 *inbuf = bufin->data;
+
+ /*need to convert YUV420 to NV12*/
+ dst_y = pvbuf +image->offsets[0];
+
+ for (i = 0; i < height; i ++) {
+ memcpy (dst_y, inbuf + i * width, width);
+ dst_y += image->pitches[0];
+ }
+
+ dst_uv = pvbuf + image->offsets[1];
+
+ for (i = 0; i < height / 2; i ++) {
+ for (j = 0; j < width; j+=2) {
+ dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2];
+ dst_uv [j + 1] =
+ inbuf [width * height * 5 / 4 + i * width / 2 + j / 2];
+ }
+ dst_uv += image->pitches[1];
+ }
+
+ vaUnmapBuffer(va_display, image->buf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaUnmapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ va_status = vaDestroyImage(va_display, src_image.image_id);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaDestroyImage\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "Map source data to surface done\n");
+
+ }
+
+ else {//if (!parent->share_buf_mode)
+
+ MixVideoFrame * frame = mix_videoframe_new();
+
+ if (mix->ref_fame == NULL)
+ {
+ ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->ref_fame, frame);
+ if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used
+ {
+ LOG_E(
+ "get reference surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (mix->rec_fame == NULL)
+ {
+ ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->rec_fame, frame);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "get recontructed surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ //mix_videoframe_unref (mix->cur_fame);
+
+ if (parent->need_display) {
+ mix->cur_fame = NULL;
+ }
+
+ if (mix->cur_fame == NULL)
+ {
+ guint ci_idx;
+ memcpy (&ci_idx, bufin->data, bufin->size);
+
+ LOG_I(
+ "surface_num = %d\n", mix->surface_num);
+ LOG_I(
+ "ci_frame_idx = %d\n", ci_idx);
+
+ if (ci_idx > mix->surface_num - 2) {
+ LOG_E(
+ "the CI frame idx is too bigger than CI frame number\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->cur_fame, frame);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "get current working surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface);
+
+ }
+
+ LOG_V( "vaBeginPicture\n");
+ LOG_I( "va_context = 0x%08x\n",(guint)va_context);
+ LOG_I( "surface = 0x%08x\n",(guint)surface);
+ LOG_I( "va_display = 0x%08x\n",(guint)va_display);
+
+ va_status = vaBeginPicture(va_display, va_context, surface);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaBeginPicture\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "mix_videofmtenc_h264_send_seq_params\n");
+
+ if (mix->encoded_frames == 0) {
+ mix_videofmtenc_h264_send_seq_params (mix);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_h264_send_seq_params\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ ret = mix_videofmtenc_h264_send_picture_parameter (mix);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_h264_send_picture_parameter\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videofmtenc_h264_send_slice_parameter (mix);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_h264_send_slice_parameter\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "before vaEndPicture\n");
+
+ va_status = vaEndPicture (va_display, va_context);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaEndPicture\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V( "vaSyncSurface\n");
+
+ va_status = vaSyncSurface(va_display, surface);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaSyncSurface\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V(
+ "Start to get encoded data\n");
+
+ /*get encoded data from the VA buffer*/
+ va_status = vaMapBuffer (va_display, mix->coded_buf, (void **)&buf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaMapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ // first 4 bytes is the size of the buffer
+ memcpy (&(iovout->data_size), (void*)buf, 4);
+ //size = (guint*) buf;
+
+ guint size = iovout->data_size + 100;
+
+ iovout->buffer_size = size;
+
+ //We will support two buffer mode, one is application allocates the buffer and passes to encode,
+ //the other is encode allocate memory
+
+ if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it.
+ iovout->data = g_malloc (size); // In case we have lots of 0x000001 start code, and we replace them with 4 bytes length prefixed
+ if (iovout->data == NULL) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+ }
+
+ if (mix->delimiter_type == MIX_DELIMITER_ANNEXB) {
+ memcpy (iovout->data, buf + 16, iovout->data_size); //parload is started from 17th byte
+ size = iovout->data_size;
+ } else {
+
+ guint pos = 0;
+ guint zero_byte_count = 0;
+ guint prefix_length = 0;
+ guint8 nal_unit_type = 0;
+ guint8 * payload = buf + 16;
+
+ while ((payload[pos++] == 0x00)) {
+ zero_byte_count ++;
+ if (pos >= iovout->data_size) //to make sure the buffer to be accessed is valid
+ break;
+ }
+
+ nal_unit_type = (guint8)(payload[pos] & 0x1f);
+ prefix_length = zero_byte_count + 1;
+
+ LOG_I ("nal_unit_type = %d\n", nal_unit_type);
+ LOG_I ("zero_byte_count = %d\n", zero_byte_count);
+
+ if ((payload [pos - 1] & 0x01) && mix->slice_num == 1 && nal_unit_type == 1) {
+ size = iovout->data_size;
+ iovout->data[0] = ((size - prefix_length) >> 24) & 0xff;
+ iovout->data[1] = ((size - prefix_length) >> 16) & 0xff;
+ iovout->data[2] = ((size - prefix_length) >> 8) & 0xff;
+ iovout->data[3] = (size - prefix_length) & 0xff;
+ // use 4 bytes to indicate the NALU length
+ memcpy (iovout->data + 4, buf + 16 + prefix_length, size - prefix_length);
+ LOG_V ("We only have one start code, copy directly\n");
+ }
+ else {
+ ret = mix_videofmtenc_h264_AnnexB_to_length_prefixed (buf + 16, iovout->data_size, iovout->data, &size);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E (
+ "Failed mix_videofmtenc_h264_AnnexB_to_length_prefixed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+ }
+
+ iovout->data_size = size;
+ LOG_I(
+ "out size is = %d\n", iovout->data_size);
+
+ va_status = vaUnmapBuffer (va_display, mix->coded_buf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaUnmapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "get encoded data done\n");
+#if 0
+ if (parent->drawable) {
+ va_status = vaPutSurface(va_display, surface, (Drawable)parent->drawable,
+ 0,0, width, height,
+ 0,0, width, height,
+ NULL,0,0);
+ }
+
+#ifdef SHOW_SRC
+ else {
+
+ va_status = vaPutSurface(va_display, surface, win,
+ 0,0, width, height,
+ 0,0, width, height,
+ NULL,0,0);
+ }
+#endif //SHOW_SRC
+#endif
+
+ VASurfaceStatus status;
+
+ /*query the status of current surface*/
+ va_status = vaQuerySurfaceStatus(va_display, surface, &status);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaQuerySurfaceStatus\n");
+ return MIX_RESULT_FAIL;
+ }
+ mix->pic_skipped = status & VASurfaceSkipped;
+
+ if (parent->need_display) {
+ ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_framemanager_enqueue\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+
+ /*update the reference surface and reconstructed surface */
+ if (!mix->pic_skipped) {
+ tmp_fame = mix->rec_fame;
+ mix->rec_fame= mix->ref_fame;
+ mix->ref_fame = tmp_fame;
+ }
+
+#if 0
+ if (mix->ref_fame != NULL)
+ mix_videoframe_unref (mix->ref_fame);
+ mix->ref_fame = mix->rec_fame;
+
+ mix_videoframe_unref (mix->cur_fame);
+#endif
+
+ if (!(parent->need_display)) {
+ mix_videoframe_unref (mix->cur_fame);
+ mix->cur_fame = NULL;
+ }
+
+ mix->encoded_frames ++;
+ }
+ else
+ {
+ LOG_E(
+ "not H264 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size (
+ MixVideoFormatEnc *mix, guint *max_size)
+{
+
+ MixVideoFormatEnc *parent = NULL;
+
+ if (mix == NULL || max_size == NULL)
+ {
+ LOG_E(
+ "mix == NULL || max_size == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ parent = MIX_VIDEOFORMATENC(mix);
+ MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix);
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOFORMATENC_H264(self)) {
+
+ if (self->coded_buf_size > 0) {
+ *max_size = self->coded_buf_size;
+ LOG_V ("Already calculate the max encoded size, get the value directly");
+ return MIX_RESULT_SUCCESS;
+ }
+
+ /*base on the rate control mode to calculate the defaule encoded buffer size*/
+ if (self->va_rcmode == VA_RC_NONE) {
+ self->coded_buf_size =
+ (parent->picture_width* parent->picture_height * 400) / (16 * 16);
+ // set to value according to QP
+ }
+ else {
+ self->coded_buf_size = parent->bitrate/ 4;
+ }
+
+ self->coded_buf_size =
+ max (self->coded_buf_size ,
+ (parent->picture_width* parent->picture_height * 400) / (16 * 16));
+
+ /*in case got a very large user input bit rate value*/
+ self->coded_buf_size =
+ min(self->coded_buf_size,
+ (parent->picture_width * parent->picture_height * 1.5 * 8));
+ self->coded_buf_size = (self->coded_buf_size + 15) &(~15);
+ }
+ else
+ {
+ LOG_E(
+ "not H264 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ *max_size = self->coded_buf_size;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed (
+ guint8 * bufin, guint bufin_len, guint8* bufout, guint * bufout_len)
+{
+
+ guint pos = 0;
+ guint last_pos = 0;
+
+ guint zero_byte_count = 0;
+ guint nal_size = 0;
+ guint prefix_length = 0;
+ guint size_copied = 0;
+ guint leading_zero_count = 0;
+
+ if (bufin == NULL || bufout == NULL || bufout_len == NULL) {
+
+ LOG_E(
+ "bufin == NULL || bufout == NULL || bufout_len = NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (bufin_len <= 0 || *bufout_len <= 0) {
+ LOG_E(
+ "bufin_len <= 0 || *bufout_len <= 0\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V ("Begin\n");
+
+ while ((bufin[pos++] == 0x00)) {
+ zero_byte_count ++;
+ if (pos >= bufin_len) //to make sure the buffer to be accessed is valid
+ break;
+ }
+
+ if (bufin[pos - 1] != 0x01 || zero_byte_count < 2)
+ {
+ LOG_E("The stream is not AnnexB format \n");
+ return MIX_RESULT_FAIL; ; //not AnnexB, we won't process it
+ }
+
+ zero_byte_count = 0;
+ last_pos = pos;
+
+ while (pos < bufin_len) {
+
+ while (bufin[pos++] == 0) {
+ zero_byte_count ++;
+ if (pos >= bufin_len) //to make sure the buffer to be accessed is valid
+ break;
+ }
+
+ if (bufin[pos - 1] == 0x01 && zero_byte_count >= 2) {
+ if (zero_byte_count == 2) {
+ prefix_length = 3;
+ }
+ else {
+ prefix_length = 4;
+ leading_zero_count = zero_byte_count - 3;
+ }
+
+ LOG_I("leading_zero_count = %d\n", leading_zero_count);
+
+ nal_size = pos - last_pos - prefix_length - leading_zero_count;
+ if (nal_size < 0) {
+ LOG_E ("something wrong in the stream\n");
+ return MIX_RESULT_FAIL; //not AnnexB, we won't process it
+ }
+
+ if (*bufout_len < (size_copied + nal_size + 4)) {
+ LOG_E ("The length of destination buffer is too small\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_I ("nal_size = %d\n", nal_size);
+
+ /*We use 4 bytes length prefix*/
+ bufout [size_copied] = nal_size >> 24 & 0xff;
+ bufout [size_copied + 1] = nal_size >> 16 & 0xff;
+ bufout [size_copied + 2] = nal_size >> 8 & 0xff;
+ bufout [size_copied + 3] = nal_size & 0xff;
+
+ size_copied += 4; //4 bytes length prefix
+ memcpy (bufout + size_copied, bufin + last_pos, nal_size);
+ size_copied += nal_size;
+
+ LOG_I ("size_copied = %d\n", size_copied);
+
+ zero_byte_count = 0;
+ leading_zero_count = 0;
+ last_pos = pos;
+ }
+
+ else if (pos == bufin_len) {
+
+ LOG_V ("Last NALU in this frame\n");
+
+ nal_size = pos - last_pos;
+
+ if (*bufout_len < (size_copied + nal_size + 4)) {
+ LOG_E ("The length of destination buffer is too small\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ /*We use 4 bytes length prefix*/
+ bufout [size_copied] = nal_size >> 24 & 0xff;
+ bufout [size_copied + 1] = nal_size >> 16 & 0xff;
+ bufout [size_copied + 2] = nal_size >> 8 & 0xff;
+ bufout [size_copied + 3] = nal_size & 0xff;
+
+ size_copied += 4; //4 bytes length prefix
+ memcpy (bufout + size_copied, bufin + last_pos, nal_size);
+ size_copied += nal_size;
+
+ LOG_I ("size_copied = %d\n", size_copied);
+ }
+
+ else {
+ zero_byte_count = 0;
+ leading_zero_count = 0;
+ }
+
+ }
+
+ if (size_copied != *bufout_len) {
+ *bufout_len = size_copied;
+ }
+
+ LOG_V ("End\n");
+
+ return MIX_RESULT_SUCCESS;
+
+}
+
diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h
new file mode 100644
index 0000000..eeef2d9
--- /dev/null
+++ b/mix_video/src/mixvideoformatenc_h264.h
@@ -0,0 +1,137 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFORMATENC_H264_H__
+#define __MIX_VIDEOFORMATENC_H264_H__
+
+#include "mixvideoformatenc.h"
+#include "mixvideoframe_private.h"
+
+#define MIX_VIDEO_ENC_H264_SURFACE_NUM 20
+
+#define min(X,Y) (((X) < (Y)) ? (X) : (Y))
+#define max(X,Y) (((X) > (Y)) ? (X) : (Y))
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEOFORMATENC_H264 (mix_videoformatenc_h264_get_type ())
+#define MIX_VIDEOFORMATENC_H264(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264))
+#define MIX_IS_VIDEOFORMATENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_H264))
+#define MIX_VIDEOFORMATENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264Class))
+#define MIX_IS_VIDEOFORMATENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_H264))
+#define MIX_VIDEOFORMATENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_H264, MixVideoFormatEnc_H264Class))
+
+typedef struct _MixVideoFormatEnc_H264 MixVideoFormatEnc_H264;
+typedef struct _MixVideoFormatEnc_H264Class MixVideoFormatEnc_H264Class;
+
+struct _MixVideoFormatEnc_H264 {
+ /*< public > */
+ MixVideoFormatEnc parent;
+
+ VABufferID coded_buf;
+ VABufferID seq_param_buf;
+ VABufferID pic_param_buf;
+ VABufferID slice_param_buf;
+ VASurfaceID * ci_shared_surfaces;
+ VASurfaceID * surfaces;
+ guint surface_num;
+
+ MixVideoFrame *cur_fame; //current input frame to be encoded;
+ MixVideoFrame *ref_fame; //reference frame
+ MixVideoFrame *rec_fame; //reconstructed frame;
+
+ guint basic_unit_size; //for rate control
+ guint disable_deblocking_filter_idc;
+ MixDelimiterType delimiter_type;
+ guint slice_num;
+ guint va_rcmode;
+
+ guint encoded_frames;
+ gboolean pic_skipped;
+
+ gboolean is_intra;
+
+ guint coded_buf_size;
+
+ /*< public > */
+};
+
+/**
+ * MixVideoFormatEnc_H264Class:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoFormatEnc_H264Class {
+ /*< public > */
+ MixVideoFormatEncClass parent_class;
+
+ /* class members */
+
+ /*< public > */
+};
+
+/**
+ * mix_videoformatenc_h264_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoformatenc_h264_get_type(void);
+
+/**
+ * mix_videoformatenc_h264_new:
+ * @returns: A newly allocated instance of #MixVideoFormatEnc_H264
+ *
+ * Use this method to create new instance of #MixVideoFormatEnc_H264
+ */
+MixVideoFormatEnc_H264 *mix_videoformatenc_h264_new(void);
+
+/**
+ * mix_videoformatenc_h264_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFormatEnc_H264 instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFormatEnc_H264 *mix_videoformatenc_h264_ref(MixVideoFormatEnc_H264 * mix);
+
+/**
+ * mix_videoformatenc_h264_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoformatenc_h264_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/* H.264 vmethods */
+MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg);
+MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+MIX_RESULT mix_videofmtenc_h264_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size);
+
+/* Local Methods */
+
+MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin,
+ MixIOVec * iovout);
+MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed (
+ guint8 * bufin, guint bufin_len, guint8* bufout, guint *bufout_len);
+
+#endif /* __MIX_VIDEOFORMATENC_H264_H__ */
diff --git a/mix_video/src/mixvideoformatenc_mpeg4.c b/mix_video/src/mixvideoformatenc_mpeg4.c
new file mode 100644
index 0000000..e58976b
--- /dev/null
+++ b/mix_video/src/mixvideoformatenc_mpeg4.c
@@ -0,0 +1,1713 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "mixvideolog.h"
+
+#include "mixvideoformatenc_mpeg4.h"
+#include "mixvideoconfigparamsenc_mpeg4.h"
+
+#define MDEBUG
+#undef SHOW_SRC
+
+#ifdef SHOW_SRC
+Window win = 0;
+#endif /* SHOW_SRC */
+
+
+/* The parent class. The pointer will be saved
+ * in this class's initialization. The pointer
+ * can be used for chaining method call if needed.
+ */
+static MixVideoFormatEncClass *parent_class = NULL;
+
+static void mix_videoformatenc_mpeg4_finalize(GObject * obj);
+
+/*
+ * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC
+ */
+G_DEFINE_TYPE (MixVideoFormatEnc_MPEG4, mix_videoformatenc_mpeg4, MIX_TYPE_VIDEOFORMATENC);
+
+static void mix_videoformatenc_mpeg4_init(MixVideoFormatEnc_MPEG4 * self) {
+ MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self);
+
+ /* TODO: public member initialization */
+
+ /* TODO: private member initialization */
+ self->encoded_frames = 0;
+ self->pic_skipped = FALSE;
+ self->is_intra = TRUE;
+ self->cur_fame = NULL;
+ self->ref_fame = NULL;
+ self->rec_fame = NULL;
+
+ self->ci_shared_surfaces = NULL;
+ self->surfaces= NULL;
+ self->surface_num = 0;
+
+ parent->initialized = FALSE;
+}
+
+static void mix_videoformatenc_mpeg4_class_init(
+ MixVideoFormatEnc_MPEG4Class * klass) {
+
+ /* root class */
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* direct parent class */
+ MixVideoFormatEncClass *video_formatenc_class =
+ MIX_VIDEOFORMATENC_CLASS(klass);
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ /* setup finializer */
+ gobject_class->finalize = mix_videoformatenc_mpeg4_finalize;
+
+ /* setup vmethods with base implementation */
+ /* TODO: decide if we need to override the parent's methods */
+ video_formatenc_class->getcaps = mix_videofmtenc_mpeg4_getcaps;
+ video_formatenc_class->initialize = mix_videofmtenc_mpeg4_initialize;
+ video_formatenc_class->encode = mix_videofmtenc_mpeg4_encode;
+ video_formatenc_class->flush = mix_videofmtenc_mpeg4_flush;
+ video_formatenc_class->eos = mix_videofmtenc_mpeg4_eos;
+ video_formatenc_class->deinitialize = mix_videofmtenc_mpeg4_deinitialize;
+ video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_mpeg4_get_max_encoded_buf_size;
+}
+
+MixVideoFormatEnc_MPEG4 *
+mix_videoformatenc_mpeg4_new(void) {
+ MixVideoFormatEnc_MPEG4 *ret =
+ g_object_new(MIX_TYPE_VIDEOFORMATENC_MPEG4, NULL);
+
+ return ret;
+}
+
+void mix_videoformatenc_mpeg4_finalize(GObject * obj) {
+ /* clean up here. */
+
+ /*MixVideoFormatEnc_MPEG4 *mix = MIX_VIDEOFORMATENC_MPEG4(obj); */
+ GObjectClass *root_class = (GObjectClass *) parent_class;
+
+ LOG_V( "\n");
+
+ /* Chain up parent */
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+}
+
+MixVideoFormatEnc_MPEG4 *
+mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix) {
+ return (MixVideoFormatEnc_MPEG4 *) g_object_ref(G_OBJECT(mix));
+}
+
+/*MPEG-4:2 vmethods implementation */
+MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg) {
+
+ /* TODO: add codes for MPEG-4:2 */
+
+ /* TODO: decide if we need to chainup parent method.
+ * if we do, the following is the code:
+ */
+
+ LOG_V( "mix_videofmtenc_mpeg4_getcaps\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+
+ if (parent_class->getcaps) {
+ return parent_class->getcaps(mix, msg);
+ }
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display ) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ MixVideoFormatEnc *parent = NULL;
+ MixVideoConfigParamsEncMPEG4 * config_params_enc_mpeg4;
+
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VASurfaceID * surfaces;
+
+ gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs;
+ gint va_num_profiles, va_num_entrypoints;
+
+ VAProfile *va_profiles = NULL;
+ VAEntrypoint *va_entrypoints = NULL;
+ VAConfigAttrib va_attrib[2];
+ guint index;
+
+
+ /*frame_mgr and input_buf_pool is reservered for future use*/
+
+ if (mix == NULL || config_params_enc == NULL || va_display == NULL) {
+ LOG_E(
+ "mix == NULL || config_params_enc == NULL || va_display == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "begin\n");
+
+
+ //TODO additional parameter checking
+
+ /* Chainup parent method. */
+#if 1
+ if (parent_class->initialize) {
+ ret = parent_class->initialize(mix, config_params_enc,
+ frame_mgr, input_buf_pool, surface_pool,
+ va_display);
+ }
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ return ret;
+ }
+
+#endif //disable it currently
+
+ if (MIX_IS_VIDEOFORMATENC_MPEG4(mix))
+ {
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix);
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc)) {
+ config_params_enc_mpeg4 =
+ MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc);
+ } else {
+ LOG_V(
+ "mix_videofmtenc_mpeg4_initialize: no mpeg4 config params found\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ g_mutex_lock(parent->objectlock);
+
+ LOG_V(
+ "Start to get properities from MPEG-4:2 params\n");
+
+ /* get properties from MPEG4 params Object, which is special to MPEG4 format*/
+
+ ret = mix_videoconfigparamsenc_mpeg4_get_profile_level (config_params_enc_mpeg4,
+ &self->profile_and_level_indication);
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_mpeg4_get_profile_level\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_mpeg4_get_fixed_vti (config_params_enc_mpeg4,
+ &(self->fixed_vop_time_increment));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E(
+ "Failed to mix_videoconfigparamsenc_mpeg4_get_fixed_vti\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videoconfigparamsenc_mpeg4_get_dlk (config_params_enc_mpeg4,
+ &(self->disable_deblocking_filter_idc));
+
+ if (ret != MIX_RESULT_SUCCESS) {
+ //TODO cleanup
+ LOG_E(
+ "Failed to config_params_enc_mpeg4\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V(
+ "======MPEG4 Encode Object properities======:\n");
+
+ LOG_I( "self->profile_and_level_indication = %d\n",
+ self->profile_and_level_indication);
+ LOG_I( "self->fixed_vop_time_increment = %d\n\n",
+ self->fixed_vop_time_increment);
+
+ LOG_V(
+ "Get properities from params done\n");
+
+
+ //display = XOpenDisplay(NULL);
+ //va_display = vaGetDisplay (videoencobj->display);
+
+ parent->va_display = va_display;
+
+ LOG_V( "Get Display\n");
+ LOG_I( "Display = 0x%08x\n",
+ (guint)va_display);
+
+ //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver);
+ //g_print ("vaInitialize va_status = %d\n", va_status);
+
+
+#if 0
+ /* query the vender information, can ignore*/
+ va_vendor = vaQueryVendorString (va_display);
+ LOG_I( "Vendor = %s\n",
+ va_vendor);
+#endif
+
+ /*get the max number for profiles/entrypoints/attribs*/
+ va_max_num_profiles = vaMaxNumProfiles(va_display);
+ LOG_I( "va_max_num_profiles = %d\n",
+ va_max_num_profiles);
+
+ va_max_num_entrypoints = vaMaxNumEntrypoints(va_display);
+ LOG_I( "va_max_num_entrypoints = %d\n",
+ va_max_num_entrypoints);
+
+ va_max_num_attribs = vaMaxNumConfigAttributes(va_display);
+ LOG_I( "va_max_num_attribs = %d\n",
+ va_max_num_attribs);
+
+ va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles);
+ va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints);
+
+ if (va_profiles == NULL || va_entrypoints ==NULL)
+ {
+ LOG_E(
+ "!va_profiles || !va_entrypoints\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ LOG_I(
+ "va_profiles = 0x%08x\n", (guint)va_profiles);
+
+ LOG_V( "vaQueryConfigProfiles\n");
+
+
+ va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaQueryConfigProfiles\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "vaQueryConfigProfiles Done\n");
+
+
+
+ /*check whether profile is supported*/
+ for(index= 0; index < va_num_profiles; index++) {
+ if(parent->va_profile == va_profiles[index])
+ break;
+ }
+
+ if(index == va_num_profiles)
+ {
+ LOG_E( "Profile not supported\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+ LOG_V( "vaQueryConfigEntrypoints\n");
+
+
+ /*Check entry point*/
+ va_status = vaQueryConfigEntrypoints(va_display,
+ parent->va_profile,
+ va_entrypoints, &va_num_entrypoints);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaQueryConfigEntrypoints\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ for (index = 0; index < va_num_entrypoints; index ++) {
+ if (va_entrypoints[index] == VAEntrypointEncSlice) {
+ break;
+ }
+ }
+
+ if (index == va_num_entrypoints) {
+ LOG_E( "Entrypoint not found\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+
+ /*free profiles and entrypoints*/
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+
+ va_attrib[0].type = VAConfigAttribRTFormat;
+ va_attrib[1].type = VAConfigAttribRateControl;
+
+ LOG_V( "vaGetConfigAttributes\n");
+
+ va_status = vaGetConfigAttributes(va_display, parent->va_profile,
+ parent->va_entrypoint,
+ &va_attrib[0], 2);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaGetConfigAttributes\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ if ((va_attrib[0].value & parent->va_format) == 0) {
+ LOG_E( "Matched format not found\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+
+ if ((va_attrib[1].value & parent->va_rcmode) == 0) {
+ LOG_E( "RC mode not found\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+ va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420;
+ va_attrib[1].value = parent->va_rcmode;
+
+ LOG_V( "======VA Configuration======\n");
+
+ LOG_I( "profile = %d\n",
+ parent->va_profile);
+ LOG_I( "va_entrypoint = %d\n",
+ parent->va_entrypoint);
+ LOG_I( "va_attrib[0].type = %d\n",
+ va_attrib[0].type);
+ LOG_I( "va_attrib[1].type = %d\n",
+ va_attrib[1].type);
+ LOG_I( "va_attrib[0].value (Format) = %d\n",
+ va_attrib[0].value);
+ LOG_I( "va_attrib[1].value (RC mode) = %d\n",
+ va_attrib[1].value);
+
+ LOG_V( "vaCreateConfig\n");
+
+ va_status = vaCreateConfig(va_display, parent->va_profile,
+ parent->va_entrypoint,
+ &va_attrib[0], 2, &(parent->va_config));
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaCreateConfig\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ /*TODO: compute the surface number*/
+ int numSurfaces;
+
+ if (parent->share_buf_mode) {
+ numSurfaces = 2;
+ }
+ else {
+ numSurfaces = 8;
+ parent->ci_frame_num = 0;
+ }
+
+ self->surface_num = numSurfaces + parent->ci_frame_num;
+
+ surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces);
+
+ if (surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate surface\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ LOG_V( "vaCreateSurfaces\n");
+
+ va_status = vaCreateSurfaces(va_display, parent->picture_width,
+ parent->picture_height, parent->va_format,
+ numSurfaces, surfaces);
+ //TODO check vret and return fail if needed
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaCreateSurfaces\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ if (parent->share_buf_mode) {
+
+ LOG_V(
+ "We are in share buffer mode!\n");
+ self->ci_shared_surfaces =
+ g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num);
+
+ if (self->ci_shared_surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate shared surface\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ guint index;
+ for(index = 0; index < parent->ci_frame_num; index++) {
+
+ LOG_I( "ci_frame_id = %lu\n",
+ parent->ci_frame_id[index]);
+
+ LOG_V(
+ "vaCreateSurfaceFromCIFrame\n");
+
+ va_status = vaCreateSurfaceFromCIFrame(va_display,
+ (gulong) (parent->ci_frame_id[index]),
+ &self->ci_shared_surfaces[index]);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateSurfaceFromCIFrame\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ LOG_V(
+ "vaCreateSurfaceFromCIFrame Done\n");
+
+ }// if (parent->share_buf_mode)
+
+ self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num);
+
+ if (self->surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate private surface\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ if (parent->share_buf_mode) {
+ /*shared surfaces should be put in pool first,
+ * because we will get it accoring to CI index*/
+ for(index = 0; index < parent->ci_frame_num; index++)
+ self->surfaces[index] = self->ci_shared_surfaces[index];
+ }
+
+ for(index = 0; index < numSurfaces; index++) {
+ self->surfaces[index + parent->ci_frame_num] = surfaces[index];
+ }
+
+ LOG_V( "assign surface Done\n");
+ LOG_I( "Created %d libva surfaces\n",
+ numSurfaces + parent->ci_frame_num);
+
+#if 0 //current put this in gst
+ images = g_malloc(sizeof(VAImage)*numSurfaces);
+ if (images == NULL)
+ {
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ for (index = 0; index < numSurfaces; index++) {
+ //Derive an VAImage from an existing surface.
+ //The image buffer can then be mapped/unmapped for CPU access
+ va_status = vaDeriveImage(va_display, surfaces[index],
+ &images[index]);
+ }
+#endif
+
+ LOG_V( "mix_surfacepool_new\n");
+
+ parent->surfacepool = mix_surfacepool_new();
+ if (surface_pool)
+ *surface_pool = parent->surfacepool;
+ //which is useful to check before encode
+
+ if (parent->surfacepool == NULL)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_new\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "mix_surfacepool_initialize\n");
+
+ ret = mix_surfacepool_initialize(parent->surfacepool,
+ self->surfaces, parent->ci_frame_num + numSurfaces);
+
+ switch (ret)
+ {
+ case MIX_RESULT_SUCCESS:
+ break;
+ case MIX_RESULT_ALREADY_INIT:
+ //TODO cleanup and/or retry
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ default:
+ break;
+ }
+
+
+ //Initialize and save the VA context ID
+ LOG_V( "vaCreateContext\n");
+
+ va_status = vaCreateContext(va_display, parent->va_config,
+ parent->picture_width, parent->picture_height,
+ VA_PROGRESSIVE, self->surfaces, parent->ci_frame_num + numSurfaces,
+ &(parent->va_context));
+
+ LOG_I(
+ "Created libva context width %d, height %d\n",
+ parent->picture_width, parent->picture_height);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateContext\n");
+ LOG_I( "va_status = %d\n",
+ (guint)va_status);
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ guint max_size = 0;
+ ret = mix_videofmtenc_mpeg4_get_max_encoded_buf_size (parent, &max_size);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_videofmtenc_mpeg4_get_max_encoded_buf_size\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+
+ }
+
+ /*Create coded buffer for output*/
+ va_status = vaCreateBuffer (va_display, parent->va_context,
+ VAEncCodedBufferType,
+ self->coded_buf_size, //
+ 1, NULL,
+ &self->coded_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer: VAEncCodedBufferType\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+#ifdef SHOW_SRC
+ Display * display = XOpenDisplay (NULL);
+
+ LOG_I( "display = 0x%08x\n",
+ (guint) display);
+ win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0,
+ parent->picture_width, parent->picture_height, 0, 0,
+ WhitePixel(display, 0));
+ XMapWindow(display, win);
+ XSelectInput(display, win, KeyPressMask | StructureNotifyMask);
+
+ XSync(display, False);
+ LOG_I( "va_display = 0x%08x\n",
+ (guint) va_display);
+
+#endif /* SHOW_SRC */
+
+ parent->initialized = TRUE;
+
+ g_mutex_unlock(parent->objectlock);
+ g_free (surfaces);
+
+ }
+ else
+ {
+ LOG_E(
+ "not MPEG4 video encode Object\n");
+ return MIX_RESULT_FAIL;
+
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ MixVideoFormatEnc *parent = NULL;
+
+ LOG_V( "Begin\n");
+
+ /*currenly only support one input and output buffer*/
+ //TODO: params i
+
+ if (bufincnt != 1 || iovoutcnt != 1) {
+ LOG_E(
+ "buffer count not equel to 1\n");
+ LOG_E(
+ "maybe some exception occurs\n");
+ }
+
+ if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) {
+ LOG_E(
+ "!mix || !bufin[0] ||!iovout[0]\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ //TODO: encode_params is reserved here for future usage.
+
+ /* TODO: decide if we need to chainup parent method.
+ * * * if we do, the following is the code:
+ * */
+
+#if 0
+ if (parent_class->encode) {
+ return parent_class->encode(mix, bufin, bufincnt, iovout,
+ iovoutcnt, encode_params);
+ }
+#endif
+
+ if (MIX_IS_VIDEOFORMATENC_MPEG4(mix))
+ {
+
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix);
+
+ LOG_V( "Locking\n");
+ g_mutex_lock(parent->objectlock);
+
+
+ //TODO: also we could move some encode Preparation work to here
+
+ LOG_V(
+ "mix_videofmtenc_mpeg4_process_encode\n");
+
+ ret = mix_videofmtenc_mpeg4_process_encode (self,
+ bufin[0], iovout[0]);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_mpeg4_process_encode\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V( "UnLocking\n");
+
+ g_mutex_unlock(parent->objectlock);
+ }
+ else
+ {
+ LOG_E(
+ "not MPEG4 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix) {
+
+ //MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+
+ /*not chain to parent flush func*/
+#if 0
+ if (parent_class->flush) {
+ return parent_class->flush(mix, msg);
+ }
+#endif
+
+ MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix);
+
+ g_mutex_lock(mix->objectlock);
+
+ /*unref the current source surface*/
+ if (self->cur_fame != NULL)
+ {
+ mix_videoframe_unref (self->cur_fame);
+ self->cur_fame = NULL;
+ }
+
+ /*unref the reconstructed surface*/
+ if (self->rec_fame != NULL)
+ {
+ mix_videoframe_unref (self->rec_fame);
+ self->rec_fame = NULL;
+ }
+
+ /*unref the reference surface*/
+ if (self->ref_fame != NULL)
+ {
+ mix_videoframe_unref (self->ref_fame);
+ self->ref_fame = NULL;
+ }
+
+ /*reset the properities*/
+ self->encoded_frames = 0;
+ self->pic_skipped = FALSE;
+ self->is_intra = TRUE;
+
+ g_mutex_unlock(mix->objectlock);
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix) {
+
+ /* TODO: add codes for MPEG-4:2 */
+
+ /* TODO: decide if we need to chainup parent method.
+ * if we do, the following is the code:
+ */
+
+ LOG_V( "\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (parent_class->eos) {
+ return parent_class->eos(mix);
+ }
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix) {
+
+ MixVideoFormatEnc *parent = NULL;
+ VAStatus va_status;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4(mix);
+
+ LOG_V( "Release frames\n");
+
+ g_mutex_lock(parent->objectlock);
+
+#if 0
+ /*unref the current source surface*/
+ if (self->cur_fame != NULL)
+ {
+ mix_videoframe_unref (self->cur_fame);
+ self->cur_fame = NULL;
+ }
+#endif
+
+ /*unref the reconstructed surface*/
+ if (self->rec_fame != NULL)
+ {
+ mix_videoframe_unref (self->rec_fame);
+ self->rec_fame = NULL;
+ }
+
+ /*unref the reference surface*/
+ if (self->ref_fame != NULL)
+ {
+ mix_videoframe_unref (self->ref_fame);
+ self->ref_fame = NULL;
+ }
+
+ LOG_V( "Release surfaces\n");
+
+ if (self->ci_shared_surfaces)
+ {
+ g_free (self->ci_shared_surfaces);
+ self->ci_shared_surfaces = NULL;
+ }
+
+ if (self->surfaces)
+ {
+ g_free (self->surfaces);
+ self->surfaces = NULL;
+ }
+
+ LOG_V( "vaDestroyContext\n");
+
+ va_status = vaDestroyContext (parent->va_display, parent->va_context);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaDestroyContext\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "vaDestroyConfig\n");
+
+ va_status = vaDestroyConfig (parent->va_display, parent->va_config);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaDestroyConfig\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ parent->initialized = TRUE;
+
+ g_mutex_unlock(parent->objectlock);
+
+#if 1
+ if (parent_class->deinitialize) {
+ return parent_class->deinitialize(mix);
+ }
+#endif
+
+ //Most stuff is cleaned up in parent_class->finalize()
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_send_seq_params (MixVideoFormatEnc_MPEG4 *mix)
+{
+
+ VAStatus va_status;
+ VAEncSequenceParameterBufferMPEG4 mpeg4_seq_param;
+ VABufferID seq_para_buf_id;
+
+
+ MixVideoFormatEnc *parent = NULL;
+
+ if (mix == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ LOG_V( "Begin\n\n");
+
+ if (MIX_IS_VIDEOFORMATENC_MPEG4(mix))
+ {
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ /*set up the sequence params for HW*/
+ mpeg4_seq_param.profile_and_level_indication = mix->profile_and_level_indication; //TODO, hard code now
+ mpeg4_seq_param.video_object_layer_width= parent->picture_width;
+ mpeg4_seq_param.video_object_layer_height= parent->picture_height;
+ mpeg4_seq_param.vop_time_increment_resolution =
+ (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom;
+ mpeg4_seq_param.fixed_vop_time_increment= mix->fixed_vop_time_increment;
+ mpeg4_seq_param.bits_per_second= parent->bitrate;
+ mpeg4_seq_param.frame_rate =
+ (unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom;
+ mpeg4_seq_param.initial_qp = parent->initial_qp;
+ mpeg4_seq_param.min_qp = parent->min_qp;
+ mpeg4_seq_param.intra_period = parent->intra_period;
+
+
+ //mpeg4_seq_param.fixed_vop_rate = 30;
+
+
+
+ LOG_V(
+ "===mpeg4 sequence params===\n");
+
+ LOG_I( "profile_and_level_indication = %d\n",
+ (guint)mpeg4_seq_param.profile_and_level_indication);
+ LOG_I( "intra_period = %d\n",
+ mpeg4_seq_param.intra_period);
+ LOG_I( "video_object_layer_width = %d\n",
+ mpeg4_seq_param.video_object_layer_width);
+ LOG_I( "video_object_layer_height = %d\n",
+ mpeg4_seq_param.video_object_layer_height);
+ LOG_I( "vop_time_increment_resolution = %d\n",
+ mpeg4_seq_param.vop_time_increment_resolution);
+ LOG_I( "fixed_vop_rate = %d\n",
+ mpeg4_seq_param.fixed_vop_rate);
+ LOG_I( "fixed_vop_time_increment = %d\n",
+ mpeg4_seq_param.fixed_vop_time_increment);
+ LOG_I( "bitrate = %d\n",
+ mpeg4_seq_param.bits_per_second);
+ LOG_I( "frame_rate = %d\n",
+ mpeg4_seq_param.frame_rate);
+ LOG_I( "initial_qp = %d\n",
+ mpeg4_seq_param.initial_qp);
+ LOG_I( "min_qp = %d\n",
+ mpeg4_seq_param.min_qp);
+ LOG_I( "intra_period = %d\n\n",
+ mpeg4_seq_param.intra_period);
+
+ va_status = vaCreateBuffer(parent->va_display, parent->va_context,
+ VAEncSequenceParameterBufferType,
+ sizeof(mpeg4_seq_param),
+ 1, &mpeg4_seq_param,
+ &seq_para_buf_id);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ va_status = vaRenderPicture(parent->va_display, parent->va_context,
+ &seq_para_buf_id, 1);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaRenderPicture\n");
+ LOG_I( "va_status = %d\n", va_status);
+ return MIX_RESULT_FAIL;
+ }
+ }
+ else
+ {
+ LOG_E(
+ "not MPEG4 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+
+
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_send_picture_parameter (MixVideoFormatEnc_MPEG4 *mix)
+{
+ VAStatus va_status;
+ VAEncPictureParameterBufferMPEG4 mpeg4_pic_param;
+ MixVideoFormatEnc *parent = NULL;
+
+ if (mix == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ LOG_V( "Begin\n\n");
+
+#if 0 //not needed currently
+ MixVideoConfigParamsEncMPEG4 * params_mpeg4
+ = MIX_VIDEOCONFIGPARAMSENC_MPEG4 (config_params_enc);
+#endif
+
+ if (MIX_IS_VIDEOFORMATENC_MPEG4(mix)) {
+
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ /*set picture params for HW*/
+ mpeg4_pic_param.reference_picture = mix->ref_fame->frame_id;
+ mpeg4_pic_param.reconstructed_picture = mix->rec_fame->frame_id;
+ mpeg4_pic_param.coded_buf = mix->coded_buf;
+ mpeg4_pic_param.picture_width = parent->picture_width;
+ mpeg4_pic_param.picture_height = parent->picture_height;
+ mpeg4_pic_param.vop_time_increment= mix->encoded_frames;
+ mpeg4_pic_param.picture_type = mix->is_intra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+
+
+
+ LOG_V(
+ "======mpeg4 picture params======\n");
+ LOG_I( "reference_picture = 0x%08x\n",
+ mpeg4_pic_param.reference_picture);
+ LOG_I( "reconstructed_picture = 0x%08x\n",
+ mpeg4_pic_param.reconstructed_picture);
+ LOG_I( "coded_buf = 0x%08x\n",
+ mpeg4_pic_param.coded_buf);
+ LOG_I( "picture_width = %d\n",
+ mpeg4_pic_param.picture_width);
+ LOG_I( "picture_height = %d\n",
+ mpeg4_pic_param.picture_height);
+ LOG_I( "vop_time_increment = %d\n",
+ mpeg4_pic_param.vop_time_increment);
+ LOG_I( "picture_type = %d\n\n",
+ mpeg4_pic_param.picture_type);
+
+ va_status = vaCreateBuffer(parent->va_display, parent->va_context,
+ VAEncPictureParameterBufferType,
+ sizeof(mpeg4_pic_param),
+ 1,&mpeg4_pic_param,
+ &mix->pic_param_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ va_status = vaRenderPicture(parent->va_display, parent->va_context,
+ &mix->pic_param_buf, 1);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaRenderPicture\n");
+ LOG_I( "va_status = %d\n", va_status);
+ return MIX_RESULT_FAIL;
+ }
+ }
+ else
+ {
+ LOG_E(
+ "not MPEG4 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+ return MIX_RESULT_SUCCESS;
+
+}
+
+
+MIX_RESULT mix_videofmtenc_mpeg4_send_slice_parameter (MixVideoFormatEnc_MPEG4 *mix)
+{
+ VAStatus va_status;
+
+ guint slice_height;
+ guint slice_index;
+ guint slice_height_in_mb;
+
+ if (mix == NULL)
+ return MIX_RESULT_NULL_PTR;
+
+ LOG_V( "Begin\n\n");
+
+
+ MixVideoFormatEnc *parent = NULL;
+
+ if (MIX_IS_VIDEOFORMATENC_MPEG4(mix))
+ {
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ slice_height = parent->picture_height;
+
+ slice_height += 15;
+ slice_height &= (~15);
+
+ VAEncSliceParameterBuffer slice_param;
+ slice_index = 0;
+ slice_height_in_mb = slice_height / 16;
+ slice_param.start_row_number = 0;
+ slice_param.slice_height = slice_height / 16;
+ slice_param.slice_flags.bits.is_intra = mix->is_intra;
+ slice_param.slice_flags.bits.disable_deblocking_filter_idc
+ = mix->disable_deblocking_filter_idc;
+
+ LOG_V(
+ "======mpeg4 slice params======\n");
+
+ LOG_I( "start_row_number = %d\n",
+ (gint) slice_param.start_row_number);
+ LOG_I( "slice_height_in_mb = %d\n",
+ (gint) slice_param.slice_height);
+ LOG_I( "slice.is_intra = %d\n",
+ (gint) slice_param.slice_flags.bits.is_intra);
+ LOG_I(
+ "disable_deblocking_filter_idc = %d\n\n",
+ (gint) mix->disable_deblocking_filter_idc);
+
+ va_status = vaCreateBuffer (parent->va_display, parent->va_context,
+ VAEncSliceParameterBufferType,
+ sizeof(VAEncSliceParameterBuffer),
+ 1, &slice_param,
+ &mix->slice_param_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ va_status = vaRenderPicture(parent->va_display, parent->va_context,
+ &mix->slice_param_buf, 1);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaRenderPicture\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ }
+ else
+ {
+ LOG_E(
+ "not MPEG4 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix,
+ MixBuffer * bufin, MixIOVec * iovout)
+{
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VADisplay va_display = NULL;
+ VAContextID va_context;
+ gulong surface = 0;
+ guint16 width, height;
+
+ MixVideoFrame * tmp_fame;
+ guint8 *buf;
+
+ if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) {
+ LOG_E(
+ "mix == NUL) || bufin == NULL || iovout == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOFORMATENC_MPEG4(mix))
+ {
+
+ MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ va_display = parent->va_display;
+ va_context = parent->va_context;
+ width = parent->picture_width;
+ height = parent->picture_height;
+
+
+ LOG_I( "encoded_frames = %d\n",
+ mix->encoded_frames);
+ LOG_I( "is_intra = %d\n",
+ mix->is_intra);
+ LOG_I( "ci_frame_id = 0x%08x\n",
+ (guint) parent->ci_frame_id);
+
+ /* determine the picture type*/
+ if ((mix->encoded_frames % parent->intra_period) == 0) {
+ mix->is_intra = TRUE;
+ } else {
+ mix->is_intra = FALSE;
+ }
+
+ LOG_I( "is_intra_picture = %d\n",
+ mix->is_intra);
+
+ LOG_V(
+ "Get Surface from the pool\n");
+
+ /*current we use one surface for source data,
+ * one for reference and one for reconstructed*/
+ /*TODO, could be refine here*/
+
+ if (!parent->share_buf_mode) {
+ LOG_V(
+ "We are NOT in share buffer mode\n");
+
+ if (mix->ref_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame);
+ if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (mix->rec_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (parent->need_display) {
+ mix->cur_fame = NULL;
+ }
+
+ if (mix->cur_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ LOG_V( "Get Surface Done\n");
+
+
+ VAImage src_image;
+ guint8 *pvbuf;
+ guint8 *dst_y;
+ guint8 *dst_uv;
+ int i,j;
+
+ LOG_V(
+ "map source data to surface\n");
+
+ ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_videoframe_get_frame_id\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_I(
+ "surface id = 0x%08x\n", (guint) surface);
+
+ va_status = vaDeriveImage(va_display, surface, &src_image);
+ //need to destroy
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaDeriveImage\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ VAImage *image = &src_image;
+
+ LOG_V( "vaDeriveImage Done\n");
+
+
+ va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed to vaMapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "vaImage information\n");
+ LOG_I(
+ "image->pitches[0] = %d\n", image->pitches[0]);
+ LOG_I(
+ "image->pitches[1] = %d\n", image->pitches[1]);
+ LOG_I(
+ "image->offsets[0] = %d\n", image->offsets[0]);
+ LOG_I(
+ "image->offsets[1] = %d\n", image->offsets[1]);
+ LOG_I(
+ "image->num_planes = %d\n", image->num_planes);
+ LOG_I(
+ "image->width = %d\n", image->width);
+ LOG_I(
+ "image->height = %d\n", image->height);
+
+ LOG_I(
+ "input buf size = %d\n", bufin->size);
+
+ guint8 *inbuf = bufin->data;
+
+ /*need to convert YUV420 to NV12*/
+ dst_y = pvbuf +image->offsets[0];
+
+ for (i = 0; i < height; i ++) {
+ memcpy (dst_y, inbuf + i * width, width);
+ dst_y += image->pitches[0];
+ }
+
+ dst_uv = pvbuf + image->offsets[1];
+
+ for (i = 0; i < height / 2; i ++) {
+ for (j = 0; j < width; j+=2) {
+ dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2];
+ dst_uv [j + 1] =
+ inbuf [width * height * 5 / 4 + i * width / 2 + j / 2];
+ }
+ dst_uv += image->pitches[1];
+ }
+
+ vaUnmapBuffer(va_display, image->buf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaUnmapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ va_status = vaDestroyImage(va_display, src_image.image_id);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaDestroyImage\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "Map source data to surface done\n");
+
+ }
+
+ else {//if (!parent->share_buf_mode)
+
+ MixVideoFrame * frame = mix_videoframe_new();
+
+ if (mix->ref_fame == NULL)
+ {
+ ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->ref_fame, frame);
+ if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used
+ {
+ LOG_E(
+ "get reference surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (mix->rec_fame == NULL)
+ {
+ ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->rec_fame, frame);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "get recontructed surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (parent->need_display) {
+ mix->cur_fame = NULL;
+ }
+
+ if (mix->cur_fame == NULL)
+ {
+ guint ci_idx;
+ memcpy (&ci_idx, bufin->data, bufin->size);
+
+ LOG_I(
+ "surface_num = %d\n", mix->surface_num);
+ LOG_I(
+ "ci_frame_idx = %d\n", ci_idx);
+
+ if (ci_idx > mix->surface_num - 2) {
+ LOG_E(
+ "the CI frame idx is too bigger than CI frame number\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->cur_fame, frame);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "get current working surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface);
+
+ }
+
+ LOG_V( "vaBeginPicture\n");
+ LOG_I( "va_context = 0x%08x\n",(guint)va_context);
+ LOG_I( "surface = 0x%08x\n",(guint)surface);
+ LOG_I( "va_display = 0x%08x\n",(guint)va_display);
+
+ va_status = vaBeginPicture(va_display, va_context, surface);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaBeginPicture\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "mix_videofmtenc_mpeg4_send_seq_params\n");
+
+ if (mix->encoded_frames == 0) {
+ mix_videofmtenc_mpeg4_send_seq_params (mix);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_mpeg4_send_seq_params\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ ret = mix_videofmtenc_mpeg4_send_picture_parameter (mix);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_mpeg4_send_picture_parameter\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ ret = mix_videofmtenc_mpeg4_send_slice_parameter (mix);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_mpeg4_send_slice_parameter\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "before vaEndPicture\n");
+
+ va_status = vaEndPicture (va_display, va_context);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaEndPicture\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V( "vaSyncSurface\n");
+
+ va_status = vaSyncSurface(va_display, surface);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaSyncSurface\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V(
+ "Start to get encoded data\n");
+
+ /*get encoded data from the VA buffer*/
+ va_status = vaMapBuffer (va_display, mix->coded_buf, (void **)&buf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaMapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ // first 4 bytes is the size of the buffer
+ memcpy (&(iovout->data_size), (void*)buf, 4);
+ //size = (guint*) buf;
+
+ if (iovout->data == NULL) { //means app doesn't allocate the buffer, so _encode will allocate it.
+
+ iovout->data = g_malloc (iovout->data_size);
+ if (iovout->data == NULL) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+ }
+
+ memcpy (iovout->data, buf + 16, iovout->data_size);
+
+ iovout->buffer_size = iovout->data_size;
+
+ LOG_I(
+ "out size is = %d\n", iovout->data_size);
+
+ va_status = vaUnmapBuffer (va_display, mix->coded_buf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaUnmapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "get encoded data done\n");
+
+#if 0
+ if (parent->drawable) {
+ va_status = vaPutSurface(va_display, surface, (Drawable)parent->drawable,
+ 0,0, width, height,
+ 0,0, width, height,
+ NULL,0,0);
+ }
+
+#ifdef SHOW_SRC
+ else {
+
+ va_status = vaPutSurface(va_display, surface, win,
+ 0,0, width, height,
+ 0,0, width, height,
+ NULL,0,0);
+ }
+#endif //SHOW_SRC
+#endif
+
+ VASurfaceStatus status;
+
+ /*query the status of current surface*/
+ va_status = vaQuerySurfaceStatus(va_display, surface, &status);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaQuerySurfaceStatus\n");
+ return MIX_RESULT_FAIL;
+ }
+ mix->pic_skipped = status & VASurfaceSkipped;
+
+ //ret = mix_framemanager_enqueue(parent->framemgr, mix->rec_fame);
+
+ if (parent->need_display) {
+ ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_framemanager_enqueue\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+
+ /*update the reference surface and reconstructed surface */
+ if (!mix->pic_skipped) {
+ tmp_fame = mix->rec_fame;
+ mix->rec_fame= mix->ref_fame;
+ mix->ref_fame = tmp_fame;
+ }
+
+
+#if 0
+ if (mix->ref_fame != NULL)
+ mix_videoframe_unref (mix->ref_fame);
+ mix->ref_fame = mix->rec_fame;
+
+ mix_videoframe_unref (mix->cur_fame);
+#endif
+
+ if (!(parent->need_display)) {
+ mix_videoframe_unref (mix->cur_fame);
+ mix->cur_fame = NULL;
+ }
+
+ mix->encoded_frames ++;
+ }
+ else
+ {
+ LOG_E(
+ "not MPEG4 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size (
+ MixVideoFormatEnc *mix, guint * max_size)
+{
+
+ MixVideoFormatEnc *parent = NULL;
+
+ if (mix == NULL)
+ {
+ LOG_E(
+ "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ parent = MIX_VIDEOFORMATENC(mix);
+ MixVideoFormatEnc_MPEG4 *self = MIX_VIDEOFORMATENC_MPEG4 (mix);
+
+ if (MIX_IS_VIDEOFORMATENC_MPEG4(self)) {
+
+ if (self->coded_buf_size > 0) {
+ *max_size = self->coded_buf_size;
+ LOG_V ("Already calculate the max encoded size, get the value directly");
+ return MIX_RESULT_SUCCESS;
+ }
+
+ /*base on the rate control mode to calculate the defaule encoded buffer size*/
+ if (self->va_rcmode == VA_RC_NONE) {
+ self->coded_buf_size =
+ (parent->picture_width* parent->picture_height * 400) / (16 * 16);
+ // set to value according to QP
+ }
+ else {
+ self->coded_buf_size = parent->bitrate/ 4;
+ }
+
+ self->coded_buf_size =
+ max (self->coded_buf_size ,
+ (parent->picture_width* parent->picture_height * 400) / (16 * 16));
+
+ /*in case got a very large user input bit rate value*/
+ self->coded_buf_size =
+ max(self->coded_buf_size,
+ (parent->picture_width * parent->picture_height * 1.5 * 8));
+ self->coded_buf_size = (self->coded_buf_size + 15) &(~15);
+ }
+ else
+ {
+ LOG_E(
+ "not MPEG4 video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ *max_size = self->coded_buf_size;
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
diff --git a/mix_video/src/mixvideoformatenc_mpeg4.h b/mix_video/src/mixvideoformatenc_mpeg4.h
new file mode 100644
index 0000000..dc26efe
--- /dev/null
+++ b/mix_video/src/mixvideoformatenc_mpeg4.h
@@ -0,0 +1,137 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFORMATENC_MPEG4_H__
+#define __MIX_VIDEOFORMATENC_MPEG4_H__
+
+#include "mixvideoformatenc.h"
+#include "mixvideoframe_private.h"
+
+#define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM 20
+
+#define min(X,Y) (((X) < (Y)) ? (X) : (Y))
+#define max(X,Y) (((X) > (Y)) ? (X) : (Y))
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEOFORMATENC_MPEG4 (mix_videoformatenc_mpeg4_get_type ())
+#define MIX_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4))
+#define MIX_IS_VIDEOFORMATENC_MPEG4(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4))
+#define MIX_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class))
+#define MIX_IS_VIDEOFORMATENC_MPEG4_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_MPEG4))
+#define MIX_VIDEOFORMATENC_MPEG4_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_MPEG4, MixVideoFormatEnc_MPEG4Class))
+
+typedef struct _MixVideoFormatEnc_MPEG4 MixVideoFormatEnc_MPEG4;
+typedef struct _MixVideoFormatEnc_MPEG4Class MixVideoFormatEnc_MPEG4Class;
+
+struct _MixVideoFormatEnc_MPEG4 {
+ /*< public > */
+ MixVideoFormatEnc parent;
+
+
+ VABufferID coded_buf;
+ VABufferID seq_param_buf;
+ VABufferID pic_param_buf;
+ VABufferID slice_param_buf;
+ VASurfaceID * ci_shared_surfaces;
+ VASurfaceID * surfaces;
+ guint surface_num;
+
+ MixVideoFrame *cur_fame; //current input frame to be encoded;
+ MixVideoFrame *ref_fame; //reference frame
+ MixVideoFrame *rec_fame; //reconstructed frame;
+
+ guchar profile_and_level_indication;
+ guint fixed_vop_time_increment;
+ guint disable_deblocking_filter_idc;
+
+ guint va_rcmode;
+
+ guint encoded_frames;
+ gboolean pic_skipped;
+
+ gboolean is_intra;
+
+ guint coded_buf_size;
+
+ /*< public > */
+};
+
+/**
+ * MixVideoFormatEnc_MPEG4Class:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoFormatEnc_MPEG4Class {
+ /*< public > */
+ MixVideoFormatEncClass parent_class;
+
+ /* class members */
+
+ /*< public > */
+};
+
+/**
+ * mix_videoformatenc_mpeg4_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoformatenc_mpeg4_get_type(void);
+
+/**
+ * mix_videoformatenc_mpeg4_new:
+ * @returns: A newly allocated instance of #MixVideoFormatEnc_MPEG4
+ *
+ * Use this method to create new instance of #MixVideoFormatEnc_MPEG4
+ */
+MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_new(void);
+
+/**
+ * mix_videoformatenc_mpeg4_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFormatEnc_MPEG4 instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFormatEnc_MPEG4 *mix_videoformatenc_mpeg4_ref(MixVideoFormatEnc_MPEG4 * mix);
+
+/**
+ * mix_videoformatenc_mpeg4_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoformatenc_mpeg4_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/* MPEG-4:2 vmethods */
+MIX_RESULT mix_videofmtenc_mpeg4_getcaps(MixVideoFormatEnc *mix, GString *msg);
+MIX_RESULT mix_videofmtenc_mpeg4_initialize(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+MIX_RESULT mix_videofmtenc_mpeg4_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+MIX_RESULT mix_videofmtenc_mpeg4_flush(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_mpeg4_eos(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_mpeg4_deinitialize(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_mpeg4_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint * max_size);
+
+/* Local Methods */
+
+MIX_RESULT mix_videofmtenc_mpeg4_process_encode (MixVideoFormatEnc_MPEG4 *mix, MixBuffer * bufin,
+ MixIOVec * iovout);
+
+#endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */
+
diff --git a/mix_video/src/mixvideoformatenc_preview.c b/mix_video/src/mixvideoformatenc_preview.c
new file mode 100644
index 0000000..17b9a4b
--- /dev/null
+++ b/mix_video/src/mixvideoformatenc_preview.c
@@ -0,0 +1,1187 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <glib.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "mixvideolog.h"
+
+#include "mixvideoformatenc_preview.h"
+#include "mixvideoconfigparamsenc_preview.h"
+
+#define MDEBUG
+#undef SHOW_SRC
+
+#ifdef SHOW_SRC
+Window win = 0;
+#endif /* SHOW_SRC */
+
+
+/* The parent class. The pointer will be saved
+ * in this class's initialization. The pointer
+ * can be used for chaining method call if needed.
+ */
+static MixVideoFormatEncClass *parent_class = NULL;
+
+static void mix_videoformatenc_preview_finalize(GObject * obj);
+
+/*
+ * Please note that the type we pass to G_DEFINE_TYPE is MIX_TYPE_VIDEOFORMATENC
+ */
+G_DEFINE_TYPE (MixVideoFormatEnc_Preview, mix_videoformatenc_preview, MIX_TYPE_VIDEOFORMATENC);
+
+static void mix_videoformatenc_preview_init(MixVideoFormatEnc_Preview * self) {
+ MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(self);
+
+ /* TODO: public member initialization */
+
+ /* TODO: private member initialization */
+ self->encoded_frames = 0;
+ self->pic_skipped = FALSE;
+ self->is_intra = TRUE;
+ self->cur_fame = NULL;
+ self->ref_fame = NULL;
+ self->rec_fame = NULL;
+
+ self->ci_shared_surfaces = NULL;
+ self->surfaces= NULL;
+ self->surface_num = 0;
+
+ parent->initialized = FALSE;
+}
+
+static void mix_videoformatenc_preview_class_init(
+ MixVideoFormatEnc_PreviewClass * klass) {
+
+ /* root class */
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ /* direct parent class */
+ MixVideoFormatEncClass *video_formatenc_class =
+ MIX_VIDEOFORMATENC_CLASS(klass);
+
+ /* parent class for later use */
+ parent_class = g_type_class_peek_parent(klass);
+
+ /* setup finializer */
+ gobject_class->finalize = mix_videoformatenc_preview_finalize;
+
+ /* setup vmethods with base implementation */
+ /* TODO: decide if we need to override the parent's methods */
+ video_formatenc_class->getcaps = mix_videofmtenc_preview_getcaps;
+ video_formatenc_class->initialize = mix_videofmtenc_preview_initialize;
+ video_formatenc_class->encode = mix_videofmtenc_preview_encode;
+ video_formatenc_class->flush = mix_videofmtenc_preview_flush;
+ video_formatenc_class->eos = mix_videofmtenc_preview_eos;
+ video_formatenc_class->deinitialize = mix_videofmtenc_preview_deinitialize;
+}
+
+MixVideoFormatEnc_Preview *
+mix_videoformatenc_preview_new(void) {
+ MixVideoFormatEnc_Preview *ret =
+ g_object_new(MIX_TYPE_VIDEOFORMATENC_PREVIEW, NULL);
+
+ return ret;
+}
+
+void mix_videoformatenc_preview_finalize(GObject * obj) {
+ /* clean up here. */
+
+ /*MixVideoFormatEnc_Preview *mix = MIX_VIDEOFORMATENC_PREVIEW(obj); */
+ GObjectClass *root_class = (GObjectClass *) parent_class;
+
+ LOG_V( "\n");
+
+ /* Chain up parent */
+ if (root_class->finalize) {
+ root_class->finalize(obj);
+ }
+}
+
+MixVideoFormatEnc_Preview *
+mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix) {
+ return (MixVideoFormatEnc_Preview *) g_object_ref(G_OBJECT(mix));
+}
+
+/*Preview vmethods implementation */
+MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg) {
+
+ /* TODO: add codes for Preview format */
+
+ /* TODO: decide if we need to chainup parent method.
+ * if we do, the following is the code:
+ */
+
+ LOG_V( "mix_videofmtenc_preview_getcaps\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+
+ if (parent_class->getcaps) {
+ return parent_class->getcaps(mix, msg);
+ }
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display ) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ MixVideoFormatEnc *parent = NULL;
+ MixVideoConfigParamsEncPreview * config_params_enc_preview;
+
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VASurfaceID * surfaces;
+
+ gint va_max_num_profiles, va_max_num_entrypoints, va_max_num_attribs;
+ gint va_num_profiles, va_num_entrypoints;
+
+ VAProfile *va_profiles = NULL;
+ VAEntrypoint *va_entrypoints = NULL;
+ VAConfigAttrib va_attrib[2];
+ guint index;
+
+
+ /*frame_mgr and input_buf_pool is reservered for future use*/
+
+ if (mix == NULL || config_params_enc == NULL || va_display == NULL) {
+ LOG_E(
+ "mix == NULL || config_params_enc == NULL || va_display == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "begin\n");
+
+
+ //TODO additional parameter checking
+
+ /* Chainup parent method. */
+#if 1
+ if (parent_class->initialize) {
+ ret = parent_class->initialize(mix, config_params_enc,
+ frame_mgr, input_buf_pool, surface_pool,
+ va_display);
+ }
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ return ret;
+ }
+
+#endif //disable it currently
+
+ if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix))
+ {
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix);
+
+ if (MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc)) {
+ config_params_enc_preview =
+ MIX_VIDEOCONFIGPARAMSENC_PREVIEW (config_params_enc);
+ } else {
+ LOG_V(
+ "mix_videofmtenc_preview_initialize: no preview config params found\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ g_mutex_lock(parent->objectlock);
+
+
+ LOG_V(
+ "Get properities from params done\n");
+
+
+ //display = XOpenDisplay(NULL);
+ //va_display = vaGetDisplay (videoencobj->display);
+
+ parent->va_display = va_display;
+
+ LOG_V( "Get Display\n");
+ LOG_I( "Display = 0x%08x\n",
+ (guint)va_display);
+
+ //va_status = vaInitialize(va_display, &va_major_ver, &va_minor_ver);
+ //g_print ("vaInitialize va_status = %d\n", va_status);
+
+
+#if 0
+ /* query the vender information, can ignore*/
+ va_vendor = vaQueryVendorString (va_display);
+ LOG_I( "Vendor = %s\n",
+ va_vendor);
+#endif
+
+ /*get the max number for profiles/entrypoints/attribs*/
+ va_max_num_profiles = vaMaxNumProfiles(va_display);
+ LOG_I( "va_max_num_profiles = %d\n",
+ va_max_num_profiles);
+
+ va_max_num_entrypoints = vaMaxNumEntrypoints(va_display);
+ LOG_I( "va_max_num_entrypoints = %d\n",
+ va_max_num_entrypoints);
+
+ va_max_num_attribs = vaMaxNumConfigAttributes(va_display);
+ LOG_I( "va_max_num_attribs = %d\n",
+ va_max_num_attribs);
+
+ va_profiles = g_malloc(sizeof(VAProfile)*va_max_num_profiles);
+ va_entrypoints = g_malloc(sizeof(VAEntrypoint)*va_max_num_entrypoints);
+
+ if (va_profiles == NULL || va_entrypoints ==NULL)
+ {
+ LOG_E(
+ "!va_profiles || !va_entrypoints\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ LOG_I(
+ "va_profiles = 0x%08x\n", (guint)va_profiles);
+
+ LOG_V( "vaQueryConfigProfiles\n");
+
+
+ va_status = vaQueryConfigProfiles (va_display, va_profiles, &va_num_profiles);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaQueryConfigProfiles\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "vaQueryConfigProfiles Done\n");
+
+
+
+ /*check whether profile is supported*/
+ for(index= 0; index < va_num_profiles; index++) {
+ if(parent->va_profile == va_profiles[index])
+ break;
+ }
+
+ if(index == va_num_profiles)
+ {
+ LOG_E( "Profile not supported\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+ LOG_V( "vaQueryConfigEntrypoints\n");
+
+
+ /*Check entry point*/
+ va_status = vaQueryConfigEntrypoints(va_display,
+ parent->va_profile,
+ va_entrypoints, &va_num_entrypoints);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaQueryConfigEntrypoints\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ for (index = 0; index < va_num_entrypoints; index ++) {
+ if (va_entrypoints[index] == VAEntrypointEncSlice) {
+ break;
+ }
+ }
+
+ if (index == va_num_entrypoints) {
+ LOG_E( "Entrypoint not found\n");
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+
+ /*free profiles and entrypoints*/
+ g_free(va_profiles);
+ g_free (va_entrypoints);
+
+ va_attrib[0].type = VAConfigAttribRTFormat;
+ va_attrib[1].type = VAConfigAttribRateControl;
+
+ LOG_V( "vaGetConfigAttributes\n");
+
+ va_status = vaGetConfigAttributes(va_display, parent->va_profile,
+ parent->va_entrypoint,
+ &va_attrib[0], 2);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to call vaGetConfigAttributes\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ if ((va_attrib[0].value & parent->va_format) == 0) {
+ LOG_E( "Matched format not found\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+
+ if ((va_attrib[1].value & parent->va_rcmode) == 0) {
+ LOG_E( "RC mode not found\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL; //Todo, add error handling here
+ }
+
+ va_attrib[0].value = parent->va_format; //VA_RT_FORMAT_YUV420;
+ va_attrib[1].value = parent->va_rcmode;
+
+ LOG_V( "======VA Configuration======\n");
+
+ LOG_I( "profile = %d\n",
+ parent->va_profile);
+ LOG_I( "va_entrypoint = %d\n",
+ parent->va_entrypoint);
+ LOG_I( "va_attrib[0].type = %d\n",
+ va_attrib[0].type);
+ LOG_I( "va_attrib[1].type = %d\n",
+ va_attrib[1].type);
+ LOG_I( "va_attrib[0].value (Format) = %d\n",
+ va_attrib[0].value);
+ LOG_I( "va_attrib[1].value (RC mode) = %d\n",
+ va_attrib[1].value);
+
+ LOG_V( "vaCreateConfig\n");
+
+ va_status = vaCreateConfig(va_display, parent->va_profile,
+ parent->va_entrypoint,
+ &va_attrib[0], 2, &(parent->va_config));
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed vaCreateConfig\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ /*TODO: compute the surface number*/
+ int numSurfaces;
+
+ if (parent->share_buf_mode) {
+ numSurfaces = 2;
+ }
+ else {
+ numSurfaces = 8;
+ parent->ci_frame_num = 0;
+ }
+
+ self->surface_num = numSurfaces + parent->ci_frame_num;
+
+ surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces);
+
+ if (surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate surface\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ LOG_V( "vaCreateSurfaces\n");
+
+ va_status = vaCreateSurfaces(va_display, parent->picture_width,
+ parent->picture_height, parent->va_format,
+ numSurfaces, surfaces);
+ //TODO check vret and return fail if needed
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaCreateSurfaces\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ if (parent->share_buf_mode) {
+
+ LOG_V(
+ "We are in share buffer mode!\n");
+ self->ci_shared_surfaces =
+ g_malloc(sizeof(VASurfaceID) * parent->ci_frame_num);
+
+ if (self->ci_shared_surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate shared surface\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ guint index;
+ for(index = 0; index < parent->ci_frame_num; index++) {
+
+ LOG_I( "ci_frame_id = %lu\n",
+ parent->ci_frame_id[index]);
+
+ LOG_V(
+ "vaCreateSurfaceFromCIFrame\n");
+
+ va_status = vaCreateSurfaceFromCIFrame(va_display,
+ (gulong) (parent->ci_frame_id[index]),
+ &self->ci_shared_surfaces[index]);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateSurfaceFromCIFrame\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ LOG_V(
+ "vaCreateSurfaceFromCIFrame Done\n");
+
+ }// if (parent->share_buf_mode)
+
+ self->surfaces = g_malloc(sizeof(VASurfaceID) * self->surface_num);
+
+ if (self->surfaces == NULL)
+ {
+ LOG_E(
+ "Failed allocate private surface\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ if (parent->share_buf_mode) {
+ /*shared surfaces should be put in pool first,
+ * because we will get it accoring to CI index*/
+ for(index = 0; index < parent->ci_frame_num; index++)
+ self->surfaces[index] = self->ci_shared_surfaces[index];
+ }
+
+ for(index = 0; index < numSurfaces; index++) {
+ self->surfaces[index + parent->ci_frame_num] = surfaces[index];
+ }
+
+ LOG_V( "assign surface Done\n");
+ LOG_I( "Created %d libva surfaces\n",
+ numSurfaces + parent->ci_frame_num);
+
+#if 0 //current put this in gst
+ images = g_malloc(sizeof(VAImage)*numSurfaces);
+ if (images == NULL)
+ {
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ for (index = 0; index < numSurfaces; index++) {
+ //Derive an VAImage from an existing surface.
+ //The image buffer can then be mapped/unmapped for CPU access
+ va_status = vaDeriveImage(va_display, surfaces[index],
+ &images[index]);
+ }
+#endif
+
+ LOG_V( "mix_surfacepool_new\n");
+
+ parent->surfacepool = mix_surfacepool_new();
+ if (surface_pool)
+ *surface_pool = parent->surfacepool;
+ //which is useful to check before encode
+
+ if (parent->surfacepool == NULL)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_new\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "mix_surfacepool_initialize\n");
+
+ ret = mix_surfacepool_initialize(parent->surfacepool,
+ self->surfaces, parent->ci_frame_num + numSurfaces);
+
+ switch (ret)
+ {
+ case MIX_RESULT_SUCCESS:
+ break;
+ case MIX_RESULT_ALREADY_INIT:
+ //TODO cleanup and/or retry
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ default:
+ break;
+ }
+
+
+ //Initialize and save the VA context ID
+ LOG_V( "vaCreateContext\n");
+
+ va_status = vaCreateContext(va_display, parent->va_config,
+ parent->picture_width, parent->picture_height,
+ 0, self->surfaces, parent->ci_frame_num + numSurfaces,
+ &(parent->va_context));
+
+ LOG_I(
+ "Created libva context width %d, height %d\n",
+ parent->picture_width, parent->picture_height);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateContext\n");
+ LOG_I( "va_status = %d\n",
+ (guint)va_status);
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ self->coded_buf_size = 4;
+
+ /*Create coded buffer for output*/
+ va_status = vaCreateBuffer (va_display, parent->va_context,
+ VAEncCodedBufferType,
+ self->coded_buf_size, //
+ 1, NULL,
+ &self->coded_buf);
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaCreateBuffer: VAEncCodedBufferType\n");
+ g_free (surfaces);
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+#ifdef SHOW_SRC
+ Display * display = XOpenDisplay (NULL);
+
+ LOG_I( "display = 0x%08x\n",
+ (guint) display);
+ win = XCreateSimpleWindow(display, RootWindow(display, 0), 0, 0,
+ parent->picture_width, parent->picture_height, 0, 0,
+ WhitePixel(display, 0));
+ XMapWindow(display, win);
+ XSelectInput(display, win, KeyPressMask | StructureNotifyMask);
+
+ XSync(display, False);
+ LOG_I( "va_display = 0x%08x\n",
+ (guint) va_display);
+
+#endif /* SHOW_SRC */
+
+ parent->initialized = TRUE;
+
+ g_mutex_unlock(parent->objectlock);
+ g_free (surfaces);
+
+ }
+ else
+ {
+ LOG_E(
+ "not Preview video encode Object\n");
+ return MIX_RESULT_FAIL;
+
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params) {
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ MixVideoFormatEnc *parent = NULL;
+
+ LOG_V( "Begin\n");
+
+ /*currenly only support one input and output buffer*/
+ //TODO: params i
+
+ if (bufincnt != 1 || iovoutcnt != 1) {
+ LOG_E(
+ "buffer count not equel to 1\n");
+ LOG_E(
+ "maybe some exception occurs\n");
+ }
+
+ if (mix == NULL ||bufin[0] == NULL || iovout[0] == NULL) {
+ LOG_E(
+ "!mix || !bufin[0] ||!iovout[0]\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ //TODO: encode_params is reserved here for future usage.
+
+ /* TODO: decide if we need to chainup parent method.
+ * * * if we do, the following is the code:
+ * */
+
+#if 0
+ if (parent_class->encode) {
+ return parent_class->encode(mix, bufin, bufincnt, iovout,
+ iovoutcnt, encode_params);
+ }
+#endif
+
+ if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix))
+ {
+
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW (mix);
+
+ LOG_V( "Locking\n");
+ g_mutex_lock(parent->objectlock);
+
+
+ //TODO: also we could move some encode Preparation work to here
+
+ LOG_V(
+ "mix_videofmtenc_preview_process_encode\n");
+
+ ret = mix_videofmtenc_preview_process_encode (self,
+ bufin[0], iovout[0]);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_videofmtenc_preview_process_encode\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V( "UnLocking\n");
+
+ g_mutex_unlock(parent->objectlock);
+ }
+ else
+ {
+ LOG_E(
+ "not Preview video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix) {
+
+ //MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+
+ /*not chain to parent flush func*/
+#if 0
+ if (parent_class->flush) {
+ return parent_class->flush(mix, msg);
+ }
+#endif
+
+ MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix);
+
+ g_mutex_lock(mix->objectlock);
+
+#if 0
+ /*unref the current source surface*/
+ if (self->cur_fame != NULL)
+ {
+ mix_videoframe_unref (self->cur_fame);
+ self->cur_fame = NULL;
+ }
+#endif
+
+ /*unref the reconstructed surface*/
+ if (self->rec_fame != NULL)
+ {
+ mix_videoframe_unref (self->rec_fame);
+ self->rec_fame = NULL;
+ }
+
+ /*unref the reference surface*/
+ if (self->ref_fame != NULL)
+ {
+ mix_videoframe_unref (self->ref_fame);
+ self->ref_fame = NULL;
+ }
+
+ /*reset the properities*/
+ self->encoded_frames = 0;
+ self->pic_skipped = FALSE;
+ self->is_intra = TRUE;
+
+ g_mutex_unlock(mix->objectlock);
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix) {
+
+ /* TODO: add codes for preview */
+
+ /* TODO: decide if we need to chainup parent method.
+ * if we do, the following is the code:
+ */
+
+ LOG_V( "\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ if (parent_class->eos) {
+ return parent_class->eos(mix);
+ }
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix) {
+
+ MixVideoFormatEnc *parent = NULL;
+ VAStatus va_status;
+
+ LOG_V( "Begin\n");
+
+ if (mix == NULL) {
+ LOG_E( "mix == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ parent = MIX_VIDEOFORMATENC(&(mix->parent));
+ MixVideoFormatEnc_Preview *self = MIX_VIDEOFORMATENC_PREVIEW(mix);
+
+ LOG_V( "Release frames\n");
+
+ g_mutex_lock(parent->objectlock);
+
+#if 0
+ /*unref the current source surface*/
+ if (self->cur_fame != NULL)
+ {
+ mix_videoframe_unref (self->cur_fame);
+ self->cur_fame = NULL;
+ }
+#endif
+
+ /*unref the reconstructed surface*/
+ if (self->rec_fame != NULL)
+ {
+ mix_videoframe_unref (self->rec_fame);
+ self->rec_fame = NULL;
+ }
+
+ /*unref the reference surface*/
+ if (self->ref_fame != NULL)
+ {
+ mix_videoframe_unref (self->ref_fame);
+ self->ref_fame = NULL;
+ }
+
+ LOG_V( "Release surfaces\n");
+
+ if (self->ci_shared_surfaces)
+ {
+ g_free (self->ci_shared_surfaces);
+ self->ci_shared_surfaces = NULL;
+ }
+
+ if (self->surfaces)
+ {
+ g_free (self->surfaces);
+ self->surfaces = NULL;
+ }
+
+ LOG_V( "vaDestroyContext\n");
+
+ va_status = vaDestroyContext (parent->va_display, parent->va_context);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaDestroyContext\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V( "vaDestroyConfig\n");
+
+ va_status = vaDestroyConfig (parent->va_display, parent->va_config);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed vaDestroyConfig\n");
+ g_mutex_unlock(parent->objectlock);
+ return MIX_RESULT_FAIL;
+ }
+
+ parent->initialized = TRUE;
+
+ g_mutex_unlock(parent->objectlock);
+
+#if 1
+ if (parent_class->deinitialize) {
+ return parent_class->deinitialize(mix);
+ }
+#endif
+
+ //Most stuff is cleaned up in parent_class->finalize()
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix,
+ MixBuffer * bufin, MixIOVec * iovout)
+{
+
+ MIX_RESULT ret = MIX_RESULT_SUCCESS;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VADisplay va_display = NULL;
+ VAContextID va_context;
+ gulong surface = 0;
+ guint16 width, height;
+
+ //MixVideoFrame * tmp_fame;
+ //guint8 *buf;
+
+ if ((mix == NULL) || (bufin == NULL) || (iovout == NULL)) {
+ LOG_E(
+ "mix == NUL) || bufin == NULL || iovout == NULL\n");
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ if (MIX_IS_VIDEOFORMATENC_PREVIEW(mix))
+ {
+
+ MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+ va_display = parent->va_display;
+ va_context = parent->va_context;
+ width = parent->picture_width;
+ height = parent->picture_height;
+
+
+ LOG_I( "encoded_frames = %d\n",
+ mix->encoded_frames);
+ LOG_I( "is_intra = %d\n",
+ mix->is_intra);
+ LOG_I( "ci_frame_id = 0x%08x\n",
+ (guint) parent->ci_frame_id);
+
+ LOG_V(
+ "Get Surface from the pool\n");
+
+ /*current we use one surface for source data,
+ * one for reference and one for reconstructed*/
+ /*TODO, could be refine here*/
+
+ if (!parent->share_buf_mode) {
+ LOG_V(
+ "We are NOT in share buffer mode\n");
+
+ if (mix->ref_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->ref_fame);
+ if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (mix->rec_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->rec_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (parent->need_display) {
+ mix->cur_fame = NULL;
+ }
+
+ if (mix->cur_fame == NULL)
+ {
+ ret = mix_surfacepool_get(parent->surfacepool, &mix->cur_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_surfacepool_get\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ LOG_V( "Get Surface Done\n");
+
+
+ VAImage src_image;
+ guint8 *pvbuf;
+ guint8 *dst_y;
+ guint8 *dst_uv;
+ int i,j;
+
+ LOG_V(
+ "map source data to surface\n");
+
+ ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed to mix_videoframe_get_frame_id\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_I(
+ "surface id = 0x%08x\n", (guint) surface);
+
+ va_status = vaDeriveImage(va_display, surface, &src_image);
+ //need to destroy
+
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaDeriveImage\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ VAImage *image = &src_image;
+
+ LOG_V( "vaDeriveImage Done\n");
+
+
+ va_status = vaMapBuffer (va_display, image->buf, (void **)&pvbuf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E( "Failed to vaMapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "vaImage information\n");
+ LOG_I(
+ "image->pitches[0] = %d\n", image->pitches[0]);
+ LOG_I(
+ "image->pitches[1] = %d\n", image->pitches[1]);
+ LOG_I(
+ "image->offsets[0] = %d\n", image->offsets[0]);
+ LOG_I(
+ "image->offsets[1] = %d\n", image->offsets[1]);
+ LOG_I(
+ "image->num_planes = %d\n", image->num_planes);
+ LOG_I(
+ "image->width = %d\n", image->width);
+ LOG_I(
+ "image->height = %d\n", image->height);
+
+ LOG_I(
+ "input buf size = %d\n", bufin->size);
+
+ guint8 *inbuf = bufin->data;
+
+ /*need to convert YUV420 to NV12*/
+ dst_y = pvbuf +image->offsets[0];
+
+ for (i = 0; i < height; i ++) {
+ memcpy (dst_y, inbuf + i * width, width);
+ dst_y += image->pitches[0];
+ }
+
+ dst_uv = pvbuf + image->offsets[1];
+
+ for (i = 0; i < height / 2; i ++) {
+ for (j = 0; j < width; j+=2) {
+ dst_uv [j] = inbuf [width * height + i * width / 2 + j / 2];
+ dst_uv [j + 1] =
+ inbuf [width * height * 5 / 4 + i * width / 2 + j / 2];
+ }
+ dst_uv += image->pitches[1];
+ }
+
+ vaUnmapBuffer(va_display, image->buf);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaUnmapBuffer\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ va_status = vaDestroyImage(va_display, src_image.image_id);
+ if (va_status != VA_STATUS_SUCCESS)
+ {
+ LOG_E(
+ "Failed to vaDestroyImage\n");
+ return MIX_RESULT_FAIL;
+ }
+
+ LOG_V(
+ "Map source data to surface done\n");
+
+ }
+
+ else {//if (!parent->share_buf_mode)
+
+ MixVideoFrame * frame = mix_videoframe_new();
+
+ if (mix->ref_fame == NULL)
+ {
+ ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 1);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->ref_fame, frame);
+ if (ret != MIX_RESULT_SUCCESS) //#ifdef SLEEP_SURFACE not used
+ {
+ LOG_E(
+ "get reference surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ if (mix->rec_fame == NULL)
+ {
+ ret = mix_videoframe_set_ci_frame_idx (frame, mix->surface_num - 2);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->rec_fame, frame);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "get recontructed surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ //mix_videoframe_unref (mix->cur_fame);
+
+ if (parent->need_display) {
+ mix->cur_fame = NULL;
+ }
+
+ if (mix->cur_fame == NULL)
+ {
+ guint ci_idx;
+ memcpy (&ci_idx, bufin->data, bufin->size);
+
+ LOG_I(
+ "surface_num = %d\n", mix->surface_num);
+ LOG_I(
+ "ci_frame_idx = %d\n", ci_idx);
+
+ if (ci_idx > mix->surface_num - 2) {
+ LOG_E(
+ "the CI frame idx is too bigger than CI frame number\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ ret = mix_videoframe_set_ci_frame_idx (frame, ci_idx);
+
+ ret = mix_surfacepool_get_frame_with_ci_frameidx
+ (parent->surfacepool, &mix->cur_fame, frame);
+
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "get current working surface from pool failed\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+ ret = mix_videoframe_get_frame_id(mix->cur_fame, &surface);
+
+ }
+
+ LOG_V( "vaBeginPicture\n");
+ LOG_I( "va_context = 0x%08x\n",(guint)va_context);
+ LOG_I( "surface = 0x%08x\n",(guint)surface);
+ LOG_I( "va_display = 0x%08x\n",(guint)va_display);
+
+ iovout->data_size = 4;
+ iovout->data = g_malloc (iovout->data_size);
+ if (iovout->data == NULL) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ memset (iovout->data, 0, iovout->data_size);
+
+ iovout->buffer_size = iovout->data_size;
+
+
+ if (parent->need_display) {
+ ret = mix_framemanager_enqueue(parent->framemgr, mix->cur_fame);
+ if (ret != MIX_RESULT_SUCCESS)
+ {
+ LOG_E(
+ "Failed mix_framemanager_enqueue\n");
+ return MIX_RESULT_FAIL;
+ }
+ }
+
+
+ if (!(parent->need_display)) {
+ mix_videoframe_unref (mix->cur_fame);
+ mix->cur_fame = NULL;
+ }
+
+ mix->encoded_frames ++;
+ }
+ else
+ {
+ LOG_E(
+ "not Preview video encode Object\n");
+ return MIX_RESULT_FAIL;
+ }
+
+
+ LOG_V( "end\n");
+
+ return MIX_RESULT_SUCCESS;
+}
diff --git a/mix_video/src/mixvideoformatenc_preview.h b/mix_video/src/mixvideoformatenc_preview.h
new file mode 100644
index 0000000..dd404e2
--- /dev/null
+++ b/mix_video/src/mixvideoformatenc_preview.h
@@ -0,0 +1,133 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFORMATENC_PREVIEW_H__
+#define __MIX_VIDEOFORMATENC_PREVIEW_H__
+
+#include "mixvideoformatenc.h"
+#include "mixvideoframe_private.h"
+
+#define MIX_VIDEO_ENC_PREVIEW_SURFACE_NUM 20
+
+#define min(X,Y) (((X) < (Y)) ? (X) : (Y))
+#define max(X,Y) (((X) > (Y)) ? (X) : (Y))
+
+/*
+ * Type macros.
+ */
+#define MIX_TYPE_VIDEOFORMATENC_PREVIEW (mix_videoformatenc_preview_get_type ())
+#define MIX_VIDEOFORMATENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_Preview))
+#define MIX_IS_VIDEOFORMATENC_PREVIEW(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW))
+#define MIX_VIDEOFORMATENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_PreviewClass))
+#define MIX_IS_VIDEOFORMATENC_PREVIEW_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFORMATENC_PREVIEW))
+#define MIX_VIDEOFORMATENC_PREVIEW_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFORMATENC_PREVIEW, MixVideoFormatEnc_PreviewClass))
+
+typedef struct _MixVideoFormatEnc_Preview MixVideoFormatEnc_Preview;
+typedef struct _MixVideoFormatEnc_PreviewClass MixVideoFormatEnc_PreviewClass;
+
+struct _MixVideoFormatEnc_Preview {
+ /*< public > */
+ MixVideoFormatEnc parent;
+
+ VABufferID coded_buf;
+ VABufferID seq_param_buf;
+ VABufferID pic_param_buf;
+ VABufferID slice_param_buf;
+ VASurfaceID * ci_shared_surfaces;
+ VASurfaceID * surfaces;
+ guint surface_num;
+
+ MixVideoFrame *cur_fame; //current input frame to be encoded;
+ MixVideoFrame *ref_fame; //reference frame
+ MixVideoFrame *rec_fame; //reconstructed frame;
+
+ guint basic_unit_size; //for rate control
+ guint disable_deblocking_filter_idc;
+ guint slice_num;
+ guint va_rcmode;
+
+
+ guint encoded_frames;
+ gboolean pic_skipped;
+
+ gboolean is_intra;
+
+ guint coded_buf_size;
+
+ /*< public > */
+};
+
+/**
+ * MixVideoFormatEnc_PreviewClass:
+ *
+ * MI-X Video object class
+ */
+struct _MixVideoFormatEnc_PreviewClass {
+ /*< public > */
+ MixVideoFormatEncClass parent_class;
+
+ /* class members */
+
+ /*< public > */
+};
+
+/**
+ * mix_videoformatenc_preview_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoformatenc_preview_get_type(void);
+
+/**
+ * mix_videoformatenc_preview_new:
+ * @returns: A newly allocated instance of #MixVideoFormatEnc_Preview
+ *
+ * Use this method to create new instance of #MixVideoFormatEnc_Preview
+ */
+MixVideoFormatEnc_Preview *mix_videoformatenc_preview_new(void);
+
+/**
+ * mix_videoformatenc_preview_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFormatEnc_Preview instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFormatEnc_Preview *mix_videoformatenc_preview_ref(MixVideoFormatEnc_Preview * mix);
+
+/**
+ * mix_videoformatenc_preview_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoformatenc_preview_unref(obj) g_object_unref (G_OBJECT(obj))
+
+/* Class Methods */
+
+/* Pure preview vmethods */
+MIX_RESULT mix_videofmtenc_preview_getcaps(MixVideoFormatEnc *mix, GString *msg);
+MIX_RESULT mix_videofmtenc_preview_initialize(MixVideoFormatEnc *mix,
+ MixVideoConfigParamsEnc * config_params_enc,
+ MixFrameManager * frame_mgr,
+ MixBufferPool * input_buf_pool,
+ MixSurfacePool ** surface_pool,
+ VADisplay va_display);
+MIX_RESULT mix_videofmtenc_preview_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
+ gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
+ MixVideoEncodeParams * encode_params);
+MIX_RESULT mix_videofmtenc_preview_flush(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_preview_eos(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_preview_deinitialize(MixVideoFormatEnc *mix);
+
+/* Local Methods */
+MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, MixBuffer * bufin,
+ MixIOVec * iovout);
+
+#endif /* __MIX_VIDEOFORMATENC_PREVIEW_H__ */
diff --git a/mix_video/src/mixvideoformatqueue.h b/mix_video/src/mixvideoformatqueue.h
new file mode 100644
index 0000000..5594aba
--- /dev/null
+++ b/mix_video/src/mixvideoformatqueue.h
@@ -0,0 +1,24 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOFORMATQUEUE_H__
+#define __MIX_VIDEOFORMATQUEUE_H__
+
+#include "mixbuffer.h"
+
+typedef struct _MixInputBufferEntry MixInputBufferEntry;
+
+struct _MixInputBufferEntry
+{
+ /*< private > */
+ MixBuffer *buf;
+ guint64 timestamp;
+
+};
+
+#endif /* __MIX_VIDEOFORMATQUEUE_H__ */
diff --git a/mix_video/src/mixvideoframe.c b/mix_video/src/mixvideoframe.c
new file mode 100644
index 0000000..2bea5d0
--- /dev/null
+++ b/mix_video/src/mixvideoframe.c
@@ -0,0 +1,391 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideoframe
+ * @short_description: VideoConfig parameters
+ *
+ * A data object which stores videoconfig specific parameters.
+ */
+
+
+#include <va/va.h>
+#include <va/va_x11.h>
+#include "mixvideolog.h"
+#include "mixvideoframe.h"
+#include "mixvideoframe_private.h"
+
+#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; }
+
+static GType _mix_videoframe_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_videoframe_type = g_define_type_id; }
+
+gboolean mix_videoframe_copy(MixParams * target, const MixParams * src);
+MixParams *mix_videoframe_dup(const MixParams * obj);
+gboolean mix_videoframe_equal(MixParams * first, MixParams * second);
+static void mix_videoframe_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoFrame, mix_videoframe, MIX_TYPE_PARAMS,
+ _do_init);
+
+#define VIDEOFRAME_PRIVATE(self) ((MixVideoFramePrivate *)((self)->reserved1))
+static void mix_videoframe_init(MixVideoFrame * self) {
+ /* initialize properties here */
+ self->frame_id = VA_INVALID_SURFACE;
+ self->timestamp = 0;
+ self->discontinuity = FALSE;
+ self->frame_structure = VA_FRAME_PICTURE;
+
+ MixVideoFramePrivate *priv = MIX_VIDEOFRAME_GET_PRIVATE(self);
+ self->reserved1 = priv;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+
+ /* set pool pointer in private structure to NULL */
+ priv -> pool = NULL;
+
+ /* set stuff for skipped frames */
+ priv -> is_skipped = FALSE;
+ priv -> real_frame = NULL;
+
+ g_static_rec_mutex_init (&priv -> lock);
+
+}
+
+static void mix_videoframe_class_init(MixVideoFrameClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_videoframe_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videoframe_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videoframe_dup;
+ mixparams_class->equal = (MixParamsEqualFunction) mix_videoframe_equal;
+
+ /* Register and allocate the space the private structure for this object */
+ g_type_class_add_private(mixparams_class, sizeof(MixVideoFramePrivate));
+
+}
+
+MixVideoFrame *
+mix_videoframe_new(void) {
+ MixVideoFrame *ret = (MixVideoFrame *) g_type_create_instance(
+ MIX_TYPE_VIDEOFRAME);
+ return ret;
+}
+
+void mix_videoframe_finalize(MixParams * obj) {
+ /* clean up here. */
+ MixVideoFrame *self = MIX_VIDEOFRAME (obj);
+ MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(self);
+
+ g_static_rec_mutex_free (&priv->lock);
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoFrame *
+mix_videoframe_ref(MixVideoFrame * obj) {
+
+ MixVideoFrame *ret = NULL;
+ MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj);
+ g_static_rec_mutex_lock(&priv->lock);
+ LOG_I("obj %x, new refcount is %d\n", (guint) obj,
+ MIX_PARAMS(obj)->refcount + 1);
+
+ ret = (MixVideoFrame *) mix_params_ref(MIX_PARAMS(obj));
+ g_static_rec_mutex_unlock (&priv->lock);
+ return ret;
+}
+
+void mix_videoframe_unref(MixVideoFrame * obj) {
+
+ if(obj == NULL) {
+ LOG_E("obj is NULL\n");
+ return;
+ }
+
+ MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj);
+ g_static_rec_mutex_lock(&priv->lock);
+
+ LOG_I("obj %x, frame id %d, new refcount is %d\n", (guint) obj,
+ (guint) obj->frame_id, MIX_PARAMS(obj)->refcount - 1);
+
+ // Check if we have reduced to 1, in which case we add ourselves to free pool
+ // but only do this for real frames, not skipped frames
+ if (((MIX_PARAMS(obj)->refcount - 1) == 1) && (!(priv -> is_skipped))) {
+
+ LOG_I("Adding obj %x, frame id %d back to pool\n", (guint) obj,
+ (guint) obj->frame_id);
+
+ MixSurfacePool *pool = NULL;
+ pool = priv -> pool;
+ if(pool == NULL) {
+ LOG_E("pool is NULL\n");
+ g_static_rec_mutex_unlock (&priv->lock);
+ return;
+ }
+ mix_surfacepool_put(pool, obj);
+ }
+
+ //If this is a skipped frame that is being deleted, release the real frame
+ if (((MIX_PARAMS(obj)->refcount - 1) == 0) && (priv -> is_skipped)) {
+
+ LOG_I("skipped frame obj %x, releasing real frame %x \n",
+ (guint) obj, (guint) priv->real_frame);
+
+ mix_videoframe_unref(priv -> real_frame);
+ }
+
+ // Unref through base class
+ mix_params_unref(MIX_PARAMS(obj));
+ g_static_rec_mutex_unlock (&priv->lock);
+}
+
+/**
+ * mix_videoframe_dup:
+ * @obj: a #MixVideoFrame object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videoframe_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEOFRAME(obj)) {
+ MixVideoFrame *duplicate = mix_videoframe_new();
+ if (mix_videoframe_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videoframe_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_videoframe_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoframe_copy(MixParams * target, const MixParams * src) {
+ MixVideoFrame *this_target, *this_src;
+
+ if (MIX_IS_VIDEOFRAME(target) && MIX_IS_VIDEOFRAME(src)) {
+ // Cast the base object to this child object
+ this_target = MIX_VIDEOFRAME(target);
+ this_src = MIX_VIDEOFRAME(src);
+
+ // Free the existing properties
+
+ // Duplicate string
+ this_target->frame_id = this_src->frame_id;
+ this_target->timestamp = this_src->timestamp;
+ this_target->discontinuity = this_src->discontinuity;
+ this_target->frame_structure = this_src->frame_structure;
+
+ // Now chainup base class
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_videoframe_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoframe_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixVideoFrame *this_first, *this_second;
+
+ if (MIX_IS_VIDEOFRAME(first) && MIX_IS_VIDEOFRAME(second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEOFRAME(first);
+ this_second = MIX_VIDEOFRAME(second);
+
+ /* TODO: add comparison for other properties */
+ if (this_first->frame_id == this_second->frame_id
+ && this_first->timestamp == this_second->timestamp
+ && this_first->discontinuity == this_second->discontinuity
+ && this_first->frame_structure == this_second->frame_structure) {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = klass->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
+
+#define MIX_VIDEOFRAME_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOFRAME(obj)) return MIX_RESULT_FAIL; \
+
+
+/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */
+MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, gulong frame_id) {
+ MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj);
+ obj->frame_id = frame_id;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, gulong * frame_id) {
+ MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_id);
+ *frame_id = obj->frame_id;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_set_ci_frame_idx (MixVideoFrame * obj, guint ci_frame_idx) {
+ MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj);
+ obj->ci_frame_idx = ci_frame_idx;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_get_ci_frame_idx (MixVideoFrame * obj, guint * ci_frame_idx) {
+ MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, ci_frame_idx);
+ *ci_frame_idx = obj->ci_frame_idx;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, guint64 timestamp) {
+ MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj);
+
+ obj->timestamp = timestamp;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj,
+ guint64 * timestamp) {
+ MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, timestamp);
+ *timestamp = obj->timestamp;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj,
+ gboolean discontinuity) {
+ MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj);
+ obj->discontinuity = discontinuity;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj,
+ gboolean * discontinuity) {
+ MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, discontinuity);
+ *discontinuity = obj->discontinuity;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj,
+ guint32 frame_structure) {
+ MIX_VIDEOFRAME_SETTER_CHECK_INPUT (obj);
+ obj->frame_structure = frame_structure;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj,
+ guint32* frame_structure) {
+ MIX_VIDEOFRAME_GETTER_CHECK_INPUT (obj, frame_structure);
+ *frame_structure = obj->frame_structure;
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_set_pool(MixVideoFrame * obj, MixSurfacePool * pool) {
+
+ /* set pool pointer in private structure */
+ VIDEOFRAME_PRIVATE(obj) -> pool = pool;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_set_frame_type(MixVideoFrame *obj,
+ MixFrameType frame_type) {
+
+ VIDEOFRAME_PRIVATE(obj) -> frame_type = frame_type;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_get_frame_type(MixVideoFrame *obj,
+ MixFrameType *frame_type) {
+
+ MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, frame_type);
+
+ *frame_type = VIDEOFRAME_PRIVATE(obj) -> frame_type;
+
+ return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videoframe_set_is_skipped(MixVideoFrame *obj,
+ gboolean is_skipped) {
+
+ VIDEOFRAME_PRIVATE(obj) -> is_skipped = is_skipped;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_get_is_skipped(MixVideoFrame *obj,
+ gboolean *is_skipped) {
+
+ MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, is_skipped);
+
+ *is_skipped = VIDEOFRAME_PRIVATE(obj) -> is_skipped;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_set_real_frame(MixVideoFrame *obj,
+ MixVideoFrame *real) {
+
+ VIDEOFRAME_PRIVATE(obj) -> real_frame = real;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoframe_get_real_frame(MixVideoFrame *obj,
+ MixVideoFrame **real) {
+
+ MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, real);
+
+ *real = VIDEOFRAME_PRIVATE(obj) -> real_frame;
+
+ return MIX_RESULT_SUCCESS;
+}
+
diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h
new file mode 100644
index 0000000..02338dd
--- /dev/null
+++ b/mix_video/src/mixvideoframe.h
@@ -0,0 +1,144 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEOFRAME_H__
+#define __MIX_VIDEOFRAME_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_VIDEOFRAME:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEOFRAME (mix_videoframe_get_type ())
+
+/**
+ * MIX_VIDEOFRAME:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEOFRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFrame))
+
+/**
+ * MIX_IS_VIDEOFRAME:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixVideoFrame
+ */
+#define MIX_IS_VIDEOFRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOFRAME))
+
+/**
+ * MIX_VIDEOFRAME_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEOFRAME_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOFRAME, MixVideoFrameClass))
+
+/**
+ * MIX_IS_VIDEOFRAME_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixVideoFrameClass
+ */
+#define MIX_IS_VIDEOFRAME_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOFRAME))
+
+/**
+ * MIX_VIDEOFRAME_GET_CLASS:
+ * @obj: a #MixVideoFrame object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEOFRAME_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFrameClass))
+
+typedef struct _MixVideoFrame MixVideoFrame;
+typedef struct _MixVideoFrameClass MixVideoFrameClass;
+
+/**
+ * MixVideoFrame:
+ *
+ * MI-X VideoConfig Parameter object
+ */
+struct _MixVideoFrame {
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+ gulong frame_id;
+ guint ci_frame_idx;
+ guint64 timestamp;
+ gboolean discontinuity;
+ guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field
+
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+ * MixVideoFrameClass:
+ *
+ * MI-X VideoConfig object class
+ */
+struct _MixVideoFrameClass {
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_videoframe_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoframe_get_type(void);
+
+/**
+ * mix_videoframe_new:
+ * @returns: A newly allocated instance of #MixVideoFrame
+ *
+ * Use this method to create new instance of #MixVideoFrame
+ */
+MixVideoFrame *mix_videoframe_new(void);
+/**
+ * mix_videoframe_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoFrame instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoFrame *mix_videoframe_ref(MixVideoFrame * obj);
+
+/**
+ * mix_videoframe_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+void mix_videoframe_unref(MixVideoFrame * obj);
+
+/* Class Methods */
+
+MIX_RESULT mix_videoframe_set_frame_id(MixVideoFrame * obj, gulong frame_id);
+MIX_RESULT mix_videoframe_get_frame_id(MixVideoFrame * obj, gulong * frame_id);
+
+MIX_RESULT mix_videoframe_set_ci_frame_idx(MixVideoFrame * obj, guint ci_frame_idx);
+MIX_RESULT mix_videoframe_get_ci_frame_idx(MixVideoFrame * obj, guint * ci_frame_idx);
+
+MIX_RESULT mix_videoframe_set_timestamp(MixVideoFrame * obj, guint64 timestamp);
+MIX_RESULT mix_videoframe_get_timestamp(MixVideoFrame * obj, guint64 * timestamp);
+
+MIX_RESULT mix_videoframe_set_discontinuity(MixVideoFrame * obj, gboolean discontinuity);
+MIX_RESULT mix_videoframe_get_discontinuity(MixVideoFrame * obj, gboolean * discontinuity);
+
+MIX_RESULT mix_videoframe_set_frame_structure(MixVideoFrame * obj, guint32 frame_structure);
+MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure);
+
+#endif /* __MIX_VIDEOFRAME_H__ */
diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h
new file mode 100644
index 0000000..5d4b894
--- /dev/null
+++ b/mix_video/src/mixvideoframe_private.h
@@ -0,0 +1,68 @@
+/*
+INTEL CONFIDENTIAL
+Copyright 2009 Intel Corporation All Rights Reserved.
+The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOFRAME_PRIVATE_H__
+#define __MIX_VIDEOFRAME_PRIVATE_H__
+
+#include "mixvideoframe.h"
+#include "mixsurfacepool.h"
+
+typedef enum _MixFrameType
+{
+ TYPE_I,
+ TYPE_P,
+ TYPE_B,
+ TYPE_INVALID
+} MixFrameType;
+
+typedef struct _MixVideoFramePrivate MixVideoFramePrivate;
+
+struct _MixVideoFramePrivate
+{
+ /*< private > */
+ MixSurfacePool *pool;
+ MixFrameType frame_type;
+ gboolean is_skipped;
+ MixVideoFrame *real_frame;
+ GStaticRecMutex lock;
+};
+
+/**
+* MIX_VIDEOFRAME_PRIVATE:
+*
+* Get private structure of this class.
+* @obj: class object for which to get private data.
+*/
+#define MIX_VIDEOFRAME_GET_PRIVATE(obj) \
+ (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEOFRAME, MixVideoFramePrivate))
+
+
+/* Private functions */
+MIX_RESULT
+mix_videoframe_set_pool (MixVideoFrame *obj, MixSurfacePool *pool);
+
+MIX_RESULT
+mix_videoframe_set_frame_type (MixVideoFrame *obj, MixFrameType frame_type);
+
+MIX_RESULT
+mix_videoframe_get_frame_type (MixVideoFrame *obj, MixFrameType *frame_type);
+
+MIX_RESULT
+mix_videoframe_set_is_skipped (MixVideoFrame *obj, gboolean is_skipped);
+
+MIX_RESULT
+mix_videoframe_get_is_skipped (MixVideoFrame *obj, gboolean *is_skipped);
+
+MIX_RESULT
+mix_videoframe_set_real_frame (MixVideoFrame *obj, MixVideoFrame *real);
+
+MIX_RESULT
+mix_videoframe_get_real_frame (MixVideoFrame *obj, MixVideoFrame **real);
+
+
+#endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */
diff --git a/mix_video/src/mixvideoinitparams.c b/mix_video/src/mixvideoinitparams.c
new file mode 100644
index 0000000..ac58548
--- /dev/null
+++ b/mix_video/src/mixvideoinitparams.c
@@ -0,0 +1,219 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideoinitparams
+ * @short_description: VideoInit parameters
+ *
+ * A data object which stores videoinit specific parameters.
+ */
+
+#include "mixvideoinitparams.h"
+
+#define SAFE_FREE(p) if(p) { g_free(p); p = NULL; }
+
+static GType _mix_videoinitparams_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_videoinitparams_type = g_define_type_id; }
+
+gboolean mix_videoinitparams_copy(MixParams * target, const MixParams * src);
+MixParams *mix_videoinitparams_dup(const MixParams * obj);
+gboolean mix_videoinitparams_equal(MixParams * first, MixParams * second);
+static void mix_videoinitparams_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoInitParams, mix_videoinitparams,
+ MIX_TYPE_PARAMS, _do_init);
+
+static void mix_videoinitparams_init(MixVideoInitParams * self) {
+
+ /* Initialize member varibles */
+ self->display = NULL;
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void mix_videoinitparams_class_init(MixVideoInitParamsClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_videoinitparams_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videoinitparams_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videoinitparams_dup;
+ mixparams_class->equal = (MixParamsEqualFunction) mix_videoinitparams_equal;
+}
+
+MixVideoInitParams *
+mix_videoinitparams_new(void) {
+ MixVideoInitParams *ret = (MixVideoInitParams *) g_type_create_instance(
+ MIX_TYPE_VIDEOINITPARAMS);
+
+ return ret;
+}
+
+void mix_videoinitparams_finalize(MixParams * obj) {
+ /* clean up here. */
+
+ MixVideoInitParams *self = MIX_VIDEOINITPARAMS(obj);
+
+ /* unref display */
+ if (self->display) {
+ mix_display_unref(self->display);
+ self->display = NULL;
+ }
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoInitParams *
+mix_videoinitparams_ref(MixVideoInitParams * mix) {
+ return (MixVideoInitParams *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videoinitparams_dup:
+ * @obj: a #MixVideoInitParams object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videoinitparams_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+ if (MIX_IS_VIDEOINITPARAMS(obj)) {
+ MixVideoInitParams *duplicate = mix_videoinitparams_new();
+ if (mix_videoinitparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videoinitparams_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_videoinitparams_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoinitparams_copy(MixParams * target, const MixParams * src) {
+ MixVideoInitParams *this_target, *this_src;
+ if (MIX_IS_VIDEOINITPARAMS(target) && MIX_IS_VIDEOINITPARAMS(src)) {
+ /* Cast the base object to this child object */
+ this_target = MIX_VIDEOINITPARAMS(target);
+ this_src = MIX_VIDEOINITPARAMS(src);
+ /* Copy properties from source to target. */
+
+ /* duplicate display */
+
+ this_target->display = mix_display_dup(this_src->display);
+
+ /* Now chainup base class */
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+/**
+ * mix_videoinitparams_equal:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videoinitparams_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixVideoInitParams *this_first, *this_second;
+ this_first = MIX_VIDEOINITPARAMS(first);
+ this_second = MIX_VIDEOINITPARAMS(second);
+ if (MIX_IS_VIDEOINITPARAMS(first) && MIX_IS_VIDEOINITPARAMS(second)) {
+ // Compare member variables
+ if (!this_first->display && !this_second->display) {
+ ret = TRUE;
+ } else if (this_first->display && this_second->display) {
+
+ /* compare MixDisplay */
+ ret = mix_display_equal(this_first->display, this_second->display);
+ }
+
+ if (ret == FALSE) {
+ return FALSE;
+ }
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ return ret;
+}
+
+#define MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEOINITPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+MIX_RESULT mix_videoinitparams_set_display(MixVideoInitParams * obj,
+ MixDisplay * display) {
+ MIX_VIDEOINITPARAMS_SETTER_CHECK_INPUT (obj);
+
+ if(obj->display) {
+ mix_display_unref(obj->display);
+ }
+ obj->display = NULL;
+
+ if(display) {
+ /* obj->display = mix_display_dup(display);
+ if(!obj->display) {
+ return MIX_RESULT_NO_MEMORY;
+ }*/
+
+ obj->display = mix_display_ref(display);
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+/*
+ Caller is responsible to use g_free to free the memory
+ */
+MIX_RESULT mix_videoinitparams_get_display(MixVideoInitParams * obj,
+ MixDisplay ** display) {
+ MIX_VIDEOINITPARAMS_GETTER_CHECK_INPUT (obj, display);
+
+ *display = NULL;
+ if(obj->display) {
+ /* *display = mix_display_dup(obj->display);
+ if(!*display) {
+ return MIX_RESULT_NO_MEMORY;
+ }*/
+ *display = mix_display_ref(obj->display);
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
diff --git a/mix_video/src/mixvideoinitparams.h b/mix_video/src/mixvideoinitparams.h
new file mode 100644
index 0000000..eb7c118
--- /dev/null
+++ b/mix_video/src/mixvideoinitparams.h
@@ -0,0 +1,138 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+*/
+
+#ifndef __MIX_VIDEOINITPARAMS_H__
+#define __MIX_VIDEOINITPARAMS_H__
+
+#include <mixparams.h>
+#include "mixdisplay.h"
+#include "mixvideodef.h"
+
+/**
+ * MIX_TYPE_VIDEOINITPARAMS:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEOINITPARAMS (mix_videoinitparams_get_type ())
+
+/**
+ * MIX_VIDEOINITPARAMS:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParams))
+
+/**
+ * MIX_IS_VIDEOINITPARAMS:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_VIDEOINITPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOINITPARAMS))
+
+/**
+ * MIX_VIDEOINITPARAMS_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParamsClass))
+
+/**
+ * MIX_IS_VIDEOINITPARAMS_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_VIDEOINITPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOINITPARAMS))
+
+/**
+ * MIX_VIDEOINITPARAMS_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEOINITPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOINITPARAMS, MixVideoInitParamsClass))
+
+typedef struct _MixVideoInitParams MixVideoInitParams;
+typedef struct _MixVideoInitParamsClass MixVideoInitParamsClass;
+
+/**
+ * MixVideoInitParams:
+ *
+ * MI-X VideoInit Parameter object
+ */
+struct _MixVideoInitParams
+{
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+
+ MixDisplay *display;
+ void *reserved1;
+ void *reserved2;
+ void *reserved3;
+ void *reserved4;
+};
+
+/**
+ * MixVideoInitParamsClass:
+ *
+ * MI-X VideoInit object class
+ */
+struct _MixVideoInitParamsClass
+{
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_videoinitparams_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videoinitparams_get_type (void);
+
+/**
+ * mix_videoinitparams_new:
+ * @returns: A newly allocated instance of #MixVideoInitParams
+ *
+ * Use this method to create new instance of #MixVideoInitParams
+ */
+MixVideoInitParams *mix_videoinitparams_new (void);
+/**
+ * mix_videoinitparams_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoInitParams instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoInitParams *mix_videoinitparams_ref (MixVideoInitParams * mix);
+
+/**
+ * mix_videoinitparams_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videoinitparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+/*
+ TO DO: Add documents
+*/
+
+MIX_RESULT mix_videoinitparams_set_display (MixVideoInitParams * obj,
+ MixDisplay * display);
+
+MIX_RESULT mix_videoinitparams_get_display (MixVideoInitParams * obj,
+ MixDisplay ** dislay);
+
+#endif /* __MIX_VIDEOINITPARAMS_H__ */
diff --git a/mix_video/src/mixvideolog.h b/mix_video/src/mixvideolog.h
new file mode 100644
index 0000000..89a8827
--- /dev/null
+++ b/mix_video/src/mixvideolog.h
@@ -0,0 +1,25 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEO_LOG_H__
+#define __MIX_VIDEO_LOG_H__
+#include <mixlog.h>
+
+#ifdef MIX_LOG_ENABLE
+#define LOG_V(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__)
+#define LOG_I(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_INFO, format, ##__VA_ARGS__)
+#define LOG_W(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_WARNING, format, ##__VA_ARGS__)
+#define LOG_E(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_ERROR, format, ##__VA_ARGS__)
+#else
+#define LOG_V(format, ...)
+#define LOG_I(format, ...)
+#define LOG_W(format, ...)
+#define LOG_E(format, ...)
+#endif
+
+#endif /* __MIX_VIDEO_LOG_H__ */
diff --git a/mix_video/src/mixvideorenderparams.c b/mix_video/src/mixvideorenderparams.c
new file mode 100644
index 0000000..0dc8be7
--- /dev/null
+++ b/mix_video/src/mixvideorenderparams.c
@@ -0,0 +1,420 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+/**
+ * SECTION:mixvideorenderparams
+ * @short_description: VideoRender parameters
+ *
+ * A data object which stores videorender specific parameters.
+ */
+#include <va/va.h> /* libVA */
+#include <glib-object.h>
+
+#include "mixvideorenderparams.h"
+#include "mixvideorenderparams_internal.h"
+
+#include <string.h>
+
+static GType _mix_videorenderparams_type = 0;
+static MixParamsClass *parent_class = NULL;
+
+#define _do_init { _mix_videorenderparams_type = g_define_type_id; }
+
+gboolean mix_videorenderparams_copy(MixParams * target, const MixParams * src);
+MixParams *mix_videorenderparams_dup(const MixParams * obj);
+gboolean mix_videorenderparams_equal(MixParams * first, MixParams * second);
+static void mix_videorenderparams_finalize(MixParams * obj);
+
+G_DEFINE_TYPE_WITH_CODE (MixVideoRenderParams, mix_videorenderparams,
+ MIX_TYPE_PARAMS, _do_init);
+
+static void mix_videorenderparams_init(MixVideoRenderParams * self) {
+
+ MixVideoRenderParamsPrivate *priv = MIX_VIDEORENDERPARAMS_GET_PRIVATE(self);
+ priv->va_cliprects = NULL;
+ self->reserved = priv;
+
+ /* initialize properties here */
+ self->display = NULL;
+ memset(&(self->src_rect), 0, sizeof(MixRect));
+ memset(&(self->dst_rect), 0, sizeof(MixRect));
+
+ self->clipping_rects = NULL;
+ self->number_of_clipping_rects = 0;
+
+ /* TODO: initialize other properties */
+ self->reserved1 = NULL;
+ self->reserved2 = NULL;
+ self->reserved3 = NULL;
+ self->reserved4 = NULL;
+}
+
+static void mix_videorenderparams_class_init(MixVideoRenderParamsClass * klass) {
+ MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
+
+ /* setup static parent class */
+ parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
+
+ mixparams_class->finalize = mix_videorenderparams_finalize;
+ mixparams_class->copy = (MixParamsCopyFunction) mix_videorenderparams_copy;
+ mixparams_class->dup = (MixParamsDupFunction) mix_videorenderparams_dup;
+ mixparams_class->equal
+ = (MixParamsEqualFunction) mix_videorenderparams_equal;
+
+ /* Register and allocate the space the private structure for this object */
+ g_type_class_add_private(mixparams_class, sizeof(MixVideoRenderParamsPrivate));
+}
+
+MixVideoRenderParams *
+mix_videorenderparams_new(void) {
+ MixVideoRenderParams *ret =
+ (MixVideoRenderParams *) g_type_create_instance(
+ MIX_TYPE_VIDEORENDERPARAMS);
+
+ return ret;
+}
+
+void mix_videorenderparams_finalize(MixParams * obj) {
+ /* clean up here. */
+
+ MixVideoRenderParams *self = MIX_VIDEORENDERPARAMS(obj);
+ MixVideoRenderParamsPrivate *priv =
+ (MixVideoRenderParamsPrivate *) self->reserved;
+
+ if (self->clipping_rects) {
+ g_free(self->clipping_rects);
+ self->clipping_rects = NULL;
+ }
+
+ if (priv->va_cliprects) {
+ g_free(self->clipping_rects);
+ priv->va_cliprects = NULL;
+ }
+
+ self->number_of_clipping_rects = 0;
+
+ if (self->display) {
+ mix_display_unref(self->display);
+ self->display = NULL;
+ }
+
+ /* TODO: cleanup other resources allocated */
+
+ /* Chain up parent */
+ if (parent_class->finalize) {
+ parent_class->finalize(obj);
+ }
+}
+
+MixVideoRenderParams *
+mix_videorenderparams_ref(MixVideoRenderParams * mix) {
+ return (MixVideoRenderParams *) mix_params_ref(MIX_PARAMS(mix));
+}
+
+/**
+ * mix_videorenderparams_dup:
+ * @obj: a #MixVideoRenderParams object
+ * @returns: a newly allocated duplicate of the object.
+ *
+ * Copy duplicate of the object.
+ */
+MixParams *
+mix_videorenderparams_dup(const MixParams * obj) {
+ MixParams *ret = NULL;
+
+ if (MIX_IS_VIDEORENDERPARAMS(obj)) {
+ MixVideoRenderParams *duplicate = mix_videorenderparams_new();
+ if (mix_videorenderparams_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
+ ret = MIX_PARAMS(duplicate);
+ } else {
+ mix_videorenderparams_unref(duplicate);
+ }
+ }
+ return ret;
+}
+
+/**
+ * mix_videorenderparams_copy:
+ * @target: copy to target
+ * @src: copy from src
+ * @returns: boolean indicates if copy is successful.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videorenderparams_copy(MixParams * target, const MixParams * src) {
+
+ MixVideoRenderParams *this_target, *this_src;
+ MIX_RESULT mix_result = MIX_RESULT_FAIL;
+
+ if (target == src) {
+ return TRUE;
+ }
+
+ if (MIX_IS_VIDEORENDERPARAMS(target) && MIX_IS_VIDEORENDERPARAMS(src)) {
+
+ // Cast the base object to this child object
+ this_target = MIX_VIDEORENDERPARAMS(target);
+ this_src = MIX_VIDEORENDERPARAMS(src);
+
+ mix_result = mix_videorenderparams_set_display(this_target,
+ this_src->display);
+ if (mix_result != MIX_RESULT_SUCCESS) {
+ return FALSE;
+ }
+
+ mix_result = mix_videorenderparams_set_clipping_rects(this_target,
+ this_src->clipping_rects, this_src->number_of_clipping_rects);
+
+ if (mix_result != MIX_RESULT_SUCCESS) {
+ return FALSE;
+ }
+
+ this_target->src_rect = this_src->src_rect;
+ this_target->dst_rect = this_src->dst_rect;
+
+ // Now chainup base class
+ if (parent_class->copy) {
+ return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
+ src));
+ } else {
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+gboolean mix_rect_equal(MixRect rc1, MixRect rc2) {
+
+ if (rc1.x == rc2.x && rc1.y == rc2.y && rc1.width == rc2.width
+ && rc1.height == rc2.height) {
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+/**
+ * mix_videorenderparams_:
+ * @first: first object to compare
+ * @second: seond object to compare
+ * @returns: boolean indicates if instance are equal.
+ *
+ * Copy instance data from @src to @target.
+ */
+gboolean mix_videorenderparams_equal(MixParams * first, MixParams * second) {
+ gboolean ret = FALSE;
+ MixVideoRenderParams *this_first, *this_second;
+
+ if (MIX_IS_VIDEORENDERPARAMS(first) && MIX_IS_VIDEORENDERPARAMS(second)) {
+ // Deep compare
+ // Cast the base object to this child object
+
+ this_first = MIX_VIDEORENDERPARAMS(first);
+ this_second = MIX_VIDEORENDERPARAMS(second);
+
+ if (mix_display_equal(MIX_DISPLAY(this_first->display), MIX_DISPLAY(
+ this_second->display)) && mix_rect_equal(this_first->src_rect,
+ this_second->src_rect) && mix_rect_equal(this_first->dst_rect,
+ this_second->dst_rect) && this_first->number_of_clipping_rects
+ == this_second->number_of_clipping_rects && memcmp(
+ (guchar *) this_first->number_of_clipping_rects,
+ (guchar *) this_second->number_of_clipping_rects,
+ this_first->number_of_clipping_rects) == 0) {
+ // members within this scope equal. chaining up.
+ MixParamsClass *klass = MIX_PARAMS_CLASS(parent_class);
+ if (klass->equal)
+ ret = parent_class->equal(first, second);
+ else
+ ret = TRUE;
+ }
+ }
+
+ return ret;
+}
+
+#define MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT(obj) \
+ if(!obj) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+#define MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT(obj, prop) \
+ if(!obj || !prop) return MIX_RESULT_NULL_PTR; \
+ if(!MIX_IS_VIDEORENDERPARAMS(obj)) return MIX_RESULT_FAIL; \
+
+
+/* TODO: Add getters and setters for other properties. The following is just an exmaple, not implemented yet. */
+
+MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj,
+ MixDisplay * display) {
+
+ MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj);
+
+ if (obj->display) {
+ mix_display_unref(obj->display);
+ obj->display = NULL;
+ }
+
+ /* dup */
+ if (display) {
+ obj->display = mix_display_dup(display);
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj,
+ MixDisplay ** display) {
+
+ MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, display);
+
+ /* dup? */
+ if (obj->display) {
+ *display = mix_display_dup(obj->display);
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videorenderparams_set_src_rect(MixVideoRenderParams * obj,
+ MixRect src_rect) {
+
+ MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj);
+
+ obj->src_rect = src_rect;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videorenderparams_get_src_rect(MixVideoRenderParams * obj,
+ MixRect * src_rect) {
+
+ MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, src_rect);
+
+ *src_rect = obj->src_rect;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videorenderparams_set_dest_rect(MixVideoRenderParams * obj,
+ MixRect dst_rect) {
+
+ MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj);
+
+ obj->dst_rect = dst_rect;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videorenderparams_get_dest_rect(MixVideoRenderParams * obj,
+ MixRect * dst_rect) {
+
+ MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, dst_rect);
+
+ *dst_rect = obj->dst_rect;
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videorenderparams_set_clipping_rects(MixVideoRenderParams * obj,
+ MixRect* clipping_rects, guint number_of_clipping_rects) {
+
+ MixVideoRenderParamsPrivate *priv = NULL;
+ MIX_VIDEORENDERPARAMS_SETTER_CHECK_INPUT (obj);
+
+ priv = (MixVideoRenderParamsPrivate *) obj->reserved;
+
+
+ if (obj->clipping_rects) {
+ g_free(obj->clipping_rects);
+ obj->clipping_rects = NULL;
+ obj->number_of_clipping_rects = 0;
+ }
+
+ if(priv->va_cliprects) {
+ g_free(priv->va_cliprects);
+ priv->va_cliprects = NULL;
+ }
+
+
+ if (clipping_rects && number_of_clipping_rects) {
+
+ gint idx = 0;
+
+ obj->clipping_rects = g_memdup(clipping_rects, number_of_clipping_rects
+ * sizeof(MixRect));
+ if (!obj->clipping_rects) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ obj->number_of_clipping_rects = number_of_clipping_rects;
+
+ /* create VARectangle list */
+ priv->va_cliprects = g_malloc(number_of_clipping_rects * sizeof(VARectangle));
+ if (!priv->va_cliprects) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ for (idx = 0; idx < number_of_clipping_rects; idx++) {
+ priv->va_cliprects[idx].x = clipping_rects[idx].x;
+ priv->va_cliprects[idx].y = clipping_rects[idx].y;
+ priv->va_cliprects[idx].width = clipping_rects[idx].width;
+ priv->va_cliprects[idx].height = clipping_rects[idx].height;
+ }
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videorenderparams_get_clipping_rects(MixVideoRenderParams * obj,
+ MixRect ** clipping_rects, guint* number_of_clipping_rects) {
+
+ MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, clipping_rects);
+ if (!number_of_clipping_rects) {
+ return MIX_RESULT_NULL_PTR;
+ }
+
+ *clipping_rects = NULL;
+ *number_of_clipping_rects = 0;
+
+ if (obj->clipping_rects && obj->number_of_clipping_rects) {
+ *clipping_rects = g_memdup(obj->clipping_rects,
+ obj->number_of_clipping_rects * sizeof(MixRect));
+ if (!*clipping_rects) {
+ return MIX_RESULT_NO_MEMORY;
+ }
+
+ *number_of_clipping_rects = obj->number_of_clipping_rects;
+ }
+
+ return MIX_RESULT_SUCCESS;
+}
+
+/* The mixvideo internal method */
+MIX_RESULT mix_videorenderparams_get_cliprects_internal(
+ MixVideoRenderParams * obj, VARectangle ** va_cliprects,
+ guint* number_of_cliprects) {
+
+ MIX_VIDEORENDERPARAMS_GETTER_CHECK_INPUT (obj, va_cliprects);
+ if (!number_of_cliprects) {
+ return MIX_RESULT_NULL_PTR;
+ }
+ MixVideoRenderParamsPrivate *priv =
+ (MixVideoRenderParamsPrivate *) obj->reserved;
+
+ *va_cliprects = NULL;
+ *number_of_cliprects = 0;
+
+ if (priv->va_cliprects && obj->number_of_clipping_rects) {
+ *va_cliprects = priv->va_cliprects;
+ *number_of_cliprects = obj->number_of_clipping_rects;
+ }
+
+ return MIX_RESULT_SUCCESS;
+
+}
+
+/* TODO: implement properties' setters and getters */
diff --git a/mix_video/src/mixvideorenderparams.h b/mix_video/src/mixvideorenderparams.h
new file mode 100644
index 0000000..f6148e7
--- /dev/null
+++ b/mix_video/src/mixvideorenderparams.h
@@ -0,0 +1,158 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEORENDERPARAMS_H__
+#define __MIX_VIDEORENDERPARAMS_H__
+
+#include <mixparams.h>
+#include "mixvideodef.h"
+#include "mixdisplay.h"
+#include "mixvideoframe.h"
+
+/**
+ * MIX_TYPE_VIDEORENDERPARAMS:
+ *
+ * Get type of class.
+ */
+#define MIX_TYPE_VIDEORENDERPARAMS (mix_videorenderparams_get_type ())
+
+/**
+ * MIX_VIDEORENDERPARAMS:
+ * @obj: object to be type-casted.
+ */
+#define MIX_VIDEORENDERPARAMS(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParams))
+
+/**
+ * MIX_IS_VIDEORENDERPARAMS:
+ * @obj: an object.
+ *
+ * Checks if the given object is an instance of #MixParams
+ */
+#define MIX_IS_VIDEORENDERPARAMS(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEORENDERPARAMS))
+
+/**
+ * MIX_VIDEORENDERPARAMS_CLASS:
+ * @klass: class to be type-casted.
+ */
+#define MIX_VIDEORENDERPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsClass))
+
+/**
+ * MIX_IS_VIDEORENDERPARAMS_CLASS:
+ * @klass: a class.
+ *
+ * Checks if the given class is #MixParamsClass
+ */
+#define MIX_IS_VIDEORENDERPARAMS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEORENDERPARAMS))
+
+/**
+ * MIX_VIDEORENDERPARAMS_GET_CLASS:
+ * @obj: a #MixParams object.
+ *
+ * Get the class instance of the object.
+ */
+#define MIX_VIDEORENDERPARAMS_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsClass))
+
+typedef struct _MixVideoRenderParams MixVideoRenderParams;
+typedef struct _MixVideoRenderParamsClass MixVideoRenderParamsClass;
+
+/**
+ * MixVideoRenderParams:
+ *
+ * MI-X VideoRender Parameter object
+ */
+struct _MixVideoRenderParams {
+ /*< public > */
+ MixParams parent;
+
+ /*< public > */
+ MixDisplay *display;
+
+ MixRect src_rect;
+ MixRect dst_rect;
+
+ MixRect *clipping_rects;
+ guint number_of_clipping_rects;
+
+ guint post_proc;
+
+ gpointer reserved;
+ gpointer reserved1;
+ gpointer reserved2;
+ gpointer reserved3;
+ gpointer reserved4;
+};
+
+/**
+ * MixVideoRenderParamsClass:
+ *
+ * MI-X VideoRender object class
+ */
+struct _MixVideoRenderParamsClass {
+ /*< public > */
+ MixParamsClass parent_class;
+
+ /* class members */
+};
+
+/**
+ * mix_videorenderparams_get_type:
+ * @returns: type
+ *
+ * Get the type of object.
+ */
+GType mix_videorenderparams_get_type(void);
+
+/**
+ * mix_videorenderparams_new:
+ * @returns: A newly allocated instance of #MixVideoRenderParams
+ *
+ * Use this method to create new instance of #MixVideoRenderParams
+ */
+MixVideoRenderParams *mix_videorenderparams_new(void);
+/**
+ * mix_videorenderparams_ref:
+ * @mix: object to add reference
+ * @returns: the MixVideoRenderParams instance where reference count has been increased.
+ *
+ * Add reference count.
+ */
+MixVideoRenderParams *mix_videorenderparams_ref(MixVideoRenderParams * mix);
+
+/**
+ * mix_videorenderparams_unref:
+ * @obj: object to unref.
+ *
+ * Decrement reference count of the object.
+ */
+#define mix_videorenderparams_unref(obj) mix_params_unref(MIX_PARAMS(obj))
+
+/* Class Methods */
+
+MIX_RESULT mix_videorenderparams_set_display(MixVideoRenderParams * obj,
+ MixDisplay * display);
+MIX_RESULT mix_videorenderparams_get_display(MixVideoRenderParams * obj,
+ MixDisplay ** display);
+
+MIX_RESULT mix_videorenderparams_set_src_rect(MixVideoRenderParams * obj,
+ MixRect src_rect);
+MIX_RESULT mix_videorenderparams_get_src_rect(MixVideoRenderParams * obj,
+ MixRect * src_rect);
+
+MIX_RESULT mix_videorenderparams_set_dest_rect(MixVideoRenderParams * obj,
+ MixRect dst_rect);
+MIX_RESULT mix_videorenderparams_get_dest_rect(MixVideoRenderParams * obj,
+ MixRect * dst_rect);
+
+MIX_RESULT mix_videorenderparams_set_clipping_rects(MixVideoRenderParams * obj,
+ MixRect* clipping_rects, guint number_of_clipping_rects);
+MIX_RESULT mix_videorenderparams_get_clipping_rects(MixVideoRenderParams * obj,
+ MixRect ** clipping_rects, guint* number_of_clipping_rects);
+
+/* TODO: Add getters and setters for other properties */
+
+#endif /* __MIX_VIDEORENDERPARAMS_H__ */
diff --git a/mix_video/src/mixvideorenderparams_internal.h b/mix_video/src/mixvideorenderparams_internal.h
new file mode 100644
index 0000000..8619173
--- /dev/null
+++ b/mix_video/src/mixvideorenderparams_internal.h
@@ -0,0 +1,36 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __MIX_VIDEORENDERPARAMS_PRIVATE_H__
+#define __MIX_VIDEORENDERPARAMS_PRIVATE_H__
+
+typedef struct _MixVideoRenderParamsPrivate MixVideoRenderParamsPrivate;
+
+struct _MixVideoRenderParamsPrivate {
+ /*< private > */
+
+ VARectangle *va_cliprects;
+};
+
+/**
+ * MIX_VIDEO_PRIVATE:
+ *
+ * Get private structure of this class.
+ * @obj: class object for which to get private data.
+ */
+#define MIX_VIDEORENDERPARAMS_GET_PRIVATE(obj) \
+ (G_TYPE_INSTANCE_GET_PRIVATE ((obj), MIX_TYPE_VIDEORENDERPARAMS, MixVideoRenderParamsPrivate))
+
+/* Internal function */
+
+MIX_RESULT mix_videorenderparams_get_cliprects_internal(
+ MixVideoRenderParams * obj,
+ VARectangle ** va_cliprects,
+ guint* number_of_cliprects);
+
+#endif /* __MIX_VIDEORENDERPARAMS_PRIVATE_H__ */
diff --git a/mix_video/src/test.c b/mix_video/src/test.c
new file mode 100644
index 0000000..8f9aee5
--- /dev/null
+++ b/mix_video/src/test.c
@@ -0,0 +1,87 @@
+#include <stdio.h>
+#include <glib.h>
+#include <glib-object.h>
+#include "mixvideo.h"
+#include "mixdisplayx11.h"
+
+int
+main (int argc, char **argv)
+{
+ MIX_RESULT ret;
+
+ g_type_init ();
+
+/* test MixDisplay */
+ {
+
+ MixDisplayX11 *x11_clone = NULL;
+ MixDisplayX11 *x11 = mix_displayx11_new ();
+
+ MixDisplay *base = MIX_DISPLAY (x11);
+
+ gboolean flag = MIX_IS_DISPLAYX11 (base);
+
+ Drawable drawable = 1024;
+
+ mix_displayx11_set_drawable (x11, drawable);
+
+/* clone x11 */
+
+ x11_clone = (MixDisplayX11 *) mix_display_dup (MIX_DISPLAY (x11));
+
+ base = MIX_DISPLAY (x11_clone);
+
+ flag = MIX_IS_DISPLAYX11 (base);
+
+ mix_displayx11_get_drawable (x11_clone, &drawable);
+
+/* TODO: add more test cases */
+
+/* release */
+ mix_display_unref (MIX_DISPLAY (x11));
+ mix_display_unref (MIX_DISPLAY (x11_clone));
+ g_print ("MixDisplayX11 test is done!\n");
+ }
+
+/* test MixVideoInitParams */
+ {
+ MixVideoInitParams *init_params = mix_videoinitparams_new ();
+
+ MixDisplayX11 *x11 = mix_displayx11_new ();
+ mix_displayx11_set_drawable (x11, 1024);
+
+ mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11));
+
+/* release */
+ mix_params_unref (MIX_PARAMS (init_params));
+ mix_display_unref (MIX_DISPLAY (x11));
+
+ g_print ("MixVideoInitParams test is done!\n");
+ }
+
+/* test MixVideo */
+
+ {
+ MixVideo *video = mix_video_new ();
+ MixVideoInitParams *init_params = mix_videoinitparams_new ();
+ MixDisplayX11 *x11 = mix_displayx11_new ();
+ MixDrmParams *drm = mix_drmparams_new ();
+ MixCodecMode mode = MIX_CODEC_MODE_DECODE;
+
+ mix_displayx11_set_drawable (x11, 1024);
+ mix_videoinitparams_set_display (init_params, MIX_DISPLAY (x11));
+
+ mix_video_initialize (video, mode, init_params, drm);
+
+/* TODO: add more test cases */
+
+/* unref the objects. */
+
+ mix_params_unref (MIX_PARAMS (init_params));
+ mix_params_unref (MIX_PARAMS (drm));
+ mix_display_unref (MIX_DISPLAY (x11));
+ g_object_unref (G_OBJECT (video));
+
+ g_print ("MixVideo test is done!\n");
+ }
+}
diff --git a/mix_video/test/Makefile.am b/mix_video/test/Makefile.am
new file mode 100644
index 0000000..aa58280
--- /dev/null
+++ b/mix_video/test/Makefile.am
@@ -0,0 +1,2 @@
+SUBDIRS = src
+EXTRA_DIST = autogen.sh
diff --git a/mix_video/test/autogen.sh b/mix_video/test/autogen.sh
new file mode 100644
index 0000000..79033fb
--- /dev/null
+++ b/mix_video/test/autogen.sh
@@ -0,0 +1 @@
+autoreconf
diff --git a/mix_video/test/configure.ac b/mix_video/test/configure.ac
new file mode 100644
index 0000000..4e3a279
--- /dev/null
+++ b/mix_video/test/configure.ac
@@ -0,0 +1,53 @@
+
+AC_INIT([testmixvideo], [0.1], [tao.q.tao@intel.com])
+
+dnl AC_CONFIG_MACRO_DIR([m4])
+
+AM_INIT_AUTOMAKE($PACKAGE, $VERSION)
+AM_INIT_AUTOMAKE([-Wall -Werror foreign])
+
+AC_PROG_CC
+AC_PROG_LIBTOOL
+
+AC_CHECK_PROG(HAVE_PKGCONFIG, pkg-config, yes , no)
+
+dnl Give error and exit if we don't have pkgconfig
+if test "x$HAVE_PKGCONFIG" = "xno"; then
+ AC_MSG_ERROR(you need to have pkgconfig installed !)
+fi
+
+GLIB_REQ=2.18
+dnl Check for glib2 without extra fat, useful for the unversioned tool frontends
+dnl PKG_CHECK_MODULES(GLIB_ONLY, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+PKG_CHECK_MODULES(GLIB, glib-2.0 >= $GLIB_REQ,HAVE_GLIB=yes,HAVE_GLIB=no)
+if test "x$HAVE_GLIB" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+PKG_CHECK_MODULES(GOBJECT, gobject-2.0 >= $GLIB_REQ,HAVE_GOBJECT=yes,HAVE_GOBJECT=no)
+if test "x$HAVE_GOBJECT" = "xno"; then
+ AC_MSG_ERROR(You need glib development packages installed !)
+fi
+
+MIXVIDEO_REQ=0.5
+PKG_CHECK_MODULES(MIXVIDEO, mixvideo >= $MIXVIDEO_REQ,HAVE_MIXVIDEO=yes,HAVE_MIXVIDEO=no)
+if test "x$HAVE_MIXVIDEO" = "xno"; then
+ AC_MSG_ERROR(You need mixvideo development packages installed !)
+fi
+
+AC_ARG_ENABLE(optimization, AC_HELP_STRING([ --disable-optimization], [Do not optimize the library for speed. Might be required for debugging.]))
+AC_ARG_ENABLE(debuginfo, AC_HELP_STRING([ --enable-debuginfo ], [add -g to the compiler flags (to create debug information)]))
+
+if test "$enable_optimization" = "no" ; then
+ DEBUG=true
+else
+ DEBUG=false
+fi
+
+
+AC_CONFIG_HEADERS([config.h])
+AC_CONFIG_FILES([
+ Makefile
+ src/Makefile
+])
+AC_OUTPUT
diff --git a/mix_video/test/src/Makefile.am b/mix_video/test/src/Makefile.am
new file mode 100644
index 0000000..2c98fa4
--- /dev/null
+++ b/mix_video/test/src/Makefile.am
@@ -0,0 +1,22 @@
+#INTEL CONFIDENTIAL
+#Copyright 2009 Intel Corporation All Rights Reserved.
+#The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+#No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+#
+
+noinst_PROGRAMS = test_framemanager
+
+##############################################################################
+# sources used to compile
+test_framemanager_SOURCES = test_framemanager.c
+
+test_framemanager_CFLAGS = $(GLIB_CFLAGS) $(GOBJECT_CFLAGS) $(MIXVIDEO_CFLAGS)
+test_framemanager_LDADD = $(GLIB_LIBS) $(GOBJECT_LIBS) $(MIXVIDEO_LIBS)
+test_framemanager_LIBTOOLFLAGS = --tag=disable-static
+
+# headers we need but don't want installed
+noinst_HEADERS =
+
+
+
diff --git a/mix_video/test/src/test_framemanager.c b/mix_video/test/src/test_framemanager.c
new file mode 100644
index 0000000..f4b8be9
--- /dev/null
+++ b/mix_video/test/src/test_framemanager.c
@@ -0,0 +1,200 @@
+#include "../../src/mixframemanager.h"
+
+gboolean stop_thread = FALSE;
+GCond* data_cond = NULL;
+GMutex* data_mutex = NULL;
+
+
+void *deque_function(void *data) {
+
+ MixFrameManager *fm = (MixFrameManager *) data;
+ MIX_RESULT mixresult;
+ MixVideoFrame *mvf = NULL;
+ guint64 pts;
+ while(!stop_thread) {
+
+ g_mutex_lock (data_mutex);
+
+ mixresult = mix_framemanager_dequeue(fm, &mvf);
+ if(mixresult == MIX_RESULT_SUCCESS) {
+ mixresult = mix_videoframe_get_timestamp(mvf, &pts);
+ g_print("dequeued timestamp = %"G_GINT64_FORMAT"\n", pts);
+ /* mix_videoframe_unref(mvf); */
+ } else if(mixresult == MIX_RESULT_FRAME_NOTAVAIL) {
+ g_print("mixresult == MIX_RESULT_FRAME_NOTAVAIL\n");
+ g_cond_wait (data_cond, data_mutex);
+ }
+
+ g_mutex_unlock (data_mutex);
+
+ }
+}
+
+void shuffle(GPtrArray *list) {
+ guint idx, jdx;
+ guint len = list->len;
+ for (idx = 0; idx < len - 1; idx++) {
+ jdx = rand() % len;
+ if (idx != jdx) {
+ gpointer tmp = g_ptr_array_index(list, jdx);
+ g_ptr_array_index(list, jdx) = g_ptr_array_index(list, idx);
+ g_ptr_array_index(list, idx) = tmp;
+ }
+ }
+}
+
+int main() {
+ MIX_RESULT mixresult;
+
+ gint fps_n = 24000;
+ gint fps_d = 1001;
+
+/*
+ gint fps_n = 2500000;
+ gint fps_d = 104297;
+*/
+ GPtrArray *fa = NULL;
+ MixFrameManager *fm = NULL;
+ MixVideoFrame *mvf = NULL;
+ MixVideoFrame *mvf_1st = NULL;
+
+ gint idx = 0;
+ guint64 pts = 0;
+
+ GThread *deque_thread = NULL;
+ GError *deque_thread_error = NULL;
+
+ /* first ting first */
+ g_type_init();
+
+ /* create frame manager */
+ fm = mix_framemanager_new();
+ if (!fm) {
+ goto cleanup;
+ }
+
+ /* initialize frame manager */
+ mixresult = mix_framemanager_initialize(fm,
+ MIX_FRAMEORDER_MODE_DISPLAYORDER, fps_n, fps_d);
+ if (mixresult != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ /* create frame_array */
+ fa = g_ptr_array_sized_new(64);
+ if (!fa) {
+ goto cleanup;
+ }
+
+ for (idx = 0; idx < 16; idx++) {
+ /* generate MixVideoFrame */
+ mvf = mix_videoframe_new();
+ if (!mvf) {
+ goto cleanup;
+ }
+
+ pts = idx * G_USEC_PER_SEC * G_GINT64_CONSTANT(1000) * fps_d / fps_n;
+ mixresult = mix_videoframe_set_timestamp(mvf, pts);
+ if (mixresult != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ g_print("original timestamp = %"G_GINT64_FORMAT"\n", pts);
+
+ if (idx == 0) {
+ mvf_1st = mvf;
+ } else {
+ g_ptr_array_add(fa, (gpointer) mvf);
+ }
+ }
+
+ /* shuffle the array */
+ shuffle( fa);
+
+ data_mutex = g_mutex_new ();
+ if(!data_mutex) {
+ goto cleanup;
+ }
+
+ data_cond = g_cond_new();
+ if(!data_cond) {
+ goto cleanup;
+ }
+
+
+ /* create another thread to dequeue */
+ deque_thread = g_thread_create((GThreadFunc) deque_function, (void *) fm,
+ TRUE, &deque_thread_error);
+ if (!deque_thread) {
+ goto cleanup;
+ }
+
+ /* enqueue */
+ mixresult = mix_framemanager_enqueue(fm, mvf_1st);
+ if (mixresult != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ mixresult = mix_videoframe_get_timestamp(mvf_1st, &pts);
+ if (mixresult != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+ g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts);
+
+ for (idx = 0; idx < fa->len; idx++) {
+
+ g_mutex_lock (data_mutex);
+
+ /* wait for 100ms to enqueue another frame */
+ g_usleep(G_USEC_PER_SEC / 10 );
+
+ mvf = (MixVideoFrame *) g_ptr_array_index(fa, idx);
+ mixresult = mix_framemanager_enqueue(fm, mvf);
+
+ /* wake up deque thread */
+ g_cond_signal (data_cond);
+
+
+ g_mutex_unlock (data_mutex);
+
+ if (mixresult != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ mixresult = mix_videoframe_get_timestamp(mvf, &pts);
+ if (mixresult != MIX_RESULT_SUCCESS) {
+ goto cleanup;
+ }
+
+ g_print("shuffled timestamp = %"G_GINT64_FORMAT"\n", pts);
+ }
+
+ getchar();
+
+ stop_thread = TRUE;
+
+ /* wake up deque thread */
+ g_cond_signal (data_cond);
+
+ g_thread_join(deque_thread);
+
+cleanup:
+
+ if(data_mutex) {
+ g_mutex_free(data_mutex);
+ }
+
+ if(data_cond) {
+ g_cond_free(data_cond);
+ }
+
+ if (fm) {
+ mix_framemanager_unref(fm);
+ }
+
+ if (fa) {
+ g_ptr_array_free(fa, TRUE);
+ }
+
+ return 0;
+}