fix[mpp]: Fix compile warning with ipc sdk toolchain

Signed-off-by: Yandong Lin <yandong.lin@rock-chips.com>
Change-Id: I206b473b904e69d10644897e47ba1933ada8204d
This commit is contained in:
Yandong Lin 2025-04-03 14:43:14 +08:00
parent 6cd2495d46
commit ea8b5af151
17 changed files with 76 additions and 519 deletions

View file

@ -789,37 +789,8 @@ typedef struct Vepu510RcRoi_t {
/* 0x00001040 reg1040 */
RK_U32 reserved_1040;
/* 0x00001044 reg1041 */
struct {
RK_U32 aq_tthd0 : 8;
RK_U32 aq_tthd1 : 8;
RK_U32 aq_tthd2 : 8;
RK_U32 aq_tthd3 : 8;
} aq_tthd0;
/* 0x00001048 reg1042 */
struct {
RK_U32 aq_tthd4 : 8;
RK_U32 aq_tthd5 : 8;
RK_U32 aq_tthd6 : 8;
RK_U32 aq_tthd7 : 8;
} aq_tthd1;
/* 0x0000104c reg1043 */
struct {
RK_U32 aq_tthd8 : 8;
RK_U32 aq_tthd9 : 8;
RK_U32 aq_tthd10 : 8;
RK_U32 aq_tthd11 : 8;
} aq_tthd2;
/* 0x00001050 reg1044 */
struct {
RK_U32 aq_tthd12 : 8;
RK_U32 aq_tthd13 : 8;
RK_U32 aq_tthd14 : 8;
RK_U32 aq_tthd15 : 8;
} aq_tthd3;
/* 0x00001044 reg1041 - 0x00001050 reg1044 */
RK_U8 aq_tthd[16];
/* 0x00001054 reg1045 */
struct {

View file

@ -1199,37 +1199,8 @@ typedef struct Vepu511RcRoi_t {
/* 0x00001040 reg1040 */
RK_U32 reserved_1040;
/* 0x00001044 reg1041 */
struct {
RK_U32 aq_tthd0 : 8;
RK_U32 aq_tthd1 : 8;
RK_U32 aq_tthd2 : 8;
RK_U32 aq_tthd3 : 8;
} aq_tthd0;
/* 0x00001048 reg1042 */
struct {
RK_U32 aq_tthd4 : 8;
RK_U32 aq_tthd5 : 8;
RK_U32 aq_tthd6 : 8;
RK_U32 aq_tthd7 : 8;
} aq_tthd1;
/* 0x0000104c reg1043 */
struct {
RK_U32 aq_tthd8 : 8;
RK_U32 aq_tthd9 : 8;
RK_U32 aq_tthd10 : 8;
RK_U32 aq_tthd11 : 8;
} aq_tthd2;
/* 0x00001050 reg1044 */
struct {
RK_U32 aq_tthd12 : 8;
RK_U32 aq_tthd13 : 8;
RK_U32 aq_tthd14 : 8;
RK_U32 aq_tthd15 : 8;
} aq_tthd3;
/* 0x00001044 reg1041 - 0x00001050 reg1044 */
RK_U8 aq_tthd[16];
/* 0x00001054 reg1045 */
struct {

View file

@ -1810,7 +1810,6 @@ static void setup_vepu510_aq(HalH264eVepu510Ctx *ctx)
MppEncCfgSet *cfg = ctx->cfg;
MppEncHwCfg *hw = &cfg->hw;
Vepu510RcRoi *s = &ctx->regs_set->reg_rc_roi;
RK_U8* thd = (RK_U8*)&s->aq_tthd0;
RK_S32 *aq_step, *aq_thd;
RK_U8 i;
@ -1823,7 +1822,7 @@ static void setup_vepu510_aq(HalH264eVepu510Ctx *ctx)
}
for (i = 0; i < 16; i++)
thd[i] = aq_thd[i] & 0xff;
s->aq_tthd[i] = aq_thd[i] & 0xff;
s->aq_stp0.aq_stp_s0 = aq_step[0] & 0x1f;
s->aq_stp0.aq_stp_0t1 = aq_step[1] & 0x1f;

View file

@ -1774,7 +1774,6 @@ static void setup_vepu511_aq(HalH264eVepu511Ctx *ctx)
MppEncCfgSet *cfg = ctx->cfg;
MppEncHwCfg *hw = &cfg->hw;
Vepu511RcRoi *s = &ctx->regs_set->reg_rc_roi;
RK_U8* thd = (RK_U8*)&s->aq_tthd0;
RK_S32 *aq_step, *aq_thd;
RK_U8 i;
@ -1787,7 +1786,7 @@ static void setup_vepu511_aq(HalH264eVepu511Ctx *ctx)
}
for (i = 0; i < 16; i++)
thd[i] = aq_thd[i] & 0xff;
s->aq_tthd[i] = aq_thd[i] & 0xff;
s->aq_stp0.aq_stp_s0 = aq_step[0] & 0x1f;
s->aq_stp0.aq_stp_0t1 = aq_step[1] & 0x1f;

View file

@ -1429,20 +1429,15 @@ static void setup_vepu540c_l2(HalVepu540cRegSet *regs, H264eSlice *slice, MppEnc
regs->reg_rc_roi.madp_st_thd1.madp_th2 = 15 << 4;
}
{
RK_U8* thd = (RK_U8*)&regs->reg_rc_roi.aq_tthd0;
RK_U8* step = (RK_U8*)&regs->reg_rc_roi.aq_stp0;
if (slice->slice_type == H264_I_SLICE) {
for (i = 0; i < MPP_ARRAY_ELEMS(h264_aq_tthd_default); i++) {
thd[i] = hw->aq_thrd_i[i];
step[i] = hw->aq_step_i[i] & 0x3f;
}
} else {
for (i = 0; i < MPP_ARRAY_ELEMS(h264_P_aq_step_default); i++) {
thd[i] = hw->aq_thrd_p[i];
step[i] = hw->aq_step_p[i] & 0x3f;
}
if (slice->slice_type == H264_I_SLICE) {
for (i = 0; i < MPP_ARRAY_ELEMS(h264_aq_tthd_default); i++) {
regs->reg_rc_roi.aq_tthd[i] = hw->aq_thrd_i[i];
regs->reg_rc_roi.aq_step[i] = hw->aq_step_i[i] & 0x3f;
}
} else {
for (i = 0; i < MPP_ARRAY_ELEMS(h264_P_aq_step_default); i++) {
regs->reg_rc_roi.aq_tthd[i] = hw->aq_thrd_p[i];
regs->reg_rc_roi.aq_step[i] = hw->aq_step_p[i] & 0x3f;
}
}

View file

@ -768,85 +768,14 @@ typedef struct Vepu540cRcROiCfg_t {
/* 0x1040 */
RK_U32 reserved_1040;
/* 0x00001044 reg1041 */
struct {
RK_U32 aq_tthd0 : 8;
RK_U32 aq_tthd1 : 8;
RK_U32 aq_tthd2 : 8;
RK_U32 aq_tthd3 : 8;
} aq_tthd0;
/* 0x00001044 reg1041 - 0x00001050 reg1044 */
RK_U8 aq_tthd[16];
/* 0x00001048 reg1042 */
struct {
RK_U32 aq_tthd4 : 8;
RK_U32 aq_tthd5 : 8;
RK_U32 aq_tthd6 : 8;
RK_U32 aq_tthd7 : 8;
} aq_tthd1;
/* 0x0000104c reg1043 */
struct {
RK_U32 aq_tthd8 : 8;
RK_U32 aq_tthd9 : 8;
RK_U32 aq_tthd10 : 8;
RK_U32 aq_tthd11 : 8;
} aq_tthd2;
/* 0x00001050 reg1044 */
struct {
RK_U32 aq_tthd12 : 8;
RK_U32 aq_tthd13 : 8;
RK_U32 aq_tthd14 : 8;
RK_U32 aq_tthd15 : 8;
} aq_tthd3;
/* 0x00001054 reg1045 */
struct {
RK_U32 aq_stp_s0 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_0t1 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_1t2 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_2t3 : 6;
RK_U32 reserved3 : 2;
} aq_stp0;
/* 0x00001058 reg1046 */
struct {
RK_U32 aq_stp_3t4 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_4t5 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_5t6 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_6t7 : 6;
RK_U32 reserved3 : 2;
} aq_stp1;
/* 0x0000105c reg1047 */
struct {
RK_U32 aq_stp_8t9 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_9t10 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_10t11 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_11t12 : 6;
RK_U32 reserved3 : 2;
} aq_stp2;
/* 0x00001060 reg1048 */
struct {
RK_U32 aq_stp_12t13 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_13t14 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_14t15 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_b15 : 6;
RK_U32 reserved3 : 2;
} aq_stp3;
/*
* 0x00001054 reg1045 - 0x00001060 reg1048
* only low 6 bits is valid for per step.
*/
RK_U8 aq_step[16];
/* 0x00001064 reg1049 */
struct {

View file

@ -2036,20 +2036,15 @@ static void setup_vepu580_l2(HalVepu580RegSet *regs, H264eSlice *slice, MppEncHw
regs->reg_s3.fme_sqi_thd1.move_lambda = 1;
}
{
RK_U8* thd = (RK_U8*)&regs->reg_rc_klut.aq_tthd0;
RK_U8* step = (RK_U8*)&regs->reg_rc_klut.aq_stp0;
if (slice->slice_type == H264_I_SLICE) {
for (i = 0; i < MPP_ARRAY_ELEMS(h264_aq_tthd_default); i++) {
thd[i] = hw->aq_thrd_i[i];
step[i] = hw->aq_step_i[i] & 0x3f;
}
} else {
for (i = 0; i < MPP_ARRAY_ELEMS(h264_P_aq_step_default); i++) {
thd[i] = hw->aq_thrd_p[i];
step[i] = hw->aq_step_p[i] & 0x3f;
}
if (slice->slice_type == H264_I_SLICE) {
for (i = 0; i < MPP_ARRAY_ELEMS(h264_aq_tthd_default); i++) {
regs->reg_rc_klut.aq_tthd[i] = hw->aq_thrd_i[i];
regs->reg_rc_klut.aq_step[i] = hw->aq_step_i[i] & 0x3f;
}
} else {
for (i = 0; i < MPP_ARRAY_ELEMS(h264_P_aq_step_default); i++) {
regs->reg_rc_klut.aq_tthd[i] = hw->aq_thrd_p[i];
regs->reg_rc_klut.aq_step[i] = hw->aq_step_p[i] & 0x3f;
}
}

View file

@ -779,85 +779,15 @@ typedef struct Vepu580RcKlutCfg_t {
RK_U32 reserved1 : 8;
} madi_cfg;
/* 0x00001044 reg1041 */
struct {
RK_U32 aq_tthd0 : 8;
RK_U32 aq_tthd1 : 8;
RK_U32 aq_tthd2 : 8;
RK_U32 aq_tthd3 : 8;
} aq_tthd0;
/* 0x00001048 reg1042 */
struct {
RK_U32 aq_tthd4 : 8;
RK_U32 aq_tthd5 : 8;
RK_U32 aq_tthd6 : 8;
RK_U32 aq_tthd7 : 8;
} aq_tthd1;
/* 0x0000104c reg1043 */
struct {
RK_U32 aq_tthd8 : 8;
RK_U32 aq_tthd9 : 8;
RK_U32 aq_tthd10 : 8;
RK_U32 aq_tthd11 : 8;
} aq_tthd2;
/* 0x00001050 reg1044 */
struct {
RK_U32 aq_tthd12 : 8;
RK_U32 aq_tthd13 : 8;
RK_U32 aq_tthd14 : 8;
RK_U32 aq_tthd15 : 8;
} aq_tthd3;
/* 0x00001044 reg1041 - 0x00001050 reg1044 */
RK_U8 aq_tthd[16];
/* 0x00001054 reg1045 */
struct {
RK_U32 aq_stp_s0 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_0t1 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_1t2 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_2t3 : 6;
RK_U32 reserved3 : 2;
} aq_stp0;
/* 0x00001058 reg1046 */
struct {
RK_U32 aq_stp_3t4 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_4t5 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_5t6 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_6t7 : 6;
RK_U32 reserved3 : 2;
} aq_stp1;
/* 0x0000105c reg1047 */
struct {
RK_U32 aq_stp_8t9 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_9t10 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_10t11 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_11t12 : 6;
RK_U32 reserved3 : 2;
} aq_stp2;
/* 0x00001060 reg1048 */
struct {
RK_U32 aq_stp_12t13 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_13t14 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_14t15 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_b15 : 6;
RK_U32 reserved3 : 2;
} aq_stp3;
/*
* 0x00001054 reg1045 - 0x00001060 reg1048
* only low 6 bits is valid for per step.
*/
RK_U8 aq_step[16];
/* 0x1064 - 0x106c */
RK_U32 reserved1049_1051[3];

View file

@ -111,13 +111,12 @@ static void vepu510_h265e_tune_aq(HalH265eVepu510Tune *tune)
RK_U32 i = 0;
RK_S32 aq_step[16];
RK_U8 *thd = (RK_U8 *)&r->aq_tthd0;
for (i = 0; i < MPP_ARRAY_ELEMS(aq_thd_default); i++) {
if (ctx->frame_type == INTRA_FRAME) {
thd[i] = hw->aq_thrd_i[i];
r->aq_tthd[i] = hw->aq_thrd_i[i];
aq_step[i] = hw->aq_step_i[i] & 0x1F;
} else {
thd[i] = hw->aq_thrd_p[i];
r->aq_tthd[i] = hw->aq_thrd_p[i];
aq_step[i] = hw->aq_step_p[i] & 0x1F;
}
}

View file

@ -2082,7 +2082,6 @@ static void vepu511_h265_set_aq(H265eV511HalContext *ctx, H265eV511RegSet *regs)
{
MppEncHwCfg *hw = &ctx->cfg->hw;
Vepu511RcRoi *rc_regs = &regs->reg_rc_roi;
RK_U8* thd = (RK_U8*)&rc_regs->aq_tthd0;
RK_S32 *aq_step, *aq_rnge;
RK_U32 *aq_thd;
RK_U32 i;
@ -2116,7 +2115,7 @@ static void vepu511_h265_set_aq(H265eV511HalContext *ctx, H265eV511RegSet *regs)
rc_regs->aq_stp2.aq_stp_b15 = aq_step[15];
for (i = 0; i < 16; i++)
thd[i] = aq_thd[i];
rc_regs->aq_tthd[i] = aq_thd[i];
rc_regs->aq_clip.aq16_rnge = aq_rnge[0];
rc_regs->aq_clip.aq32_rnge = aq_rnge[1];

View file

@ -431,21 +431,16 @@ static void vepu540c_h265_global_cfg_set(H265eV540cHalContext *ctx, H265eV540cRe
vepu540c_h265_rdo_cfg(reg_rdo);
if (ctx->frame_type == INTRA_FRAME) {
RK_U8 *thd = (RK_U8 *)&rc_regs->aq_tthd0;
RK_S8 *step = (RK_S8 *)&rc_regs->aq_stp0;
for (i = 0; i < MPP_ARRAY_ELEMS(aq_thd_default); i++) {
thd[i] = hw->aq_thrd_i[i];
step[i] = hw->aq_step_i[i] & 0x3f;
rc_regs->aq_tthd[i] = hw->aq_thrd_i[i];
rc_regs->aq_step[i] = hw->aq_step_i[i] & 0x3f;
}
reg_wgt->iprd_lamb_satd_ofst.lambda_satd_offset = 11;
memcpy(&reg_wgt->rdo_wgta_qp_grpa_0_51[0], lamd_moda_qp, sizeof(lamd_moda_qp));
} else {
RK_U8 *thd = (RK_U8 *)&rc_regs->aq_tthd0;
RK_S8 *step = (RK_S8 *)&rc_regs->aq_stp0;
for (i = 0; i < MPP_ARRAY_ELEMS(aq_thd_default); i++) {
thd[i] = hw->aq_thrd_p[i];
step[i] = hw->aq_step_p[i] & 0x3f;
rc_regs->aq_tthd[i] = hw->aq_thrd_p[i];
rc_regs->aq_step[i] = hw->aq_step_p[i] & 0x3f;
}
reg_wgt->iprd_lamb_satd_ofst.lambda_satd_offset = 11;
memcpy(&reg_wgt->rdo_wgta_qp_grpa_0_51[0], lamd_modb_qp, sizeof(lamd_modb_qp));

View file

@ -916,85 +916,14 @@ typedef struct HevcVepu540cRcRoi_t {
/* 0x00001040 reg1040 */
RK_U32 reserved_1040;
/* 0x00001044 reg1041 */
struct {
RK_U32 aq_tthd0 : 8;
RK_U32 aq_tthd1 : 8;
RK_U32 aq_tthd2 : 8;
RK_U32 aq_tthd3 : 8;
} aq_tthd0;
/* 0x00001044 reg1041 - 0x00001050 reg1044 */
RK_U8 aq_tthd[16];
/* 0x00001048 reg1042 */
struct {
RK_U32 aq_tthd4 : 8;
RK_U32 aq_tthd5 : 8;
RK_U32 aq_tthd6 : 8;
RK_U32 aq_tthd7 : 8;
} aq_tthd1;
/* 0x0000104c reg1043 */
struct {
RK_U32 aq_tthd8 : 8;
RK_U32 aq_tthd9 : 8;
RK_U32 aq_tthd10 : 8;
RK_U32 aq_tthd11 : 8;
} aq_tthd2;
/* 0x00001050 reg1044 */
struct {
RK_U32 aq_tthd12 : 8;
RK_U32 aq_tthd13 : 8;
RK_U32 aq_tthd14 : 8;
RK_U32 aq_tthd15 : 8;
} aq_tthd3;
/* 0x00001054 reg1045 */
struct {
RK_U32 aq_stp_s0 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_0t1 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_1t2 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_2t3 : 6;
RK_U32 reserved3 : 2;
} aq_stp0;
/* 0x00001058 reg1046 */
struct {
RK_U32 aq_stp_3t4 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_4t5 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_5t6 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_6t7 : 6;
RK_U32 reserved3 : 2;
} aq_stp1;
/* 0x0000105c reg1047 */
struct {
RK_U32 aq_stp_8t9 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_9t10 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_10t11 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_11t12 : 6;
RK_U32 reserved3 : 2;
} aq_stp2;
/* 0x00001060 reg1048 */
struct {
RK_U32 aq_stp_12t13 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_13t14 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_14t15 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_b15 : 6;
RK_U32 reserved3 : 2;
} aq_stp3;
/*
* 0x00001054 reg1045 - 0x00001060 reg1048
* only low 6 bits is valid for per step.
*/
RK_U8 aq_step[16];
/* 0x00001064 reg1049 */
struct {

View file

@ -567,20 +567,14 @@ static void vepu541_h265_set_l2_regs(H265eV541HalContext *ctx, H265eV54xL2RegSet
memcpy(&regs->lamd_modb_qp[0], lamd_modb_qp, sizeof(lamd_modb_qp));
if (ctx->frame_type == INTRA_FRAME) {
RK_U8 *thd = (RK_U8 *)&regs->aq_thd0;
RK_S8 *step = (RK_S8 *)&regs->aq_qp_dlt0;
for (i = 0; i < MPP_ARRAY_ELEMS(aq_thd_default); i++) {
thd[i] = hw->aq_thrd_i[i];
step[i] = hw->aq_step_i[i] & 0x3f;
regs->aq_tthd[i] = hw->aq_thrd_i[i];
regs->aq_step[i] = hw->aq_step_i[i] & 0x3f;
}
} else {
RK_U8 *thd = (RK_U8 *)&regs->aq_thd0;
RK_S8 *step = (RK_S8 *)&regs->aq_qp_dlt0;
for (i = 0; i < MPP_ARRAY_ELEMS(aq_thd_default); i++) {
thd[i] = hw->aq_thrd_p[i];
step[i] = hw->aq_step_p[i] & 0x3f;
regs->aq_tthd[i] = hw->aq_thrd_p[i];
regs->aq_step[i] = hw->aq_step_p[i] & 0x3f;
}
}

View file

@ -272,85 +272,15 @@ typedef struct H265eV54xL2RegSet_t {
RK_U32 reserved1 : 8;
} madi_cfg;
/* 0x2e4 - AQ_THD0 */
struct {
RK_U32 aq_thld0 : 8;
RK_U32 aq_thld1 : 8;
RK_U32 aq_thld2 : 8;
RK_U32 aq_thld3 : 8;
} aq_thd0;
/* 0x2e4 - AQ_THD0 - 0x2f0 - AQ_THD3 */
RK_U8 aq_tthd[16];
/* 0x2e8 - AQ_THD1 */
struct {
RK_U32 aq_thld4 : 8;
RK_U32 aq_thld5 : 8;
RK_U32 aq_thld6 : 8;
RK_U32 aq_thld7 : 8;
} aq_thd1;
/*
* 0x2f4 - AQ_QP_DLT0 - 0x300 - AQ_QP_DLT3
* only low 6 bits is valid for per step.
*/
RK_U8 aq_step[16];
/* 0x2ec - AQ_THD2 */
struct {
RK_U32 aq_thld8 : 8;
RK_U32 aq_thld9 : 8;
RK_U32 aq_thld10 : 8;
RK_U32 aq_thld11 : 8;
} aq_thd2;
/* 0x2f0 - AQ_THD3 */
struct {
RK_U32 aq_thld12 : 8;
RK_U32 aq_thld13 : 8;
RK_U32 aq_thld14 : 8;
RK_U32 aq_thld15 : 8;
} aq_thd3;
/* 0x2f4 - AQ_QP_DLT0 */
struct {
RK_S32 qp_delta0 : 6;
RK_S32 reserved0 : 2;
RK_S32 qp_delta1 : 6;
RK_S32 reserved1 : 2;
RK_S32 qp_delta2 : 6;
RK_S32 reserved2 : 2;
RK_S32 qp_delta3 : 6;
RK_S32 reserved3 : 2;
} aq_qp_dlt0;
/* 0x2f8 - AQ_QP_DLT1 */
struct {
RK_S32 qp_delta4 : 6;
RK_S32 reserved0 : 2;
RK_S32 qp_delta5 : 6;
RK_S32 reserved1 : 2;
RK_S32 qp_delta6 : 6;
RK_S32 reserved2 : 2;
RK_S32 qp_delta7 : 6;
RK_S32 reserved3 : 2;
} aq_qp_dlt1;
/* 0x2fc - AQ_QP_DLT2 */
struct {
RK_S32 qp_delta8 : 6;
RK_S32 reserved0 : 2;
RK_S32 qp_delta9 : 6;
RK_S32 reserved1 : 2;
RK_S32 qp_delta10 : 6;
RK_S32 reserved2 : 2;
RK_S32 qp_delta11 : 6;
RK_S32 reserved3 : 2;
} aq_qp_dlt2;
/* 0x300 - AQ_QP_DLT3 */
struct {
RK_S32 qp_delta12 : 6;
RK_S32 reserved0 : 2;
RK_S32 qp_delta13 : 6;
RK_S32 reserved1 : 2;
RK_S32 qp_delta14 : 6;
RK_S32 reserved2 : 2;
RK_S32 qp_delta15 : 6;
RK_S32 reserved3 : 2;
} aq_qp_dlt3;
/*0x304-0x30c*/
RK_U32 reserve[3];
/*pre_intra class mode */

View file

@ -1336,6 +1336,7 @@ static void vepu580_h265_global_cfg_set(H265eV580HalContext *ctx, H265eV580RegSe
hevc_vepu580_rc_klut *rc_regs = &regs->reg_rc_klut;
hevc_vepu580_wgt *reg_wgt = &regs->reg_wgt;
vepu580_rdo_cfg *reg_rdo = &regs->reg_rdo;
vepu580_h265_sobel_cfg(reg_wgt);
vepu580_h265_rdo_cfg(reg_rdo);
vepu580_h265_rdo_bias_cfg(reg_rdo, hw);
@ -1344,22 +1345,15 @@ static void vepu580_h265_global_cfg_set(H265eV580HalContext *ctx, H265eV580RegSe
memcpy(&reg_wgt->iprd_wgt_qp_hevc_0_51[0], lamd_satd_qp, sizeof(lamd_satd_qp));
if (ctx->frame_type == INTRA_FRAME) {
RK_U8 *thd = (RK_U8 *)&rc_regs->aq_tthd0;
RK_S8 *step = (RK_S8 *)&rc_regs->aq_stp0;
for (i = 0; i < MPP_ARRAY_ELEMS(aq_thd_default); i++) {
thd[i] = hw->aq_thrd_i[i];
step[i] = hw->aq_step_i[i] & 0x3f;
rc_regs->aq_tthd[i] = hw->aq_thrd_i[i];
rc_regs->aq_step[i] = hw->aq_step_i[i] & 0x3f;
}
memcpy(&reg_wgt->rdo_wgta_qp_grpa_0_51[0], lamd_moda_qp, sizeof(lamd_moda_qp));
} else {
RK_U8 *thd = (RK_U8 *)&rc_regs->aq_tthd0;
RK_S8 *step = (RK_S8 *)&rc_regs->aq_stp0;
for (i = 0; i < MPP_ARRAY_ELEMS(aq_thd_default); i++) {
thd[i] = hw->aq_thrd_p[i];
step[i] = hw->aq_step_p[i] & 0x3f;
rc_regs->aq_tthd[i] = hw->aq_thrd_p[i];
rc_regs->aq_step[i] = hw->aq_step_p[i] & 0x3f;
}
memcpy(&reg_wgt->rdo_wgta_qp_grpa_0_51[0], lamd_modb_qp, sizeof(lamd_modb_qp));
}

View file

@ -798,85 +798,13 @@ typedef struct HevcVepu580RcKlut_t {
RK_U32 reserved1 : 8;
} madi_cfg;
/* 0x00001044 reg1041 */
struct {
RK_U32 aq_tthd0 : 8;
RK_U32 aq_tthd1 : 8;
RK_U32 aq_tthd2 : 8;
RK_U32 aq_tthd3 : 8;
} aq_tthd0;
/* 0x00001048 reg1042 */
struct {
RK_U32 aq_tthd4 : 8;
RK_U32 aq_tthd5 : 8;
RK_U32 aq_tthd6 : 8;
RK_U32 aq_tthd7 : 8;
} aq_tthd1;
/* 0x0000104c reg1043 */
struct {
RK_U32 aq_tthd8 : 8;
RK_U32 aq_tthd9 : 8;
RK_U32 aq_tthd10 : 8;
RK_U32 aq_tthd11 : 8;
} aq_tthd2;
/* 0x00001050 reg1044 */
struct {
RK_U32 aq_tthd12 : 8;
RK_U32 aq_tthd13 : 8;
RK_U32 aq_tthd14 : 8;
RK_U32 aq_tthd15 : 8;
} aq_tthd3;
/* 0x00001054 reg1045 */
struct {
RK_U32 aq_stp_s0 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_0t1 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_1t2 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_2t3 : 6;
RK_U32 reserved3 : 2;
} aq_stp0;
/* 0x00001058 reg1046 */
struct {
RK_U32 aq_stp_3t4 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_4t5 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_5t6 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_6t7 : 6;
RK_U32 reserved3 : 2;
} aq_stp1;
/* 0x0000105c reg1047 */
struct {
RK_U32 aq_stp_8t9 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_9t10 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_10t11 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_11t12 : 6;
RK_U32 reserved3 : 2;
} aq_stp2;
/* 0x00001060 reg1048 */
struct {
RK_U32 aq_stp_12t13 : 6;
RK_U32 reserved : 2;
RK_U32 aq_stp_13t14 : 6;
RK_U32 reserved1 : 2;
RK_U32 aq_stp_14t15 : 6;
RK_U32 reserved2 : 2;
RK_U32 aq_stp_b15 : 6;
RK_U32 reserved3 : 2;
} aq_stp3;
/* 0x00001044 reg1041 - 0x00001050 reg1044 */
RK_U8 aq_tthd[16];
/*
* 0x00001054 reg1045 - 0x00001060 reg1048
* only low 6bits is valid for per step.
*/
RK_U8 aq_step[16];
/* 0x1064 - 0x106c */
RK_U32 reserved1049_1051[3];