Merge "msm: vidc: delayed start of dec vpp processing in work mode 2"

This commit is contained in:
qctecmdr 2020-05-01 14:11:49 -07:00 committed by Gerrit - the friendly Code Review server
commit 303075dcf4
15 changed files with 254 additions and 58 deletions

View File

@ -798,13 +798,13 @@ static int hfi_process_session_ftb_done(
hfi_msg_session_fill_buffer_done_compressed_packet) + 4;
is_encoder = (msg_hdr->size == struct_size) ||
(msg_hdr->size == (struct_size +
sizeof(struct hfi_ubwc_cr_stat) - 4));
sizeof(struct hfi_ubwc_cr_stats) - 4));
struct_size = sizeof(struct
hfi_msg_session_fbd_uncompressed_plane0_packet) + 4;
is_decoder = (msg_hdr->size == struct_size) ||
(msg_hdr->size == (struct_size +
sizeof(struct hfi_ubwc_cr_stat) - 4));
sizeof(struct hfi_ubwc_cr_stats) - 4));
if (!(is_encoder ^ is_decoder)) {
d_vpr_e("Ambiguous packet (%#x) received (size %d)\n",
@ -846,6 +846,17 @@ static int hfi_process_session_ftb_done(
data_done.output_done.extra_data_buffer =
pkt->extra_data_buffer;
data_done.output_done.buffer_type = HAL_BUFFER_OUTPUT;
/* FBD packet is extended only when stats=1. */
if (pkt->stats == 1) {
struct hfi_ubwc_cr_stats *ubwc_stat =
(struct hfi_ubwc_cr_stats *)pkt->rgData;
data_done.output_done.ubwc_cr_stat.is_valid =
ubwc_stat->is_valid;
data_done.output_done.ubwc_cr_stat.worst_cr =
ubwc_stat->worst_compression_ratio;
data_done.output_done.ubwc_cr_stat.worst_cf =
ubwc_stat->worst_complexity_number;
}
} else /* if (is_decoder) */ {
struct hfi_msg_session_fbd_uncompressed_plane0_packet *pkt =
(struct hfi_msg_session_fbd_uncompressed_plane0_packet *)
@ -883,6 +894,18 @@ static int hfi_process_session_ftb_done(
data_done.output_done.extra_data_buffer =
pkt->extra_data_buffer;
/* FBD packet is extended only when view_id=1. */
if (pkt->view_id == 1) {
struct hfi_ubwc_cr_stats *ubwc_stat =
(struct hfi_ubwc_cr_stats *)pkt->rgData;
data_done.output_done.ubwc_cr_stat.is_valid =
ubwc_stat->is_valid;
data_done.output_done.ubwc_cr_stat.worst_cr =
ubwc_stat->worst_compression_ratio;
data_done.output_done.ubwc_cr_stat.worst_cf =
ubwc_stat->worst_complexity_number;
}
if (!pkt->stream_id)
data_done.output_done.buffer_type = HAL_BUFFER_OUTPUT;
else if (pkt->stream_id == 1)

View File

@ -855,6 +855,9 @@ static inline int start_streaming(struct msm_vidc_inst *inst)
goto fail_start;
}
/* Decide bse vpp delay after work mode */
msm_vidc_set_bse_vpp_delay(inst);
/* Assign Core and LP mode for current session */
rc = call_core_op(inst->core, decide_core_and_power_mode, inst);
if (rc) {
@ -1489,6 +1492,8 @@ void *msm_vidc_open(int core_id, int session_type)
inst->max_filled_len = 0;
inst->entropy_mode = HFI_H264_ENTROPY_CABAC;
inst->full_range = COLOR_RANGE_UNSPECIFIED;
inst->bse_vpp_delay = DEFAULT_BSE_VPP_DELAY;
inst->first_reconfig = 0;
for (i = SESSION_MSG_INDEX(SESSION_MSG_START);
i <= SESSION_MSG_INDEX(SESSION_MSG_END); i++) {

View File

@ -266,13 +266,13 @@ static int msm_vidc_get_extra_input_buff_count(struct msm_vidc_inst *inst);
static int msm_vidc_get_extra_output_buff_count(struct msm_vidc_inst *inst);
static inline u32 calculate_h264d_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, bool is_interlaced);
u32 width, u32 height, bool is_interlaced, u32 delay);
static inline u32 calculate_h265d_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, bool is_interlaced);
u32 width, u32 height, bool is_interlaced, u32 delay);
static inline u32 calculate_vpxd_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, bool is_interlaced);
u32 width, u32 height, bool is_interlaced, u32 delay);
static inline u32 calculate_mpeg2d_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, bool is_interlaced);
u32 width, u32 height, bool is_interlaced, u32 delay);
static inline u32 calculate_enc_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, u32 work_mode, u32 lcu_size, u32 num_vpp_pipes);
@ -373,7 +373,7 @@ int msm_vidc_get_decoder_internal_buffer_sizes(struct msm_vidc_inst *inst)
struct msm_vidc_dec_buff_size_calculators *dec_calculators;
u32 width, height, i, out_min_count, num_vpp_pipes;
struct v4l2_format *f;
u32 vpp_delay = 2 + 1;
u32 vpp_delay = inst->bse_vpp_delay;
if (!inst || !inst->core || !inst->core->platform_data) {
d_vpr_e("%s: Instance is null!", __func__);
@ -419,7 +419,8 @@ int msm_vidc_get_decoder_internal_buffer_sizes(struct msm_vidc_inst *inst)
MSM_VIDC_PIC_STRUCT_MAYBE_INTERLACED);
curr_req->buffer_size =
dec_calculators->calculate_scratch_size(
inst, width, height, is_interlaced);
inst, width, height, is_interlaced,
vpp_delay);
valid_buffer_type = true;
} else if (curr_req->buffer_type ==
HAL_BUFFER_INTERNAL_SCRATCH_1) {
@ -428,7 +429,7 @@ int msm_vidc_get_decoder_internal_buffer_sizes(struct msm_vidc_inst *inst)
fmt = &inst->fmts[OUTPUT_PORT];
out_min_count = fmt->count_min;
out_min_count =
max(vpp_delay, out_min_count);
max(vpp_delay + 1, out_min_count);
curr_req->buffer_size =
dec_calculators->calculate_scratch1_size(
inst, width, height, out_min_count,
@ -690,6 +691,17 @@ int msm_vidc_calculate_output_buffer_count(struct msm_vidc_inst *inst)
} else {
output_min_count = MIN_ENC_OUTPUT_BUFFERS;
}
if (inst->core->resources.has_vpp_delay &&
is_decode_session(inst) &&
(codec == V4L2_PIX_FMT_H264
|| codec == V4L2_PIX_FMT_HEVC)) {
output_min_count =
max(output_min_count, (u32)MAX_BSE_VPP_DELAY);
output_min_count =
max(output_min_count, (u32)(msm_vidc_vpp_delay & 0x1F));
}
extra_buff_count = msm_vidc_get_extra_buff_count(inst,
HAL_BUFFER_OUTPUT);
fmt->count_min = output_min_count;
@ -1115,12 +1127,11 @@ static inline u32 hfi_iris2_h264d_non_comv_size(u32 width, u32 height,
return size;
}
static inline u32 size_h264d_hw_bin_buffer(u32 width, u32 height)
static inline u32 size_h264d_hw_bin_buffer(u32 width, u32 height, u32 delay)
{
u32 size_yuv, size_bin_hdr, size_bin_res;
u32 size = 0;
u32 product;
u32 delay = 2;
product = width * height;
size_yuv = (product <= BIN_BUFFER_THRESHOLD) ?
@ -1138,14 +1149,15 @@ static inline u32 size_h264d_hw_bin_buffer(u32 width, u32 height)
}
static inline u32 calculate_h264d_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, bool is_interlaced)
u32 width, u32 height, bool is_interlaced, u32 delay)
{
u32 aligned_width = ALIGN(width, BUFFER_ALIGNMENT_SIZE(16));
u32 aligned_height = ALIGN(height, BUFFER_ALIGNMENT_SIZE(16));
u32 size = 0;
if (!is_interlaced)
size = size_h264d_hw_bin_buffer(aligned_width, aligned_height);
size = size_h264d_hw_bin_buffer(aligned_width, aligned_height,
delay);
else
size = 0;
@ -1234,12 +1246,11 @@ static inline u32 hfi_iris2_h265d_non_comv_size(u32 width, u32 height,
return size;
}
static inline u32 size_h265d_hw_bin_buffer(u32 width, u32 height)
static inline u32 size_h265d_hw_bin_buffer(u32 width, u32 height, u32 delay)
{
u32 size = 0;
u32 size_yuv, size_bin_hdr, size_bin_res;
u32 product;
u32 delay = 2;
product = width * height;
size_yuv = (product <= BIN_BUFFER_THRESHOLD) ?
@ -1257,14 +1268,15 @@ static inline u32 size_h265d_hw_bin_buffer(u32 width, u32 height)
}
static inline u32 calculate_h265d_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, bool is_interlaced)
u32 width, u32 height, bool is_interlaced, u32 delay)
{
u32 aligned_width = ALIGN(width, BUFFER_ALIGNMENT_SIZE(16));
u32 aligned_height = ALIGN(height, BUFFER_ALIGNMENT_SIZE(16));
u32 size = 0;
if (!is_interlaced)
size = size_h265d_hw_bin_buffer(aligned_width, aligned_height);
size = size_h265d_hw_bin_buffer(aligned_width, aligned_height,
delay);
else
size = 0;
@ -1272,7 +1284,7 @@ static inline u32 calculate_h265d_scratch_size(struct msm_vidc_inst *inst,
}
static inline u32 calculate_vpxd_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, bool is_interlaced)
u32 width, u32 height, bool is_interlaced, u32 delay)
{
u32 aligned_width = ALIGN(width, BUFFER_ALIGNMENT_SIZE(16));
u32 aligned_height = ALIGN(height, BUFFER_ALIGNMENT_SIZE(16));
@ -1303,7 +1315,7 @@ static inline u32 calculate_vpxd_scratch_size(struct msm_vidc_inst *inst,
}
static inline u32 calculate_mpeg2d_scratch_size(struct msm_vidc_inst *inst,
u32 width, u32 height, bool is_interlaced)
u32 width, u32 height, bool is_interlaced, u32 delay)
{
return 0;
}

View File

@ -12,7 +12,7 @@
struct msm_vidc_dec_buff_size_calculators {
u32 (*calculate_scratch_size)(struct msm_vidc_inst *inst, u32 width,
u32 height, bool is_interlaced);
u32 height, bool is_interlaced, u32 delay);
u32 (*calculate_scratch1_size)(struct msm_vidc_inst *inst, u32 width,
u32 height, u32 min_buf_count, bool split_mode_enabled,
u32 num_vpp_pipes);

View File

@ -9,6 +9,7 @@
#include "msm_vidc_clocks.h"
#include "msm_vidc_buffer_calculations.h"
#include "msm_vidc_bus.h"
#include "vidc_hfi.h"
#define MSM_VIDC_MIN_UBWC_COMPLEXITY_FACTOR (1 << 16)
#define MSM_VIDC_MAX_UBWC_COMPLEXITY_FACTOR (4 << 16)
@ -162,6 +163,9 @@ void update_recon_stats(struct msm_vidc_inst *inst,
u32 CR = 0, CF = 0;
u32 frame_size;
if (inst->core->resources.ubwc_stats_in_fbd == 1)
return;
/* do not consider recon stats in case of superframe */
ctrl = get_ctrl(inst, V4L2_CID_MPEG_VIDC_SUPERFRAME);
if (ctrl->val)
@ -199,25 +203,37 @@ static int fill_dynamic_stats(struct msm_vidc_inst *inst,
u32 min_input_cr = MSM_VIDC_MAX_UBWC_COMPRESSION_RATIO;
u32 min_cr = MSM_VIDC_MAX_UBWC_COMPRESSION_RATIO;
mutex_lock(&inst->refbufs.lock);
list_for_each_entry_safe(binfo, nextb, &inst->refbufs.list, list) {
if (binfo->CR) {
min_cr = min(min_cr, binfo->CR);
max_cr = max(max_cr, binfo->CR);
if (inst->core->resources.ubwc_stats_in_fbd == 1) {
mutex_lock(&inst->ubwc_stats_lock);
if (inst->ubwc_stats.is_valid == 1) {
min_cr = inst->ubwc_stats.worst_cr;
max_cf = inst->ubwc_stats.worst_cf;
min_input_cr = inst->ubwc_stats.worst_cr;
}
if (binfo->CF) {
min_cf = min(min_cf, binfo->CF);
max_cf = max(max_cf, binfo->CF);
mutex_unlock(&inst->ubwc_stats_lock);
} else {
mutex_lock(&inst->refbufs.lock);
list_for_each_entry_safe(binfo, nextb,
&inst->refbufs.list, list) {
if (binfo->CR) {
min_cr = min(min_cr, binfo->CR);
max_cr = max(max_cr, binfo->CR);
}
if (binfo->CF) {
min_cf = min(min_cf, binfo->CF);
max_cf = max(max_cf, binfo->CF);
}
}
}
mutex_unlock(&inst->refbufs.lock);
mutex_unlock(&inst->refbufs.lock);
mutex_lock(&inst->input_crs.lock);
list_for_each_entry_safe(temp, next, &inst->input_crs.list, list) {
min_input_cr = min(min_input_cr, temp->input_cr);
max_input_cr = max(max_input_cr, temp->input_cr);
mutex_lock(&inst->input_crs.lock);
list_for_each_entry_safe(temp, next,
&inst->input_crs.list, list) {
min_input_cr = min(min_input_cr, temp->input_cr);
max_input_cr = max(max_input_cr, temp->input_cr);
}
mutex_unlock(&inst->input_crs.lock);
}
mutex_unlock(&inst->input_crs.lock);
/* Sanitize CF values from HW . */
max_cf = min_t(u32, max_cf, MSM_VIDC_MAX_UBWC_COMPLEXITY_FACTOR);
@ -1032,6 +1048,7 @@ void msm_clock_data_reset(struct msm_vidc_inst *inst)
i = 0;
inst->clk_data.buffer_counter = 0;
inst->ubwc_stats.is_valid = 0;
rc = msm_comm_scale_clocks_and_bus(inst, 1);
@ -1160,6 +1177,62 @@ decision_done:
return rc;
}
int msm_vidc_set_bse_vpp_delay(struct msm_vidc_inst *inst)
{
int rc = 0;
struct hfi_device *hdev;
u32 delay = 0;
u32 mbpf = 0, codec = 0;
if (!inst || !inst->core) {
d_vpr_e("%s: invalid params %pK\n", __func__, inst);
return -EINVAL;
}
if (!inst->core->resources.has_vpp_delay ||
inst->session_type != MSM_VIDC_DECODER ||
inst->clk_data.work_mode != HFI_WORKMODE_2 ||
inst->first_reconfig) {
s_vpr_hp(inst->sid, "%s: Skip bse-vpp\n", __func__);
return 0;
}
hdev = inst->core->device;
/* Decide VPP delay only on first reconfig */
if (in_port_reconfig(inst))
inst->first_reconfig = 1;
codec = get_v4l2_codec(inst);
if (codec != V4L2_PIX_FMT_HEVC && codec != V4L2_PIX_FMT_H264) {
s_vpr_hp(inst->sid, "%s: Skip bse-vpp, codec %u\n",
__func__, codec);
goto exit;
}
mbpf = msm_vidc_get_mbs_per_frame(inst);
if (mbpf >= NUM_MBS_PER_FRAME(7680, 3840))
delay = MAX_BSE_VPP_DELAY;
else
delay = DEFAULT_BSE_VPP_DELAY;
/* DebugFS override [1-31] */
if (msm_vidc_vpp_delay & 0x1F)
delay = msm_vidc_vpp_delay & 0x1F;
s_vpr_hp(inst->sid, "%s: bse-vpp delay %u\n", __func__, delay);
rc = call_hfi_op(hdev, session_set_property, inst->session,
HFI_PROPERTY_PARAM_VDEC_VSP_VPP_DELAY, &delay,
sizeof(u32));
if (rc)
s_vpr_e(inst->sid, "%s: set property failed\n", __func__);
else
inst->bse_vpp_delay = delay;
exit:
return rc;
}
int msm_vidc_decide_work_mode_iris2(struct msm_vidc_inst *inst)
{
int rc = 0;

View File

@ -1,6 +1,6 @@
/* SPDX-License-Identifier: GPL-2.0-only */
/*
* Copyright (c) 2018-2019, The Linux Foundation. All rights reserved.
* Copyright (c) 2018-2020, The Linux Foundation. All rights reserved.
*/
#ifndef _MSM_VIDC_CLOCKS_H_
@ -39,4 +39,5 @@ bool res_is_greater_than(u32 width, u32 height,
u32 ref_width, u32 ref_height);
bool res_is_less_than(u32 width, u32 height,
u32 ref_width, u32 ref_height);
int msm_vidc_set_bse_vpp_delay(struct msm_vidc_inst *inst);
#endif

View File

@ -1582,6 +1582,43 @@ static void msm_vidc_queue_rbr_event(struct msm_vidc_inst *inst,
v4l2_event_queue_fh(&inst->event_handler, &buf_event);
}
static void handle_event_change_insufficient(struct msm_vidc_inst *inst,
struct msm_vidc_format *fmt,
struct msm_vidc_cb_event *event_notify,
u32 codec)
{
int extra_buff_count = 0;
s_vpr_h(inst->sid,
"seq: V4L2_EVENT_SEQ_CHANGED_INSUFFICIENT\n");
/* decide batching as configuration changed */
inst->batch.enable = is_batching_allowed(inst);
s_vpr_hp(inst->sid, "seq : batching %s\n",
inst->batch.enable ? "enabled" : "disabled");
msm_dcvs_try_enable(inst);
extra_buff_count = msm_vidc_get_extra_buff_count(inst,
HAL_BUFFER_OUTPUT);
fmt->count_min = event_notify->fw_min_cnt;
if (inst->core->resources.has_vpp_delay &&
is_decode_session(inst) &&
(codec == V4L2_PIX_FMT_H264
|| codec == V4L2_PIX_FMT_HEVC)) {
fmt->count_min =
max(fmt->count_min, (u32)MAX_BSE_VPP_DELAY);
fmt->count_min =
max(fmt->count_min,
(u32)(msm_vidc_vpp_delay & 0x1F));
}
fmt->count_min_host = fmt->count_min + extra_buff_count;
s_vpr_h(inst->sid,
"seq: hal buffer[%d] count: min %d min_host %d\n",
HAL_BUFFER_OUTPUT, fmt->count_min,
fmt->count_min_host);
}
static void handle_event_change(enum hal_command_response cmd, void *data)
{
struct msm_vidc_inst *inst = NULL;
@ -1593,7 +1630,6 @@ static void handle_event_change(enum hal_command_response cmd, void *data)
u32 *ptr = NULL;
struct msm_vidc_format *fmt;
struct v4l2_format *f;
int extra_buff_count = 0;
u32 codec;
if (!event_notify) {
@ -1754,24 +1790,9 @@ static void handle_event_change(enum hal_command_response cmd, void *data)
fmt->v4l2_fmt.fmt.pix_mp.width = event_notify->width;
mutex_unlock(&inst->lock);
if (event == V4L2_EVENT_SEQ_CHANGED_INSUFFICIENT) {
s_vpr_h(inst->sid,
"seq: V4L2_EVENT_SEQ_CHANGED_INSUFFICIENT\n");
/* decide batching as configuration changed */
inst->batch.enable = is_batching_allowed(inst);
s_vpr_hp(inst->sid, "seq : batching %s\n",
inst->batch.enable ? "enabled" : "disabled");
msm_dcvs_try_enable(inst);
extra_buff_count = msm_vidc_get_extra_buff_count(inst,
HAL_BUFFER_OUTPUT);
fmt->count_min = event_notify->fw_min_cnt;
fmt->count_min_host = fmt->count_min + extra_buff_count;
s_vpr_h(inst->sid,
"seq: hal buffer[%d] count: min %d min_host %d\n",
HAL_BUFFER_OUTPUT, fmt->count_min,
fmt->count_min_host);
}
if (event == V4L2_EVENT_SEQ_CHANGED_INSUFFICIENT)
handle_event_change_insufficient(inst, fmt,
event_notify, codec);
rc = msm_vidc_check_session_supported(inst);
if (!rc) {
@ -2638,6 +2659,17 @@ static void handle_fbd(enum hal_command_response cmd, void *data)
break;
}
if (inst->core->resources.ubwc_stats_in_fbd == 1) {
mutex_lock(&inst->ubwc_stats_lock);
inst->ubwc_stats.is_valid =
fill_buf_done->ubwc_cr_stat.is_valid;
inst->ubwc_stats.worst_cr =
fill_buf_done->ubwc_cr_stat.worst_cr;
inst->ubwc_stats.worst_cf =
fill_buf_done->ubwc_cr_stat.worst_cf;
mutex_unlock(&inst->ubwc_stats_lock);
}
/*
* dma cache operations need to be performed before dma_unmap
* which is done inside msm_comm_put_vidc_buffer()

View File

@ -23,6 +23,7 @@ int msm_vidc_clock_voting = !1;
bool msm_vidc_syscache_disable = !true;
bool msm_vidc_cvp_usage = true;
int msm_vidc_err_recovery_disable = !1;
int msm_vidc_vpp_delay;
#define MAX_DBG_BUF_SIZE 4096
@ -211,6 +212,8 @@ struct dentry *msm_vidc_debugfs_init_drv(void)
bool ok = false;
struct dentry *dir = NULL;
msm_vidc_vpp_delay = 0;
dir = debugfs_create_dir("msm_vidc", NULL);
if (IS_ERR_OR_NULL(dir)) {
dir = NULL;
@ -241,7 +244,8 @@ struct dentry *msm_vidc_debugfs_init_drv(void)
__debugfs_create(bool, "lossless_encoding",
&msm_vidc_lossless_encode) &&
__debugfs_create(u32, "disable_err_recovery",
&msm_vidc_err_recovery_disable);
&msm_vidc_err_recovery_disable) &&
__debugfs_create(u32, "vpp_delay", &msm_vidc_vpp_delay);
#undef __debugfs_create

View File

@ -1,6 +1,6 @@
/* SPDX-License-Identifier: GPL-2.0-only */
/*
* Copyright (c) 2012-2019, The Linux Foundation. All rights reserved.
* Copyright (c) 2012-2020, The Linux Foundation. All rights reserved.
*/
#ifndef __MSM_VIDC_DEBUG__
@ -132,6 +132,7 @@ extern bool msm_vidc_syscache_disable;
extern bool msm_vidc_lossless_encode;
extern bool msm_vidc_cvp_usage;
extern int msm_vidc_err_recovery_disable;
extern int msm_vidc_vpp_delay;
struct log_cookie {
u32 used;

View File

@ -39,6 +39,8 @@
#define MAX_NUM_OUTPUT_BUFFERS VIDEO_MAX_FRAME // same as VB2_MAX_FRAME
#define MAX_SUPPORTED_INSTANCES 16
#define MAX_BSE_VPP_DELAY 6
#define DEFAULT_BSE_VPP_DELAY 2
/* Maintains the number of FTB's between each FBD over a window */
#define DCVS_FTB_WINDOW 16
@ -561,6 +563,10 @@ struct msm_vidc_inst {
bool is_perf_eligible_session;
u32 max_filled_len;
int full_range;
struct mutex ubwc_stats_lock;
struct msm_vidc_ubwc_stats ubwc_stats;
u32 bse_vpp_delay;
u32 first_reconfig;
};
extern struct msm_vidc_drv *vidc_driver;

View File

@ -458,6 +458,14 @@ static struct msm_vidc_common_data lahaina_common_data[] = {
*/
.value = 13434880,
},
{
.key = "qcom,ubwc_stats_in_fbd",
.value = 1,
},
{
.key = "qcom,vpp_delay_supported",
.value = 1,
},
};
static struct msm_vidc_common_data bengal_common_data_v0[] = {
@ -509,6 +517,14 @@ static struct msm_vidc_common_data bengal_common_data_v0[] = {
.key = "qcom,fw-vpp-cycles",
.value = 225975,
},
{
.key = "qcom,ubwc_stats_in_fbd",
.value = 0,
},
{
.key = "qcom,vpp_delay_supported",
.value = 0,
},
};
static struct msm_vidc_common_data bengal_common_data_v1[] = {
@ -560,6 +576,14 @@ static struct msm_vidc_common_data bengal_common_data_v1[] = {
.key = "qcom,fw-vpp-cycles",
.value = 225975,
},
{
.key = "qcom,ubwc_stats_in_fbd",
.value = 0,
},
{
.key = "qcom,vpp_delay_supported",
.value = 0,
},
};
/* Default UBWC config for LPDDR5 */

View File

@ -807,6 +807,10 @@ int read_platform_resources_from_drv_data(
"qcom,fw-vpp-cycles");
res->avsync_window_size = find_key_value(platform_data,
"qcom,avsync-window-size");
res->ubwc_stats_in_fbd = find_key_value(platform_data,
"qcom,ubwc_stats_in_fbd");
res->has_vpp_delay = find_key_value(platform_data,
"qcom,vpp_delay_supported");
res->csc_coeff_data = &platform_data->csc_data;

View File

@ -194,6 +194,8 @@ struct msm_vidc_platform_resources {
struct msm_vidc_ubwc_config_data *ubwc_config;
uint32_t clk_freq_threshold;
struct cx_ipeak_client *cx_ipeak_context;
uint32_t ubwc_stats_in_fbd;
uint32_t has_vpp_delay;
};
static inline bool is_iommu_present(struct msm_vidc_platform_resources *res)

View File

@ -191,6 +191,8 @@ struct hfi_extradata_header {
(HFI_PROPERTY_PARAM_VDEC_OX_START + 0x0022)
#define HFI_PROPERTY_PARAM_VDEC_HDR10_HIST_EXTRADATA \
(HFI_PROPERTY_PARAM_VDEC_OX_START + 0x0023)
#define HFI_PROPERTY_PARAM_VDEC_VSP_VPP_DELAY \
(HFI_PROPERTY_PARAM_VDEC_OX_START + 0x0024)
#define HFI_PROPERTY_CONFIG_VDEC_OX_START \
(HFI_DOMAIN_BASE_VDEC + HFI_ARCH_OX_OFFSET + 0x4000)
@ -548,7 +550,7 @@ struct hfi_msg_session_flush_done_packet {
u32 flush_type;
};
struct hfi_ubwc_cr_stat {
struct hfi_ubwc_cr_stats {
u32 is_valid;
u32 worst_compression_ratio;
u32 worst_complexity_number;

View File

@ -472,6 +472,12 @@ enum hal_command_response {
HAL_RESPONSE_UNUSED = 0x10000000,
};
struct msm_vidc_ubwc_stats {
u32 is_valid;
u32 worst_cr;
u32 worst_cf;
};
struct ubwc_cr_stats_info_type {
u32 cr_stats_info0;
u32 cr_stats_info1;
@ -534,6 +540,7 @@ struct vidc_hal_fbd {
u32 offset3;
u32 packet_buffer3;
enum hal_buffer buffer_type;
struct msm_vidc_ubwc_stats ubwc_cr_stat;
};
struct msm_vidc_capability {