msm-vidc: calculate encode output buffer size considering 10-bit
In case of opaque color format the bit depth will be known with first ETB. Existing logic calculates output buffer size based on 8-bit which won't be sufficient for 10-bit. So consider 10-bit by default while calculating encoder output buffer size. Change-Id: I51eee2fcb0dc137c596babd04659cd1fc087ebd6
This commit is contained in:
parent
c8babda925
commit
185d24cddd
@ -869,11 +869,15 @@ u32 msm_vidc_calculate_enc_output_frame_size(struct msm_vidc_inst *inst)
|
||||
if (inst->rc_type == RATE_CONTROL_LOSSLESS)
|
||||
frame_size = (width * height * 6);
|
||||
|
||||
/* For 10-bit cases size = size * 1.25 */
|
||||
if (inst->bit_depth == MSM_VIDC_BIT_DEPTH_10) {
|
||||
/*
|
||||
* In case of opaque color format bitdepth will be known
|
||||
* with first ETB, buffers allocated already with 8 bit
|
||||
* won't be sufficient for 10 bit
|
||||
* calculate size considering 10-bit by default
|
||||
* For 10-bit cases size = size * 1.25
|
||||
*/
|
||||
frame_size *= 5;
|
||||
frame_size /= 4;
|
||||
}
|
||||
|
||||
return ALIGN(frame_size, SZ_4K);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user