RDMA/irdma: Harden depth calculation functions

An issue was exposed where OS can pass in U32_MAX for SQ/RQ/SRQ size.
This can cause integer overflow and truncation of SQ/RQ/SRQ depth
returning a success when it should have failed.

Harden the functions to do all depth calculations and boundary
checking in u64 sizes.

Fixes: 563e1feb5f ("RDMA/irdma: Add SRQ support")
Signed-off-by: Shiraz Saleem <shiraz.saleem@intel.com>
Signed-off-by: Tatyana Nikolova <tatyana.e.nikolova@intel.com>
Signed-off-by: Leon Romanovsky <leon@kernel.org>
This commit is contained in:
Shiraz Saleem 2026-03-16 13:39:47 -05:00 committed by Leon Romanovsky
parent 7221f581ee
commit e37afcb56a

View File

@ -1438,7 +1438,7 @@ int irdma_uk_cq_poll_cmpl(struct irdma_cq_uk *cq,
* irdma_round_up_wq - return round up qp wq depth
* @wqdepth: wq depth in quanta to round up
*/
static int irdma_round_up_wq(u32 wqdepth)
static u64 irdma_round_up_wq(u64 wqdepth)
{
int scount = 1;
@ -1491,15 +1491,16 @@ void irdma_get_wqe_shift(struct irdma_uk_attrs *uk_attrs, u32 sge,
int irdma_get_sqdepth(struct irdma_uk_attrs *uk_attrs, u32 sq_size, u8 shift,
u32 *sqdepth)
{
u32 min_size = (u32)uk_attrs->min_hw_wq_size << shift;
u32 min_hw_quanta = (u32)uk_attrs->min_hw_wq_size << shift;
u64 hw_quanta =
irdma_round_up_wq(((u64)sq_size << shift) + IRDMA_SQ_RSVD);
*sqdepth = irdma_round_up_wq((sq_size << shift) + IRDMA_SQ_RSVD);
if (*sqdepth < min_size)
*sqdepth = min_size;
else if (*sqdepth > uk_attrs->max_hw_wq_quanta)
if (hw_quanta < min_hw_quanta)
hw_quanta = min_hw_quanta;
else if (hw_quanta > uk_attrs->max_hw_wq_quanta)
return -EINVAL;
*sqdepth = hw_quanta;
return 0;
}
@ -1513,15 +1514,16 @@ int irdma_get_sqdepth(struct irdma_uk_attrs *uk_attrs, u32 sq_size, u8 shift,
int irdma_get_rqdepth(struct irdma_uk_attrs *uk_attrs, u32 rq_size, u8 shift,
u32 *rqdepth)
{
u32 min_size = (u32)uk_attrs->min_hw_wq_size << shift;
u32 min_hw_quanta = (u32)uk_attrs->min_hw_wq_size << shift;
u64 hw_quanta =
irdma_round_up_wq(((u64)rq_size << shift) + IRDMA_RQ_RSVD);
*rqdepth = irdma_round_up_wq((rq_size << shift) + IRDMA_RQ_RSVD);
if (*rqdepth < min_size)
*rqdepth = min_size;
else if (*rqdepth > uk_attrs->max_hw_rq_quanta)
if (hw_quanta < min_hw_quanta)
hw_quanta = min_hw_quanta;
else if (hw_quanta > uk_attrs->max_hw_rq_quanta)
return -EINVAL;
*rqdepth = hw_quanta;
return 0;
}
@ -1535,13 +1537,16 @@ int irdma_get_rqdepth(struct irdma_uk_attrs *uk_attrs, u32 rq_size, u8 shift,
int irdma_get_srqdepth(struct irdma_uk_attrs *uk_attrs, u32 srq_size, u8 shift,
u32 *srqdepth)
{
*srqdepth = irdma_round_up_wq((srq_size << shift) + IRDMA_RQ_RSVD);
u32 min_hw_quanta = (u32)uk_attrs->min_hw_wq_size << shift;
u64 hw_quanta =
irdma_round_up_wq(((u64)srq_size << shift) + IRDMA_RQ_RSVD);
if (*srqdepth < ((u32)uk_attrs->min_hw_wq_size << shift))
*srqdepth = uk_attrs->min_hw_wq_size << shift;
else if (*srqdepth > uk_attrs->max_hw_srq_quanta)
if (hw_quanta < min_hw_quanta)
hw_quanta = min_hw_quanta;
else if (hw_quanta > uk_attrs->max_hw_srq_quanta)
return -EINVAL;
*srqdepth = hw_quanta;
return 0;
}