Lines Matching refs:q_depth
33 #define SQ_SIZE(q) ((q)->q_depth << (q)->sqes)
34 #define CQ_SIZE(q) ((q)->q_depth * sizeof(struct nvme_completion))
124 u32 q_depth;
199 u32 q_depth;
477 if (next_tail == nvmeq->q_depth)
501 if (++nvmeq->sq_tail == nvmeq->q_depth)
1030 if (tmp == nvmeq->q_depth) {
1154 c.create_cq.qsize = cpu_to_le16(nvmeq->q_depth - 1);
1184 c.create_sq.qsize = cpu_to_le16(nvmeq->q_depth - 1);
1454 int q_depth = dev->q_depth;
1455 unsigned q_size_aligned = roundup(q_depth * entry_size,
1462 q_depth = div_u64(mem_per_q, entry_size);
1465 * Ensure the reduced q_depth is above some threshold where it
1469 if (q_depth < 64)
1473 return q_depth;
1510 nvmeq->q_depth = depth;
1734 aqa = nvmeq->q_depth - 1;
1763 if (nvme_alloc_queue(dev, i, dev->q_depth)) {
2190 dev->q_depth = result;
2332 dev->tagset.queue_depth = min_t(unsigned int, dev->q_depth,
2392 dev->q_depth = min_t(u32, NVME_CAP_MQES(dev->ctrl.cap) + 1,
2394 dev->ctrl.sqsize = dev->q_depth - 1; /* 0's based queue depth */
2413 dev->q_depth = 2;
2416 dev->q_depth);
2420 dev->q_depth = 64;
2422 "set queue depth=%u\n", dev->q_depth);
2430 (dev->q_depth < (NVME_AQ_DEPTH + 2))) {
2431 dev->q_depth = NVME_AQ_DEPTH + 2;
2433 dev->q_depth);