Home
last modified time | relevance | path

Searched refs:bulk (Results 1 - 25 of 145) sorted by relevance

123456

/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/lib/
H A Dcrypto.c13 * (for example, TLS) after last revalidation in a pool or a bulk.
20 #define MLX5_CRYPTO_DEK_BULK_CALC_FREED(bulk) MLX5_CRYPTO_DEK_CALC_FREED(bulk)
22 #define MLX5_CRYPTO_DEK_BULK_IDLE(bulk) \
23 ({ typeof(bulk) _bulk = (bulk); \
59 int num_deks; /* the total number of keys in a bulk */
80 struct mlx5_crypto_dek_bulk *bulk; member
182 mlx5_core_dbg(mdev, "DEK objects created, bulk=%d, obj_id=%d\n", in mlx5_crypto_create_dek_bulk()
292 struct mlx5_crypto_dek_bulk *bulk; in mlx5_crypto_dek_bulk_create() local
336 struct mlx5_crypto_dek_bulk *bulk; mlx5_crypto_dek_pool_add_bulk() local
349 mlx5_crypto_dek_bulk_free(struct mlx5_crypto_dek_bulk *bulk) mlx5_crypto_dek_bulk_free() argument
357 mlx5_crypto_dek_pool_remove_bulk(struct mlx5_crypto_dek_pool *pool, struct mlx5_crypto_dek_bulk *bulk, bool delay) mlx5_crypto_dek_pool_remove_bulk() argument
372 struct mlx5_crypto_dek_bulk *bulk; mlx5_crypto_dek_pool_pop() local
429 struct mlx5_crypto_dek_bulk *bulk = dek->bulk; mlx5_crypto_dek_free_locked() local
479 mlx5_crypto_dek_bulk_reset_synced(struct mlx5_crypto_dek_pool *pool, struct mlx5_crypto_dek_bulk *bulk) mlx5_crypto_dek_bulk_reset_synced() argument
510 mlx5_crypto_dek_bulk_handle_avail(struct mlx5_crypto_dek_pool *pool, struct mlx5_crypto_dek_bulk *bulk, struct list_head *destroy_list) mlx5_crypto_dek_bulk_handle_avail() argument
550 struct mlx5_crypto_dek_bulk *bulk, *tmp; mlx5_crypto_dek_pool_reset_synced() local
609 struct mlx5_crypto_dek_bulk *bulk; mlx5_crypto_dek_create() local
664 struct mlx5_crypto_dek_bulk *bulk, *tmp; mlx5_crypto_dek_free_destroy_list() local
709 struct mlx5_crypto_dek_bulk *bulk, *tmp; mlx5_crypto_dek_pool_destroy() local
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/
H A Dfs_counters.c41 /* Max number of counters to query in bulk read is 32K */
63 struct mlx5_fc_bulk *bulk; member
183 /* first id must be aligned to 4 when using bulk query */ in mlx5_fc_stats_query_counter_range()
193 mlx5_core_err(dev, "Error doing bulk query: %d\n", err); in mlx5_fc_stats_query_counter_range()
222 if (counter->bulk) in mlx5_fc_release()
476 static void mlx5_fc_init(struct mlx5_fc *counter, struct mlx5_fc_bulk *bulk, in mlx5_fc_init() argument
479 counter->bulk = bulk; in mlx5_fc_init()
483 static int mlx5_fc_bulk_get_free_fcs_amount(struct mlx5_fc_bulk *bulk) in mlx5_fc_bulk_get_free_fcs_amount() argument
485 return bitmap_weight(bulk in mlx5_fc_bulk_get_free_fcs_amount()
491 struct mlx5_fc_bulk *bulk; mlx5_fc_bulk_create() local
532 mlx5_fc_bulk_destroy(struct mlx5_core_dev *dev, struct mlx5_fc_bulk *bulk) mlx5_fc_bulk_destroy() argument
546 mlx5_fc_bulk_acquire_fc(struct mlx5_fc_bulk *bulk) mlx5_fc_bulk_acquire_fc() argument
557 mlx5_fc_bulk_release_fc(struct mlx5_fc_bulk *bulk, struct mlx5_fc *fc) mlx5_fc_bulk_release_fc() argument
585 struct mlx5_fc_bulk *bulk; mlx5_fc_pool_cleanup() local
616 mlx5_fc_pool_free_bulk(struct mlx5_fc_pool *fc_pool, struct mlx5_fc_bulk *bulk) mlx5_fc_pool_free_bulk() argument
630 struct mlx5_fc_bulk *bulk; mlx5_fc_pool_acquire_from_list() local
678 struct mlx5_fc_bulk *bulk = fc->bulk; mlx5_fc_pool_release_counter() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/
H A Dfs_counters.c42 /* Max number of counters to query in bulk read is 32K */
65 struct mlx5_fc_bulk *bulk; member
191 /* first id must be aligned to 4 when using bulk query */ in mlx5_fc_stats_query_counter_range()
201 mlx5_core_err(dev, "Error doing bulk query: %d\n", err); in mlx5_fc_stats_query_counter_range()
230 if (counter->bulk) in mlx5_fc_release()
252 "Can't increase flow counters bulk query buffer size, insufficient memory, bulk_size(%d)\n", in mlx5_fc_stats_bulk_query_size_increase()
265 "Flow counters bulk query buffer size increased, bulk_size(%d)\n", in mlx5_fc_stats_bulk_query_size_increase()
545 static void mlx5_fc_init(struct mlx5_fc *counter, struct mlx5_fc_bulk *bulk, in mlx5_fc_init() argument
548 counter->bulk = bulk; in mlx5_fc_init()
552 mlx5_fc_bulk_get_free_fcs_amount(struct mlx5_fc_bulk *bulk) mlx5_fc_bulk_get_free_fcs_amount() argument
560 struct mlx5_fc_bulk *bulk; mlx5_fc_bulk_create() local
600 mlx5_fc_bulk_destroy(struct mlx5_core_dev *dev, struct mlx5_fc_bulk *bulk) mlx5_fc_bulk_destroy() argument
614 mlx5_fc_bulk_acquire_fc(struct mlx5_fc_bulk *bulk) mlx5_fc_bulk_acquire_fc() argument
625 mlx5_fc_bulk_release_fc(struct mlx5_fc_bulk *bulk, struct mlx5_fc *fc) mlx5_fc_bulk_release_fc() argument
653 struct mlx5_fc_bulk *bulk; mlx5_fc_pool_cleanup() local
684 mlx5_fc_pool_free_bulk(struct mlx5_fc_pool *fc_pool, struct mlx5_fc_bulk *bulk) mlx5_fc_pool_free_bulk() argument
698 struct mlx5_fc_bulk *bulk; mlx5_fc_pool_acquire_from_list() local
746 struct mlx5_fc_bulk *bulk = fc->bulk; mlx5_fc_pool_release_counter() local
[all...]
/kernel/linux/linux-5.10/drivers/staging/vc04_services/vchiq-mmal/
H A Dmmal-vchiq.c137 /* actual buffer used to store bulk reply */
149 } bulk; /* bulk data */ member
171 /* vmalloc page to receive scratch bulk xfers into */
180 /* ordered workqueue to process all bulk operations */
256 container_of(work, struct mmal_msg_context, u.bulk.work); in buffer_work_cb()
257 struct mmal_buffer *buffer = msg_context->u.bulk.buffer; in buffer_work_cb()
265 buffer->length = msg_context->u.bulk.buffer_used; in buffer_work_cb()
266 buffer->mmal_flags = msg_context->u.bulk.mmal_flags; in buffer_work_cb()
267 buffer->dts = msg_context->u.bulk in buffer_work_cb()
[all...]
/kernel/linux/linux-6.6/drivers/staging/vc04_services/vchiq-mmal/
H A Dmmal-vchiq.c137 /* actual buffer used to store bulk reply */
149 } bulk; /* bulk data */ member
177 /* ordered workqueue to process all bulk operations */
253 container_of(work, struct mmal_msg_context, u.bulk.work); in buffer_work_cb()
254 struct mmal_buffer *buffer = msg_context->u.bulk.buffer; in buffer_work_cb()
262 buffer->length = msg_context->u.bulk.buffer_used; in buffer_work_cb()
263 buffer->mmal_flags = msg_context->u.bulk.mmal_flags; in buffer_work_cb()
264 buffer->dts = msg_context->u.bulk.dts; in buffer_work_cb()
265 buffer->pts = msg_context->u.bulk in buffer_work_cb()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/msm/
H A Dmsm_io_utils.c17 struct clk *msm_clk_bulk_get_clock(struct clk_bulk_data *bulk, int count, in msm_clk_bulk_get_clock() argument
25 for (i = 0; bulk && i < count; i++) { in msm_clk_bulk_get_clock()
26 if (!strcmp(bulk[i].id, name) || !strcmp(bulk[i].id, n)) in msm_clk_bulk_get_clock()
27 return bulk[i].clk; in msm_clk_bulk_get_clock()
H A Dmsm_mdss.c349 struct clk_bulk_data *bulk; in mdp5_mdss_parse_clock() local
356 bulk = devm_kcalloc(&pdev->dev, MDP5_MDSS_NUM_CLOCKS, sizeof(struct clk_bulk_data), GFP_KERNEL); in mdp5_mdss_parse_clock()
357 if (!bulk) in mdp5_mdss_parse_clock()
360 bulk[num_clocks++].id = "iface"; in mdp5_mdss_parse_clock()
361 bulk[num_clocks++].id = "bus"; in mdp5_mdss_parse_clock()
362 bulk[num_clocks++].id = "vsync"; in mdp5_mdss_parse_clock()
364 ret = devm_clk_bulk_get_optional(&pdev->dev, num_clocks, bulk); in mdp5_mdss_parse_clock()
368 *clocks = bulk; in mdp5_mdss_parse_clock()
/kernel/linux/linux-5.10/drivers/staging/vc04_services/interface/vchiq_arm/
H A Dvchiq_2835_arm.c219 vchiq_prepare_bulk_data(struct vchiq_bulk *bulk, void *offset, in vchiq_prepare_bulk_data() argument
232 bulk->data = pagelistinfo->dma_addr; in vchiq_prepare_bulk_data()
238 bulk->remote_data = pagelistinfo; in vchiq_prepare_bulk_data()
244 vchiq_complete_bulk(struct vchiq_bulk *bulk) in vchiq_complete_bulk() argument
246 if (bulk && bulk->remote_data && bulk->actual) in vchiq_complete_bulk()
247 free_pagelist((struct vchiq_pagelist_info *)bulk->remote_data, in vchiq_complete_bulk()
248 bulk->actual); in vchiq_complete_bulk()
H A Dvchiq_core.c1204 /* Called by the slot handler - don't hold the bulk mutex */
1221 struct vchiq_bulk *bulk = in notify_bulks() local
1224 /* Only generate callbacks for non-dummy bulk in notify_bulks()
1226 if (bulk->data && service->instance) { in notify_bulks()
1227 if (bulk->actual != VCHIQ_BULK_ACTUAL_ABORTED) { in notify_bulks()
1228 if (bulk->dir == VCHIQ_BULK_TRANSMIT) { in notify_bulks()
1233 bulk->actual); in notify_bulks()
1239 bulk->actual); in notify_bulks()
1245 if (bulk->mode == VCHIQ_BULK_MODE_BLOCKING) { in notify_bulks()
1249 waiter = bulk in notify_bulks()
1372 struct vchiq_bulk *bulk = abort_outstanding_bulks() local
1730 struct vchiq_bulk *bulk; parse_rx_slots() local
3031 struct vchiq_bulk *bulk; vchiq_bulk_transfer() local
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/ttm/
H A Dttm_resource.c34 * ttm_lru_bulk_move_init - initialize a bulk move structure
35 * @bulk: the structure to init
39 void ttm_lru_bulk_move_init(struct ttm_lru_bulk_move *bulk) in ttm_lru_bulk_move_init() argument
41 memset(bulk, 0, sizeof(*bulk)); in ttm_lru_bulk_move_init()
46 * ttm_lru_bulk_move_tail - bulk move range of resources to the LRU tail.
48 * @bulk: bulk move structure
53 void ttm_lru_bulk_move_tail(struct ttm_lru_bulk_move *bulk) in ttm_lru_bulk_move_tail() argument
59 struct ttm_lru_bulk_move_pos *pos = &bulk in ttm_lru_bulk_move_tail()
79 ttm_lru_bulk_move_pos(struct ttm_lru_bulk_move *bulk, struct ttm_resource *res) ttm_lru_bulk_move_pos() argument
97 ttm_lru_bulk_move_add(struct ttm_lru_bulk_move *bulk, struct ttm_resource *res) ttm_lru_bulk_move_add() argument
111 ttm_lru_bulk_move_del(struct ttm_lru_bulk_move *bulk, struct ttm_resource *res) ttm_lru_bulk_move_del() argument
[all...]
/kernel/linux/linux-5.10/drivers/media/usb/uvc/
H A Duvc_video.c1003 * uvc_video_decode_start is called with URB data at the start of a bulk or
1021 * uvc_video_decode_end is called with header data at the end of a bulk or
1206 * uvc_video_encode_data(). Only bulk transfers are currently supported.
1235 nbytes = min(stream->bulk.max_payload_size - stream->bulk.payload_size, in uvc_video_encode_data()
1423 if (urb->actual_length == 0 && stream->bulk.header_size == 0) in uvc_video_decode_bulk()
1428 stream->bulk.payload_size += len; in uvc_video_decode_bulk()
1433 if (stream->bulk.header_size == 0 && !stream->bulk.skip_payload) { in uvc_video_decode_bulk()
1442 stream->bulk in uvc_video_decode_bulk()
[all...]
/kernel/linux/linux-6.6/drivers/media/usb/uvc/
H A Duvc_video.c1021 * uvc_video_decode_start is called with URB data at the start of a bulk or
1039 * uvc_video_decode_end is called with header data at the end of a bulk or
1254 * uvc_video_encode_data(). Only bulk transfers are currently supported.
1283 nbytes = min(stream->bulk.max_payload_size - stream->bulk.payload_size, in uvc_video_encode_data()
1472 if (urb->actual_length == 0 && stream->bulk.header_size == 0) in uvc_video_decode_bulk()
1477 stream->bulk.payload_size += len; in uvc_video_decode_bulk()
1483 if (stream->bulk.header_size == 0 && !stream->bulk.skip_payload) { in uvc_video_decode_bulk()
1492 stream->bulk in uvc_video_decode_bulk()
[all...]
/kernel/linux/linux-6.6/drivers/staging/vc04_services/interface/vchiq_arm/
H A Dvchiq_core.c1296 get_bulk_reason(struct vchiq_bulk *bulk) in get_bulk_reason() argument
1298 if (bulk->dir == VCHIQ_BULK_TRANSMIT) { in get_bulk_reason()
1299 if (bulk->actual == VCHIQ_BULK_ACTUAL_ABORTED) in get_bulk_reason()
1305 if (bulk->actual == VCHIQ_BULK_ACTUAL_ABORTED) in get_bulk_reason()
1311 /* Called by the slot handler - don't hold the bulk mutex */
1325 struct vchiq_bulk *bulk = in notify_bulks() local
1329 * Only generate callbacks for non-dummy bulk in notify_bulks()
1332 if (bulk->data && service->instance) { in notify_bulks()
1333 if (bulk->actual != VCHIQ_BULK_ACTUAL_ABORTED) { in notify_bulks()
1334 if (bulk in notify_bulks()
1453 struct vchiq_bulk *bulk = &queue->bulks[BULK_INDEX(queue->process)]; abort_outstanding_bulks() local
1747 struct vchiq_bulk *bulk; parse_message() local
3021 struct vchiq_bulk *bulk; vchiq_bulk_transfer() local
[all...]
H A Dvchiq_arm.c626 vchiq_prepare_bulk_data(struct vchiq_instance *instance, struct vchiq_bulk *bulk, void *offset, in vchiq_prepare_bulk_data() argument
639 bulk->data = pagelistinfo->dma_addr; in vchiq_prepare_bulk_data()
645 bulk->remote_data = pagelistinfo; in vchiq_prepare_bulk_data()
651 vchiq_complete_bulk(struct vchiq_instance *instance, struct vchiq_bulk *bulk) in vchiq_complete_bulk() argument
653 if (bulk && bulk->remote_data && bulk->actual) in vchiq_complete_bulk()
654 free_pagelist(instance, (struct vchiq_pagelist_info *)bulk->remote_data, in vchiq_complete_bulk()
655 bulk->actual); in vchiq_complete_bulk()
944 struct vchiq_bulk *bulk in vchiq_blocking_bulk_transfer() local
971 struct vchiq_bulk *bulk = waiter->bulk_waiter.bulk; vchiq_blocking_bulk_transfer() local
[all...]
/kernel/linux/linux-5.10/drivers/remoteproc/
H A Dqcom_wcnss.c378 struct regulator_bulk_data *bulk; in wcnss_init_regulators() local
382 bulk = devm_kcalloc(wcnss->dev, in wcnss_init_regulators()
385 if (!bulk) in wcnss_init_regulators()
389 bulk[i].supply = info[i].name; in wcnss_init_regulators()
391 ret = devm_regulator_bulk_get(wcnss->dev, num_vregs, bulk); in wcnss_init_regulators()
397 regulator_set_voltage(bulk[i].consumer, in wcnss_init_regulators()
402 regulator_set_load(bulk[i].consumer, info[i].load_uA); in wcnss_init_regulators()
405 wcnss->vregs = bulk; in wcnss_init_regulators()
/kernel/linux/linux-6.6/drivers/remoteproc/
H A Dqcom_wcnss.c431 struct regulator_bulk_data *bulk; in wcnss_init_regulators() local
445 bulk = devm_kcalloc(wcnss->dev, in wcnss_init_regulators()
448 if (!bulk) in wcnss_init_regulators()
452 bulk[i].supply = info[i].name; in wcnss_init_regulators()
454 ret = devm_regulator_bulk_get(wcnss->dev, num_vregs, bulk); in wcnss_init_regulators()
460 regulator_set_voltage(bulk[i].consumer, in wcnss_init_regulators()
465 regulator_set_load(bulk[i].consumer, info[i].load_uA); in wcnss_init_regulators()
468 wcnss->vregs = bulk; in wcnss_init_regulators()
/kernel/linux/linux-5.10/drivers/media/usb/dvb-usb/
H A Ddibusb-mb.c203 .bulk = {
293 .bulk = {
362 .bulk = {
424 .bulk = {
H A Ddtt200u.c199 .bulk = {
251 .bulk = {
303 .bulk = {
355 .bulk = {
/kernel/linux/linux-6.6/drivers/media/usb/dvb-usb/
H A Ddtt200u.c213 .bulk = {
265 .bulk = {
317 .bulk = {
369 .bulk = {
/kernel/linux/linux-5.10/drivers/media/usb/dvb-usb-v2/
H A Dusb_urb.c28 __func__, ptype == PIPE_ISOCHRONOUS ? "isoc" : "bulk", in usb_urb_complete()
155 stream->props.u.bulk.buffersize, in usb_urb_alloc_bulk_urbs()
260 buf_size = stream->props.u.bulk.buffersize; in usb_urb_reconfig()
281 props->u.bulk.buffersize == in usb_urb_reconfig()
282 stream->props.u.bulk.buffersize) in usb_urb_reconfig()
326 stream->props.u.bulk.buffersize); in usb_urb_initv2()
/kernel/linux/linux-6.6/drivers/media/usb/dvb-usb-v2/
H A Dusb_urb.c28 __func__, ptype == PIPE_ISOCHRONOUS ? "isoc" : "bulk", in usb_urb_complete()
155 stream->props.u.bulk.buffersize, in usb_urb_alloc_bulk_urbs()
260 buf_size = stream->props.u.bulk.buffersize; in usb_urb_reconfig()
281 props->u.bulk.buffersize == in usb_urb_reconfig()
282 stream->props.u.bulk.buffersize) in usb_urb_reconfig()
326 stream->props.u.bulk.buffersize); in usb_urb_initv2()
/kernel/linux/linux-5.10/drivers/usb/core/
H A Ddevio.c480 static const char *types[] = {"isoc", "int", "ctrl", "bulk"}; in snoop_urb()
1266 struct usbdevfs_bulktransfer *bulk) in do_proc_bulk()
1275 ret = findintfep(ps->dev, bulk->ep); in do_proc_bulk()
1282 len1 = bulk->len; in do_proc_bulk()
1286 if (bulk->ep & USB_DIR_IN) in do_proc_bulk()
1287 pipe = usb_rcvbulkpipe(dev, bulk->ep & 0x7f); in do_proc_bulk()
1289 pipe = usb_sndbulkpipe(dev, bulk->ep & 0x7f); in do_proc_bulk()
1318 tmo = bulk->timeout; in do_proc_bulk()
1319 if (bulk->ep & 0x80) { in do_proc_bulk()
1328 if (copy_to_user(bulk in do_proc_bulk()
1265 do_proc_bulk(struct usb_dev_state *ps, struct usbdevfs_bulktransfer *bulk) do_proc_bulk() argument
1357 struct usbdevfs_bulktransfer bulk; proc_bulk() local
2136 struct usbdevfs_bulktransfer bulk; proc_bulk_compat() local
[all...]
/kernel/linux/linux-6.6/drivers/usb/core/
H A Ddevio.c491 static const char *types[] = {"isoc", "int", "ctrl", "bulk"}; in snoop_urb()
1281 struct usbdevfs_bulktransfer *bulk) in do_proc_bulk()
1290 ret = findintfep(ps->dev, bulk->ep); in do_proc_bulk()
1297 len1 = bulk->len; in do_proc_bulk()
1301 if (bulk->ep & USB_DIR_IN) in do_proc_bulk()
1302 pipe = usb_rcvbulkpipe(dev, bulk->ep & 0x7f); in do_proc_bulk()
1304 pipe = usb_sndbulkpipe(dev, bulk->ep & 0x7f); in do_proc_bulk()
1333 tmo = bulk->timeout; in do_proc_bulk()
1334 if (bulk->ep & 0x80) { in do_proc_bulk()
1343 if (copy_to_user(bulk in do_proc_bulk()
1280 do_proc_bulk(struct usb_dev_state *ps, struct usbdevfs_bulktransfer *bulk) do_proc_bulk() argument
1372 struct usbdevfs_bulktransfer bulk; proc_bulk() local
2151 struct usbdevfs_bulktransfer bulk; proc_bulk_compat() local
[all...]
/kernel/linux/linux-5.10/drivers/interconnect/
H A DMakefile4 icc-core-objs := core.o bulk.o
/kernel/linux/linux-6.6/drivers/interconnect/
H A DMakefile4 icc-core-objs := core.o bulk.o debugfs-client.o

Completed in 30 milliseconds

123456