Home
last modified time | relevance | path

Searched refs:tmp_dev (Results 1 - 23 of 23) sorted by relevance

/kernel/linux/linux-5.10/drivers/crypto/virtio/
H A Dvirtio_crypto_mgr.c187 struct virtio_crypto *vcrypto_dev = NULL, *tmp_dev; in virtcrypto_get_dev_node() local
192 list_for_each_entry(tmp_dev, virtcrypto_devmgr_get_head(), list) { in virtcrypto_get_dev_node()
194 if ((node == dev_to_node(&tmp_dev->vdev->dev) || in virtcrypto_get_dev_node()
195 dev_to_node(&tmp_dev->vdev->dev) < 0) && in virtcrypto_get_dev_node()
196 virtcrypto_dev_started(tmp_dev) && in virtcrypto_get_dev_node()
197 virtcrypto_algo_is_supported(tmp_dev, service, algo)) { in virtcrypto_get_dev_node()
198 ctr = atomic_read(&tmp_dev->ref_count); in virtcrypto_get_dev_node()
200 vcrypto_dev = tmp_dev; in virtcrypto_get_dev_node()
210 list_for_each_entry(tmp_dev, in virtcrypto_get_dev_node()
212 if (virtcrypto_dev_started(tmp_dev) in virtcrypto_get_dev_node()
[all...]
/kernel/linux/linux-6.6/drivers/crypto/virtio/
H A Dvirtio_crypto_mgr.c187 struct virtio_crypto *vcrypto_dev = NULL, *tmp_dev; in virtcrypto_get_dev_node() local
192 list_for_each_entry(tmp_dev, virtcrypto_devmgr_get_head(), list) { in virtcrypto_get_dev_node()
194 if ((node == dev_to_node(&tmp_dev->vdev->dev) || in virtcrypto_get_dev_node()
195 dev_to_node(&tmp_dev->vdev->dev) < 0) && in virtcrypto_get_dev_node()
196 virtcrypto_dev_started(tmp_dev) && in virtcrypto_get_dev_node()
197 virtcrypto_algo_is_supported(tmp_dev, service, algo)) { in virtcrypto_get_dev_node()
198 ctr = atomic_read(&tmp_dev->ref_count); in virtcrypto_get_dev_node()
200 vcrypto_dev = tmp_dev; in virtcrypto_get_dev_node()
210 list_for_each_entry(tmp_dev, in virtcrypto_get_dev_node()
212 if (virtcrypto_dev_started(tmp_dev) in virtcrypto_get_dev_node()
[all...]
/kernel/linux/linux-6.6/drivers/crypto/intel/qat/qat_common/
H A Dqat_crypto.c53 struct adf_accel_dev *accel_dev = NULL, *tmp_dev; in qat_crypto_get_instance_node() local
57 list_for_each_entry(tmp_dev, adf_devmgr_get_head(), list) { in qat_crypto_get_instance_node()
60 if ((node == dev_to_node(&GET_DEV(tmp_dev)) || in qat_crypto_get_instance_node()
61 dev_to_node(&GET_DEV(tmp_dev)) < 0) && in qat_crypto_get_instance_node()
62 adf_dev_started(tmp_dev) && in qat_crypto_get_instance_node()
63 !list_empty(&tmp_dev->crypto_list)) { in qat_crypto_get_instance_node()
64 ctr = atomic_read(&tmp_dev->ref_count); in qat_crypto_get_instance_node()
66 accel_dev = tmp_dev; in qat_crypto_get_instance_node()
75 list_for_each_entry(tmp_dev, adf_devmgr_get_head(), list) { in qat_crypto_get_instance_node()
76 if (adf_dev_started(tmp_dev) in qat_crypto_get_instance_node()
[all...]
H A Dqat_compression.c57 struct adf_accel_dev *tmp_dev; in qat_compression_get_instance_node() local
61 tmp_dev = list_entry(itr, struct adf_accel_dev, list); in qat_compression_get_instance_node()
62 tmp_dev_node = dev_to_node(&GET_DEV(tmp_dev)); in qat_compression_get_instance_node()
65 adf_dev_started(tmp_dev) && !list_empty(&tmp_dev->compression_list)) { in qat_compression_get_instance_node()
66 ctr = atomic_read(&tmp_dev->ref_count); in qat_compression_get_instance_node()
68 accel_dev = tmp_dev; in qat_compression_get_instance_node()
78 struct adf_accel_dev *tmp_dev; in qat_compression_get_instance_node() local
80 tmp_dev = list_entry(itr, struct adf_accel_dev, list); in qat_compression_get_instance_node()
81 if (adf_dev_started(tmp_dev) in qat_compression_get_instance_node()
[all...]
/kernel/linux/linux-5.10/drivers/crypto/qat/qat_common/
H A Dqat_crypto.c53 struct adf_accel_dev *accel_dev = NULL, *tmp_dev; in qat_crypto_get_instance_node() local
57 list_for_each_entry(tmp_dev, adf_devmgr_get_head(), list) { in qat_crypto_get_instance_node()
60 if ((node == dev_to_node(&GET_DEV(tmp_dev)) || in qat_crypto_get_instance_node()
61 dev_to_node(&GET_DEV(tmp_dev)) < 0) && in qat_crypto_get_instance_node()
62 adf_dev_started(tmp_dev) && in qat_crypto_get_instance_node()
63 !list_empty(&tmp_dev->crypto_list)) { in qat_crypto_get_instance_node()
64 ctr = atomic_read(&tmp_dev->ref_count); in qat_crypto_get_instance_node()
66 accel_dev = tmp_dev; in qat_crypto_get_instance_node()
75 list_for_each_entry(tmp_dev, adf_devmgr_get_head(), list) { in qat_crypto_get_instance_node()
76 if (adf_dev_started(tmp_dev) in qat_crypto_get_instance_node()
[all...]
/kernel/linux/linux-6.6/drivers/md/
H A Dmd-linear.c209 struct dev_info *tmp_dev; in linear_make_request() local
217 tmp_dev = which_dev(mddev, bio_sector); in linear_make_request()
218 start_sector = tmp_dev->end_sector - tmp_dev->rdev->sectors; in linear_make_request()
219 end_sector = tmp_dev->end_sector; in linear_make_request()
220 data_offset = tmp_dev->rdev->data_offset; in linear_make_request()
226 if (unlikely(is_rdev_broken(tmp_dev->rdev))) { in linear_make_request()
227 md_error(mddev, tmp_dev->rdev); in linear_make_request()
242 bio_set_dev(bio, tmp_dev->rdev->bdev); in linear_make_request()
263 tmp_dev in linear_make_request()
[all...]
H A Draid0.c552 struct md_rdev *tmp_dev; in raid0_map_submit_bio() local
561 tmp_dev = map_sector(mddev, zone, bio_sector, &sector); in raid0_map_submit_bio()
564 tmp_dev = map_sector(mddev, zone, sector, &sector); in raid0_map_submit_bio()
572 if (unlikely(is_rdev_broken(tmp_dev))) { in raid0_map_submit_bio()
574 md_error(mddev, tmp_dev); in raid0_map_submit_bio()
578 bio_set_dev(bio, tmp_dev->bdev); in raid0_map_submit_bio()
580 tmp_dev->data_offset; in raid0_map_submit_bio()
/kernel/linux/linux-5.10/drivers/md/
H A Dmd-linear.c220 struct dev_info *tmp_dev; in linear_make_request() local
228 tmp_dev = which_dev(mddev, bio_sector); in linear_make_request()
229 start_sector = tmp_dev->end_sector - tmp_dev->rdev->sectors; in linear_make_request()
230 end_sector = tmp_dev->end_sector; in linear_make_request()
231 data_offset = tmp_dev->rdev->data_offset; in linear_make_request()
237 if (unlikely(is_mddev_broken(tmp_dev->rdev, "linear"))) { in linear_make_request()
251 bio_set_dev(bio, tmp_dev->rdev->bdev); in linear_make_request()
274 bdevname(tmp_dev->rdev->bdev, b), in linear_make_request()
275 (unsigned long long)tmp_dev in linear_make_request()
[all...]
H A Draid0.c570 struct md_rdev *tmp_dev; in raid0_make_request() local
610 tmp_dev = map_sector(mddev, zone, orig_sector, &sector); in raid0_make_request()
613 tmp_dev = map_sector(mddev, zone, sector, &sector); in raid0_make_request()
621 if (unlikely(is_mddev_broken(tmp_dev, "raid0"))) { in raid0_make_request()
626 bio_set_dev(bio, tmp_dev->bdev); in raid0_make_request()
628 tmp_dev->data_offset; in raid0_make_request()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/
H A Ddev.c313 struct mlx5_core_dev *tmp_dev; in mlx5_get_next_phys_dev() local
322 tmp_dev = container_of(priv, struct mlx5_core_dev, priv); in mlx5_get_next_phys_dev()
323 if (!mlx5_core_is_pf(tmp_dev)) in mlx5_get_next_phys_dev()
326 if ((dev != tmp_dev) && (mlx5_gen_pci_id(tmp_dev) == pci_id)) { in mlx5_get_next_phys_dev()
327 res = tmp_dev; in mlx5_get_next_phys_dev()
H A Dlag.c561 struct mlx5_core_dev *tmp_dev; in mlx5_lag_add() local
569 tmp_dev = mlx5_get_next_phys_dev(dev); in mlx5_lag_add()
570 if (tmp_dev) in mlx5_lag_add()
571 ldev = tmp_dev->priv.lag; in mlx5_lag_add()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/lib/
H A Ddevcom.c84 struct mlx5_core_dev *tmp_dev = NULL; in mlx5_devcom_register_device() local
89 tmp_dev = iter->devs[i]; in mlx5_devcom_register_device()
97 sguid1 = mlx5_query_nic_system_image_guid(tmp_dev); in mlx5_devcom_register_device()
/kernel/linux/linux-6.6/drivers/firmware/arm_ffa/
H A Dbus.c157 struct ffa_device *tmp_dev; in ffa_device_is_valid() local
161 tmp_dev = to_ffa_dev(dev); in ffa_device_is_valid()
162 if (tmp_dev == ffa_dev) { in ffa_device_is_valid()
/kernel/linux/linux-5.10/drivers/pcmcia/
H A Dds.c217 struct device *tmp_dev; in pcmcia_get_dev() local
218 tmp_dev = get_device(&p_dev->dev); in pcmcia_get_dev()
219 if (!tmp_dev) in pcmcia_get_dev()
221 return to_pcmcia_dev(tmp_dev); in pcmcia_get_dev()
487 struct pcmcia_device *p_dev, *tmp_dev; in pcmcia_device_add() local
533 list_for_each_entry(tmp_dev, &s->devices_list, socket_device_list) in pcmcia_device_add()
534 if (p_dev->func == tmp_dev->func) { in pcmcia_device_add()
535 p_dev->function_config = tmp_dev->function_config; in pcmcia_device_add()
536 p_dev->irq = tmp_dev->irq; in pcmcia_device_add()
/kernel/linux/linux-6.6/drivers/pcmcia/
H A Dds.c217 struct device *tmp_dev; in pcmcia_get_dev() local
218 tmp_dev = get_device(&p_dev->dev); in pcmcia_get_dev()
219 if (!tmp_dev) in pcmcia_get_dev()
221 return to_pcmcia_dev(tmp_dev); in pcmcia_get_dev()
482 struct pcmcia_device *p_dev, *tmp_dev; in pcmcia_device_add() local
528 list_for_each_entry(tmp_dev, &s->devices_list, socket_device_list) in pcmcia_device_add()
529 if (p_dev->func == tmp_dev->func) { in pcmcia_device_add()
530 p_dev->function_config = tmp_dev->function_config; in pcmcia_device_add()
531 p_dev->irq = tmp_dev->irq; in pcmcia_device_add()
/kernel/linux/linux-6.6/drivers/s390/block/
H A Ddasd_devmap.c665 struct dasd_device *tmp_dev; in dasd_devmap_check_copy_relation() local
716 tmp_dev = device; in dasd_devmap_check_copy_relation()
718 tmp_dev = copy->entry[j].device; in dasd_devmap_check_copy_relation()
720 if (!tmp_dev) in dasd_devmap_check_copy_relation()
723 if (dasd_devmap_get_pprc_status(tmp_dev, &tmp_dat)) in dasd_devmap_check_copy_relation()
727 dev_warn(&tmp_dev->cdev->dev, in dasd_devmap_check_copy_relation()
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/lag/
H A Dlag.c1219 struct mlx5_core_dev *tmp_dev; in __mlx5_lag_dev_add_mdev() local
1221 tmp_dev = mlx5_get_next_phys_dev_lag(dev); in __mlx5_lag_dev_add_mdev()
1222 if (tmp_dev) in __mlx5_lag_dev_add_mdev()
1223 ldev = mlx5_lag_dev(tmp_dev); in __mlx5_lag_dev_add_mdev()
/kernel/linux/linux-5.10/drivers/net/usb/
H A Dhso.c2880 struct hso_device *tmp_dev = NULL; in hso_probe() local
2920 tmp_dev = hso_dev; in hso_probe()
2941 if (tmp_dev) in hso_probe()
2942 hso_dev = tmp_dev; in hso_probe()
/kernel/linux/linux-6.6/drivers/net/usb/
H A Dhso.c2873 struct hso_device *tmp_dev = NULL; in hso_probe() local
2913 tmp_dev = hso_dev; in hso_probe()
2934 if (tmp_dev) in hso_probe()
2935 hso_dev = tmp_dev; in hso_probe()
/kernel/linux/linux-5.10/drivers/iommu/intel/
H A Ddmar.c107 struct device *tmp_dev; in dmar_free_dev_scope() local
110 for_each_active_dev_scope(*devices, *cnt, i, tmp_dev) in dmar_free_dev_scope()
111 put_device(tmp_dev); in dmar_free_dev_scope()
/kernel/linux/linux-6.6/drivers/iommu/intel/
H A Ddmar.c107 struct device *tmp_dev; in dmar_free_dev_scope() local
110 for_each_active_dev_scope(*devices, *cnt, i, tmp_dev) in dmar_free_dev_scope()
111 put_device(tmp_dev); in dmar_free_dev_scope()
/kernel/linux/linux-5.10/drivers/target/
H A Dtarget_core_user.c2933 struct tcmu_dev *udev, *tmp_dev; in check_timedout_devices() local
2940 list_for_each_entry_safe(udev, tmp_dev, &devs, timedout_entry) { in check_timedout_devices()
/kernel/linux/linux-6.6/drivers/target/
H A Dtarget_core_user.c3265 struct tcmu_dev *udev, *tmp_dev; in check_timedout_devices() local
3272 list_for_each_entry_safe(udev, tmp_dev, &devs, timedout_entry) { in check_timedout_devices()

Completed in 37 milliseconds