Home
last modified time | relevance | path

Searched refs:loaded (Results 1 - 25 of 97) sorted by relevance

1234

/kernel/linux/linux-5.10/tools/perf/util/
H A Dcounts.h16 struct xyarray *loaded; member
29 return *((bool *) xyarray__entry(counts->loaded, cpu, thread)); in perf_counts__is_loaded()
33 perf_counts__set_loaded(struct perf_counts *counts, int cpu, int thread, bool loaded) in perf_counts__set_loaded() argument
35 *((bool *) xyarray__entry(counts->loaded, cpu, thread)) = loaded; in perf_counts__set_loaded()
H A Dcounts.c31 counts->loaded = values; in perf_counts__new()
40 xyarray__delete(counts->loaded); in perf_counts__delete()
48 xyarray__reset(counts->loaded); in perf_counts__reset()
H A Ddso.h177 bool loaded; member
228 dso->loaded = true; in dso__set_loaded()
/kernel/linux/linux-6.6/tools/perf/util/
H A Dcounts.h15 struct xyarray *loaded; member
28 return *((bool *) xyarray__entry(counts->loaded, cpu_map_idx, thread)); in perf_counts__is_loaded()
32 perf_counts__set_loaded(struct perf_counts *counts, int cpu_map_idx, int thread, bool loaded) in perf_counts__set_loaded() argument
34 *((bool *) xyarray__entry(counts->loaded, cpu_map_idx, thread)) = loaded; in perf_counts__set_loaded()
H A Dcounts.c32 counts->loaded = values; in perf_counts__new()
41 xyarray__delete(counts->loaded); in perf_counts__delete()
49 xyarray__reset(counts->loaded); in perf_counts__reset()
H A Ddso.h180 bool loaded; member
239 dso->loaded = true; in dso__set_loaded()
/kernel/linux/linux-5.10/arch/arm64/kvm/
H A Daarch32.c48 static void post_fault_synchronize(struct kvm_vcpu *vcpu, bool loaded) in post_fault_synchronize() argument
50 if (loaded) { in post_fault_synchronize()
178 bool loaded = pre_fault_synchronize(vcpu); in kvm_inject_undef32() local
181 post_fault_synchronize(vcpu, loaded); in kvm_inject_undef32()
194 bool loaded; in inject_abt32() local
196 loaded = pre_fault_synchronize(vcpu); in inject_abt32()
221 post_fault_synchronize(vcpu, loaded); in inject_abt32()
H A Dreset.c256 * handling code. In the first case, the VCPU will not be loaded, and in the
257 * second case the VCPU will be loaded. Because this function operates purely
259 * we were loaded (handling a request) and load the values back at the end of
268 bool loaded; in kvm_reset_vcpu() local
280 loaded = (vcpu->cpu != -1); in kvm_reset_vcpu()
281 if (loaded) in kvm_reset_vcpu()
354 if (loaded) in kvm_reset_vcpu()
H A Darch_timer.c232 WARN_ON(timer_ctx && timer_ctx->loaded); in kvm_timer_irq_can_fire()
250 WARN(ctx->loaded, "timer %d loaded\n", i); in kvm_timer_earliest_exp()
322 if (timer_ctx->loaded) { in kvm_timer_should_fire()
435 if (!ctx->loaded) in timer_save_state()
463 ctx->loaded = false; in timer_save_state()
514 if (ctx->loaded) in timer_restore_state()
534 ctx->loaded = true; in timer_restore_state()
/kernel/linux/linux-6.6/drivers/iommu/
H A Diova.c637 struct iova_magazine *loaded; member
748 cpu_rcache->loaded = iova_magazine_alloc(GFP_KERNEL); in iova_domain_init_rcaches()
750 if (!cpu_rcache->loaded || !cpu_rcache->prev) { in iova_domain_init_rcaches()
787 if (!iova_magazine_full(cpu_rcache->loaded)) { in __iova_rcache_insert()
790 swap(cpu_rcache->prev, cpu_rcache->loaded); in __iova_rcache_insert()
799 cpu_rcache->loaded; in __iova_rcache_insert()
801 mag_to_free = cpu_rcache->loaded; in __iova_rcache_insert()
805 cpu_rcache->loaded = new_mag; in __iova_rcache_insert()
811 iova_magazine_push(cpu_rcache->loaded, iova_pfn); in __iova_rcache_insert()
850 if (!iova_magazine_empty(cpu_rcache->loaded)) { in __iova_rcache_get()
[all...]
/kernel/linux/linux-5.10/drivers/iommu/
H A Diova.c786 struct iova_magazine *loaded; member
879 cpu_rcache->loaded = iova_magazine_alloc(GFP_KERNEL); in init_iova_rcaches()
903 if (!iova_magazine_full(cpu_rcache->loaded)) { in __iova_rcache_insert()
906 swap(cpu_rcache->prev, cpu_rcache->loaded); in __iova_rcache_insert()
915 cpu_rcache->loaded; in __iova_rcache_insert()
917 mag_to_free = cpu_rcache->loaded; in __iova_rcache_insert()
921 cpu_rcache->loaded = new_mag; in __iova_rcache_insert()
927 iova_magazine_push(cpu_rcache->loaded, iova_pfn); in __iova_rcache_insert()
966 if (!iova_magazine_empty(cpu_rcache->loaded)) { in __iova_rcache_get()
969 swap(cpu_rcache->prev, cpu_rcache->loaded); in __iova_rcache_get()
[all...]
/kernel/linux/linux-6.6/arch/arm64/kvm/
H A Dreset.c199 * handling code. In the first case, the VCPU will not be loaded, and in the
200 * second case the VCPU will be loaded. Because this function operates purely
202 * we were loaded (handling a request) and load the values back at the end of
211 bool loaded; in kvm_reset_vcpu() local
223 loaded = (vcpu->cpu != -1); in kvm_reset_vcpu()
224 if (loaded) in kvm_reset_vcpu()
299 if (loaded) in kvm_reset_vcpu()
H A Darch_timer.c281 WARN_ON(timer_ctx && timer_ctx->loaded); in kvm_timer_irq_can_fire()
316 WARN(ctx->loaded, "timer %d loaded\n", i); in kvm_timer_earliest_exp()
391 if (timer_ctx->loaded) { in kvm_timer_should_fire()
509 if (!ctx->loaded) in timer_save_state()
562 ctx->loaded = false; in timer_save_state()
615 if (ctx->loaded) in timer_restore_state()
644 ctx->loaded = true; in timer_restore_state()
/kernel/linux/linux-6.6/drivers/platform/x86/intel/ifs/
H A Dsysfs.c115 if (!ifsd->loaded) in current_batch_show()
124 * Display currently loaded IFS image version.
131 if (!ifsd->loaded) in image_version_show()
/kernel/linux/linux-5.10/arch/c6x/include/asm/
H A Dmodule.h17 unsigned int loaded; member
/kernel/linux/linux-6.6/kernel/module/
H A Dsysfs.c79 /* Count loaded sections and allocate structures */ in add_sect_attrs()
174 unsigned int notes, loaded, i; in add_notes_attrs() local
199 for (loaded = i = 0; i < info->hdr->e_shnum; ++i) { in add_notes_attrs()
204 nattr->attr.name = mod->sect_attrs->attrs[loaded].battr.attr.name; in add_notes_attrs()
211 ++loaded; in add_notes_attrs()
351 pr_err("%s: module is already loaded\n", mod->name); in mod_sysfs_init()
/kernel/linux/linux-5.10/fs/
H A Dbinfmt_flat.c85 bool loaded; /* Has this library been loaded? */ member
344 } else if (!p->lib_list[id].loaded && in calc_reloc()
784 libinfo->lib_list[id].loaded = 1; in load_flat_file()
987 if (!libinfo.lib_list[i].loaded) in load_flat_binary()
990 unsigned long val = libinfo.lib_list[j].loaded ? in load_flat_binary()
1028 if (libinfo.lib_list[i].loaded) { in load_flat_binary()
/kernel/linux/linux-5.10/arch/s390/boot/
H A Dhead.S38 .long 0x00080000,0x80000000+iplstart # The first 24 bytes are loaded
42 .long 0x020000f0,0x60000050 # The next 160 byte are loaded
51 .long 0x020003c0,0x60000050 # at location 0 is loaded.
255 # everything loaded, go for it
/kernel/linux/linux-6.6/arch/riscv/kvm/
H A Dvcpu.c51 bool loaded; in kvm_riscv_reset_vcpu() local
59 loaded = (vcpu->cpu != -1); in kvm_riscv_reset_vcpu()
60 if (loaded) in kvm_riscv_reset_vcpu()
87 if (loaded) in kvm_riscv_reset_vcpu()
/kernel/linux/linux-6.6/drivers/usb/host/
H A Dxhci-mtk-sch.c504 struct mu3h_sch_ep_info *sch_ep, bool loaded) in load_ep_bw()
507 update_sch_tt(sch_ep, loaded); in load_ep_bw()
510 update_bus_bw(sch_bw, sch_ep, loaded); in load_ep_bw()
511 sch_ep->allocated = loaded; in load_ep_bw()
503 load_ep_bw(struct mu3h_sch_bw_info *sch_bw, struct mu3h_sch_ep_info *sch_ep, bool loaded) load_ep_bw() argument
/kernel/linux/linux-5.10/arch/h8300/boot/compressed/
H A Dhead.S46 .long 0 ; address of loaded ramdisk image
/kernel/linux/linux-5.10/include/kvm/
H A Darm_arch_timer.h40 bool loaded; member
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/diag/
H A Dfw_tracer.h83 bool loaded; member
/kernel/linux/linux-5.10/drivers/staging/media/ipu3/
H A Dipu3-css-fw.h104 u32 group; /* Per pipeline data, loaded by dma */
105 u32 output; /* SP output data, loaded by dmem */
156 u32 loaded __aligned(8); /* Firmware has been loaded */
/kernel/linux/linux-6.6/include/kvm/
H A Darm_arch_timer.h67 bool loaded; member

Completed in 24 milliseconds

1234