Home
last modified time | relevance | path

Searched refs:arch (Results 76 - 100 of 1984) sorted by relevance

12345678910>>...80

/kernel/linux/linux-6.6/arch/x86/kvm/
H A Dx86.h97 return vcpu->arch.last_vmentry_cpu != -1; in kvm_vcpu_has_run()
102 return vcpu->arch.exception.pending || in kvm_is_exception_pending()
103 vcpu->arch.exception_vmexit.pending || in kvm_is_exception_pending()
109 vcpu->arch.exception.pending = false; in kvm_clear_exception_queue()
110 vcpu->arch.exception.injected = false; in kvm_clear_exception_queue()
111 vcpu->arch.exception_vmexit.pending = false; in kvm_clear_exception_queue()
117 vcpu->arch.interrupt.injected = true; in kvm_queue_interrupt()
118 vcpu->arch.interrupt.soft = soft; in kvm_queue_interrupt()
119 vcpu->arch.interrupt.nr = vector; in kvm_queue_interrupt()
124 vcpu->arch in kvm_clear_interrupt_queue()
[all...]
H A Dkvm_cache_regs.h21 return vcpu->arch.regs[VCPU_REGS_##uname]; \
26 vcpu->arch.regs[VCPU_REGS_##uname] = val; \
50 * 1 0 register in vcpu->arch
51 * 1 1 register in vcpu->arch, needs to be stored back
56 return test_bit(reg, (unsigned long *)&vcpu->arch.regs_avail); in kvm_register_is_available()
62 return test_bit(reg, (unsigned long *)&vcpu->arch.regs_dirty); in kvm_register_is_dirty()
68 __set_bit(reg, (unsigned long *)&vcpu->arch.regs_avail); in kvm_register_mark_available()
74 __set_bit(reg, (unsigned long *)&vcpu->arch.regs_avail); in kvm_register_mark_dirty()
75 __set_bit(reg, (unsigned long *)&vcpu->arch.regs_dirty); in kvm_register_mark_dirty()
80 * arch bito
[all...]
/kernel/linux/linux-5.10/tools/perf/arch/s390/annotate/
H A Dinstructions.c4 static int s390_call__parse(struct arch *arch, struct ins_operands *ops, in s390_call__parse() argument
25 if (arch->objdump.skip_functions_char && in s390_call__parse()
26 strchr(name, arch->objdump.skip_functions_char)) in s390_call__parse()
56 static int s390_mov__parse(struct arch *arch __maybe_unused, in s390_mov__parse()
109 static struct ins_ops *s390__associate_ins_ops(struct arch *arch, const char *name) in s390__associate_ins_ops() argument
135 arch__associate_ins_ops(arch, name, ops); in s390__associate_ins_ops()
139 static int s390__cpuid_parse(struct arch *arc argument
160 s390__annotate_init(struct arch *arch, char *cpuid __maybe_unused) s390__annotate_init() argument
[all...]
/kernel/linux/linux-6.6/tools/perf/arch/s390/annotate/
H A Dinstructions.c4 static int s390_call__parse(struct arch *arch, struct ins_operands *ops, in s390_call__parse() argument
25 if (arch->objdump.skip_functions_char && in s390_call__parse()
26 strchr(name, arch->objdump.skip_functions_char)) in s390_call__parse()
53 static int s390_mov__parse(struct arch *arch __maybe_unused, in s390_mov__parse()
106 static struct ins_ops *s390__associate_ins_ops(struct arch *arch, const char *name) in s390__associate_ins_ops() argument
132 arch__associate_ins_ops(arch, name, ops); in s390__associate_ins_ops()
136 static int s390__cpuid_parse(struct arch *arc argument
157 s390__annotate_init(struct arch *arch, char *cpuid __maybe_unused) s390__annotate_init() argument
[all...]
/kernel/linux/linux-5.10/tools/perf/arch/arm/annotate/
H A Dinstructions.c13 static struct ins_ops *arm__associate_instruction_ops(struct arch *arch, const char *name) in arm__associate_instruction_ops() argument
15 struct arm_annotate *arm = arch->priv; in arm__associate_instruction_ops()
26 arch__associate_ins_ops(arch, name, ops); in arm__associate_instruction_ops()
30 static int arm__annotate_init(struct arch *arch, char *cpuid __maybe_unused) in arm__annotate_init() argument
35 if (arch->initialized) in arm__annotate_init()
51 arch->initialized = true; in arm__annotate_init()
52 arch->priv = arm; in arm__annotate_init()
53 arch in arm__annotate_init()
[all...]
/kernel/linux/linux-6.6/tools/perf/arch/arm/annotate/
H A Dinstructions.c13 static struct ins_ops *arm__associate_instruction_ops(struct arch *arch, const char *name) in arm__associate_instruction_ops() argument
15 struct arm_annotate *arm = arch->priv; in arm__associate_instruction_ops()
26 arch__associate_ins_ops(arch, name, ops); in arm__associate_instruction_ops()
30 static int arm__annotate_init(struct arch *arch, char *cpuid __maybe_unused) in arm__annotate_init() argument
35 if (arch->initialized) in arm__annotate_init()
51 arch->initialized = true; in arm__annotate_init()
52 arch->priv = arm; in arm__annotate_init()
53 arch in arm__annotate_init()
[all...]
/kernel/linux/linux-5.10/arch/loongarch/kvm/
H A Dhypcall.c19 ipi_bitmap = vcpu->arch.gprs[REG_A1]; in kvm_virt_ipi()
20 min = vcpu->arch.gprs[REG_A2]; in kvm_virt_ipi()
21 action = vcpu->arch.gprs[REG_A3]; in kvm_virt_ipi()
37 struct gfn_to_pfn_cache *cache = &vcpu->arch.st.cache; in kvm_save_notify()
41 num = vcpu->arch.gprs[REG_A0]; in kvm_save_notify()
42 id = vcpu->arch.gprs[REG_A1]; in kvm_save_notify()
43 data = vcpu->arch.gprs[REG_A2]; in kvm_save_notify()
50 if (vcpu->arch.st.guest_addr && (data == 0)) in kvm_save_notify()
53 vcpu->arch.st.guest_addr = data; in kvm_save_notify()
55 vcpu->arch in kvm_save_notify()
[all...]
/kernel/linux/linux-6.6/tools/perf/arch/
H A Dcommon.c148 const char *arch = perf_env__arch(env), *cross_env; in perf_env__lookup_binutils_path() local
156 if (!strcmp(perf_env__arch(NULL), arch)) in perf_env__lookup_binutils_path()
173 if (!strcmp(arch, "arc")) in perf_env__lookup_binutils_path()
175 else if (!strcmp(arch, "arm")) in perf_env__lookup_binutils_path()
177 else if (!strcmp(arch, "arm64")) in perf_env__lookup_binutils_path()
179 else if (!strcmp(arch, "powerpc")) in perf_env__lookup_binutils_path()
181 else if (!strcmp(arch, "riscv32")) in perf_env__lookup_binutils_path()
183 else if (!strcmp(arch, "riscv64")) in perf_env__lookup_binutils_path()
185 else if (!strcmp(arch, "sh")) in perf_env__lookup_binutils_path()
187 else if (!strcmp(arch, "s39 in perf_env__lookup_binutils_path()
[all...]
/kernel/linux/linux-5.10/arch/arm64/kvm/
H A Dfpsimd.c3 * arch/arm64/kvm/fpsimd.c: Guest/host FPSIMD context coordination helpers
45 vcpu->arch.host_thread_info = kern_hyp_va(ti); in kvm_arch_vcpu_run_map_fp()
46 vcpu->arch.host_fpsimd_state = kern_hyp_va(fpsimd); in kvm_arch_vcpu_run_map_fp()
65 vcpu->arch.flags &= ~(KVM_ARM64_FP_ENABLED | in kvm_arch_vcpu_load_fp()
68 vcpu->arch.flags |= KVM_ARM64_FP_HOST; in kvm_arch_vcpu_load_fp()
71 vcpu->arch.flags |= KVM_ARM64_HOST_SVE_IN_USE; in kvm_arch_vcpu_load_fp()
74 vcpu->arch.flags |= KVM_ARM64_HOST_SVE_ENABLED; in kvm_arch_vcpu_load_fp()
87 if (vcpu->arch.flags & KVM_ARM64_FP_ENABLED) { in kvm_arch_vcpu_ctxsync_fp()
88 fpsimd_bind_state_to_cpu(&vcpu->arch.ctxt.fp_regs, in kvm_arch_vcpu_ctxsync_fp()
89 vcpu->arch in kvm_arch_vcpu_ctxsync_fp()
[all...]
/kernel/linux/linux-5.10/arch/powerpc/include/asm/
H A Dkvm_booke.h28 vcpu->arch.regs.gpr[num] = val; in kvmppc_set_gpr()
33 return vcpu->arch.regs.gpr[num]; in kvmppc_get_gpr()
38 vcpu->arch.regs.ccr = val; in kvmppc_set_cr()
43 return vcpu->arch.regs.ccr; in kvmppc_get_cr()
48 vcpu->arch.regs.xer = val; in kvmppc_set_xer()
53 return vcpu->arch.regs.xer; in kvmppc_get_xer()
64 vcpu->arch.regs.ctr = val; in kvmppc_set_ctr()
69 return vcpu->arch.regs.ctr; in kvmppc_get_ctr()
74 vcpu->arch.regs.link = val; in kvmppc_set_lr()
79 return vcpu->arch in kvmppc_get_lr()
[all...]
/kernel/linux/linux-6.6/arch/powerpc/include/asm/
H A Dkvm_booke.h28 vcpu->arch.regs.gpr[num] = val; in kvmppc_set_gpr()
33 return vcpu->arch.regs.gpr[num]; in kvmppc_get_gpr()
38 vcpu->arch.regs.ccr = val; in kvmppc_set_cr()
43 return vcpu->arch.regs.ccr; in kvmppc_get_cr()
48 vcpu->arch.regs.xer = val; in kvmppc_set_xer()
53 return vcpu->arch.regs.xer; in kvmppc_get_xer()
64 vcpu->arch.regs.ctr = val; in kvmppc_set_ctr()
69 return vcpu->arch.regs.ctr; in kvmppc_get_ctr()
74 vcpu->arch.regs.link = val; in kvmppc_set_lr()
79 return vcpu->arch in kvmppc_get_lr()
[all...]
/third_party/gn/src/util/
H A Dsys_info.cc65 std::string arch(info.machine); in OperatingSystemArchitecture()
67 if (arch == "i386" || arch == "i486" || arch == "i586" || arch == "i686") { in OperatingSystemArchitecture()
68 arch = "x86"; in OperatingSystemArchitecture()
69 } else if (arch == "i86pc") { in OperatingSystemArchitecture()
73 arch = "x86_64"; in OperatingSystemArchitecture()
74 } else if (arch == "amd64") { in OperatingSystemArchitecture()
75 arch in OperatingSystemArchitecture()
[all...]
/kernel/linux/linux-5.10/arch/mips/kvm/
H A Dvz.c115 if (kvm_mips_guest_has_msa(&vcpu->arch)) in kvm_vz_config5_guest_wrmask()
122 if (kvm_mips_guest_has_fpu(&vcpu->arch)) { in kvm_vz_config5_guest_wrmask()
158 if (kvm_mips_guest_can_have_fpu(&vcpu->arch)) in kvm_vz_config1_user_wrmask()
175 if (kvm_mips_guest_can_have_msa(&vcpu->arch)) in kvm_vz_config3_user_wrmask()
205 set_bit(priority, &vcpu->arch.pending_exceptions); in kvm_vz_queue_irq()
206 clear_bit(priority, &vcpu->arch.pending_exceptions_clr); in kvm_vz_queue_irq()
211 clear_bit(priority, &vcpu->arch.pending_exceptions); in kvm_vz_dequeue_irq()
212 set_bit(priority, &vcpu->arch.pending_exceptions_clr); in kvm_vz_dequeue_irq()
282 clear_bit(priority, &vcpu->arch.pending_exceptions); in kvm_vz_irq_deliver_cb()
325 clear_bit(priority, &vcpu->arch in kvm_vz_irq_clear_cb()
1083 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_vz_gpsi_cache() local
1218 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_trap_vz_handle_gpsi() local
1300 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_trap_vz_handle_gsfc() local
[all...]
/kernel/linux/linux-6.6/arch/mips/kvm/
H A Dvz.c115 if (kvm_mips_guest_has_msa(&vcpu->arch)) in kvm_vz_config5_guest_wrmask()
122 if (kvm_mips_guest_has_fpu(&vcpu->arch)) { in kvm_vz_config5_guest_wrmask()
158 if (kvm_mips_guest_can_have_fpu(&vcpu->arch)) in kvm_vz_config1_user_wrmask()
175 if (kvm_mips_guest_can_have_msa(&vcpu->arch)) in kvm_vz_config3_user_wrmask()
205 set_bit(priority, &vcpu->arch.pending_exceptions); in kvm_vz_queue_irq()
206 clear_bit(priority, &vcpu->arch.pending_exceptions_clr); in kvm_vz_queue_irq()
211 clear_bit(priority, &vcpu->arch.pending_exceptions); in kvm_vz_dequeue_irq()
212 set_bit(priority, &vcpu->arch.pending_exceptions_clr); in kvm_vz_dequeue_irq()
282 clear_bit(priority, &vcpu->arch.pending_exceptions); in kvm_vz_irq_deliver_cb()
324 clear_bit(priority, &vcpu->arch in kvm_vz_irq_clear_cb()
1081 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_vz_gpsi_cache() local
1216 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_trap_vz_handle_gpsi() local
1298 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_trap_vz_handle_gsfc() local
[all...]
/kernel/linux/linux-5.10/arch/x86/kvm/
H A Dkvm_cache_regs.h15 return vcpu->arch.regs[VCPU_REGS_##uname]; \
20 vcpu->arch.regs[VCPU_REGS_##uname] = val; \
43 return test_bit(reg, (unsigned long *)&vcpu->arch.regs_avail); in kvm_register_is_available()
49 return test_bit(reg, (unsigned long *)&vcpu->arch.regs_dirty); in kvm_register_is_dirty()
55 __set_bit(reg, (unsigned long *)&vcpu->arch.regs_avail); in kvm_register_mark_available()
61 __set_bit(reg, (unsigned long *)&vcpu->arch.regs_avail); in kvm_register_mark_dirty()
62 __set_bit(reg, (unsigned long *)&vcpu->arch.regs_dirty); in kvm_register_mark_dirty()
73 return vcpu->arch.regs[reg]; in kvm_register_read()
82 vcpu->arch.regs[reg] = val; in kvm_register_write()
113 return vcpu->arch in kvm_pdptr_read()
[all...]
/kernel/linux/linux-6.6/arch/arm64/kvm/
H A Dreset.c6 * Derived from arch/arm/kvm/reset.c
81 vcpu->arch.sve_max_vl = kvm_sve_max_vl; in kvm_vcpu_enable_sve()
95 * vcpu->arch.sve_state as necessary.
104 vl = vcpu->arch.sve_max_vl; in kvm_vcpu_finalize_sve()
126 vcpu->arch.sve_state = buf; in kvm_vcpu_finalize_sve()
157 void *sve_state = vcpu->arch.sve_state; in kvm_arm_vcpu_destroy()
164 kfree(vcpu->arch.ccsidr); in kvm_arm_vcpu_destroy()
170 memset(vcpu->arch.sve_state, 0, vcpu_sve_state_size(vcpu)); in kvm_vcpu_reset_sve()
180 if (!test_bit(KVM_ARM_VCPU_PTRAUTH_ADDRESS, vcpu->arch.features) || in kvm_vcpu_enable_ptrauth()
181 !test_bit(KVM_ARM_VCPU_PTRAUTH_GENERIC, vcpu->arch in kvm_vcpu_enable_ptrauth()
[all...]
/kernel/linux/linux-6.6/arch/riscv/kvm/
H A Dvcpu_timer.c46 struct kvm_vcpu *vcpu = container_of(t, struct kvm_vcpu, arch.timer); in kvm_riscv_vcpu_hrtimer_expired()
47 struct kvm_guest_timer *gt = &vcpu->kvm->arch.timer; in kvm_riscv_vcpu_hrtimer_expired()
85 struct kvm_vcpu_timer *t = &vcpu->arch.timer; in kvm_riscv_vcpu_update_hrtimer()
86 struct kvm_guest_timer *gt = &vcpu->kvm->arch.timer; in kvm_riscv_vcpu_update_hrtimer()
104 struct kvm_vcpu_timer *t = &vcpu->arch.timer; in kvm_riscv_vcpu_timer_next_event()
113 struct kvm_vcpu *vcpu = container_of(t, struct kvm_vcpu, arch.timer); in kvm_riscv_vcpu_vstimer_expired()
114 struct kvm_guest_timer *gt = &vcpu->kvm->arch.timer; in kvm_riscv_vcpu_vstimer_expired()
130 struct kvm_vcpu_timer *t = &vcpu->arch.timer; in kvm_riscv_vcpu_timer_pending()
131 struct kvm_guest_timer *gt = &vcpu->kvm->arch.timer; in kvm_riscv_vcpu_timer_pending()
142 struct kvm_vcpu_timer *t = &vcpu->arch in kvm_riscv_vcpu_timer_blocking()
[all...]
/kernel/linux/linux-6.6/scripts/
H A Drecordmcount.pl118 print "usage: $P arch endian bits objdump objcopy cc ld nm rm mv is_module inputfile\n";
123 my ($arch, $endian, $bits, $objdump, $objcopy, $cc,
159 #print STDERR "running: $P '$arch' '$objdump' '$objcopy' '$cc' '$ld' " .
177 if ($arch =~ /(x86(_64)?)|(i386)/) {
179 $arch = "x86_64";
181 $arch = "i386";
198 if ($arch eq "x86_64") {
204 # force flags for this arch
210 } elsif ($arch eq "i386") {
214 # force flags for this arch
[all...]
/third_party/node/deps/base64/base64/
H A DMakefile7 lib/arch/avx512/codec.o \
8 lib/arch/avx2/codec.o \
9 lib/arch/generic/codec.o \
10 lib/arch/neon32/codec.o \
11 lib/arch/neon64/codec.o \
12 lib/arch/ssse3/codec.o \
13 lib/arch/sse41/codec.o \
14 lib/arch/sse42/codec.o \
15 lib/arch/avx/codec.o \
93 lib/arch/avx51
[all...]
/kernel/linux/linux-6.6/arch/arm64/include/asm/
H A Dkvm_emulate.h6 * Derived from arch/arm/include/kvm_emulate.h
60 return !(vcpu->arch.hcr_el2 & HCR_RW); in vcpu_el1_is_32bit()
65 return test_bit(KVM_ARM_VCPU_EL1_32BIT, vcpu->arch.features); in vcpu_el1_is_32bit()
71 vcpu->arch.hcr_el2 = HCR_GUEST_FLAGS; in vcpu_reset_hcr()
73 vcpu->arch.hcr_el2 |= HCR_E2H; in vcpu_reset_hcr()
76 vcpu->arch.hcr_el2 |= HCR_TEA; in vcpu_reset_hcr()
78 vcpu->arch.hcr_el2 |= HCR_TERR; in vcpu_reset_hcr()
82 vcpu->arch.hcr_el2 |= HCR_FWB; in vcpu_reset_hcr()
90 vcpu->arch.hcr_el2 |= HCR_TVM; in vcpu_reset_hcr()
95 vcpu->arch in vcpu_reset_hcr()
[all...]
/kernel/linux/linux-5.10/arch/arm64/kvm/hyp/include/hyp/
H A Dswitch.h47 vcpu->arch.host_thread_info->flags & _TIF_FOREIGN_FPSTATE) in update_fp_enabled()
48 vcpu->arch.flags &= ~(KVM_ARM64_FP_ENABLED | in update_fp_enabled()
51 return !!(vcpu->arch.flags & KVM_ARM64_FP_ENABLED); in update_fp_enabled()
93 write_sysreg(vcpu->arch.mdcr_el2, mdcr_el2); in __activate_traps_common()
104 u64 hcr = vcpu->arch.hcr_el2; in ___activate_traps()
112 write_sysreg_s(vcpu->arch.vsesr_el2, SYS_VSESR_EL2); in ___activate_traps()
123 if (vcpu->arch.hcr_el2 & HCR_VSE) { in ___deactivate_traps()
124 vcpu->arch.hcr_el2 &= ~HCR_VSE; in ___deactivate_traps()
125 vcpu->arch.hcr_el2 |= read_sysreg(hcr_el2) & HCR_VSE; in ___deactivate_traps()
164 esr = vcpu->arch in __populate_fault_info()
[all...]
/kernel/linux/linux-6.6/arch/s390/kvm/
H A Dkvm-s390.h24 #define IS_TE_ENABLED(vcpu) ((vcpu->arch.sie_block->ecb & ECB_TE))
27 ((*(char *)phys_to_virt((vcpu)->arch.sie_block->itdba) == TDB_FORMAT1))
34 debug_sprintf_event((d_kvm)->arch.dbf, d_loglevel, d_string "\n", \
49 debug_sprintf_event(d_kvm->arch.dbf, d_loglevel, d_string "\n", \
55 debug_sprintf_event(d_vcpu->kvm->arch.dbf, d_loglevel, \
57 d_vcpu->arch.sie_block->gpsw.mask, d_vcpu->arch.sie_block->gpsw.addr,\
63 atomic_or(flags, &vcpu->arch.sie_block->cpuflags); in kvm_s390_set_cpuflags()
68 atomic_andnot(flags, &vcpu->arch.sie_block->cpuflags); in kvm_s390_clear_cpuflags()
73 return (atomic_read(&vcpu->arch in kvm_s390_test_cpuflags()
[all...]
H A Dinterrupt.c52 read_lock(&vcpu->kvm->arch.sca_lock); in sca_ext_call_pending()
53 if (vcpu->kvm->arch.use_esca) { in sca_ext_call_pending()
54 struct esca_block *sca = vcpu->kvm->arch.sca; in sca_ext_call_pending()
61 struct bsca_block *sca = vcpu->kvm->arch.sca; in sca_ext_call_pending()
68 read_unlock(&vcpu->kvm->arch.sca_lock); in sca_ext_call_pending()
81 read_lock(&vcpu->kvm->arch.sca_lock); in sca_inject_ext_call()
82 if (vcpu->kvm->arch.use_esca) { in sca_inject_ext_call()
83 struct esca_block *sca = vcpu->kvm->arch.sca; in sca_inject_ext_call()
96 struct bsca_block *sca = vcpu->kvm->arch.sca; in sca_inject_ext_call()
109 read_unlock(&vcpu->kvm->arch in sca_inject_ext_call()
[all...]
/kernel/linux/linux-6.6/arch/powerpc/kvm/
H A Dbook3s.c10 * This file is derived from arch/powerpc/kvm/44x.c,
137 vcpu->kvm->arch.kvm_ops->inject_interrupt(vcpu, vec, flags); in kvmppc_inject_interrupt()
170 unsigned long old_pending = vcpu->arch.pending_exceptions; in kvmppc_book3s_dequeue_irqprio()
173 &vcpu->arch.pending_exceptions); in kvmppc_book3s_dequeue_irqprio()
175 kvmppc_update_int_pending(vcpu, vcpu->arch.pending_exceptions, in kvmppc_book3s_dequeue_irqprio()
184 &vcpu->arch.pending_exceptions); in kvmppc_book3s_queue_irqprio()
237 return test_bit(BOOK3S_IRQPRIO_DECREMENTER, &vcpu->arch.pending_exceptions); in kvmppc_core_pending_dec()
271 vcpu->arch.external_oneshot = 1; in kvmppc_core_queue_external()
388 if (vcpu->arch.external_oneshot) { in clear_irqprio()
389 vcpu->arch in clear_irqprio()
[all...]
/kernel/linux/linux-6.6/arch/s390/kernel/
H A Dmodule.c79 module_memfree(mod->arch.trampolines_start); in module_arch_cleanup()
89 vfree(mod->arch.syminfo); in module_arch_freeing_init()
90 mod->arch.syminfo = NULL; in module_arch_freeing_init()
97 info = me->arch.syminfo + ELF_R_SYM (rela->r_info); in check_rela()
112 info->got_offset = me->arch.got_size; in check_rela()
113 me->arch.got_size += sizeof(void*); in check_rela()
124 info->plt_offset = me->arch.plt_size; in check_rela()
125 me->arch.plt_size += PLT_ENTRY_SIZE; in check_rela()
166 me->arch.nsyms = symtab->sh_size / sizeof(Elf_Sym); in module_frob_arch_sections()
167 me->arch in module_frob_arch_sections()
[all...]

Completed in 14 milliseconds

12345678910>>...80