Home
last modified time | relevance | path

Searched refs:blade (Results 1 - 20 of 20) sorted by relevance

/kernel/linux/linux-5.10/drivers/misc/sgi-gru/
H A Dgrufile.c266 nid = uv_blade_to_memory_nid(bid);/* -1 if no memory on blade */ in gru_init_tables()
317 * We target the cores of a blade and not the hyperthreads themselves. in gru_chiplet_cpu_to_mmr()
318 * There is a max of 8 cores per socket and 2 sockets per blade, in gru_chiplet_cpu_to_mmr()
357 irq_handler_t irq_handler, int cpu, int blade) in gru_chiplet_setup_tlb_irq()
388 static void gru_chiplet_teardown_tlb_irq(int chiplet, int cpu, int blade) in gru_chiplet_teardown_tlb_irq() argument
407 irq_handler_t irq_handler, int cpu, int blade) in gru_chiplet_setup_tlb_irq()
417 irq = uv_setup_irq(irq_name, cpu, blade, mmr, UV_AFFINITY_CPU); in gru_chiplet_setup_tlb_irq()
431 gru_base[blade]->bs_grus[chiplet].gs_irq[core] = irq; in gru_chiplet_setup_tlb_irq()
435 static void gru_chiplet_teardown_tlb_irq(int chiplet, int cpu, int blade) in gru_chiplet_teardown_tlb_irq() argument
442 irq = gru_base[blade] in gru_chiplet_teardown_tlb_irq()
356 gru_chiplet_setup_tlb_irq(int chiplet, char *irq_name, irq_handler_t irq_handler, int cpu, int blade) gru_chiplet_setup_tlb_irq() argument
406 gru_chiplet_setup_tlb_irq(int chiplet, char *irq_name, irq_handler_t irq_handler, int cpu, int blade) gru_chiplet_setup_tlb_irq() argument
454 int blade; gru_teardown_tlb_irqs() local
472 int blade; gru_setup_tlb_irqs() local
[all...]
H A Dgrumain.c642 * - retarget interrupts on local blade
679 * - task has migrated to a different cpu on the same blade where
695 * a context is assigned to any blade-local chiplet. However, users can
715 * Unload the gru context if it is not assigned to the correct blade or
717 * blade or if the user changes the selected blade/chiplet.
751 * Insufficient GRU resources available on the local blade. Steal a context from
781 struct gru_blade_state *blade; in gru_steal_context() local
793 blade = gru_base[blade_id]; in gru_steal_context()
794 spin_lock(&blade in gru_steal_context()
[all...]
H A Dgrufault.c519 static irqreturn_t gru_intr(int chiplet, int blade) in gru_intr() argument
530 gru = &gru_base[blade]->bs_grus[chiplet]; in gru_intr()
601 int blade; in gru_intr_mblade() local
603 for_each_possible_blade(blade) { in gru_intr_mblade()
604 if (uv_blade_nr_possible_cpus(blade)) in gru_intr_mblade()
606 gru_intr(0, blade); in gru_intr_mblade()
607 gru_intr(1, blade); in gru_intr_mblade()
874 /* Select blade/chiplet for GRU context */ in gru_set_context_option()
H A Dgru.h47 int blade; member
/kernel/linux/linux-6.6/drivers/misc/sgi-gru/
H A Dgrufile.c266 nid = uv_blade_to_memory_nid(bid);/* -1 if no memory on blade */ in gru_init_tables()
317 * We target the cores of a blade and not the hyperthreads themselves. in gru_chiplet_cpu_to_mmr()
318 * There is a max of 8 cores per socket and 2 sockets per blade, in gru_chiplet_cpu_to_mmr()
357 irq_handler_t irq_handler, int cpu, int blade) in gru_chiplet_setup_tlb_irq()
388 static void gru_chiplet_teardown_tlb_irq(int chiplet, int cpu, int blade) in gru_chiplet_teardown_tlb_irq() argument
407 irq_handler_t irq_handler, int cpu, int blade) in gru_chiplet_setup_tlb_irq()
417 irq = uv_setup_irq(irq_name, cpu, blade, mmr, UV_AFFINITY_CPU); in gru_chiplet_setup_tlb_irq()
431 gru_base[blade]->bs_grus[chiplet].gs_irq[core] = irq; in gru_chiplet_setup_tlb_irq()
435 static void gru_chiplet_teardown_tlb_irq(int chiplet, int cpu, int blade) in gru_chiplet_teardown_tlb_irq() argument
442 irq = gru_base[blade] in gru_chiplet_teardown_tlb_irq()
356 gru_chiplet_setup_tlb_irq(int chiplet, char *irq_name, irq_handler_t irq_handler, int cpu, int blade) gru_chiplet_setup_tlb_irq() argument
406 gru_chiplet_setup_tlb_irq(int chiplet, char *irq_name, irq_handler_t irq_handler, int cpu, int blade) gru_chiplet_setup_tlb_irq() argument
454 int blade; gru_teardown_tlb_irqs() local
472 int blade; gru_setup_tlb_irqs() local
[all...]
H A Dgrumain.c642 * - retarget interrupts on local blade
679 * - task has migrated to a different cpu on the same blade where
695 * a context is assigned to any blade-local chiplet. However, users can
715 * Unload the gru context if it is not assigned to the correct blade or
717 * blade or if the user changes the selected blade/chiplet.
751 * Insufficient GRU resources available on the local blade. Steal a context from
781 struct gru_blade_state *blade; in gru_steal_context() local
793 blade = gru_base[blade_id]; in gru_steal_context()
794 spin_lock(&blade in gru_steal_context()
[all...]
H A Dgrufault.c519 static irqreturn_t gru_intr(int chiplet, int blade) in gru_intr() argument
530 gru = &gru_base[blade]->bs_grus[chiplet]; in gru_intr()
601 int blade; in gru_intr_mblade() local
603 for_each_possible_blade(blade) { in gru_intr_mblade()
604 if (uv_blade_nr_possible_cpus(blade)) in gru_intr_mblade()
606 gru_intr(0, blade); in gru_intr_mblade()
607 gru_intr(1, blade); in gru_intr_mblade()
874 /* Select blade/chiplet for GRU context */ in gru_set_context_option()
H A Dgru.h47 int blade; member
/kernel/linux/linux-5.10/arch/ia64/include/asm/sn/
H A Dsn_sal.h85 u64 blade : 16, member
92 sn_mq_watchlist_alloc(int blade, void *mq, unsigned int mq_size, in sn_mq_watchlist_alloc() argument
102 size_blade.blade = blade; in sn_mq_watchlist_alloc()
117 sn_mq_watchlist_free(int blade, int watchlist_num) in sn_mq_watchlist_free() argument
120 ia64_sal_oemcall_nolock(&rv, SN_SAL_WATCHLIST_FREE, blade, in sn_mq_watchlist_free()
/kernel/linux/linux-6.6/arch/ia64/include/asm/sn/
H A Dsn_sal.h85 u64 blade : 16, member
92 sn_mq_watchlist_alloc(int blade, void *mq, unsigned int mq_size, in sn_mq_watchlist_alloc() argument
102 size_blade.blade = blade; in sn_mq_watchlist_alloc()
117 sn_mq_watchlist_free(int blade, int watchlist_num) in sn_mq_watchlist_free() argument
120 ia64_sal_oemcall_nolock(&rv, SN_SAL_WATCHLIST_FREE, blade, in sn_mq_watchlist_free()
/kernel/linux/linux-6.6/arch/x86/include/asm/uv/
H A Duv_geo.h21 unsigned char blade; member
100 -1 : g.common.blade * 2 + g.common.slot; in geo_blade()
H A Duv_hub.h34 * into the blade local memory. RAM memory on a blade is physically
638 /* Blade-local cpu number of current cpu. Numbered 0 .. <# cpus on the blade> */
644 /* Blade-local cpu number of cpu N. Numbered 0 .. <# cpus on the blade> */
651 static inline int uv_blade_to_node(int blade) in uv_blade_to_node() argument
653 return uv_socket_to_node(blade); in uv_blade_to_node()
663 * Convert linux node number to the UV blade number.
664 * .. Currently for UV2 thru UV4 the node and the blade are identical.
674 /* Convert a CPU number to the UV blade number */
680 /* Convert a blade i
[all...]
/kernel/linux/linux-5.10/arch/x86/platform/uv/
H A Dbios_uv.c127 uv_bios_mq_watchlist_free(int blade, int watchlist_num) in uv_bios_mq_watchlist_free() argument
130 blade, watchlist_num, 0, 0, 0); in uv_bios_mq_watchlist_free()
H A Duv_irq.c98 chip_data->pnode = uv_blade_to_pnode(info->uv.blade); in uv_domain_alloc()
123 * on the specified blade to allow the sending of MSIs to the specified CPU.
133 * Disable the specified MMR located on the specified blade so that MSIs are
197 info.uv.blade = mmr_blade; in uv_setup_irq()
/kernel/linux/linux-6.6/arch/x86/platform/uv/
H A Duv_irq.c98 chip_data->pnode = uv_blade_to_pnode(info->uv.blade); in uv_domain_alloc()
123 * on the specified blade to allow the sending of MSIs to the specified CPU.
133 * Disable the specified MMR located on the specified blade so that MSIs are
196 info.uv.blade = mmr_blade; in uv_setup_irq()
H A Dbios_uv.c128 uv_bios_mq_watchlist_free(int blade, int watchlist_num) in uv_bios_mq_watchlist_free() argument
131 blade, watchlist_num, 0, 0, 0); in uv_bios_mq_watchlist_free()
/kernel/linux/linux-6.6/drivers/platform/x86/
H A Duv_sysfs.c69 static int location_to_bpos(char *location, int *rack, int *slot, int *blade) in location_to_bpos() argument
77 *blade = idb * 2 + idh; in location_to_bpos()
87 int rack, slot, blade; in cache_obj_to_cnode() local
99 blade = geo_blade(geoid); in cache_obj_to_cnode()
100 if (obj_rack == rack && obj_slot == slot && obj_blade == blade) in cache_obj_to_cnode()
/kernel/linux/linux-5.10/arch/x86/include/asm/
H A Dhw_irq.h58 int blade; member
/kernel/linux/linux-6.6/arch/x86/include/asm/
H A Dhw_irq.h56 int blade; member
/kernel/linux/linux-5.10/arch/x86/include/asm/uv/
H A Duv_hub.h34 * into the blade local memory. RAM memory on a blade is physically
632 /* Blade-local cpu number of current cpu. Numbered 0 .. <# cpus on the blade> */
638 /* Blade-local cpu number of cpu N. Numbered 0 .. <# cpus on the blade> */
645 static inline int uv_blade_to_node(int blade) in uv_blade_to_node() argument
647 return blade; in uv_blade_to_node()
657 * Convert linux node number to the UV blade number.
658 * .. Currently for UV2 thru UV4 the node and the blade are identical.
666 /* Convert a CPU number to the UV blade number */
672 /* Convert a blade i
[all...]

Completed in 15 milliseconds