Home
last modified time | relevance | path

Searched refs:mask_val (Results 1 - 25 of 52) sorted by relevance

123

/kernel/linux/linux-5.10/drivers/bus/
H A Domap_l3_noc.c172 u32 err_reg, mask_val; in l3_interrupt_handler() local
213 mask_val = readl_relaxed(mask_reg); in l3_interrupt_handler()
214 mask_val &= ~(1 << err_src); in l3_interrupt_handler()
215 writel_relaxed(mask_val, mask_reg); in l3_interrupt_handler()
320 u32 mask_val; in l3_resume_noirq() local
330 mask_val = readl_relaxed(mask_regx); in l3_resume_noirq()
331 mask_val &= ~(flag_mux->mask_app_bits); in l3_resume_noirq()
333 writel_relaxed(mask_val, mask_regx); in l3_resume_noirq()
336 mask_val = readl_relaxed(mask_regx); in l3_resume_noirq()
337 mask_val in l3_resume_noirq()
[all...]
/kernel/linux/linux-6.6/drivers/bus/
H A Domap_l3_noc.c164 u32 err_reg, mask_val; in l3_interrupt_handler() local
205 mask_val = readl_relaxed(mask_reg); in l3_interrupt_handler()
206 mask_val &= ~(1 << err_src); in l3_interrupt_handler()
207 writel_relaxed(mask_val, mask_reg); in l3_interrupt_handler()
312 u32 mask_val; in l3_resume_noirq() local
322 mask_val = readl_relaxed(mask_regx); in l3_resume_noirq()
323 mask_val &= ~(flag_mux->mask_app_bits); in l3_resume_noirq()
325 writel_relaxed(mask_val, mask_regx); in l3_resume_noirq()
328 mask_val = readl_relaxed(mask_regx); in l3_resume_noirq()
329 mask_val in l3_resume_noirq()
[all...]
/kernel/linux/linux-5.10/net/netlabel/
H A Dnetlabel_addrlist.c314 u32 mask_val = ntohl(mask); in netlbl_af4list_audit_addr() local
320 if (mask_val != 0xffffffff) { in netlbl_af4list_audit_addr()
322 while (mask_val > 0) { in netlbl_af4list_audit_addr()
323 mask_val <<= 1; in netlbl_af4list_audit_addr()
356 u32 mask_val; in netlbl_af6list_audit_addr() local
360 mask_val = ntohl(mask->s6_addr32[iter]); in netlbl_af6list_audit_addr()
361 while (mask_val > 0) { in netlbl_af6list_audit_addr()
362 mask_val <<= 1; in netlbl_af6list_audit_addr()
/kernel/linux/linux-6.6/net/netlabel/
H A Dnetlabel_addrlist.c314 u32 mask_val = ntohl(mask); in netlbl_af4list_audit_addr() local
320 if (mask_val != 0xffffffff) { in netlbl_af4list_audit_addr()
322 while (mask_val > 0) { in netlbl_af4list_audit_addr()
323 mask_val <<= 1; in netlbl_af4list_audit_addr()
356 u32 mask_val; in netlbl_af6list_audit_addr() local
360 mask_val = ntohl(mask->s6_addr32[iter]); in netlbl_af6list_audit_addr()
361 while (mask_val > 0) { in netlbl_af6list_audit_addr()
362 mask_val <<= 1; in netlbl_af6list_audit_addr()
/kernel/linux/linux-6.6/drivers/watchdog/
H A Ds3c2410_wdt.c312 const u32 mask_val = BIT(wdt->drv_data->mask_bit); in s3c2410wdt_disable_wdt_reset() local
313 const u32 val = mask ? mask_val : 0; in s3c2410wdt_disable_wdt_reset()
317 mask_val, val); in s3c2410wdt_disable_wdt_reset()
326 const u32 mask_val = BIT(wdt->drv_data->mask_bit); in s3c2410wdt_mask_wdt_reset() local
328 const u32 val = (mask ^ val_inv) ? mask_val : 0; in s3c2410wdt_mask_wdt_reset()
332 mask_val, val); in s3c2410wdt_mask_wdt_reset()
341 const u32 mask_val = BIT(wdt->drv_data->cnt_en_bit); in s3c2410wdt_enable_counter() local
342 const u32 val = en ? mask_val : 0; in s3c2410wdt_enable_counter()
346 mask_val, val); in s3c2410wdt_enable_counter()
/kernel/linux/linux-5.10/drivers/irqchip/
H A Dirq-csky-mpintc.c128 const struct cpumask *mask_val, in csky_irq_set_affinity()
135 cpu = cpumask_any_and(mask_val, cpu_online_mask); in csky_irq_set_affinity()
137 cpu = cpumask_first(mask_val); in csky_irq_set_affinity()
147 * SO we only use auto deliver mode when affinity mask_val is in csky_irq_set_affinity()
151 if (cpumask_equal(mask_val, cpu_present_mask)) in csky_irq_set_affinity()
127 csky_irq_set_affinity(struct irq_data *d, const struct cpumask *mask_val, bool force) csky_irq_set_affinity() argument
H A Dirq-hip04.c147 const struct cpumask *mask_val, in hip04_irq_set_affinity()
155 cpu = cpumask_any_and(mask_val, cpu_online_mask); in hip04_irq_set_affinity()
157 cpu = cpumask_first(mask_val); in hip04_irq_set_affinity()
146 hip04_irq_set_affinity(struct irq_data *d, const struct cpumask *mask_val, bool force) hip04_irq_set_affinity() argument
H A Dexynos-combiner.c96 const struct cpumask *mask_val, bool force) in combiner_set_affinity()
103 return chip->irq_set_affinity(data, mask_val, force); in combiner_set_affinity()
95 combiner_set_affinity(struct irq_data *d, const struct cpumask *mask_val, bool force) combiner_set_affinity() argument
H A Dirq-sifive-plic.c137 const struct cpumask *mask_val, bool force) in plic_set_affinity()
143 cpumask_and(&amask, &priv->lmask, mask_val); in plic_set_affinity()
136 plic_set_affinity(struct irq_data *d, const struct cpumask *mask_val, bool force) plic_set_affinity() argument
/kernel/linux/linux-6.6/drivers/irqchip/
H A Dirq-csky-mpintc.c128 const struct cpumask *mask_val, in csky_irq_set_affinity()
135 cpu = cpumask_any_and(mask_val, cpu_online_mask); in csky_irq_set_affinity()
137 cpu = cpumask_first(mask_val); in csky_irq_set_affinity()
147 * SO we only use auto deliver mode when affinity mask_val is in csky_irq_set_affinity()
151 if (cpumask_equal(mask_val, cpu_present_mask)) in csky_irq_set_affinity()
127 csky_irq_set_affinity(struct irq_data *d, const struct cpumask *mask_val, bool force) csky_irq_set_affinity() argument
H A Dirq-hip04.c147 const struct cpumask *mask_val, in hip04_irq_set_affinity()
155 cpu = cpumask_any_and(mask_val, cpu_online_mask); in hip04_irq_set_affinity()
157 cpu = cpumask_first(mask_val); in hip04_irq_set_affinity()
146 hip04_irq_set_affinity(struct irq_data *d, const struct cpumask *mask_val, bool force) hip04_irq_set_affinity() argument
H A Dexynos-combiner.c94 const struct cpumask *mask_val, bool force) in combiner_set_affinity()
101 return chip->irq_set_affinity(data, mask_val, force); in combiner_set_affinity()
93 combiner_set_affinity(struct irq_data *d, const struct cpumask *mask_val, bool force) combiner_set_affinity() argument
/kernel/linux/linux-5.10/drivers/net/ethernet/chelsio/cxgb4/
H A Dcxgb4_tc_u32_parse.h57 u32 mask_val; in cxgb4_fill_ipv4_frag() local
61 mask_val = ntohl(mask) & 0x0000FFFF; in cxgb4_fill_ipv4_frag()
63 if (frag_val == 0x1 && mask_val != 0x3FFF) { /* MF set */ in cxgb4_fill_ipv4_frag()
66 } else if (frag_val == 0x2 && mask_val != 0x3FFF) { /* DF set */ in cxgb4_fill_ipv4_frag()
/kernel/linux/linux-5.10/sound/soc/intel/skylake/
H A Dskl-sst-utils.c119 u32 mask_val; in skl_getid_32() local
122 mask_val = (u32)(*val >> word1_mask); in skl_getid_32()
124 if (mask_val != 0xffffffff) { in skl_getid_32()
125 index = ffz(mask_val); in skl_getid_32()
/kernel/linux/linux-6.6/drivers/net/ethernet/chelsio/cxgb4/
H A Dcxgb4_tc_u32_parse.h57 u32 mask_val; in cxgb4_fill_ipv4_frag() local
61 mask_val = ntohl(mask) & 0x0000FFFF; in cxgb4_fill_ipv4_frag()
63 if (frag_val == 0x1 && mask_val != 0x3FFF) { /* MF set */ in cxgb4_fill_ipv4_frag()
66 } else if (frag_val == 0x2 && mask_val != 0x3FFF) { /* DF set */ in cxgb4_fill_ipv4_frag()
/kernel/linux/linux-6.6/sound/soc/intel/skylake/
H A Dskl-sst-utils.c119 u32 mask_val; in skl_getid_32() local
122 mask_val = (u32)(*val >> word1_mask); in skl_getid_32()
124 if (mask_val != 0xffffffff) { in skl_getid_32()
125 index = ffz(mask_val); in skl_getid_32()
/kernel/linux/linux-6.6/sound/soc/tegra/
H A Dtegra210_adx.c205 unsigned int mask_val = adx->byte_mask[mc->reg / 32]; in tegra210_adx_put_byte_map() local
208 mask_val |= (1 << (mc->reg % 32)); in tegra210_adx_put_byte_map()
210 mask_val &= ~(1 << (mc->reg % 32)); in tegra210_adx_put_byte_map()
212 if (mask_val == adx->byte_mask[mc->reg / 32]) in tegra210_adx_put_byte_map()
217 adx->byte_mask[mc->reg / 32] = mask_val; in tegra210_adx_put_byte_map()
H A Dtegra210_amx.c234 unsigned int mask_val = amx->byte_mask[reg / 32]; in tegra210_amx_put_byte_map() local
237 mask_val |= (1 << (reg % 32)); in tegra210_amx_put_byte_map()
239 mask_val &= ~(1 << (reg % 32)); in tegra210_amx_put_byte_map()
241 if (mask_val == amx->byte_mask[reg / 32]) in tegra210_amx_put_byte_map()
246 amx->byte_mask[reg / 32] = mask_val; in tegra210_amx_put_byte_map()
/kernel/linux/linux-5.10/drivers/net/wireless/ath/ath9k/
H A Dar9003_wow.c126 u32 pattern_val, mask_val; in ath9k_hw_wow_apply_pattern() local
145 memcpy(&mask_val, user_mask, 4); in ath9k_hw_wow_apply_pattern()
146 REG_WRITE(ah, (AR_WOW_TB_MASK(pattern_count) + i), mask_val); in ath9k_hw_wow_apply_pattern()
/kernel/linux/linux-6.6/drivers/net/wireless/ath/ath9k/
H A Dar9003_wow.c126 u32 pattern_val, mask_val; in ath9k_hw_wow_apply_pattern() local
145 memcpy(&mask_val, user_mask, 4); in ath9k_hw_wow_apply_pattern()
146 REG_WRITE(ah, (AR_WOW_TB_MASK(pattern_count) + i), mask_val); in ath9k_hw_wow_apply_pattern()
/kernel/linux/linux-5.10/fs/orangefs/
H A Dorangefs-debug.h49 __u64 mask_val; member
58 * keyword mask_val index
H A Dorangefs-debugfs.c729 if (*mask & s_kmod_keyword_mask_map[index].mask_val) { in do_k_string()
816 if (*k_mask >= s_kmod_keyword_mask_map[k_all_index].mask_val) { in check_amalgam_keyword()
885 s_kmod_keyword_mask_map[i].mask_val; in do_k_mask()
/kernel/linux/linux-6.6/fs/orangefs/
H A Dorangefs-debug.h49 __u64 mask_val; member
58 * keyword mask_val index
H A Dorangefs-debugfs.c729 if (*mask & s_kmod_keyword_mask_map[index].mask_val) { in do_k_string()
816 if (*k_mask >= s_kmod_keyword_mask_map[k_all_index].mask_val) { in check_amalgam_keyword()
885 s_kmod_keyword_mask_map[i].mask_val; in do_k_mask()
/kernel/linux/linux-5.10/drivers/watchdog/
H A Ds3c2410_wdt.c206 u32 mask_val = 1 << wdt->drv_data->mask_bit; in s3c2410wdt_mask_and_disable_reset() local
214 val = mask_val; in s3c2410wdt_mask_and_disable_reset()
218 mask_val, val); in s3c2410wdt_mask_and_disable_reset()
224 mask_val, val); in s3c2410wdt_mask_and_disable_reset()

Completed in 17 milliseconds

123