Lines Matching defs:mask

128    uint64_t copy_mask = src->mask;
1007 unsigned mask = locs->descriptor_sets_enabled;
1009 mask &= descriptors_state->dirty & descriptors_state->valid;
1011 while (mask) {
1014 u_bit_scan_consecutive_range(&mask, &start, &count);
1060 * Compute the PA_SC_AA_SAMPLE_LOCS_PIXEL_* mask based on hardware sample
1082 * Compute the PA_SC_CENTROID_PRIORITY_* mask based on the top left hardware
1143 /* Compute the PA_SC_AA_SAMPLE_LOCS_PIXEL_* mask. */
1148 /* Compute the PA_SC_CENTROID_PRIORITY_* mask. */
1288 uint32_t mask = state->prefetch_L2_mask;
1292 mask &= RADV_PREFETCH_VS | RADV_PREFETCH_VBO_DESCRIPTORS | RADV_PREFETCH_MS;
1294 if (mask & RADV_PREFETCH_VS)
1297 if (mask & RADV_PREFETCH_MS)
1300 if (mask & RADV_PREFETCH_VBO_DESCRIPTORS)
1303 if (mask & RADV_PREFETCH_TCS)
1306 if (mask & RADV_PREFETCH_TES)
1309 if (mask & RADV_PREFETCH_GS) {
1315 if (mask & RADV_PREFETCH_PS)
1318 state->prefetch_L2_mask &= ~mask;
3557 const uint64_t mask = shader->info.inline_push_constant_mask;
3558 if (!mask)
3561 const uint8_t base = ffs(mask) - 1;
3562 if (mask == u_bit_consecutive64(base, util_last_bit64(mask) - base)) {
3570 u_foreach_bit64 (idx, mask)
3706 uint32_t mask = pipeline->vb_desc_usage_mask;
3712 while (mask) {
3713 unsigned i = u_bit_scan(&mask);
4621 * account for the "real" number of layers. If the view mask is
9639 * VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT in the destination stage mask
9644 * with only VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT in the source stage mask
10060 radeon_emit(cs, S_0084FC_OFFSET_UPDATE_DONE(1)); /* mask */