/third_party/mesa3d/src/intel/common/ |
H A D | intel_aux_map.c | 152 struct aux_map_buffer *tail = in align_and_verify_space() local 154 uint64_t gpu = tail->buffer->gpu + ctx->tail_offset; in align_and_verify_space() 170 struct aux_map_buffer *tail = in get_current_pos() local 173 *gpu = tail->buffer->gpu + ctx->tail_offset; in get_current_pos() 175 *map = (uint64_t*)((uint8_t*)tail->buffer->map + ctx->tail_offset); in get_current_pos()
|
/third_party/python/Tools/iobench/ |
H A D | iobench.py | 405 tail = chunk[:size % len(chunk)] 406 # Adjust tail to end on a character boundary 409 tail.decode(TEXT_ENCODING) 412 tail = tail[:-1] 415 f.write(tail)
|
/third_party/python/Lib/tkinter/ |
H A D | filedialog.py | 197 head, tail = os.path.split(self.get_selection()) 198 if tail == os.curdir: tail = '' 199 self.set_selection(tail) 268 head, tail = os.path.split(file)
|
/third_party/vulkan-loader/scripts/ |
H A D | helper_file_generator.py | 254 if ((elem.tag != 'type') and (elem.tail is not None)) and '*' in elem.tail: 262 if (paramname.tail is not None) and ('[' in paramname.tail): 263 isstaticarray = paramname.tail.count('[')
|
/kernel/linux/linux-5.10/drivers/net/ethernet/marvell/prestera/ |
H A D | prestera_rxtx.c | 377 struct prestera_sdma_buf *head, *tail, *next, *prev; in prestera_sdma_rx_init() local 386 tail = &ring->bufs[bnum - 1]; in prestera_sdma_rx_init() 408 } while (prev != tail); in prestera_sdma_rx_init() 410 /* join tail with head to make a circular list */ in prestera_sdma_rx_init() 411 prestera_sdma_rx_desc_set_next(sdma, tail->desc, head->desc_dma); in prestera_sdma_rx_init() 522 struct prestera_sdma_buf *head, *tail, *next, *prev; in prestera_sdma_tx_init() local 534 tail = &tx_ring->bufs[bnum - 1]; in prestera_sdma_tx_init() 557 } while (prev != tail); in prestera_sdma_tx_init() 559 /* join tail with head to make a circular list */ in prestera_sdma_tx_init() 560 prestera_sdma_tx_desc_set_next(sdma, tail in prestera_sdma_tx_init() [all...] |
/kernel/linux/linux-5.10/drivers/input/misc/ |
H A D | uinput.c | 62 unsigned char tail; member 642 have_event = udev->head != udev->tail; in uinput_fetch_next_event() 644 *event = udev->buff[udev->tail]; in uinput_fetch_next_event() 645 udev->tail = (udev->tail + 1) % UINPUT_BUFFER_SIZE; in uinput_fetch_next_event() 687 else if (udev->head == udev->tail && in uinput_read() 700 udev->head != udev->tail || in uinput_read() 714 if (udev->head != udev->tail) in uinput_poll()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/marvell/prestera/ |
H A D | prestera_rxtx.c | 379 struct prestera_sdma_buf *head, *tail, *next, *prev; in prestera_sdma_rx_init() local 388 tail = &ring->bufs[bnum - 1]; in prestera_sdma_rx_init() 410 } while (prev != tail); in prestera_sdma_rx_init() 412 /* join tail with head to make a circular list */ in prestera_sdma_rx_init() 413 prestera_sdma_rx_desc_set_next(sdma, tail->desc, head->desc_dma); in prestera_sdma_rx_init() 524 struct prestera_sdma_buf *head, *tail, *next, *prev; in prestera_sdma_tx_init() local 536 tail = &tx_ring->bufs[bnum - 1]; in prestera_sdma_tx_init() 559 } while (prev != tail); in prestera_sdma_tx_init() 561 /* join tail with head to make a circular list */ in prestera_sdma_tx_init() 562 prestera_sdma_tx_desc_set_next(sdma, tail in prestera_sdma_tx_init() [all...] |
/kernel/linux/linux-6.6/drivers/input/misc/ |
H A D | uinput.c | 63 unsigned char tail; member 662 have_event = udev->head != udev->tail; in uinput_fetch_next_event() 664 *event = udev->buff[udev->tail]; in uinput_fetch_next_event() 665 udev->tail = (udev->tail + 1) % UINPUT_BUFFER_SIZE; in uinput_fetch_next_event() 707 else if (udev->head == udev->tail && in uinput_read() 720 udev->head != udev->tail || in uinput_read() 734 if (udev->head != udev->tail) in uinput_poll()
|
/third_party/skia/third_party/externals/abseil-cpp/absl/types/internal/ |
H A D | conformance_profile.h | 228 void addTestFailureImpl(const H& head, const T&... tail) { in addTestFailureImpl() argument 230 addTestFailureImpl(tail...); in addTestFailureImpl() 748 constexpr H MinEnum(H head, N next, T... tail) { in MinEnum() argument 750 ? (MinEnum)(head, tail...) in MinEnum() 751 : (MinEnum)(next, tail...); in MinEnum() 822 constexpr H MaxEnum(H head, N next, T... tail) { in MaxEnum() argument 824 ? (MaxEnum)(head, tail...) in MaxEnum() 825 : (MaxEnum)(next, tail...); in MaxEnum()
|
/kernel/linux/linux-5.10/drivers/dma/qcom/ |
H A D | bam_dma.c | 344 #define IS_BUSY(chan) (CIRC_SPACE(bchan->tail, bchan->head,\ 364 unsigned short tail; /* end of active descriptor entries */ member 489 bchan->tail = 0; in bam_chan_init_hw() 1012 avail = CIRC_SPACE(bchan->tail, bchan->head, in bam_start_dma() 1043 if (bchan->tail + async_desc->xfer_len > MAX_DESCRIPTORS) { in bam_start_dma() 1044 u32 partial = MAX_DESCRIPTORS - bchan->tail; in bam_start_dma() 1046 memcpy(&fifo[bchan->tail], desc, in bam_start_dma() 1052 memcpy(&fifo[bchan->tail], desc, in bam_start_dma() 1057 bchan->tail += async_desc->xfer_len; in bam_start_dma() 1058 bchan->tail in bam_start_dma() [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/i40e/ |
H A D | i40e_adminq.c | 20 /* set head and tail registers in our local struct */ in i40e_adminq_init_regs() 22 hw->aq.asq.tail = I40E_VF_ATQT1; in i40e_adminq_init_regs() 27 hw->aq.arq.tail = I40E_VF_ARQT1; in i40e_adminq_init_regs() 33 hw->aq.asq.tail = I40E_PF_ATQT; in i40e_adminq_init_regs() 38 hw->aq.arq.tail = I40E_PF_ARQT; in i40e_adminq_init_regs() 276 wr32(hw, hw->aq.asq.tail, 0); in i40e_config_asq_regs() 305 wr32(hw, hw->aq.arq.tail, 0); in i40e_config_arq_regs() 313 /* Update tail in the HW to post pre-allocated buffers */ in i40e_config_arq_regs() 314 wr32(hw, hw->aq.arq.tail, hw->aq.num_arq_entries - 1); in i40e_config_arq_regs() 461 wr32(hw, hw->aq.asq.tail, in i40e_shutdown_asq() [all...] |
/kernel/linux/linux-5.10/kernel/irq/ |
H A D | irqdomain.c | 1256 struct irq_data *tail, *irqd, *irq_data; in irq_domain_trim_hierarchy() local 1259 tail = NULL; in irq_domain_trim_hierarchy() 1271 if (irqd->chip && tail) in irq_domain_trim_hierarchy() 1275 if (!irqd->chip && !tail) in irq_domain_trim_hierarchy() 1283 tail = irq_data; in irq_domain_trim_hierarchy() 1288 if (!tail) in irq_domain_trim_hierarchy() 1292 virq, tail->parent_data->domain->name); in irq_domain_trim_hierarchy() 1295 irqd = tail; in irq_domain_trim_hierarchy() 1296 tail = tail in irq_domain_trim_hierarchy() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/actions/ |
H A D | owl-emac.c | 142 return CIRC_SPACE(ring->head, ring->tail, ring->size); in owl_emac_ring_num_unused() 158 ring->tail = owl_emac_ring_get_next(ring, ring->tail); in owl_emac_ring_pop_tail() 213 ring->tail = 0; in owl_emac_ring_prepare_rx() 238 ring->tail = 0; in owl_emac_ring_prepare_tx() 640 tx_tail = ring->tail; in owl_emac_tx_complete_tail() 704 while (ring->tail != ring->head) { in owl_emac_tx_complete() 712 * At this point, when TX queue is full, the tail descriptor has the in owl_emac_tx_complete() 715 * queue having the OWN bit cleared, we can safely assume the tail in owl_emac_tx_complete() 722 tx_next = ring->tail; in owl_emac_tx_complete() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/i40e/ |
H A D | i40e_adminq.c | 19 /* set head and tail registers in our local struct */ in i40e_adminq_init_regs() 21 hw->aq.asq.tail = I40E_VF_ATQT1; in i40e_adminq_init_regs() 26 hw->aq.arq.tail = I40E_VF_ARQT1; in i40e_adminq_init_regs() 32 hw->aq.asq.tail = I40E_PF_ATQT; in i40e_adminq_init_regs() 37 hw->aq.arq.tail = I40E_PF_ARQT; in i40e_adminq_init_regs() 275 wr32(hw, hw->aq.asq.tail, 0); in i40e_config_asq_regs() 304 wr32(hw, hw->aq.arq.tail, 0); in i40e_config_arq_regs() 312 /* Update tail in the HW to post pre-allocated buffers */ in i40e_config_arq_regs() 313 wr32(hw, hw->aq.arq.tail, hw->aq.num_arq_entries - 1); in i40e_config_arq_regs() 460 wr32(hw, hw->aq.asq.tail, in i40e_shutdown_asq() [all...] |
/kernel/linux/linux-6.6/kernel/irq/ |
H A D | irqdomain.c | 1264 struct irq_data *tail, *irqd, *irq_data; in irq_domain_trim_hierarchy() local 1267 tail = NULL; in irq_domain_trim_hierarchy() 1279 if (irqd->chip && tail) in irq_domain_trim_hierarchy() 1283 if (!irqd->chip && !tail) in irq_domain_trim_hierarchy() 1291 tail = irq_data; in irq_domain_trim_hierarchy() 1296 if (!tail) in irq_domain_trim_hierarchy() 1300 virq, tail->parent_data->domain->name); in irq_domain_trim_hierarchy() 1303 irqd = tail; in irq_domain_trim_hierarchy() 1304 tail = tail in irq_domain_trim_hierarchy() [all...] |
/kernel/linux/linux-6.6/drivers/dma/qcom/ |
H A D | bam_dma.c | 344 #define IS_BUSY(chan) (CIRC_SPACE(bchan->tail, bchan->head,\ 364 unsigned short tail; /* end of active descriptor entries */ member 529 bchan->tail = 0; in bam_chan_init_hw() 1050 avail = CIRC_SPACE(bchan->tail, bchan->head, in bam_start_dma() 1081 if (bchan->tail + async_desc->xfer_len > MAX_DESCRIPTORS) { in bam_start_dma() 1082 u32 partial = MAX_DESCRIPTORS - bchan->tail; in bam_start_dma() 1084 memcpy(&fifo[bchan->tail], desc, in bam_start_dma() 1090 memcpy(&fifo[bchan->tail], desc, in bam_start_dma() 1095 bchan->tail += async_desc->xfer_len; in bam_start_dma() 1096 bchan->tail in bam_start_dma() [all...] |
/third_party/mesa3d/src/compiler/spirv/ |
H A D | vtn_variables.c | 315 nir_deref_instr *tail; in vtn_pointer_dereference() local 317 tail = base->deref; in vtn_pointer_dereference() 413 tail = nir_build_deref_cast(&b->nb, desc, nir_mode, in vtn_pointer_dereference() 421 tail = nir_build_deref_cast(&b->nb, nir_load_shader_record_ptr(&b->nb), in vtn_pointer_dereference() 428 tail = nir_build_deref_var(&b->nb, base->var->var); in vtn_pointer_dereference() 430 tail->dest.ssa.num_components = in vtn_pointer_dereference() 432 tail->dest.ssa.bit_size = glsl_get_bit_size(base->ptr_type->type); in vtn_pointer_dereference() 440 tail = nir_build_deref_cast(&b->nb, &tail->dest.ssa, tail in vtn_pointer_dereference() [all...] |
/kernel/linux/linux-5.10/arch/arm64/crypto/ |
H A D | aes-neonbs-glue.c | 324 int tail = req->cryptlen % (8 * AES_BLOCK_SIZE); in __xts_crypt() local 336 /* ensure that the cts tail is covered by a single step */ in __xts_crypt() 337 if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) { in __xts_crypt() 350 tail = 0; in __xts_crypt() 391 if (err || likely(!tail)) in __xts_crypt() 399 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in __xts_crypt()
|
/kernel/linux/linux-5.10/arch/sparc/kernel/ |
H A D | signal_64.c | 355 void __user *tail; in setup_rt_frame() local 380 tail = (sf + 1); in setup_rt_frame() 386 __siginfo_fpu_t __user *fpu_save = tail; in setup_rt_frame() 387 tail += sizeof(__siginfo_fpu_t); in setup_rt_frame() 394 __siginfo_rwin_t __user *rwin_save = tail; in setup_rt_frame() 395 tail += sizeof(__siginfo_rwin_t); in setup_rt_frame()
|
/kernel/linux/linux-6.6/arch/arm64/crypto/ |
H A D | aes-neonbs-glue.c | 282 int tail = req->cryptlen % (8 * AES_BLOCK_SIZE); in __xts_crypt() local 294 /* ensure that the cts tail is covered by a single step */ in __xts_crypt() 295 if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) { in __xts_crypt() 308 tail = 0; in __xts_crypt() 351 if (err || likely(!tail)) in __xts_crypt() 359 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in __xts_crypt()
|
/kernel/linux/linux-6.6/arch/sparc/kernel/ |
H A D | signal_64.c | 355 void __user *tail; in setup_rt_frame() local 380 tail = (sf + 1); in setup_rt_frame() 386 __siginfo_fpu_t __user *fpu_save = tail; in setup_rt_frame() 387 tail += sizeof(__siginfo_fpu_t); in setup_rt_frame() 394 __siginfo_rwin_t __user *rwin_save = tail; in setup_rt_frame() 395 tail += sizeof(__siginfo_rwin_t); in setup_rt_frame()
|
/kernel/linux/linux-5.10/kernel/events/ |
H A D | ring_buffer.c | 102 * from the tail WRITE. in perf_output_put_handle() 138 ring_buffer_has_space(unsigned long head, unsigned long tail, in ring_buffer_has_space() argument 143 return CIRC_SPACE(head, tail, data_size) >= size; in ring_buffer_has_space() 145 return CIRC_SPACE(tail, head, data_size) >= size; in ring_buffer_has_space() 155 unsigned long tail, offset, head; in __perf_output_begin() local 195 tail = READ_ONCE(rb->user_page->data_tail); in __perf_output_begin() 198 if (unlikely(!ring_buffer_has_space(head, tail, in __perf_output_begin() 206 * @tail load above from the data stores below. Since the @tail in __perf_output_begin() 210 * after reading the data and before storing the new tail in __perf_output_begin() [all...] |
/kernel/linux/linux-6.6/drivers/tty/serial/ |
H A D | men_z135_uart.c | 303 int tail; in men_z135_handle_tx() local 349 tail = xmit->tail & (UART_XMIT_SIZE - 1); in men_z135_handle_tx() 351 s = ((head >= tail) ? head : UART_XMIT_SIZE) - tail; in men_z135_handle_tx() 354 memcpy_toio(port->membase + MEN_Z135_TX_RAM, &xmit->buf[xmit->tail], n); in men_z135_handle_tx()
|
/kernel/linux/linux-6.6/kernel/events/ |
H A D | ring_buffer.c | 102 * from the tail WRITE. in perf_output_put_handle() 138 ring_buffer_has_space(unsigned long head, unsigned long tail, in ring_buffer_has_space() argument 143 return CIRC_SPACE(head, tail, data_size) >= size; in ring_buffer_has_space() 145 return CIRC_SPACE(tail, head, data_size) >= size; in ring_buffer_has_space() 155 unsigned long tail, offset, head; in __perf_output_begin() local 197 tail = READ_ONCE(rb->user_page->data_tail); in __perf_output_begin() 199 if (unlikely(!ring_buffer_has_space(head, tail, in __perf_output_begin() 207 * @tail load above from the data stores below. Since the @tail in __perf_output_begin() 211 * after reading the data and before storing the new tail in __perf_output_begin() [all...] |
/kernel/linux/linux-6.6/crypto/ |
H A D | chacha20poly1305.c | 36 /* tail data with AD/ciphertext lengths */ 40 } tail; member 177 preq->tail.assoclen = cpu_to_le64(rctx->assoclen); in poly_tail() 178 preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); in poly_tail() 179 sg_init_one(preq->src, &preq->tail, sizeof(preq->tail)); in poly_tail() 185 rctx->tag, sizeof(preq->tail)); in poly_tail()
|