Lines Matching refs:adjust
13146 // 6. adjust pointers
13415 // similar magic number but without the continue byte adjust and halfword swapped
13719 // 3. adjust pointers
13741 // 3. adjust pointers
13834 // 3. adjust pointers
13844 // 3. adjust pointers
13893 // 6. adjust pointers
14088 // 3. adjust pointers
14098 // 3. adjust pointers
14147 // 6. adjust pointers
14486 // 3. adjust pointers
14512 // 3. adjust pointers
14554 // 3. adjust pointers
14602 // 6. adjust pointers
14787 // 3. adjust pointers
14835 // 6. adjust pointers
17941 __mmask64 mprocessed = (tail == SIMDUTF_FULL) ? _pdep_u64(0xFFFFFFFF, mend) : _pdep_u64(0xFFFFFFFF, _kand_mask64(mend, b)); // we adjust mend at the end of the output.
18014 __mmask64 mprocessed = (tail == SIMDUTF_FULL) ? _pdep_u64(Mout, mend) : _pdep_u64(Mout, _kand_mask64(mend, b)); // we adjust mend at the end of the output.
19457 size_t adjust = 0;
19555 adjust = (int)inlen - 31;
19614 outbuf -= adjust;
19622 adjust = inlen - 31;
19626 *outlen = (outbuf - outbuf_orig) + adjust;
19627 return ((inbuf - inbuf_orig) + adjust);
19848 // 3. adjust pointers
19900 // 6. adjust pointers
20093 // 3. adjust pointers
20145 // 6. adjust pointers
20512 We adjust for the bytes that have their two most significant bits. This takes care of the first 32 bytes, assuming we interleaved the bytes. */
22507 // 2. adjust pointers
22565 // 6. adjust pointers
23105 // 3. adjust pointers
23158 // 6. adjust pointers
23348 // 3. adjust pointers
23401 // 6. adjust pointers
23867 // 3. adjust pointers
23919 // 6. adjust pointers
24112 // 3. adjust pointers
24164 // 6. adjust pointers
28472 // 6. adjust pointers
29003 // slow path writes useful 8-15 bytes twice (eagerly writes 16 bytes and then adjust the pointer)
29622 // 3. adjust pointers
29632 // 3. adjust pointers
29826 // 3. adjust pointers
29836 // 3. adjust pointers
30306 // 3. adjust pointers
30319 // 3. adjust pointers
30373 // 6. adjust pointers
30562 // 3. adjust pointers
30579 // 3. adjust pointers
30629 // 6. adjust pointers