Lines Matching refs:dst

40 #define dst a0
47 * memcpy copies len bytes from src to dst and sets v0 to dst.
49 * - src and dst don't overlap
51 * - dst is writable
54 * __copy_user copies up to len bytes from src to dst and sets a2 (len) to
56 * __copy_user assumes that src and dst don't overlap, and that the call is
61 * - dst is writable (no exceptions when writing dst)
79 * 3- (dst - src) == (dst_entry - src_entry),
84 * (3) is met by not doing loads between a pair of increments of dst and src
281 * Note: dst & src may be unaligned, len may be 0
295 PREFD( 1, 0(dst) )
297 and t1, dst, ADDRMASK
299 PREFD( 1, 1*32(dst) )
303 PREFD( 1, 2*32(dst) )
314 * src and dst are aligned; need to compute rem
321 PREFD( 1, 3*32(dst) )
332 STORE(t0, UNIT(0)(dst), .Ls_exc_p8u\@)
333 STORE(t1, UNIT(1)(dst), .Ls_exc_p7u\@)
337 ADD dst, dst, 8*NBYTES
338 STORE(t2, UNIT(-6)(dst), .Ls_exc_p6u\@)
339 STORE(t3, UNIT(-5)(dst), .Ls_exc_p5u\@)
340 STORE(t4, UNIT(-4)(dst), .Ls_exc_p4u\@)
341 STORE(t7, UNIT(-3)(dst), .Ls_exc_p3u\@)
342 STORE(t0, UNIT(-2)(dst), .Ls_exc_p2u\@)
343 STORE(t1, UNIT(-1)(dst), .Ls_exc_p1u\@)
345 PREFD( 1, 8*32(dst) )
367 STORE(t0, UNIT(0)(dst), .Ls_exc_p4u\@)
368 STORE(t1, UNIT(1)(dst), .Ls_exc_p3u\@)
369 STORE(t2, UNIT(2)(dst), .Ls_exc_p2u\@)
370 STORE(t3, UNIT(3)(dst), .Ls_exc_p1u\@)
372 ADD dst, dst, 4*NBYTES
386 STORE(t0, 0(dst), .Ls_exc_p1u\@)
388 ADD dst, dst, NBYTES
394 * src and dst are aligned, need to copy rem bytes (rem < NBYTES)
396 * mispredicts. Can't do an explicit LOAD dst,mask,or,STORE
397 * because can't assume read-access to dst. Instead, use
398 * STREST dst, which doesn't require read access to dst.
406 ADD t1, dst, len # t1 is just past last byte of dst
417 * dst is unaligned
419 * t1 = dst & ADDRMASK; T1 > 0
422 * Copy enough bytes to align dst
423 * Set match = (src and dst have same alignment)
432 STFIRST(t3, FIRST(0)(dst), .Ls_exc\@)
435 ADD dst, dst, t2
444 PREFD( 1, 3*32(dst) )
467 STORE(t0, UNIT(0)(dst), .Ls_exc_p4u\@)
468 STORE(t1, UNIT(1)(dst), .Ls_exc_p3u\@)
469 STORE(t2, UNIT(2)(dst), .Ls_exc_p2u\@)
470 STORE(t3, UNIT(3)(dst), .Ls_exc_p1u\@)
471 PREFD( 1, 9*32(dst) ) # 1 is PREF_STORE (not streamed)
473 ADD dst, dst, 4*NBYTES
488 STORE(t0, 0(dst), .Ls_exc_p1u\@)
490 ADD dst, dst, NBYTES
505 STOREB(t0, N(dst), .Ls_exc_p1\@)
518 STOREB(t0, NBYTES-2(dst), .Ls_exc_p1\@)
536 ADD dst, dst, 8
561 sb t1, 0(dst) # can't fault -- we're copy_from_user
563 ADD dst, dst, 1
607 sltu t0, a1, t0 # dst + len <= src -> memcpy
608 sltu t1, a0, t1 # dst >= src + len -> memcpy
616 LEAF(__rmemcpy) /* a0=dst a1=src a2=len */
618 beqz t0, .Lr_end_bytes_up # src >= dst
620 ADD a0, a2 # dst = dst + len
657 * memcpy sets v0 to dst.
660 LEAF(memcpy) /* a0=dst a1=src a2=len */
662 move v0, dst /* return value */