Lines Matching refs:rsp
74 * and does not change rsp.
100 movq %rsp, PER_CPU_VAR(cpu_tss_rw + TSS_sp2)
101 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp
102 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp
119 movq %rsp, %rsi
132 movq RCX(%rsp), %rcx
133 movq RIP(%rsp), %r11
161 cmpq $__USER_CS, CS(%rsp) /* CS must match SYSRET */
164 movq R11(%rsp), %r11
165 cmpq %r11, EFLAGS(%rsp) /* R11 == RFLAGS */
191 cmpq $__USER_DS, SS(%rsp) /* SS must match SYSRET */
206 movq %rsp, %rdi
207 movq PER_CPU_VAR(cpu_tss_rw + TSS_sp0), %rsp
222 popq %rsp
244 movq %rsp, TASK_threadsp(%rdi)
245 movq TASK_threadsp(%rsi), %rsp
291 movq %rsp, %rdi
305 movq $0, RAX(%rsp)
332 movq %rsp, %rdi /* pt_regs pointer into 1st argument*/
335 movq ORIG_RAX(%rsp), %rsi /* get error code into 2nd argument*/
336 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
368 testb $3, CS-ORIG_RAX(%rsp)
371 pushq 5*8(%rsp)
432 testb $3, CS-ORIG_RAX(%rsp)
440 movq %rsp, %rdi /* pt_regs pointer */
485 testb $3, CS-ORIG_RAX(%rsp)
502 movq %rsp, %rdi /* pt_regs pointer */
504 movq %rax, %rsp /* Switch to new stack */
510 movq ORIG_RAX(%rsp), %rsi /* get error code into 2nd argument*/
511 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
513 movq %rsp, %rdi /* pt_regs pointer */
547 movq %rsp, %rdi /* pt_regs pointer into first argument */
548 movq ORIG_RAX(%rsp), %rsi /* get error code into 2nd argument*/
549 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
578 testb $3, CS(%rsp)
593 movq %rsp, %rdi
594 movq PER_CPU_VAR(cpu_tss_rw + TSS_sp0), %rsp
624 testb $3, CS(%rsp)
630 addq $8, %rsp /* skip regs->orig_ax */
644 testb $4, (SS-RIP)(%rsp)
687 movq (1*8)(%rsp), %rax /* user RIP */
689 movq (2*8)(%rsp), %rax /* user CS */
691 movq (3*8)(%rsp), %rax /* user RFLAGS */
693 movq (5*8)(%rsp), %rax /* user SS */
695 movq (4*8)(%rsp), %rax /* user RSP */
715 movq %rax, %rsp
782 mov %rsp, %rbp
788 mov %rsp, (%rdi)
789 mov %rdi, %rsp
833 movq %rdi, %rsp /* we don't return, adjust the stack frame */
857 cmpw %cx, 0x10(%rsp)
860 cmpw %cx, 0x18(%rsp)
863 cmpw %cx, 0x20(%rsp)
866 cmpw %cx, 0x28(%rsp)
869 movq (%rsp), %rcx
870 movq 8(%rsp), %r11
871 addq $0x30, %rsp
876 movq (%rsp), %rcx
877 movq 8(%rsp), %r11
878 addq $0x30, %rsp
1041 testb $3, CS+8(%rsp)
1059 movq %rsp, %rdi /* arg0 = pt_regs pointer */
1061 movq %rax, %rsp /* switch stack */
1074 cmpq %rcx, RIP+8(%rsp)
1077 cmpq %rax, RIP+8(%rsp)
1079 cmpq $.Lgs_change, RIP+8(%rsp)
1100 movq %rcx, RIP+8(%rsp)
1118 mov %rsp, %rdi
1120 mov %rax, %rsp
1127 testb $3, CS(%rsp)
1186 testb $3, CS-RIP+8(%rsp)
1204 movq %rsp, %rdx
1205 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp
1208 pushq 4*8(%rdx) /* pt_regs->rsp */
1226 movq %rsp, %rdi
1290 cmpq 8(%rsp), %rdx
1293 cmpq 8(%rsp), %rdx
1302 cmpl $1, -8(%rsp)
1317 lea 6*8(%rsp), %rdx
1318 /* Compare the NMI stack (rdx) with the stack we came from (4*8(%rsp)) */
1319 cmpq %rdx, 4*8(%rsp)
1324 cmpq %rdx, 4*8(%rsp)
1330 testb $(X86_EFLAGS_DF >> 8), (3*8 + 1)(%rsp)
1340 subq $8, %rsp
1341 leaq -10*8(%rsp), %rdx
1349 addq $(6*8), %rsp
1359 movq (%rsp), %rdx
1365 subq $(5*8), %rsp
1369 pushq 11*8(%rsp)
1381 pushq %rsp /* RSP (minus 8 because of the previous push) */
1382 addq $8, (%rsp) /* Fix up RSP */
1407 movq $1, 10*8(%rsp) /* Set "NMI executing". */
1414 addq $(10*8), %rsp
1416 pushq -6*8(%rsp)
1418 subq $(5*8), %rsp
1438 movq %rsp, %rdi
1476 addq $6*8, %rsp
1488 movq $0, 5*8(%rsp) /* clear "NMI executing" */
1518 leaq -PTREGS_SIZE(%rax), %rsp