Lines Matching defs:offset
75 intptr_t offset) {
76 if (is_int20(offset)) {
77 __ lay(r14, MemOperand(entry_address, offset));
79 __ AddS64(r14, entry_address, Operand(offset));
169 // Compute baseline pc for bytecode offset.
183 // not a valid bytecode offset.
231 // If the bytecode offset is kFunctionEntryOffset, get the start address of
285 // Load the OSR entrypoint offset from the deoptimization data.
546 // Store offset of return address for deoptimizer.
899 // Store the current pc as the handler offset. It's used later to create the
1239 // Advance the current bytecode offset. This simulates what all bytecode
1242 // the bytecode offset if the current bytecode is a JumpLoop, instead just
1252 // The bytecode offset value will be increased by one in wide and extra wide
1312 // Otherwise, load the size of the current bytecode and advance the offset.
1433 // store the bytecode offset.
1582 // Load the initial bytecode offset.
1586 // Push bytecode array and Smi tagged bytecode array offset.
1641 // handler at the current bytecode offset.
1660 // Get bytecode array and bytecode offset from the stack frame.
1682 // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1691 // After the call, restore the bytecode array, bytecode offset and accumulator
1692 // registers again. Also, restore the bytecode offset in the stack to its
1938 // Get the target bytecode offset from the frame.
1964 // Get bytecode array and bytecode offset from the stack frame.
1988 // Convert new bytecode offset to a Smi and save in the stackframe.
1998 // not a valid bytecode offset. Detect this case and advance to the first
2922 // The runtime function returns the jump table slot offset as a Smi. Use
3645 int offset = code * kDoubleSize;
3646 __ StoreF64(dreg, MemOperand(sp, offset));
3705 int offset =
3708 __ StoreU64(r4, MemOperand(r3, offset));
3820 int offset =
3823 __ LoadU64(ToRegister(i), MemOperand(r1, offset));