Lines Matching defs:code

2 // Use of this source code is governed by a BSD-style license that can be
11 #include "src/codegen/code-factory.h"
550 // Invoke the code.
557 // context and the function left on the stack by the code
593 static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
595 DCHECK(!AreAliased(code, scratch));
596 // Verify that the code kind is baseline code via the CodeKind.
597 __ mov(scratch, FieldOperand(code, Code::kFlagsOffset));
784 // Store the optimized code in the closure.
853 // If the optimized code is cleared, go to runtime to update the optimization
857 // Check if the optimized code is marked for deopt. If it is, bailout to a
865 // Optimized code is good, get it into the closure and link the closure
866 // into the optimized functions list, then tail call the optimized code.
877 // Optimized code slot contains deoptimized code or code is cleared and
878 // optimized code marker isn't updated. Evict the code, update the marker
879 // and re-enter the closure's code.
922 // will restore the original bytecode. In order to simplify the code, we have
943 // The code to load the next bytecode is common to both wide and extra wide.
993 // is optimized code or a tiering state that needs to be processed.
1001 // Store feedback_vector. We may need it if we need to load the optimize code
1007 // Check if there is optimized code or a tiering state that needes to be
1020 // Check if optimized code is available
1053 // Generate code for entering a JS function with the interpreter.
1087 // optimized code and update invocation count. Otherwise, setup the stack
1113 // MANUAL indicates that the scope shouldn't actually generate code to set
1297 // Load the baseline code into the closure.
1667 // If the code deoptimizes during the implicit function entry stack interrupt
1749 // We'll use the bytecode for both code age/OSR resetting, and pushing onto
1756 // Baseline code frames store the feedback vector where interpreter would
1804 // Push the baseline code return address now, as if it had been pushed by
1852 int code = config->GetAllocatableGeneralCode(i);
1853 __ pop(Register::from_code(code));
1854 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1855 __ SmiUntag(Register::from_code(code));
1904 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
2185 Handle<Code> code) {
2270 __ Jump(code, RelocInfo::CODE_TARGET);
2280 Handle<Code> code) {
2372 // Tail-call to the {code} handler.
2373 __ Jump(code, RelocInfo::CODE_TARGET);
2812 // If the code object is null, just return to the caller.
2825 // Load deoptimization data from the code object.
3657 // Generated code is put into a fixed, unmovable buffer, and not into
3989 int code = config->GetAllocatableDoubleCode(i);
3990 XMMRegister xmm_reg = XMMRegister::from_code(code);
3991 int offset = code * kDoubleSize;
4004 // Get the address of the location in the code object
4046 int code = config->GetAllocatableDoubleCode(i);
4047 int dst_offset = code * kDoubleSize + double_regs_offset;
4048 int src_offset = code * kDoubleSize;
4055 // and check that the generated code never deoptimizes with unbalanced stack.
4125 int code = config->GetAllocatableDoubleCode(i);
4126 XMMRegister xmm_reg = XMMRegister::from_code(code);
4127 int src_offset = code * kDoubleSize + double_regs_offset;
4172 // bytecode. If there is baseline code on the shared function info, converts an
4174 // code. Otherwise execution continues with bytecode.
4196 // Check if we have baseline code. For OSR entry it is safe to assume we
4197 // always have baseline code.
4203 // Start with bytecode as there is no baseline code.
4255 // If the code deoptimizes during the implicit function entry stack interrupt
4330 // Retry from the start after installing baseline code.