1// Copyright 2016 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/ic/binary-op-assembler.h" 6 7#include "src/common/globals.h" 8 9namespace v8 { 10namespace internal { 11 12TNode<Object> BinaryOpAssembler::Generate_AddWithFeedback( 13 const LazyNode<Context>& context, TNode<Object> lhs, TNode<Object> rhs, 14 TNode<UintPtrT> slot_id, const LazyNode<HeapObject>& maybe_feedback_vector, 15 UpdateFeedbackMode update_feedback_mode, bool rhs_known_smi) { 16 // Shared entry for floating point addition. 17 Label do_fadd(this), if_lhsisnotnumber(this, Label::kDeferred), 18 check_rhsisoddball(this, Label::kDeferred), 19 call_with_oddball_feedback(this), call_with_any_feedback(this), 20 call_add_stub(this), end(this), bigint(this, Label::kDeferred); 21 TVARIABLE(Float64T, var_fadd_lhs); 22 TVARIABLE(Float64T, var_fadd_rhs); 23 TVARIABLE(Smi, var_type_feedback); 24 TVARIABLE(Object, var_result); 25 26 // Check if the {lhs} is a Smi or a HeapObject. 27 Label if_lhsissmi(this); 28 // If rhs is known to be an Smi we want to fast path Smi operation. This is 29 // for AddSmi operation. For the normal Add operation, we want to fast path 30 // both Smi and Number operations, so this path should not be marked as 31 // Deferred. 32 Label if_lhsisnotsmi(this, 33 rhs_known_smi ? Label::kDeferred : Label::kNonDeferred); 34 Branch(TaggedIsNotSmi(lhs), &if_lhsisnotsmi, &if_lhsissmi); 35 36 BIND(&if_lhsissmi); 37 { 38 Comment("lhs is Smi"); 39 TNode<Smi> lhs_smi = CAST(lhs); 40 if (!rhs_known_smi) { 41 // Check if the {rhs} is also a Smi. 42 Label if_rhsissmi(this), if_rhsisnotsmi(this); 43 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi); 44 45 BIND(&if_rhsisnotsmi); 46 { 47 // Check if the {rhs} is a HeapNumber. 48 TNode<HeapObject> rhs_heap_object = CAST(rhs); 49 GotoIfNot(IsHeapNumber(rhs_heap_object), &check_rhsisoddball); 50 51 var_fadd_lhs = SmiToFloat64(lhs_smi); 52 var_fadd_rhs = LoadHeapNumberValue(rhs_heap_object); 53 Goto(&do_fadd); 54 } 55 56 BIND(&if_rhsissmi); 57 } 58 59 { 60 Comment("perform smi operation"); 61 // If rhs is known to be an Smi we want to fast path Smi operation. This 62 // is for AddSmi operation. For the normal Add operation, we want to fast 63 // path both Smi and Number operations, so this path should not be marked 64 // as Deferred. 65 TNode<Smi> rhs_smi = CAST(rhs); 66 Label if_overflow(this, 67 rhs_known_smi ? Label::kDeferred : Label::kNonDeferred); 68 TNode<Smi> smi_result = TrySmiAdd(lhs_smi, rhs_smi, &if_overflow); 69 // Not overflowed. 70 { 71 var_type_feedback = SmiConstant(BinaryOperationFeedback::kSignedSmall); 72 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), 73 slot_id, update_feedback_mode); 74 var_result = smi_result; 75 Goto(&end); 76 } 77 78 BIND(&if_overflow); 79 { 80 var_fadd_lhs = SmiToFloat64(lhs_smi); 81 var_fadd_rhs = SmiToFloat64(rhs_smi); 82 Goto(&do_fadd); 83 } 84 } 85 } 86 87 BIND(&if_lhsisnotsmi); 88 { 89 // Check if {lhs} is a HeapNumber. 90 TNode<HeapObject> lhs_heap_object = CAST(lhs); 91 GotoIfNot(IsHeapNumber(lhs_heap_object), &if_lhsisnotnumber); 92 93 if (!rhs_known_smi) { 94 // Check if the {rhs} is Smi. 95 Label if_rhsissmi(this), if_rhsisnotsmi(this); 96 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi); 97 98 BIND(&if_rhsisnotsmi); 99 { 100 // Check if the {rhs} is a HeapNumber. 101 TNode<HeapObject> rhs_heap_object = CAST(rhs); 102 GotoIfNot(IsHeapNumber(rhs_heap_object), &check_rhsisoddball); 103 104 var_fadd_lhs = LoadHeapNumberValue(lhs_heap_object); 105 var_fadd_rhs = LoadHeapNumberValue(rhs_heap_object); 106 Goto(&do_fadd); 107 } 108 109 BIND(&if_rhsissmi); 110 } 111 { 112 var_fadd_lhs = LoadHeapNumberValue(lhs_heap_object); 113 var_fadd_rhs = SmiToFloat64(CAST(rhs)); 114 Goto(&do_fadd); 115 } 116 } 117 118 BIND(&do_fadd); 119 { 120 var_type_feedback = SmiConstant(BinaryOperationFeedback::kNumber); 121 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), slot_id, 122 update_feedback_mode); 123 TNode<Float64T> value = 124 Float64Add(var_fadd_lhs.value(), var_fadd_rhs.value()); 125 TNode<HeapNumber> result = AllocateHeapNumberWithValue(value); 126 var_result = result; 127 Goto(&end); 128 } 129 130 BIND(&if_lhsisnotnumber); 131 { 132 // No checks on rhs are done yet. We just know lhs is not a number or Smi. 133 Label if_lhsisoddball(this), if_lhsisnotoddball(this); 134 TNode<Uint16T> lhs_instance_type = LoadInstanceType(CAST(lhs)); 135 TNode<BoolT> lhs_is_oddball = 136 InstanceTypeEqual(lhs_instance_type, ODDBALL_TYPE); 137 Branch(lhs_is_oddball, &if_lhsisoddball, &if_lhsisnotoddball); 138 139 BIND(&if_lhsisoddball); 140 { 141 GotoIf(TaggedIsSmi(rhs), &call_with_oddball_feedback); 142 143 // Check if {rhs} is a HeapNumber. 144 Branch(IsHeapNumber(CAST(rhs)), &call_with_oddball_feedback, 145 &check_rhsisoddball); 146 } 147 148 BIND(&if_lhsisnotoddball); 149 { 150 // Check if the {rhs} is a smi, and exit the string and bigint check early 151 // if it is. 152 GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback); 153 TNode<HeapObject> rhs_heap_object = CAST(rhs); 154 155 Label lhs_is_string(this), lhs_is_bigint(this); 156 GotoIf(IsStringInstanceType(lhs_instance_type), &lhs_is_string); 157 GotoIf(IsBigIntInstanceType(lhs_instance_type), &lhs_is_bigint); 158 Goto(&call_with_any_feedback); 159 160 BIND(&lhs_is_bigint); 161 Branch(IsBigInt(rhs_heap_object), &bigint, &call_with_any_feedback); 162 163 BIND(&lhs_is_string); 164 { 165 TNode<Uint16T> rhs_instance_type = LoadInstanceType(rhs_heap_object); 166 167 // Exit unless {rhs} is a string. Since {lhs} is a string we no longer 168 // need an Oddball check. 169 GotoIfNot(IsStringInstanceType(rhs_instance_type), 170 &call_with_any_feedback); 171 172 var_type_feedback = SmiConstant(BinaryOperationFeedback::kString); 173 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), 174 slot_id, update_feedback_mode); 175 var_result = 176 CallBuiltin(Builtin::kStringAdd_CheckNone, context(), lhs, rhs); 177 178 Goto(&end); 179 } 180 } 181 } 182 183 BIND(&check_rhsisoddball); 184 { 185 // Check if rhs is an oddball. At this point we know lhs is either a 186 // Smi or number or oddball and rhs is not a number or Smi. 187 TNode<Uint16T> rhs_instance_type = LoadInstanceType(CAST(rhs)); 188 TNode<BoolT> rhs_is_oddball = 189 InstanceTypeEqual(rhs_instance_type, ODDBALL_TYPE); 190 GotoIf(rhs_is_oddball, &call_with_oddball_feedback); 191 Goto(&call_with_any_feedback); 192 } 193 194 BIND(&bigint); 195 { 196 // Both {lhs} and {rhs} are of BigInt type. 197 Label bigint_too_big(this); 198 var_result = CallBuiltin(Builtin::kBigIntAddNoThrow, context(), lhs, rhs); 199 // Check for sentinel that signals BigIntTooBig exception. 200 GotoIf(TaggedIsSmi(var_result.value()), &bigint_too_big); 201 202 var_type_feedback = SmiConstant(BinaryOperationFeedback::kBigInt); 203 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), slot_id, 204 update_feedback_mode); 205 Goto(&end); 206 207 BIND(&bigint_too_big); 208 { 209 // Update feedback to prevent deopt loop. 210 UpdateFeedback(SmiConstant(BinaryOperationFeedback::kAny), 211 maybe_feedback_vector(), slot_id, update_feedback_mode); 212 ThrowRangeError(context(), MessageTemplate::kBigIntTooBig); 213 } 214 } 215 216 BIND(&call_with_oddball_feedback); 217 { 218 var_type_feedback = SmiConstant(BinaryOperationFeedback::kNumberOrOddball); 219 Goto(&call_add_stub); 220 } 221 222 BIND(&call_with_any_feedback); 223 { 224 var_type_feedback = SmiConstant(BinaryOperationFeedback::kAny); 225 Goto(&call_add_stub); 226 } 227 228 BIND(&call_add_stub); 229 { 230 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), slot_id, 231 update_feedback_mode); 232 var_result = CallBuiltin(Builtin::kAdd, context(), lhs, rhs); 233 Goto(&end); 234 } 235 236 BIND(&end); 237 return var_result.value(); 238} 239 240TNode<Object> BinaryOpAssembler::Generate_BinaryOperationWithFeedback( 241 const LazyNode<Context>& context, TNode<Object> lhs, TNode<Object> rhs, 242 TNode<UintPtrT> slot_id, const LazyNode<HeapObject>& maybe_feedback_vector, 243 const SmiOperation& smiOperation, const FloatOperation& floatOperation, 244 Operation op, UpdateFeedbackMode update_feedback_mode, bool rhs_known_smi) { 245 Label do_float_operation(this), end(this), call_stub(this), 246 check_rhsisoddball(this, Label::kDeferred), call_with_any_feedback(this), 247 if_lhsisnotnumber(this, Label::kDeferred), 248 if_both_bigint(this, Label::kDeferred); 249 TVARIABLE(Float64T, var_float_lhs); 250 TVARIABLE(Float64T, var_float_rhs); 251 TVARIABLE(Smi, var_type_feedback); 252 TVARIABLE(Object, var_result); 253 254 Label if_lhsissmi(this); 255 // If rhs is known to be an Smi (in the SubSmi, MulSmi, DivSmi, ModSmi 256 // bytecode handlers) we want to fast path Smi operation. For the normal 257 // operation, we want to fast path both Smi and Number operations, so this 258 // path should not be marked as Deferred. 259 Label if_lhsisnotsmi(this, 260 rhs_known_smi ? Label::kDeferred : Label::kNonDeferred); 261 Branch(TaggedIsNotSmi(lhs), &if_lhsisnotsmi, &if_lhsissmi); 262 263 // Check if the {lhs} is a Smi or a HeapObject. 264 BIND(&if_lhsissmi); 265 { 266 Comment("lhs is Smi"); 267 TNode<Smi> lhs_smi = CAST(lhs); 268 if (!rhs_known_smi) { 269 // Check if the {rhs} is also a Smi. 270 Label if_rhsissmi(this), if_rhsisnotsmi(this); 271 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi); 272 273 BIND(&if_rhsisnotsmi); 274 { 275 // Check if {rhs} is a HeapNumber. 276 TNode<HeapObject> rhs_heap_object = CAST(rhs); 277 GotoIfNot(IsHeapNumber(rhs_heap_object), &check_rhsisoddball); 278 279 // Perform a floating point operation. 280 var_float_lhs = SmiToFloat64(lhs_smi); 281 var_float_rhs = LoadHeapNumberValue(rhs_heap_object); 282 Goto(&do_float_operation); 283 } 284 285 BIND(&if_rhsissmi); 286 } 287 288 { 289 Comment("perform smi operation"); 290 var_result = smiOperation(lhs_smi, CAST(rhs), &var_type_feedback); 291 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), 292 slot_id, update_feedback_mode); 293 Goto(&end); 294 } 295 } 296 297 BIND(&if_lhsisnotsmi); 298 { 299 Comment("lhs is not Smi"); 300 // Check if the {lhs} is a HeapNumber. 301 TNode<HeapObject> lhs_heap_object = CAST(lhs); 302 GotoIfNot(IsHeapNumber(lhs_heap_object), &if_lhsisnotnumber); 303 304 if (!rhs_known_smi) { 305 // Check if the {rhs} is a Smi. 306 Label if_rhsissmi(this), if_rhsisnotsmi(this); 307 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi); 308 309 BIND(&if_rhsisnotsmi); 310 { 311 // Check if the {rhs} is a HeapNumber. 312 TNode<HeapObject> rhs_heap_object = CAST(rhs); 313 GotoIfNot(IsHeapNumber(rhs_heap_object), &check_rhsisoddball); 314 315 // Perform a floating point operation. 316 var_float_lhs = LoadHeapNumberValue(lhs_heap_object); 317 var_float_rhs = LoadHeapNumberValue(rhs_heap_object); 318 Goto(&do_float_operation); 319 } 320 321 BIND(&if_rhsissmi); 322 } 323 324 { 325 // Perform floating point operation. 326 var_float_lhs = LoadHeapNumberValue(lhs_heap_object); 327 var_float_rhs = SmiToFloat64(CAST(rhs)); 328 Goto(&do_float_operation); 329 } 330 } 331 332 BIND(&do_float_operation); 333 { 334 var_type_feedback = SmiConstant(BinaryOperationFeedback::kNumber); 335 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), slot_id, 336 update_feedback_mode); 337 TNode<Float64T> lhs_value = var_float_lhs.value(); 338 TNode<Float64T> rhs_value = var_float_rhs.value(); 339 TNode<Float64T> value = floatOperation(lhs_value, rhs_value); 340 var_result = AllocateHeapNumberWithValue(value); 341 Goto(&end); 342 } 343 344 BIND(&if_lhsisnotnumber); 345 { 346 // No checks on rhs are done yet. We just know lhs is not a number or Smi. 347 Label if_left_bigint(this), if_left_oddball(this); 348 TNode<Uint16T> lhs_instance_type = LoadInstanceType(CAST(lhs)); 349 GotoIf(IsBigIntInstanceType(lhs_instance_type), &if_left_bigint); 350 TNode<BoolT> lhs_is_oddball = 351 InstanceTypeEqual(lhs_instance_type, ODDBALL_TYPE); 352 Branch(lhs_is_oddball, &if_left_oddball, &call_with_any_feedback); 353 354 BIND(&if_left_oddball); 355 { 356 Label if_rhsissmi(this), if_rhsisnotsmi(this); 357 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi); 358 359 BIND(&if_rhsissmi); 360 { 361 var_type_feedback = 362 SmiConstant(BinaryOperationFeedback::kNumberOrOddball); 363 Goto(&call_stub); 364 } 365 366 BIND(&if_rhsisnotsmi); 367 { 368 // Check if {rhs} is a HeapNumber. 369 GotoIfNot(IsHeapNumber(CAST(rhs)), &check_rhsisoddball); 370 371 var_type_feedback = 372 SmiConstant(BinaryOperationFeedback::kNumberOrOddball); 373 Goto(&call_stub); 374 } 375 } 376 377 BIND(&if_left_bigint); 378 { 379 GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback); 380 Branch(IsBigInt(CAST(rhs)), &if_both_bigint, &call_with_any_feedback); 381 } 382 } 383 384 BIND(&check_rhsisoddball); 385 { 386 // Check if rhs is an oddball. At this point we know lhs is either a 387 // Smi or number or oddball and rhs is not a number or Smi. 388 TNode<Uint16T> rhs_instance_type = LoadInstanceType(CAST(rhs)); 389 TNode<BoolT> rhs_is_oddball = 390 InstanceTypeEqual(rhs_instance_type, ODDBALL_TYPE); 391 GotoIfNot(rhs_is_oddball, &call_with_any_feedback); 392 393 var_type_feedback = SmiConstant(BinaryOperationFeedback::kNumberOrOddball); 394 Goto(&call_stub); 395 } 396 397 BIND(&if_both_bigint); 398 { 399 var_type_feedback = SmiConstant(BinaryOperationFeedback::kBigInt); 400 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), slot_id, 401 update_feedback_mode); 402 if (op == Operation::kSubtract) { 403 Label bigint_too_big(this); 404 var_result = 405 CallBuiltin(Builtin::kBigIntSubtractNoThrow, context(), lhs, rhs); 406 407 // Check for sentinel that signals BigIntTooBig exception. 408 GotoIf(TaggedIsSmi(var_result.value()), &bigint_too_big); 409 Goto(&end); 410 411 BIND(&bigint_too_big); 412 { 413 // Update feedback to prevent deopt loop. 414 UpdateFeedback(SmiConstant(BinaryOperationFeedback::kAny), 415 maybe_feedback_vector(), slot_id, update_feedback_mode); 416 ThrowRangeError(context(), MessageTemplate::kBigIntTooBig); 417 } 418 } else { 419 var_result = CallRuntime(Runtime::kBigIntBinaryOp, context(), lhs, rhs, 420 SmiConstant(op)); 421 Goto(&end); 422 } 423 } 424 425 BIND(&call_with_any_feedback); 426 { 427 var_type_feedback = SmiConstant(BinaryOperationFeedback::kAny); 428 Goto(&call_stub); 429 } 430 431 BIND(&call_stub); 432 { 433 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), slot_id, 434 update_feedback_mode); 435 TNode<Object> result; 436 switch (op) { 437 case Operation::kSubtract: 438 result = CallBuiltin(Builtin::kSubtract, context(), lhs, rhs); 439 break; 440 case Operation::kMultiply: 441 result = CallBuiltin(Builtin::kMultiply, context(), lhs, rhs); 442 break; 443 case Operation::kDivide: 444 result = CallBuiltin(Builtin::kDivide, context(), lhs, rhs); 445 break; 446 case Operation::kModulus: 447 result = CallBuiltin(Builtin::kModulus, context(), lhs, rhs); 448 break; 449 case Operation::kExponentiate: 450 result = CallBuiltin(Builtin::kExponentiate, context(), lhs, rhs); 451 break; 452 default: 453 UNREACHABLE(); 454 } 455 var_result = result; 456 Goto(&end); 457 } 458 459 BIND(&end); 460 return var_result.value(); 461} 462 463TNode<Object> BinaryOpAssembler::Generate_SubtractWithFeedback( 464 const LazyNode<Context>& context, TNode<Object> lhs, TNode<Object> rhs, 465 TNode<UintPtrT> slot_id, const LazyNode<HeapObject>& maybe_feedback_vector, 466 UpdateFeedbackMode update_feedback_mode, bool rhs_known_smi) { 467 auto smiFunction = [=](TNode<Smi> lhs, TNode<Smi> rhs, 468 TVariable<Smi>* var_type_feedback) { 469 Label end(this); 470 TVARIABLE(Number, var_result); 471 // If rhs is known to be an Smi (for SubSmi) we want to fast path Smi 472 // operation. For the normal Sub operation, we want to fast path both 473 // Smi and Number operations, so this path should not be marked as Deferred. 474 Label if_overflow(this, 475 rhs_known_smi ? Label::kDeferred : Label::kNonDeferred); 476 var_result = TrySmiSub(lhs, rhs, &if_overflow); 477 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kSignedSmall); 478 Goto(&end); 479 480 BIND(&if_overflow); 481 { 482 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kNumber); 483 TNode<Float64T> value = Float64Sub(SmiToFloat64(lhs), SmiToFloat64(rhs)); 484 var_result = AllocateHeapNumberWithValue(value); 485 Goto(&end); 486 } 487 488 BIND(&end); 489 return var_result.value(); 490 }; 491 auto floatFunction = [=](TNode<Float64T> lhs, TNode<Float64T> rhs) { 492 return Float64Sub(lhs, rhs); 493 }; 494 return Generate_BinaryOperationWithFeedback( 495 context, lhs, rhs, slot_id, maybe_feedback_vector, smiFunction, 496 floatFunction, Operation::kSubtract, update_feedback_mode, rhs_known_smi); 497} 498 499TNode<Object> BinaryOpAssembler::Generate_MultiplyWithFeedback( 500 const LazyNode<Context>& context, TNode<Object> lhs, TNode<Object> rhs, 501 TNode<UintPtrT> slot_id, const LazyNode<HeapObject>& maybe_feedback_vector, 502 UpdateFeedbackMode update_feedback_mode, bool rhs_known_smi) { 503 auto smiFunction = [=](TNode<Smi> lhs, TNode<Smi> rhs, 504 TVariable<Smi>* var_type_feedback) { 505 TNode<Number> result = SmiMul(lhs, rhs); 506 *var_type_feedback = SelectSmiConstant( 507 TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, 508 BinaryOperationFeedback::kNumber); 509 return result; 510 }; 511 auto floatFunction = [=](TNode<Float64T> lhs, TNode<Float64T> rhs) { 512 return Float64Mul(lhs, rhs); 513 }; 514 return Generate_BinaryOperationWithFeedback( 515 context, lhs, rhs, slot_id, maybe_feedback_vector, smiFunction, 516 floatFunction, Operation::kMultiply, update_feedback_mode, rhs_known_smi); 517} 518 519TNode<Object> BinaryOpAssembler::Generate_DivideWithFeedback( 520 const LazyNode<Context>& context, TNode<Object> dividend, 521 TNode<Object> divisor, TNode<UintPtrT> slot_id, 522 const LazyNode<HeapObject>& maybe_feedback_vector, 523 UpdateFeedbackMode update_feedback_mode, bool rhs_known_smi) { 524 auto smiFunction = [=](TNode<Smi> lhs, TNode<Smi> rhs, 525 TVariable<Smi>* var_type_feedback) { 526 TVARIABLE(Object, var_result); 527 // If rhs is known to be an Smi (for DivSmi) we want to fast path Smi 528 // operation. For the normal Div operation, we want to fast path both 529 // Smi and Number operations, so this path should not be marked as Deferred. 530 Label bailout(this, rhs_known_smi ? Label::kDeferred : Label::kNonDeferred), 531 end(this); 532 var_result = TrySmiDiv(lhs, rhs, &bailout); 533 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kSignedSmall); 534 Goto(&end); 535 536 BIND(&bailout); 537 { 538 *var_type_feedback = 539 SmiConstant(BinaryOperationFeedback::kSignedSmallInputs); 540 TNode<Float64T> value = Float64Div(SmiToFloat64(lhs), SmiToFloat64(rhs)); 541 var_result = AllocateHeapNumberWithValue(value); 542 Goto(&end); 543 } 544 545 BIND(&end); 546 return var_result.value(); 547 }; 548 auto floatFunction = [=](TNode<Float64T> lhs, TNode<Float64T> rhs) { 549 return Float64Div(lhs, rhs); 550 }; 551 return Generate_BinaryOperationWithFeedback( 552 context, dividend, divisor, slot_id, maybe_feedback_vector, smiFunction, 553 floatFunction, Operation::kDivide, update_feedback_mode, rhs_known_smi); 554} 555 556TNode<Object> BinaryOpAssembler::Generate_ModulusWithFeedback( 557 const LazyNode<Context>& context, TNode<Object> dividend, 558 TNode<Object> divisor, TNode<UintPtrT> slot_id, 559 const LazyNode<HeapObject>& maybe_feedback_vector, 560 UpdateFeedbackMode update_feedback_mode, bool rhs_known_smi) { 561 auto smiFunction = [=](TNode<Smi> lhs, TNode<Smi> rhs, 562 TVariable<Smi>* var_type_feedback) { 563 TNode<Number> result = SmiMod(lhs, rhs); 564 *var_type_feedback = SelectSmiConstant( 565 TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, 566 BinaryOperationFeedback::kNumber); 567 return result; 568 }; 569 auto floatFunction = [=](TNode<Float64T> lhs, TNode<Float64T> rhs) { 570 return Float64Mod(lhs, rhs); 571 }; 572 return Generate_BinaryOperationWithFeedback( 573 context, dividend, divisor, slot_id, maybe_feedback_vector, smiFunction, 574 floatFunction, Operation::kModulus, update_feedback_mode, rhs_known_smi); 575} 576 577TNode<Object> BinaryOpAssembler::Generate_ExponentiateWithFeedback( 578 const LazyNode<Context>& context, TNode<Object> base, 579 TNode<Object> exponent, TNode<UintPtrT> slot_id, 580 const LazyNode<HeapObject>& maybe_feedback_vector, 581 UpdateFeedbackMode update_feedback_mode, bool rhs_known_smi) { 582 auto smiFunction = [=](TNode<Smi> base, TNode<Smi> exponent, 583 TVariable<Smi>* var_type_feedback) { 584 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kNumber); 585 return AllocateHeapNumberWithValue( 586 Float64Pow(SmiToFloat64(base), SmiToFloat64(exponent))); 587 }; 588 auto floatFunction = [=](TNode<Float64T> base, TNode<Float64T> exponent) { 589 return Float64Pow(base, exponent); 590 }; 591 return Generate_BinaryOperationWithFeedback( 592 context, base, exponent, slot_id, maybe_feedback_vector, smiFunction, 593 floatFunction, Operation::kExponentiate, update_feedback_mode, 594 rhs_known_smi); 595} 596 597TNode<Object> BinaryOpAssembler::Generate_BitwiseBinaryOpWithOptionalFeedback( 598 Operation bitwise_op, TNode<Object> left, TNode<Object> right, 599 const LazyNode<Context>& context, TNode<UintPtrT>* slot, 600 const LazyNode<HeapObject>* maybe_feedback_vector, 601 UpdateFeedbackMode update_feedback_mode) { 602 TVARIABLE(Object, result); 603 TVARIABLE(Smi, var_left_feedback); 604 TVARIABLE(Smi, var_right_feedback); 605 TVARIABLE(Word32T, var_left_word32); 606 TVARIABLE(Word32T, var_right_word32); 607 TVARIABLE(BigInt, var_left_bigint); 608 TVARIABLE(BigInt, var_right_bigint); 609 // These are the variables that are passed to BigIntBinaryOp. They are not 610 // guaranteed to be BigInts because the Runtime call handles throwing 611 // exceptions when only one side is a BigInt. 612 TVARIABLE(Object, var_left_maybe_bigint, left); 613 TVARIABLE(Numeric, var_right_maybe_bigint); 614 Label done(this); 615 Label if_left_number(this), do_number_op(this); 616 Label if_left_bigint(this), do_bigint_op(this); 617 618 TaggedToWord32OrBigIntWithFeedback( 619 context(), left, &if_left_number, &var_left_word32, &if_left_bigint, 620 &var_left_bigint, slot ? &var_left_feedback : nullptr); 621 622 Label right_is_bigint(this); 623 BIND(&if_left_number); 624 { 625 TaggedToWord32OrBigIntWithFeedback( 626 context(), right, &do_number_op, &var_right_word32, &right_is_bigint, 627 &var_right_bigint, slot ? &var_right_feedback : nullptr); 628 } 629 630 BIND(&right_is_bigint); 631 { 632 // At this point it's guaranteed that the op will fail because the RHS is a 633 // BigInt while the LHS is not, but that's ok because the Runtime call will 634 // throw the exception. 635 var_right_maybe_bigint = var_right_bigint.value(); 636 Goto(&do_bigint_op); 637 } 638 639 BIND(&do_number_op); 640 { 641 result = BitwiseOp(var_left_word32.value(), var_right_word32.value(), 642 bitwise_op); 643 644 if (slot) { 645 TNode<Smi> result_type = SelectSmiConstant( 646 TaggedIsSmi(result.value()), BinaryOperationFeedback::kSignedSmall, 647 BinaryOperationFeedback::kNumber); 648 TNode<Smi> input_feedback = 649 SmiOr(var_left_feedback.value(), var_right_feedback.value()); 650 TNode<Smi> feedback = SmiOr(result_type, input_feedback); 651 UpdateFeedback(feedback, (*maybe_feedback_vector)(), *slot, 652 update_feedback_mode); 653 } 654 Goto(&done); 655 } 656 657 // BigInt cases. 658 BIND(&if_left_bigint); 659 { 660 TaggedToNumericWithFeedback(context(), right, &var_right_maybe_bigint, 661 &var_right_feedback); 662 var_left_maybe_bigint = var_left_bigint.value(); 663 Goto(&do_bigint_op); 664 } 665 666 BIND(&do_bigint_op); 667 { 668 if (slot) { 669 // Ensure that the feedback is updated even if the runtime call below 670 // would throw. 671 TNode<Smi> feedback = 672 SmiOr(var_left_feedback.value(), var_right_feedback.value()); 673 UpdateFeedback(feedback, (*maybe_feedback_vector)(), *slot, 674 update_feedback_mode); 675 } 676 677 result = CallRuntime( 678 Runtime::kBigIntBinaryOp, context(), var_left_maybe_bigint.value(), 679 var_right_maybe_bigint.value(), SmiConstant(bitwise_op)); 680 Goto(&done); 681 } 682 683 BIND(&done); 684 return result.value(); 685} 686 687TNode<Object> 688BinaryOpAssembler::Generate_BitwiseBinaryOpWithSmiOperandAndOptionalFeedback( 689 Operation bitwise_op, TNode<Object> left, TNode<Object> right, 690 const LazyNode<Context>& context, TNode<UintPtrT>* slot, 691 const LazyNode<HeapObject>* maybe_feedback_vector, 692 UpdateFeedbackMode update_feedback_mode) { 693 TNode<Smi> right_smi = CAST(right); 694 TVARIABLE(Object, result); 695 TVARIABLE(Smi, var_left_feedback); 696 TVARIABLE(Word32T, var_left_word32); 697 TVARIABLE(BigInt, var_left_bigint); 698 TVARIABLE(Smi, feedback); 699 // Check if the {lhs} is a Smi or a HeapObject. 700 Label if_lhsissmi(this), if_lhsisnotsmi(this, Label::kDeferred); 701 Label do_number_op(this), if_bigint_mix(this), done(this); 702 703 Branch(TaggedIsSmi(left), &if_lhsissmi, &if_lhsisnotsmi); 704 705 BIND(&if_lhsissmi); 706 { 707 TNode<Smi> left_smi = CAST(left); 708 result = BitwiseSmiOp(left_smi, right_smi, bitwise_op); 709 if (slot) { 710 if (IsBitwiseOutputKnownSmi(bitwise_op)) { 711 feedback = SmiConstant(BinaryOperationFeedback::kSignedSmall); 712 } else { 713 feedback = SelectSmiConstant(TaggedIsSmi(result.value()), 714 BinaryOperationFeedback::kSignedSmall, 715 BinaryOperationFeedback::kNumber); 716 } 717 } 718 Goto(&done); 719 } 720 721 BIND(&if_lhsisnotsmi); 722 { 723 TNode<HeapObject> left_pointer = CAST(left); 724 TaggedPointerToWord32OrBigIntWithFeedback( 725 context(), left_pointer, &do_number_op, &var_left_word32, 726 &if_bigint_mix, &var_left_bigint, &var_left_feedback); 727 BIND(&do_number_op); 728 { 729 result = 730 BitwiseOp(var_left_word32.value(), SmiToInt32(right_smi), bitwise_op); 731 if (slot) { 732 TNode<Smi> result_type = SelectSmiConstant( 733 TaggedIsSmi(result.value()), BinaryOperationFeedback::kSignedSmall, 734 BinaryOperationFeedback::kNumber); 735 feedback = SmiOr(result_type, var_left_feedback.value()); 736 } 737 Goto(&done); 738 } 739 740 BIND(&if_bigint_mix); 741 { 742 if (slot) { 743 // Ensure that the feedback is updated before we throw. 744 feedback = var_left_feedback.value(); 745 UpdateFeedback(feedback.value(), (*maybe_feedback_vector)(), *slot, 746 update_feedback_mode); 747 } 748 ThrowTypeError(context(), MessageTemplate::kBigIntMixedTypes); 749 } 750 } 751 752 BIND(&done); 753 UpdateFeedback(feedback.value(), (*maybe_feedback_vector)(), *slot, 754 update_feedback_mode); 755 return result.value(); 756} 757 758} // namespace internal 759} // namespace v8 760