| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/interpreter-assembler.h" | 5 #include "src/interpreter/interpreter-assembler.h" |
| 6 | 6 |
| 7 #include <limits> | 7 #include <limits> |
| 8 #include <ostream> | 8 #include <ostream> |
| 9 | 9 |
| 10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
| (...skipping 442 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 453 void InterpreterAssembler::CallEpilogue() { | 453 void InterpreterAssembler::CallEpilogue() { |
| 454 if (FLAG_debug_code && !disable_stack_check_across_call_) { | 454 if (FLAG_debug_code && !disable_stack_check_across_call_) { |
| 455 Node* stack_pointer_after_call = LoadStackPointer(); | 455 Node* stack_pointer_after_call = LoadStackPointer(); |
| 456 Node* stack_pointer_before_call = stack_pointer_before_call_; | 456 Node* stack_pointer_before_call = stack_pointer_before_call_; |
| 457 stack_pointer_before_call_ = nullptr; | 457 stack_pointer_before_call_ = nullptr; |
| 458 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, | 458 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, |
| 459 kUnexpectedStackPointer); | 459 kUnexpectedStackPointer); |
| 460 } | 460 } |
| 461 } | 461 } |
| 462 | 462 |
| 463 Node* InterpreterAssembler::IncrementCallCount(Node* type_feedback_vector, |
| 464 Node* slot_id) { |
| 465 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); |
| 466 Node* call_count = |
| 467 LoadFixedArrayElement(type_feedback_vector, call_count_slot); |
| 468 Node* new_count = SmiAdd(call_count, SmiTag(Int32Constant(1))); |
| 469 // Count is Smi, so we don't need a write barrier. |
| 470 return StoreFixedArrayElement(type_feedback_vector, call_count_slot, |
| 471 new_count, SKIP_WRITE_BARRIER); |
| 472 } |
| 473 |
| 463 Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context, | 474 Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context, |
| 464 Node* first_arg, Node* arg_count, | 475 Node* first_arg, Node* arg_count, |
| 465 Node* slot_id, | 476 Node* slot_id, |
| 466 Node* type_feedback_vector, | 477 Node* type_feedback_vector, |
| 467 TailCallMode tail_call_mode) { | 478 TailCallMode tail_call_mode) { |
| 468 // Static checks to assert it is safe to examine the type feedback element. | 479 // Static checks to assert it is safe to examine the type feedback element. |
| 469 // We don't know that we have a weak cell. We might have a private symbol | 480 // We don't know that we have a weak cell. We might have a private symbol |
| 470 // or an AllocationSite, but the memory is safe to examine. | 481 // or an AllocationSite, but the memory is safe to examine. |
| 471 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | 482 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
| 472 // FixedArray. | 483 // FixedArray. |
| 473 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | 484 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
| 474 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | 485 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |
| 475 // computed, meaning that it can't appear to be a pointer. If the low bit is | 486 // computed, meaning that it can't appear to be a pointer. If the low bit is |
| 476 // 0, then hash is computed, but the 0 bit prevents the field from appearing | 487 // 0, then hash is computed, but the 0 bit prevents the field from appearing |
| 477 // to be a pointer. | 488 // to be a pointer. |
| 478 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | 489 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |
| 479 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | 490 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |
| 480 WeakCell::kValueOffset && | 491 WeakCell::kValueOffset && |
| 481 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | 492 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
| 482 | 493 |
| 483 Variable return_value(this, MachineRepresentation::kTagged); | 494 Variable return_value(this, MachineRepresentation::kTagged); |
| 484 Label handle_monomorphic(this), extra_checks(this), end(this), call(this), | 495 Label handle_monomorphic(this), extra_checks(this), end(this), call(this), |
| 485 call_function(this); | 496 call_function(this), call_without_feedback(this); |
| 486 | 497 |
| 487 // Slot id of 0 is used to indicate no typefeedback is available. Call using | 498 // Slot id of 0 is used to indicate no typefeedback is available. Call using |
| 488 // call builtin. | 499 // call builtin. |
| 489 STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0); | 500 STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0); |
| 490 Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0)); | 501 Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0)); |
| 491 GotoIf(is_feedback_unavailable, &call); | 502 GotoIf(is_feedback_unavailable, &call_without_feedback); |
| 492 | 503 |
| 493 // The checks. First, does function match the recorded monomorphic target? | 504 // The checks. First, does function match the recorded monomorphic target? |
| 494 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id); | 505 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id); |
| 495 Node* feedback_value = LoadWeakCellValue(feedback_element); | 506 Node* feedback_value = LoadWeakCellValue(feedback_element); |
| 496 Node* is_monomorphic = WordEqual(function, feedback_value); | 507 Node* is_monomorphic = WordEqual(function, feedback_value); |
| 497 BranchIf(is_monomorphic, &handle_monomorphic, &extra_checks); | 508 BranchIf(is_monomorphic, &handle_monomorphic, &extra_checks); |
| 498 | 509 |
| 499 Bind(&handle_monomorphic); | 510 Bind(&handle_monomorphic); |
| 500 { | 511 { |
| 501 // The compare above could have been a SMI/SMI comparison. Guard against | 512 // The compare above could have been a SMI/SMI comparison. Guard against |
| 502 // this convincing us that we have a monomorphic JSFunction. | 513 // this convincing us that we have a monomorphic JSFunction. |
| 503 Node* is_smi = WordIsSmi(function); | 514 Node* is_smi = WordIsSmi(function); |
| 504 GotoIf(is_smi, &extra_checks); | 515 GotoIf(is_smi, &extra_checks); |
| 505 | 516 |
| 506 // Increment the call count. | 517 // Increment the call count. |
| 507 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); | 518 IncrementCallCount(type_feedback_vector, slot_id); |
| 508 Node* call_count = | |
| 509 LoadFixedArrayElement(type_feedback_vector, call_count_slot); | |
| 510 Node* new_count = SmiAdd(call_count, SmiTag(Int32Constant(1))); | |
| 511 // Count is Smi, so we don't need a write barrier. | |
| 512 StoreFixedArrayElement(type_feedback_vector, call_count_slot, new_count, | |
| 513 SKIP_WRITE_BARRIER); | |
| 514 | 519 |
| 515 // Call using call function builtin. | 520 // Call using call function builtin. |
| 516 Callable callable = CodeFactory::InterpreterPushArgsAndCall( | 521 Callable callable = CodeFactory::InterpreterPushArgsAndCall( |
| 517 isolate(), tail_call_mode, CallableType::kJSFunction); | 522 isolate(), tail_call_mode, CallableType::kJSFunction); |
| 518 Node* code_target = HeapConstant(callable.code()); | 523 Node* code_target = HeapConstant(callable.code()); |
| 519 Node* ret_value = CallStub(callable.descriptor(), code_target, context, | 524 Node* ret_value = CallStub(callable.descriptor(), code_target, context, |
| 520 arg_count, first_arg, function); | 525 arg_count, first_arg, function); |
| 521 return_value.Bind(ret_value); | 526 return_value.Bind(ret_value); |
| 522 Goto(&end); | 527 Goto(&end); |
| 523 } | 528 } |
| (...skipping 17 matching lines...) Expand all Loading... |
| 541 GotoUnless(is_allocation_site, &check_initialized); | 546 GotoUnless(is_allocation_site, &check_initialized); |
| 542 | 547 |
| 543 // If it is not the Array() function, mark megamorphic. | 548 // If it is not the Array() function, mark megamorphic. |
| 544 Node* context_slot = | 549 Node* context_slot = |
| 545 LoadFixedArrayElement(LoadNativeContext(context), | 550 LoadFixedArrayElement(LoadNativeContext(context), |
| 546 Int32Constant(Context::ARRAY_FUNCTION_INDEX)); | 551 Int32Constant(Context::ARRAY_FUNCTION_INDEX)); |
| 547 Node* is_array_function = WordEqual(context_slot, function); | 552 Node* is_array_function = WordEqual(context_slot, function); |
| 548 GotoUnless(is_array_function, &mark_megamorphic); | 553 GotoUnless(is_array_function, &mark_megamorphic); |
| 549 | 554 |
| 550 // It is a monomorphic Array function. Increment the call count. | 555 // It is a monomorphic Array function. Increment the call count. |
| 551 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); | 556 IncrementCallCount(type_feedback_vector, slot_id); |
| 552 Node* call_count = | |
| 553 LoadFixedArrayElement(type_feedback_vector, call_count_slot); | |
| 554 Node* new_count = SmiAdd(call_count, SmiTag(Int32Constant(1))); | |
| 555 // Count is Smi, so we don't need a write barrier. | |
| 556 StoreFixedArrayElement(type_feedback_vector, call_count_slot, new_count, | |
| 557 SKIP_WRITE_BARRIER); | |
| 558 | 557 |
| 559 // Call ArrayConstructorStub. | 558 // Call ArrayConstructorStub. |
| 560 Callable callable_call = | 559 Callable callable_call = |
| 561 CodeFactory::InterpreterPushArgsAndConstructArray(isolate()); | 560 CodeFactory::InterpreterPushArgsAndConstructArray(isolate()); |
| 562 Node* code_target_call = HeapConstant(callable_call.code()); | 561 Node* code_target_call = HeapConstant(callable_call.code()); |
| 563 Node* ret_value = | 562 Node* ret_value = |
| 564 CallStub(callable_call.descriptor(), code_target_call, context, | 563 CallStub(callable_call.descriptor(), code_target_call, context, |
| 565 arg_count, function, feedback_element, first_arg); | 564 arg_count, function, feedback_element, first_arg); |
| 566 return_value.Bind(ret_value); | 565 return_value.Bind(ret_value); |
| 567 Goto(&end); | 566 Goto(&end); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 592 Node* is_array_function = WordEqual(context_slot, function); | 591 Node* is_array_function = WordEqual(context_slot, function); |
| 593 GotoIf(is_array_function, &create_allocation_site); | 592 GotoIf(is_array_function, &create_allocation_site); |
| 594 | 593 |
| 595 // Check if the function belongs to the same native context | 594 // Check if the function belongs to the same native context |
| 596 Node* native_context = LoadNativeContext( | 595 Node* native_context = LoadNativeContext( |
| 597 LoadObjectField(function, JSFunction::kContextOffset)); | 596 LoadObjectField(function, JSFunction::kContextOffset)); |
| 598 Node* is_same_native_context = | 597 Node* is_same_native_context = |
| 599 WordEqual(native_context, LoadNativeContext(context)); | 598 WordEqual(native_context, LoadNativeContext(context)); |
| 600 GotoUnless(is_same_native_context, &mark_megamorphic); | 599 GotoUnless(is_same_native_context, &mark_megamorphic); |
| 601 | 600 |
| 602 // Initialize it to a monomorphic target. | |
| 603 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); | |
| 604 // Count is Smi, so we don't need a write barrier. | |
| 605 StoreFixedArrayElement(type_feedback_vector, call_count_slot, | |
| 606 SmiTag(Int32Constant(1)), SKIP_WRITE_BARRIER); | |
| 607 | |
| 608 CreateWeakCellInFeedbackVector(type_feedback_vector, SmiTag(slot_id), | 601 CreateWeakCellInFeedbackVector(type_feedback_vector, SmiTag(slot_id), |
| 609 function); | 602 function); |
| 610 | 603 |
| 611 // Call using call function builtin. | 604 // Call using call function builtin. |
| 612 Goto(&call_function); | 605 Goto(&call_function); |
| 613 } | 606 } |
| 614 | 607 |
| 615 Bind(&create_allocation_site); | 608 Bind(&create_allocation_site); |
| 616 { | 609 { |
| 617 // TODO(mythria): Inline the creation of the allocation site. | 610 // TODO(mythria): Inline the creation of the allocation site. |
| 618 CreateAllocationSiteStub create_stub(isolate()); | 611 CreateAllocationSiteStub create_stub(isolate()); |
| 619 CallStub(create_stub.GetCallInterfaceDescriptor(), | 612 CallStub(create_stub.GetCallInterfaceDescriptor(), |
| 620 HeapConstant(create_stub.GetCode()), context, | 613 HeapConstant(create_stub.GetCode()), context, |
| 621 type_feedback_vector, SmiTag(slot_id)); | 614 type_feedback_vector, SmiTag(slot_id)); |
| 622 | 615 |
| 623 // Initialize the count to 1. | |
| 624 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); | |
| 625 // Count is Smi, so we don't need a write barrier. | |
| 626 StoreFixedArrayElement(type_feedback_vector, call_count_slot, | |
| 627 SmiTag(Int32Constant(1)), SKIP_WRITE_BARRIER); | |
| 628 | |
| 629 // Call using CallFunction builtin. CallICs have a PREMONOMORPHIC state. | 616 // Call using CallFunction builtin. CallICs have a PREMONOMORPHIC state. |
| 630 // They start collecting feedback only when a call is executed the second | 617 // They start collecting feedback only when a call is executed the second |
| 631 // time. So, do not pass any feedback here. | 618 // time. So, do not pass any feedback here. |
| 632 Goto(&call_function); | 619 Goto(&call_function); |
| 633 } | 620 } |
| 634 | 621 |
| 635 Bind(&mark_megamorphic); | 622 Bind(&mark_megamorphic); |
| 636 { | 623 { |
| 637 // Mark it as a megamorphic. | 624 // Mark it as a megamorphic. |
| 638 // MegamorphicSentinel is created as a part of Heap::InitialObjects | 625 // MegamorphicSentinel is created as a part of Heap::InitialObjects |
| 639 // and will not move during a GC. So it is safe to skip write barrier. | 626 // and will not move during a GC. So it is safe to skip write barrier. |
| 640 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); | 627 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); |
| 641 StoreFixedArrayElement( | 628 StoreFixedArrayElement( |
| 642 type_feedback_vector, slot_id, | 629 type_feedback_vector, slot_id, |
| 643 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), | 630 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), |
| 644 SKIP_WRITE_BARRIER); | 631 SKIP_WRITE_BARRIER); |
| 645 Goto(&call); | 632 Goto(&call); |
| 646 } | 633 } |
| 647 } | 634 } |
| 648 | 635 |
| 649 Bind(&call_function); | 636 Bind(&call_function); |
| 650 { | 637 { |
| 638 // Increment the call count. |
| 639 IncrementCallCount(type_feedback_vector, slot_id); |
| 640 |
| 651 Callable callable_call = CodeFactory::InterpreterPushArgsAndCall( | 641 Callable callable_call = CodeFactory::InterpreterPushArgsAndCall( |
| 652 isolate(), tail_call_mode, CallableType::kJSFunction); | 642 isolate(), tail_call_mode, CallableType::kJSFunction); |
| 653 Node* code_target_call = HeapConstant(callable_call.code()); | 643 Node* code_target_call = HeapConstant(callable_call.code()); |
| 654 Node* ret_value = CallStub(callable_call.descriptor(), code_target_call, | 644 Node* ret_value = CallStub(callable_call.descriptor(), code_target_call, |
| 655 context, arg_count, first_arg, function); | 645 context, arg_count, first_arg, function); |
| 656 return_value.Bind(ret_value); | 646 return_value.Bind(ret_value); |
| 657 Goto(&end); | 647 Goto(&end); |
| 658 } | 648 } |
| 659 | 649 |
| 660 Bind(&call); | 650 Bind(&call); |
| 661 { | 651 { |
| 652 // Increment the call count. |
| 653 IncrementCallCount(type_feedback_vector, slot_id); |
| 654 |
| 655 // Call using call builtin. |
| 656 Callable callable_call = CodeFactory::InterpreterPushArgsAndCall( |
| 657 isolate(), tail_call_mode, CallableType::kAny); |
| 658 Node* code_target_call = HeapConstant(callable_call.code()); |
| 659 Node* ret_value = CallStub(callable_call.descriptor(), code_target_call, |
| 660 context, arg_count, first_arg, function); |
| 661 return_value.Bind(ret_value); |
| 662 Goto(&end); |
| 663 } |
| 664 |
| 665 Bind(&call_without_feedback); |
| 666 { |
| 662 // Call using call builtin. | 667 // Call using call builtin. |
| 663 Callable callable_call = CodeFactory::InterpreterPushArgsAndCall( | 668 Callable callable_call = CodeFactory::InterpreterPushArgsAndCall( |
| 664 isolate(), tail_call_mode, CallableType::kAny); | 669 isolate(), tail_call_mode, CallableType::kAny); |
| 665 Node* code_target_call = HeapConstant(callable_call.code()); | 670 Node* code_target_call = HeapConstant(callable_call.code()); |
| 666 Node* ret_value = CallStub(callable_call.descriptor(), code_target_call, | 671 Node* ret_value = CallStub(callable_call.descriptor(), code_target_call, |
| 667 context, arg_count, first_arg, function); | 672 context, arg_count, first_arg, function); |
| 668 return_value.Bind(ret_value); | 673 return_value.Bind(ret_value); |
| 669 Goto(&end); | 674 Goto(&end); |
| 670 } | 675 } |
| 671 | 676 |
| (...skipping 652 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1324 Goto(&loop); | 1329 Goto(&loop); |
| 1325 } | 1330 } |
| 1326 Bind(&done_loop); | 1331 Bind(&done_loop); |
| 1327 | 1332 |
| 1328 return array; | 1333 return array; |
| 1329 } | 1334 } |
| 1330 | 1335 |
| 1331 } // namespace interpreter | 1336 } // namespace interpreter |
| 1332 } // namespace internal | 1337 } // namespace internal |
| 1333 } // namespace v8 | 1338 } // namespace v8 |
| OLD | NEW |