| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/interpreter-assembler.h" | 5 #include "src/interpreter/interpreter-assembler.h" |
| 6 | 6 |
| 7 #include <limits> | 7 #include <limits> |
| 8 #include <ostream> | 8 #include <ostream> |
| 9 | 9 |
| 10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
| (...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 444 Bytecodes::GetOperandType(bytecode_, operand_index)); | 444 Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 445 OperandSize operand_size = | 445 OperandSize operand_size = |
| 446 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); | 446 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 447 DCHECK_EQ(operand_size, OperandSize::kByte); | 447 DCHECK_EQ(operand_size, OperandSize::kByte); |
| 448 return BytecodeUnsignedOperand(operand_index, operand_size); | 448 return BytecodeUnsignedOperand(operand_index, operand_size); |
| 449 } | 449 } |
| 450 | 450 |
| 451 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { | 451 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { |
| 452 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), | 452 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), |
| 453 BytecodeArray::kConstantPoolOffset); | 453 BytecodeArray::kConstantPoolOffset); |
| 454 return LoadFixedArrayElement(constant_pool, index, 0, INTPTR_PARAMETERS); | 454 return LoadFixedArrayElement(constant_pool, index); |
| 455 } | 455 } |
| 456 | 456 |
| 457 Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) { | 457 Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) { |
| 458 return SmiUntag(LoadConstantPoolEntry(index)); | 458 return SmiUntag(LoadConstantPoolEntry(index)); |
| 459 } | 459 } |
| 460 | 460 |
| 461 Node* InterpreterAssembler::LoadTypeFeedbackVector() { | 461 Node* InterpreterAssembler::LoadTypeFeedbackVector() { |
| 462 Node* function = LoadRegister(Register::function_closure()); | 462 Node* function = LoadRegister(Register::function_closure()); |
| 463 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset); | 463 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset); |
| 464 Node* vector = | 464 Node* vector = |
| (...skipping 18 matching lines...) Expand all Loading... |
| 483 stack_pointer_before_call_ = nullptr; | 483 stack_pointer_before_call_ = nullptr; |
| 484 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, | 484 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, |
| 485 kUnexpectedStackPointer); | 485 kUnexpectedStackPointer); |
| 486 } | 486 } |
| 487 } | 487 } |
| 488 | 488 |
| 489 Node* InterpreterAssembler::IncrementCallCount(Node* type_feedback_vector, | 489 Node* InterpreterAssembler::IncrementCallCount(Node* type_feedback_vector, |
| 490 Node* slot_id) { | 490 Node* slot_id) { |
| 491 Comment("increment call count"); | 491 Comment("increment call count"); |
| 492 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); | 492 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); |
| 493 Node* call_count = LoadFixedArrayElement( | 493 Node* call_count = |
| 494 type_feedback_vector, call_count_slot, 0, INTPTR_PARAMETERS); | 494 LoadFixedArrayElement(type_feedback_vector, call_count_slot); |
| 495 Node* new_count = SmiAdd(call_count, SmiConstant(1)); | 495 Node* new_count = SmiAdd(call_count, SmiConstant(1)); |
| 496 // Count is Smi, so we don't need a write barrier. | 496 // Count is Smi, so we don't need a write barrier. |
| 497 return StoreFixedArrayElement(type_feedback_vector, call_count_slot, | 497 return StoreFixedArrayElement(type_feedback_vector, call_count_slot, |
| 498 new_count, SKIP_WRITE_BARRIER, 0, | 498 new_count, SKIP_WRITE_BARRIER); |
| 499 INTPTR_PARAMETERS); | |
| 500 } | 499 } |
| 501 | 500 |
| 502 Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context, | 501 Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context, |
| 503 Node* first_arg, Node* arg_count, | 502 Node* first_arg, Node* arg_count, |
| 504 Node* slot_id, | 503 Node* slot_id, |
| 505 Node* type_feedback_vector, | 504 Node* type_feedback_vector, |
| 506 TailCallMode tail_call_mode) { | 505 TailCallMode tail_call_mode) { |
| 507 // Static checks to assert it is safe to examine the type feedback element. | 506 // Static checks to assert it is safe to examine the type feedback element. |
| 508 // We don't know that we have a weak cell. We might have a private symbol | 507 // We don't know that we have a weak cell. We might have a private symbol |
| 509 // or an AllocationSite, but the memory is safe to examine. | 508 // or an AllocationSite, but the memory is safe to examine. |
| 510 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | 509 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
| 511 // FixedArray. | 510 // FixedArray. |
| 512 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | 511 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
| 513 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | 512 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |
| 514 // computed, meaning that it can't appear to be a pointer. If the low bit is | 513 // computed, meaning that it can't appear to be a pointer. If the low bit is |
| 515 // 0, then hash is computed, but the 0 bit prevents the field from appearing | 514 // 0, then hash is computed, but the 0 bit prevents the field from appearing |
| 516 // to be a pointer. | 515 // to be a pointer. |
| 517 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | 516 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |
| 518 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | 517 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |
| 519 WeakCell::kValueOffset && | 518 WeakCell::kValueOffset && |
| 520 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | 519 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
| 521 | 520 |
| 522 Variable return_value(this, MachineRepresentation::kTagged); | 521 Variable return_value(this, MachineRepresentation::kTagged); |
| 523 Label call_function(this), extra_checks(this, Label::kDeferred), call(this), | 522 Label call_function(this), extra_checks(this, Label::kDeferred), call(this), |
| 524 end(this); | 523 end(this); |
| 525 | 524 |
| 526 // The checks. First, does function match the recorded monomorphic target? | 525 // The checks. First, does function match the recorded monomorphic target? |
| 527 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id, | 526 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id); |
| 528 0, INTPTR_PARAMETERS); | |
| 529 Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element); | 527 Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element); |
| 530 Node* is_monomorphic = WordEqual(function, feedback_value); | 528 Node* is_monomorphic = WordEqual(function, feedback_value); |
| 531 GotoUnless(is_monomorphic, &extra_checks); | 529 GotoUnless(is_monomorphic, &extra_checks); |
| 532 | 530 |
| 533 // The compare above could have been a SMI/SMI comparison. Guard against | 531 // The compare above could have been a SMI/SMI comparison. Guard against |
| 534 // this convincing us that we have a monomorphic JSFunction. | 532 // this convincing us that we have a monomorphic JSFunction. |
| 535 Node* is_smi = TaggedIsSmi(function); | 533 Node* is_smi = TaggedIsSmi(function); |
| 536 Branch(is_smi, &extra_checks, &call_function); | 534 Branch(is_smi, &extra_checks, &call_function); |
| 537 | 535 |
| 538 Bind(&call_function); | 536 Bind(&call_function); |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 638 | 636 |
| 639 Bind(&mark_megamorphic); | 637 Bind(&mark_megamorphic); |
| 640 { | 638 { |
| 641 // Mark it as a megamorphic. | 639 // Mark it as a megamorphic. |
| 642 // MegamorphicSentinel is created as a part of Heap::InitialObjects | 640 // MegamorphicSentinel is created as a part of Heap::InitialObjects |
| 643 // and will not move during a GC. So it is safe to skip write barrier. | 641 // and will not move during a GC. So it is safe to skip write barrier. |
| 644 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); | 642 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); |
| 645 StoreFixedArrayElement( | 643 StoreFixedArrayElement( |
| 646 type_feedback_vector, slot_id, | 644 type_feedback_vector, slot_id, |
| 647 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), | 645 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), |
| 648 SKIP_WRITE_BARRIER, 0, INTPTR_PARAMETERS); | 646 SKIP_WRITE_BARRIER); |
| 649 Goto(&call); | 647 Goto(&call); |
| 650 } | 648 } |
| 651 } | 649 } |
| 652 | 650 |
| 653 Bind(&call); | 651 Bind(&call); |
| 654 { | 652 { |
| 655 Comment("Increment call count and call using Call builtin"); | 653 Comment("Increment call count and call using Call builtin"); |
| 656 // Increment the call count. | 654 // Increment the call count. |
| 657 IncrementCallCount(type_feedback_vector, slot_id); | 655 IncrementCallCount(type_feedback_vector, slot_id); |
| 658 | 656 |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 699 Node* is_smi = TaggedIsSmi(constructor); | 697 Node* is_smi = TaggedIsSmi(constructor); |
| 700 GotoIf(is_smi, &call_construct); | 698 GotoIf(is_smi, &call_construct); |
| 701 | 699 |
| 702 // Check that constructor is a JSFunction. | 700 // Check that constructor is a JSFunction. |
| 703 Node* instance_type = LoadInstanceType(constructor); | 701 Node* instance_type = LoadInstanceType(constructor); |
| 704 Node* is_js_function = | 702 Node* is_js_function = |
| 705 Word32Equal(instance_type, Int32Constant(JS_FUNCTION_TYPE)); | 703 Word32Equal(instance_type, Int32Constant(JS_FUNCTION_TYPE)); |
| 706 GotoUnless(is_js_function, &call_construct); | 704 GotoUnless(is_js_function, &call_construct); |
| 707 | 705 |
| 708 // Check if it is a monomorphic constructor. | 706 // Check if it is a monomorphic constructor. |
| 709 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id, | 707 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id); |
| 710 0, INTPTR_PARAMETERS); | |
| 711 Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element); | 708 Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element); |
| 712 Node* is_monomorphic = WordEqual(constructor, feedback_value); | 709 Node* is_monomorphic = WordEqual(constructor, feedback_value); |
| 713 allocation_feedback.Bind(UndefinedConstant()); | 710 allocation_feedback.Bind(UndefinedConstant()); |
| 714 Branch(is_monomorphic, &call_construct_function, &extra_checks); | 711 Branch(is_monomorphic, &call_construct_function, &extra_checks); |
| 715 | 712 |
| 716 Bind(&call_construct_function); | 713 Bind(&call_construct_function); |
| 717 { | 714 { |
| 718 Comment("call using callConstructFunction"); | 715 Comment("call using callConstructFunction"); |
| 719 IncrementCallCount(type_feedback_vector, slot_id); | 716 IncrementCallCount(type_feedback_vector, slot_id); |
| 720 Callable callable_function = CodeFactory::InterpreterPushArgsAndConstruct( | 717 Callable callable_function = CodeFactory::InterpreterPushArgsAndConstruct( |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 805 | 802 |
| 806 Bind(&mark_megamorphic); | 803 Bind(&mark_megamorphic); |
| 807 { | 804 { |
| 808 // MegamorphicSentinel is an immortal immovable object so | 805 // MegamorphicSentinel is an immortal immovable object so |
| 809 // write-barrier is not needed. | 806 // write-barrier is not needed. |
| 810 Comment("transition to megamorphic"); | 807 Comment("transition to megamorphic"); |
| 811 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); | 808 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); |
| 812 StoreFixedArrayElement( | 809 StoreFixedArrayElement( |
| 813 type_feedback_vector, slot_id, | 810 type_feedback_vector, slot_id, |
| 814 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), | 811 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), |
| 815 SKIP_WRITE_BARRIER, 0, INTPTR_PARAMETERS); | 812 SKIP_WRITE_BARRIER); |
| 816 Goto(&call_construct_function); | 813 Goto(&call_construct_function); |
| 817 } | 814 } |
| 818 } | 815 } |
| 819 | 816 |
| 820 Bind(&call_construct); | 817 Bind(&call_construct); |
| 821 { | 818 { |
| 822 Comment("call using callConstruct builtin"); | 819 Comment("call using callConstruct builtin"); |
| 823 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct( | 820 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct( |
| 824 isolate(), CallableType::kAny); | 821 isolate(), CallableType::kAny); |
| 825 Node* code_target = HeapConstant(callable.code()); | 822 Node* code_target = HeapConstant(callable.code()); |
| (...skipping 430 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1256 Label loop(this, &var_index), done_loop(this); | 1253 Label loop(this, &var_index), done_loop(this); |
| 1257 Goto(&loop); | 1254 Goto(&loop); |
| 1258 Bind(&loop); | 1255 Bind(&loop); |
| 1259 { | 1256 { |
| 1260 Node* index = var_index.value(); | 1257 Node* index = var_index.value(); |
| 1261 GotoUnless(UintPtrLessThan(index, register_count), &done_loop); | 1258 GotoUnless(UintPtrLessThan(index, register_count), &done_loop); |
| 1262 | 1259 |
| 1263 Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index); | 1260 Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index); |
| 1264 Node* value = LoadRegister(reg_index); | 1261 Node* value = LoadRegister(reg_index); |
| 1265 | 1262 |
| 1266 StoreFixedArrayElement(array, index, value, UPDATE_WRITE_BARRIER, 0, | 1263 StoreFixedArrayElement(array, index, value); |
| 1267 INTPTR_PARAMETERS); | |
| 1268 | 1264 |
| 1269 var_index.Bind(IntPtrAdd(index, IntPtrConstant(1))); | 1265 var_index.Bind(IntPtrAdd(index, IntPtrConstant(1))); |
| 1270 Goto(&loop); | 1266 Goto(&loop); |
| 1271 } | 1267 } |
| 1272 Bind(&done_loop); | 1268 Bind(&done_loop); |
| 1273 | 1269 |
| 1274 return array; | 1270 return array; |
| 1275 } | 1271 } |
| 1276 | 1272 |
| 1277 Node* InterpreterAssembler::ImportRegisterFile(Node* array) { | 1273 Node* InterpreterAssembler::ImportRegisterFile(Node* array) { |
| 1278 Node* register_count = RegisterCount(); | 1274 Node* register_count = RegisterCount(); |
| 1279 if (FLAG_debug_code) { | 1275 if (FLAG_debug_code) { |
| 1280 Node* array_size = LoadAndUntagFixedArrayBaseLength(array); | 1276 Node* array_size = LoadAndUntagFixedArrayBaseLength(array); |
| 1281 AbortIfWordNotEqual(array_size, register_count, | 1277 AbortIfWordNotEqual(array_size, register_count, |
| 1282 kInvalidRegisterFileInGenerator); | 1278 kInvalidRegisterFileInGenerator); |
| 1283 } | 1279 } |
| 1284 | 1280 |
| 1285 Variable var_index(this, MachineType::PointerRepresentation()); | 1281 Variable var_index(this, MachineType::PointerRepresentation()); |
| 1286 var_index.Bind(IntPtrConstant(0)); | 1282 var_index.Bind(IntPtrConstant(0)); |
| 1287 | 1283 |
| 1288 // Iterate over array and write values into register file. Also erase the | 1284 // Iterate over array and write values into register file. Also erase the |
| 1289 // array contents to not keep them alive artificially. | 1285 // array contents to not keep them alive artificially. |
| 1290 Label loop(this, &var_index), done_loop(this); | 1286 Label loop(this, &var_index), done_loop(this); |
| 1291 Goto(&loop); | 1287 Goto(&loop); |
| 1292 Bind(&loop); | 1288 Bind(&loop); |
| 1293 { | 1289 { |
| 1294 Node* index = var_index.value(); | 1290 Node* index = var_index.value(); |
| 1295 GotoUnless(UintPtrLessThan(index, register_count), &done_loop); | 1291 GotoUnless(UintPtrLessThan(index, register_count), &done_loop); |
| 1296 | 1292 |
| 1297 Node* value = LoadFixedArrayElement(array, index, 0, INTPTR_PARAMETERS); | 1293 Node* value = LoadFixedArrayElement(array, index); |
| 1298 | 1294 |
| 1299 Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index); | 1295 Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index); |
| 1300 StoreRegister(value, reg_index); | 1296 StoreRegister(value, reg_index); |
| 1301 | 1297 |
| 1302 StoreFixedArrayElement(array, index, StaleRegisterConstant(), | 1298 StoreFixedArrayElement(array, index, StaleRegisterConstant()); |
| 1303 UPDATE_WRITE_BARRIER, 0, INTPTR_PARAMETERS); | |
| 1304 | 1299 |
| 1305 var_index.Bind(IntPtrAdd(index, IntPtrConstant(1))); | 1300 var_index.Bind(IntPtrAdd(index, IntPtrConstant(1))); |
| 1306 Goto(&loop); | 1301 Goto(&loop); |
| 1307 } | 1302 } |
| 1308 Bind(&done_loop); | 1303 Bind(&done_loop); |
| 1309 | 1304 |
| 1310 return array; | 1305 return array; |
| 1311 } | 1306 } |
| 1312 | 1307 |
| 1313 } // namespace interpreter | 1308 } // namespace interpreter |
| 1314 } // namespace internal | 1309 } // namespace internal |
| 1315 } // namespace v8 | 1310 } // namespace v8 |
| OLD | NEW |