Index: src/a64/code-stubs-a64.cc |
diff --git a/src/a64/code-stubs-a64.cc b/src/a64/code-stubs-a64.cc |
index 036e59a1833b531429beaaec4a79461acaf02c93..bf6ab321b5939b3a25b2c3e39d8cce6094c0f650 100644 |
--- a/src/a64/code-stubs-a64.cc |
+++ b/src/a64/code-stubs-a64.cc |
@@ -3195,8 +3195,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
masm->isolate()->heap()->undefined_value()); |
ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), |
masm->isolate()->heap()->the_hole_value()); |
- ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()), |
- masm->isolate()->heap()->null_value()); |
// Load the cache state. |
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
@@ -3224,22 +3222,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
// A monomorphic miss (i.e, here the cache is not uninitialized) goes |
// megamorphic. |
- Label not_uninitialized; |
- __ JumpIfNotRoot(x4, Heap::kTheHoleValueRootIndex, ¬_uninitialized); |
- |
- // PremonomorphicSentinel is an immortal immovable object (null) so no |
- // write-barrier is needed. |
- __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
- __ LoadRoot(x10, Heap::kNullValueRootIndex); |
- __ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
- __ B(&done); |
- |
- // If the cache isn't uninitialized, it is either premonomorphic or |
- // monomorphic. If it is premonomorphic, we initialize it thus making |
- // it monomorphic. Otherwise, we go megamorphic. |
- __ Bind(¬_uninitialized); |
- __ JumpIfRoot(x4, Heap::kNullValueRootIndex, &initialize); |
- |
+ __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize); |
// MegamorphicSentinel is an immortal immovable object (undefined) so no |
// write-barrier is needed. |
__ Bind(&megamorphic); |