| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 4658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4669 } | 4669 } |
| 4670 __ B(&done_allocate); | 4670 __ B(&done_allocate); |
| 4671 | 4671 |
| 4672 // Fall back to %NewStrictArguments. | 4672 // Fall back to %NewStrictArguments. |
| 4673 __ Bind(&too_big_for_new_space); | 4673 __ Bind(&too_big_for_new_space); |
| 4674 __ Push(x1); | 4674 __ Push(x1); |
| 4675 __ TailCallRuntime(Runtime::kNewStrictArguments); | 4675 __ TailCallRuntime(Runtime::kNewStrictArguments); |
| 4676 } | 4676 } |
| 4677 | 4677 |
| 4678 | 4678 |
| 4679 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | |
| 4680 Register context = cp; | |
| 4681 Register value = x0; | |
| 4682 Register slot = x2; | |
| 4683 Register context_temp = x10; | |
| 4684 Register cell = x10; | |
| 4685 Register cell_details = x11; | |
| 4686 Register cell_value = x12; | |
| 4687 Register cell_value_map = x13; | |
| 4688 Register value_map = x14; | |
| 4689 Label fast_heapobject_case, fast_smi_case, slow_case; | |
| 4690 | |
| 4691 if (FLAG_debug_code) { | |
| 4692 __ CompareRoot(value, Heap::kTheHoleValueRootIndex); | |
| 4693 __ Check(ne, kUnexpectedValue); | |
| 4694 } | |
| 4695 | |
| 4696 // Go up the context chain to the script context. | |
| 4697 for (int i = 0; i < depth(); i++) { | |
| 4698 __ Ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX)); | |
| 4699 context = context_temp; | |
| 4700 } | |
| 4701 | |
| 4702 // Load the PropertyCell at the specified slot. | |
| 4703 __ Add(cell, context, Operand(slot, LSL, kPointerSizeLog2)); | |
| 4704 __ Ldr(cell, ContextMemOperand(cell)); | |
| 4705 | |
| 4706 // Load PropertyDetails for the cell (actually only the cell_type and kind). | |
| 4707 __ Ldr(cell_details, | |
| 4708 UntagSmiFieldMemOperand(cell, PropertyCell::kDetailsOffset)); | |
| 4709 __ And(cell_details, cell_details, | |
| 4710 PropertyDetails::PropertyCellTypeField::kMask | | |
| 4711 PropertyDetails::KindField::kMask | | |
| 4712 PropertyDetails::kAttributesReadOnlyMask); | |
| 4713 | |
| 4714 // Check if PropertyCell holds mutable data. | |
| 4715 Label not_mutable_data; | |
| 4716 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( | |
| 4717 PropertyCellType::kMutable) | | |
| 4718 PropertyDetails::KindField::encode(kData)); | |
| 4719 __ B(ne, ¬_mutable_data); | |
| 4720 __ JumpIfSmi(value, &fast_smi_case); | |
| 4721 __ Bind(&fast_heapobject_case); | |
| 4722 __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset)); | |
| 4723 // RecordWriteField clobbers the value register, so we copy it before the | |
| 4724 // call. | |
| 4725 __ Mov(x11, value); | |
| 4726 __ RecordWriteField(cell, PropertyCell::kValueOffset, x11, x12, | |
| 4727 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
| 4728 OMIT_SMI_CHECK); | |
| 4729 __ Ret(); | |
| 4730 | |
| 4731 __ Bind(¬_mutable_data); | |
| 4732 // Check if PropertyCell value matches the new value (relevant for Constant, | |
| 4733 // ConstantType and Undefined cells). | |
| 4734 Label not_same_value; | |
| 4735 __ Ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset)); | |
| 4736 __ Cmp(cell_value, value); | |
| 4737 __ B(ne, ¬_same_value); | |
| 4738 | |
| 4739 // Make sure the PropertyCell is not marked READ_ONLY. | |
| 4740 __ Tst(cell_details, PropertyDetails::kAttributesReadOnlyMask); | |
| 4741 __ B(ne, &slow_case); | |
| 4742 | |
| 4743 if (FLAG_debug_code) { | |
| 4744 Label done; | |
| 4745 // This can only be true for Constant, ConstantType and Undefined cells, | |
| 4746 // because we never store the_hole via this stub. | |
| 4747 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( | |
| 4748 PropertyCellType::kConstant) | | |
| 4749 PropertyDetails::KindField::encode(kData)); | |
| 4750 __ B(eq, &done); | |
| 4751 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( | |
| 4752 PropertyCellType::kConstantType) | | |
| 4753 PropertyDetails::KindField::encode(kData)); | |
| 4754 __ B(eq, &done); | |
| 4755 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( | |
| 4756 PropertyCellType::kUndefined) | | |
| 4757 PropertyDetails::KindField::encode(kData)); | |
| 4758 __ Check(eq, kUnexpectedValue); | |
| 4759 __ Bind(&done); | |
| 4760 } | |
| 4761 __ Ret(); | |
| 4762 __ Bind(¬_same_value); | |
| 4763 | |
| 4764 // Check if PropertyCell contains data with constant type (and is not | |
| 4765 // READ_ONLY). | |
| 4766 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( | |
| 4767 PropertyCellType::kConstantType) | | |
| 4768 PropertyDetails::KindField::encode(kData)); | |
| 4769 __ B(ne, &slow_case); | |
| 4770 | |
| 4771 // Now either both old and new values must be smis or both must be heap | |
| 4772 // objects with same map. | |
| 4773 Label value_is_heap_object; | |
| 4774 __ JumpIfNotSmi(value, &value_is_heap_object); | |
| 4775 __ JumpIfNotSmi(cell_value, &slow_case); | |
| 4776 // Old and new values are smis, no need for a write barrier here. | |
| 4777 __ Bind(&fast_smi_case); | |
| 4778 __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset)); | |
| 4779 __ Ret(); | |
| 4780 | |
| 4781 __ Bind(&value_is_heap_object); | |
| 4782 __ JumpIfSmi(cell_value, &slow_case); | |
| 4783 | |
| 4784 __ Ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset)); | |
| 4785 __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset)); | |
| 4786 __ Cmp(cell_value_map, value_map); | |
| 4787 __ B(eq, &fast_heapobject_case); | |
| 4788 | |
| 4789 // Fall back to the runtime. | |
| 4790 __ Bind(&slow_case); | |
| 4791 __ SmiTag(slot); | |
| 4792 __ Push(slot, value); | |
| 4793 __ TailCallRuntime(is_strict(language_mode()) | |
| 4794 ? Runtime::kStoreGlobalViaContext_Strict | |
| 4795 : Runtime::kStoreGlobalViaContext_Sloppy); | |
| 4796 } | |
| 4797 | |
| 4798 | |
| 4799 // The number of register that CallApiFunctionAndReturn will need to save on | 4679 // The number of register that CallApiFunctionAndReturn will need to save on |
| 4800 // the stack. The space for these registers need to be allocated in the | 4680 // the stack. The space for these registers need to be allocated in the |
| 4801 // ExitFrame before calling CallApiFunctionAndReturn. | 4681 // ExitFrame before calling CallApiFunctionAndReturn. |
| 4802 static const int kCallApiFunctionSpillSpace = 4; | 4682 static const int kCallApiFunctionSpillSpace = 4; |
| 4803 | 4683 |
| 4804 | 4684 |
| 4805 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 4685 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
| 4806 return static_cast<int>(ref0.address() - ref1.address()); | 4686 return static_cast<int>(ref0.address() - ref1.address()); |
| 4807 } | 4687 } |
| 4808 | 4688 |
| (...skipping 328 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5137 kStackUnwindSpace, NULL, spill_offset, | 5017 kStackUnwindSpace, NULL, spill_offset, |
| 5138 return_value_operand, NULL); | 5018 return_value_operand, NULL); |
| 5139 } | 5019 } |
| 5140 | 5020 |
| 5141 #undef __ | 5021 #undef __ |
| 5142 | 5022 |
| 5143 } // namespace internal | 5023 } // namespace internal |
| 5144 } // namespace v8 | 5024 } // namespace v8 |
| 5145 | 5025 |
| 5146 #endif // V8_TARGET_ARCH_ARM64 | 5026 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |