| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 2868 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2879 __ dsrl(at, a3, 32 - kPointerSizeLog2); | 2879 __ dsrl(at, a3, 32 - kPointerSizeLog2); |
| 2880 __ Daddu(at, a2, Operand(at)); | 2880 __ Daddu(at, a2, Operand(at)); |
| 2881 __ ld(a4, FieldMemOperand(at, FixedArray::kHeaderSize)); | 2881 __ ld(a4, FieldMemOperand(at, FixedArray::kHeaderSize)); |
| 2882 | 2882 |
| 2883 // Verify that a4 contains an AllocationSite | 2883 // Verify that a4 contains an AllocationSite |
| 2884 __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset)); | 2884 __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset)); |
| 2885 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 2885 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 2886 __ Branch(&miss, ne, a5, Operand(at)); | 2886 __ Branch(&miss, ne, a5, Operand(at)); |
| 2887 | 2887 |
| 2888 __ mov(a2, a4); | 2888 __ mov(a2, a4); |
| 2889 __ mov(a3, a1); | |
| 2890 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 2889 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
| 2891 __ TailCallStub(&stub); | 2890 __ TailCallStub(&stub); |
| 2892 | 2891 |
| 2893 __ bind(&miss); | 2892 __ bind(&miss); |
| 2894 GenerateMiss(masm); | 2893 GenerateMiss(masm); |
| 2895 | 2894 |
| 2896 // The slow case, we need this no matter what to complete a call after a miss. | 2895 // The slow case, we need this no matter what to complete a call after a miss. |
| 2897 CallFunctionNoFeedback(masm, | 2896 CallFunctionNoFeedback(masm, |
| 2898 arg_count(), | 2897 arg_count(), |
| 2899 true, | 2898 true, |
| (...skipping 1936 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4836 UNREACHABLE(); | 4835 UNREACHABLE(); |
| 4837 } | 4836 } |
| 4838 } | 4837 } |
| 4839 | 4838 |
| 4840 | 4839 |
| 4841 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 4840 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
| 4842 // ----------- S t a t e ------------- | 4841 // ----------- S t a t e ------------- |
| 4843 // -- a0 : argc (only if argument_count() == ANY) | 4842 // -- a0 : argc (only if argument_count() == ANY) |
| 4844 // -- a1 : constructor | 4843 // -- a1 : constructor |
| 4845 // -- a2 : AllocationSite or undefined | 4844 // -- a2 : AllocationSite or undefined |
| 4846 // -- a3 : original constructor | |
| 4847 // -- sp[0] : return address | 4845 // -- sp[0] : return address |
| 4848 // -- sp[4] : last argument | 4846 // -- sp[4] : last argument |
| 4849 // ----------------------------------- | 4847 // ----------------------------------- |
| 4850 | 4848 |
| 4851 if (FLAG_debug_code) { | 4849 if (FLAG_debug_code) { |
| 4852 // The array construct code is only set for the global and natives | 4850 // The array construct code is only set for the global and natives |
| 4853 // builtin Array functions which always have maps. | 4851 // builtin Array functions which always have maps. |
| 4854 | 4852 |
| 4855 // Initial map for the builtin Array function should be a map. | 4853 // Initial map for the builtin Array function should be a map. |
| 4856 __ ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 4854 __ ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 4857 // Will both indicate a NULL and a Smi. | 4855 // Will both indicate a NULL and a Smi. |
| 4858 __ SmiTst(a4, at); | 4856 __ SmiTst(a4, at); |
| 4859 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, | 4857 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, |
| 4860 at, Operand(zero_reg)); | 4858 at, Operand(zero_reg)); |
| 4861 __ GetObjectType(a4, a4, a5); | 4859 __ GetObjectType(a4, a4, a5); |
| 4862 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, | 4860 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, |
| 4863 a5, Operand(MAP_TYPE)); | 4861 a5, Operand(MAP_TYPE)); |
| 4864 | 4862 |
| 4865 // We should either have undefined in a2 or a valid AllocationSite | 4863 // We should either have undefined in a2 or a valid AllocationSite |
| 4866 __ AssertUndefinedOrAllocationSite(a2, a4); | 4864 __ AssertUndefinedOrAllocationSite(a2, a4); |
| 4867 } | 4865 } |
| 4868 | 4866 |
| 4869 Label subclassing; | |
| 4870 __ Branch(&subclassing, ne, a1, Operand(a3)); | |
| 4871 | |
| 4872 Label no_info; | 4867 Label no_info; |
| 4873 // Get the elements kind and case on that. | 4868 // Get the elements kind and case on that. |
| 4874 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 4869 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 4875 __ Branch(&no_info, eq, a2, Operand(at)); | 4870 __ Branch(&no_info, eq, a2, Operand(at)); |
| 4876 | 4871 |
| 4877 __ ld(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); | 4872 __ ld(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); |
| 4878 __ SmiUntag(a3); | 4873 __ SmiUntag(a3); |
| 4879 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 4874 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
| 4880 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); | 4875 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); |
| 4881 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 4876 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
| 4882 | 4877 |
| 4883 __ bind(&no_info); | 4878 __ bind(&no_info); |
| 4884 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); | 4879 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); |
| 4885 | |
| 4886 __ bind(&subclassing); | |
| 4887 __ TailCallRuntime(Runtime::kThrowArrayNotSubclassableError, 0, 1); | |
| 4888 } | 4880 } |
| 4889 | 4881 |
| 4890 | 4882 |
| 4891 void InternalArrayConstructorStub::GenerateCase( | 4883 void InternalArrayConstructorStub::GenerateCase( |
| 4892 MacroAssembler* masm, ElementsKind kind) { | 4884 MacroAssembler* masm, ElementsKind kind) { |
| 4893 | 4885 |
| 4894 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); | 4886 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); |
| 4895 __ TailCallStub(&stub0, lo, a0, Operand(1)); | 4887 __ TailCallStub(&stub0, lo, a0, Operand(1)); |
| 4896 | 4888 |
| 4897 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); | 4889 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); |
| (...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5265 kStackUnwindSpace, kInvalidStackOffset, | 5257 kStackUnwindSpace, kInvalidStackOffset, |
| 5266 MemOperand(fp, 6 * kPointerSize), NULL); | 5258 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5267 } | 5259 } |
| 5268 | 5260 |
| 5269 | 5261 |
| 5270 #undef __ | 5262 #undef __ |
| 5271 | 5263 |
| 5272 } } // namespace v8::internal | 5264 } } // namespace v8::internal |
| 5273 | 5265 |
| 5274 #endif // V8_TARGET_ARCH_MIPS64 | 5266 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |