OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3321 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3332 result_size_ == 1; | 3332 result_size_ == 1; |
3333 } | 3333 } |
3334 | 3334 |
3335 | 3335 |
3336 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 3336 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
3337 CEntryStub::GenerateAheadOfTime(isolate); | 3337 CEntryStub::GenerateAheadOfTime(isolate); |
3338 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3338 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3339 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3339 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3340 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 3340 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
3341 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3341 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3342 if (FLAG_optimize_constructed_arrays) { | 3342 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
3343 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | |
3344 } | |
3345 } | 3343 } |
3346 | 3344 |
3347 | 3345 |
3348 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 3346 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
3349 SaveFPRegsMode mode = kSaveFPRegs; | 3347 SaveFPRegsMode mode = kSaveFPRegs; |
3350 CEntryStub save_doubles(1, mode); | 3348 CEntryStub save_doubles(1, mode); |
3351 StoreBufferOverflowStub stub(mode); | 3349 StoreBufferOverflowStub stub(mode); |
3352 // These stubs might already be in the snapshot, detect that and don't | 3350 // These stubs might already be in the snapshot, detect that and don't |
3353 // regenerate, which would lead to code stub initialization state being messed | 3351 // regenerate, which would lead to code stub initialization state being messed |
3354 // up. | 3352 // up. |
(...skipping 1656 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5011 __ addiu(a3, a3, kPointerSize); // In branch delay slot. | 5009 __ addiu(a3, a3, kPointerSize); // In branch delay slot. |
5012 | 5010 |
5013 __ bind(&done); | 5011 __ bind(&done); |
5014 __ DropAndRet(3); | 5012 __ DropAndRet(3); |
5015 | 5013 |
5016 __ bind(&slowcase); | 5014 __ bind(&slowcase); |
5017 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); | 5015 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); |
5018 } | 5016 } |
5019 | 5017 |
5020 | 5018 |
5021 static void GenerateRecordCallTargetNoArray(MacroAssembler* masm) { | |
5022 // Cache the called function in a global property cell. Cache states | |
5023 // are uninitialized, monomorphic (indicated by a JSFunction), and | |
5024 // megamorphic. | |
5025 // a1 : the function to call | |
5026 // a2 : cache cell for call target | |
5027 Label done; | |
5028 | |
5029 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | |
5030 masm->isolate()->heap()->undefined_value()); | |
5031 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), | |
5032 masm->isolate()->heap()->the_hole_value()); | |
5033 | |
5034 // Load the cache state into a3. | |
5035 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | |
5036 | |
5037 // A monomorphic cache hit or an already megamorphic state: invoke the | |
5038 // function without changing the state. | |
5039 __ Branch(&done, eq, a3, Operand(a1)); | |
5040 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
5041 __ Branch(&done, eq, a3, Operand(at)); | |
5042 | |
5043 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | |
5044 // megamorphic. | |
5045 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
5046 | |
5047 __ Branch(USE_DELAY_SLOT, &done, eq, a3, Operand(at)); | |
5048 // An uninitialized cache is patched with the function. | |
5049 // Store a1 in the delay slot. This may or may not get overwritten depending | |
5050 // on the result of the comparison. | |
5051 __ sw(a1, FieldMemOperand(a2, Cell::kValueOffset)); | |
5052 // No need for a write barrier here - cells are rescanned. | |
5053 | |
5054 // MegamorphicSentinel is an immortal immovable object (undefined) so no | |
5055 // write-barrier is needed. | |
5056 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
5057 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset)); | |
5058 | |
5059 __ bind(&done); | |
5060 } | |
5061 | |
5062 | |
5063 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 5019 static void GenerateRecordCallTarget(MacroAssembler* masm) { |
5064 // Cache the called function in a global property cell. Cache states | 5020 // Cache the called function in a global property cell. Cache states |
5065 // are uninitialized, monomorphic (indicated by a JSFunction), and | 5021 // are uninitialized, monomorphic (indicated by a JSFunction), and |
5066 // megamorphic. | 5022 // megamorphic. |
5067 // a1 : the function to call | 5023 // a1 : the function to call |
5068 // a2 : cache cell for call target | 5024 // a2 : cache cell for call target |
5069 ASSERT(FLAG_optimize_constructed_arrays); | |
5070 Label initialize, done, miss, megamorphic, not_array_function; | 5025 Label initialize, done, miss, megamorphic, not_array_function; |
5071 | 5026 |
5072 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | 5027 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), |
5073 masm->isolate()->heap()->undefined_value()); | 5028 masm->isolate()->heap()->undefined_value()); |
5074 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), | 5029 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), |
5075 masm->isolate()->heap()->the_hole_value()); | 5030 masm->isolate()->heap()->the_hole_value()); |
5076 | 5031 |
5077 // Load the cache state into a3. | 5032 // Load the cache state into a3. |
5078 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 5033 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
5079 | 5034 |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5159 } | 5114 } |
5160 | 5115 |
5161 // Check that the function is really a JavaScript function. | 5116 // Check that the function is really a JavaScript function. |
5162 // a1: pushed function (to be verified) | 5117 // a1: pushed function (to be verified) |
5163 __ JumpIfSmi(a1, &non_function); | 5118 __ JumpIfSmi(a1, &non_function); |
5164 // Get the map of the function object. | 5119 // Get the map of the function object. |
5165 __ GetObjectType(a1, a3, a3); | 5120 __ GetObjectType(a1, a3, a3); |
5166 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); | 5121 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); |
5167 | 5122 |
5168 if (RecordCallTarget()) { | 5123 if (RecordCallTarget()) { |
5169 if (FLAG_optimize_constructed_arrays) { | 5124 GenerateRecordCallTarget(masm); |
5170 GenerateRecordCallTarget(masm); | |
5171 } else { | |
5172 GenerateRecordCallTargetNoArray(masm); | |
5173 } | |
5174 } | 5125 } |
5175 | 5126 |
5176 // Fast-case: Invoke the function now. | 5127 // Fast-case: Invoke the function now. |
5177 // a1: pushed function | 5128 // a1: pushed function |
5178 ParameterCount actual(argc_); | 5129 ParameterCount actual(argc_); |
5179 | 5130 |
5180 if (ReceiverMightBeImplicit()) { | 5131 if (ReceiverMightBeImplicit()) { |
5181 Label call_as_function; | 5132 Label call_as_function; |
5182 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 5133 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
5183 __ Branch(&call_as_function, eq, t0, Operand(at)); | 5134 __ Branch(&call_as_function, eq, t0, Operand(at)); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5237 // a2 : cache cell for call target | 5188 // a2 : cache cell for call target |
5238 Label slow, non_function_call; | 5189 Label slow, non_function_call; |
5239 | 5190 |
5240 // Check that the function is not a smi. | 5191 // Check that the function is not a smi. |
5241 __ JumpIfSmi(a1, &non_function_call); | 5192 __ JumpIfSmi(a1, &non_function_call); |
5242 // Check that the function is a JSFunction. | 5193 // Check that the function is a JSFunction. |
5243 __ GetObjectType(a1, a3, a3); | 5194 __ GetObjectType(a1, a3, a3); |
5244 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); | 5195 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); |
5245 | 5196 |
5246 if (RecordCallTarget()) { | 5197 if (RecordCallTarget()) { |
5247 if (FLAG_optimize_constructed_arrays) { | 5198 GenerateRecordCallTarget(masm); |
5248 GenerateRecordCallTarget(masm); | |
5249 } else { | |
5250 GenerateRecordCallTargetNoArray(masm); | |
5251 } | |
5252 } | 5199 } |
5253 | 5200 |
5254 // Jump to the function-specific construct stub. | 5201 // Jump to the function-specific construct stub. |
5255 Register jmp_reg = FLAG_optimize_constructed_arrays ? a3 : a2; | 5202 Register jmp_reg = a3; |
5256 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 5203 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
5257 __ lw(jmp_reg, FieldMemOperand(jmp_reg, | 5204 __ lw(jmp_reg, FieldMemOperand(jmp_reg, |
5258 SharedFunctionInfo::kConstructStubOffset)); | 5205 SharedFunctionInfo::kConstructStubOffset)); |
5259 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | 5206 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); |
5260 __ Jump(at); | 5207 __ Jump(at); |
5261 | 5208 |
5262 // a0: number of arguments | 5209 // a0: number of arguments |
5263 // a1: called object | 5210 // a1: called object |
5264 // a3: object type | 5211 // a3: object type |
5265 Label do_call; | 5212 Label do_call; |
(...skipping 2500 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7766 // We should either have undefined in a2 or a valid cell | 7713 // We should either have undefined in a2 or a valid cell |
7767 Label okay_here; | 7714 Label okay_here; |
7768 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 7715 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
7769 __ Branch(&okay_here, eq, a2, Operand(undefined_sentinel)); | 7716 __ Branch(&okay_here, eq, a2, Operand(undefined_sentinel)); |
7770 __ lw(a3, FieldMemOperand(a2, 0)); | 7717 __ lw(a3, FieldMemOperand(a2, 0)); |
7771 __ Assert(eq, "Expected property cell in register a2", | 7718 __ Assert(eq, "Expected property cell in register a2", |
7772 a3, Operand(cell_map)); | 7719 a3, Operand(cell_map)); |
7773 __ bind(&okay_here); | 7720 __ bind(&okay_here); |
7774 } | 7721 } |
7775 | 7722 |
7776 if (FLAG_optimize_constructed_arrays) { | 7723 Label no_info, switch_ready; |
7777 Label no_info, switch_ready; | 7724 // Get the elements kind and case on that. |
7778 // Get the elements kind and case on that. | 7725 __ Branch(&no_info, eq, a2, Operand(undefined_sentinel)); |
7779 __ Branch(&no_info, eq, a2, Operand(undefined_sentinel)); | 7726 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
7780 __ lw(a3, FieldMemOperand(a2, PropertyCell::kValueOffset)); | 7727 __ JumpIfNotSmi(a3, &no_info); |
7781 __ JumpIfNotSmi(a3, &no_info); | 7728 __ SmiUntag(a3); |
7782 __ SmiUntag(a3); | 7729 __ jmp(&switch_ready); |
7783 __ jmp(&switch_ready); | 7730 __ bind(&no_info); |
7784 __ bind(&no_info); | 7731 __ li(a3, Operand(GetInitialFastElementsKind())); |
7785 __ li(a3, Operand(GetInitialFastElementsKind())); | 7732 __ bind(&switch_ready); |
7786 __ bind(&switch_ready); | |
7787 | 7733 |
7788 if (argument_count_ == ANY) { | 7734 if (argument_count_ == ANY) { |
7789 Label not_zero_case, not_one_case; | 7735 Label not_zero_case, not_one_case; |
7790 __ And(at, a0, a0); | 7736 __ And(at, a0, a0); |
7791 __ Branch(¬_zero_case, ne, at, Operand(zero_reg)); | 7737 __ Branch(¬_zero_case, ne, at, Operand(zero_reg)); |
7792 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); | 7738 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); |
7793 | 7739 |
7794 __ bind(¬_zero_case); | 7740 __ bind(¬_zero_case); |
7795 __ Branch(¬_one_case, gt, a0, Operand(1)); | 7741 __ Branch(¬_one_case, gt, a0, Operand(1)); |
7796 CreateArrayDispatchOneArgument(masm); | 7742 CreateArrayDispatchOneArgument(masm); |
7797 | 7743 |
7798 __ bind(¬_one_case); | 7744 __ bind(¬_one_case); |
7799 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); | 7745 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); |
7800 } else if (argument_count_ == NONE) { | 7746 } else if (argument_count_ == NONE) { |
7801 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); | 7747 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); |
7802 } else if (argument_count_ == ONE) { | 7748 } else if (argument_count_ == ONE) { |
7803 CreateArrayDispatchOneArgument(masm); | 7749 CreateArrayDispatchOneArgument(masm); |
7804 } else if (argument_count_ == MORE_THAN_ONE) { | 7750 } else if (argument_count_ == MORE_THAN_ONE) { |
7805 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); | 7751 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); |
7806 } else { | |
7807 UNREACHABLE(); | |
7808 } | |
7809 } else { | 7752 } else { |
7810 Label generic_constructor; | 7753 UNREACHABLE(); |
7811 // Run the native code for the Array function called as a constructor. | |
7812 ArrayNativeCode(masm, &generic_constructor); | |
7813 | |
7814 // Jump to the generic construct code in case the specialized code cannot | |
7815 // handle the construction. | |
7816 __ bind(&generic_constructor); | |
7817 Handle<Code> generic_construct_stub = | |
7818 masm->isolate()->builtins()->JSConstructStubGeneric(); | |
7819 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | |
7820 } | 7754 } |
7821 } | 7755 } |
7822 | 7756 |
7823 | 7757 |
7824 void InternalArrayConstructorStub::GenerateCase( | 7758 void InternalArrayConstructorStub::GenerateCase( |
7825 MacroAssembler* masm, ElementsKind kind) { | 7759 MacroAssembler* masm, ElementsKind kind) { |
7826 Label not_zero_case, not_one_case; | 7760 Label not_zero_case, not_one_case; |
7827 Label normal_sequence; | 7761 Label normal_sequence; |
7828 | 7762 |
7829 __ Branch(¬_zero_case, ne, a0, Operand(zero_reg)); | 7763 __ Branch(¬_zero_case, ne, a0, Operand(zero_reg)); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7870 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 7804 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
7871 // Will both indicate a NULL and a Smi. | 7805 // Will both indicate a NULL and a Smi. |
7872 __ And(at, a3, Operand(kSmiTagMask)); | 7806 __ And(at, a3, Operand(kSmiTagMask)); |
7873 __ Assert(ne, "Unexpected initial map for Array function", | 7807 __ Assert(ne, "Unexpected initial map for Array function", |
7874 at, Operand(zero_reg)); | 7808 at, Operand(zero_reg)); |
7875 __ GetObjectType(a3, a3, t0); | 7809 __ GetObjectType(a3, a3, t0); |
7876 __ Assert(eq, "Unexpected initial map for Array function", | 7810 __ Assert(eq, "Unexpected initial map for Array function", |
7877 t0, Operand(MAP_TYPE)); | 7811 t0, Operand(MAP_TYPE)); |
7878 } | 7812 } |
7879 | 7813 |
7880 if (FLAG_optimize_constructed_arrays) { | 7814 // Figure out the right elements kind. |
7881 // Figure out the right elements kind. | 7815 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
7882 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | |
7883 | 7816 |
7884 // Load the map's "bit field 2" into a3. We only need the first byte, | 7817 // Load the map's "bit field 2" into a3. We only need the first byte, |
7885 // but the following bit field extraction takes care of that anyway. | 7818 // but the following bit field extraction takes care of that anyway. |
7886 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); | 7819 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); |
7887 // Retrieve elements_kind from bit field 2. | 7820 // Retrieve elements_kind from bit field 2. |
7888 __ Ext(a3, a3, Map::kElementsKindShift, Map::kElementsKindBitCount); | 7821 __ Ext(a3, a3, Map::kElementsKindShift, Map::kElementsKindBitCount); |
7889 | 7822 |
7890 if (FLAG_debug_code) { | 7823 if (FLAG_debug_code) { |
7891 Label done; | 7824 Label done; |
7892 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS)); | 7825 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS)); |
7893 __ Assert( | 7826 __ Assert( |
7894 eq, "Invalid ElementsKind for InternalArray or InternalPackedArray", | 7827 eq, "Invalid ElementsKind for InternalArray or InternalPackedArray", |
7895 a3, Operand(FAST_HOLEY_ELEMENTS)); | 7828 a3, Operand(FAST_HOLEY_ELEMENTS)); |
7896 __ bind(&done); | 7829 __ bind(&done); |
7897 } | 7830 } |
7898 | 7831 |
7899 Label fast_elements_case; | 7832 Label fast_elements_case; |
7900 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); | 7833 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); |
7901 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 7834 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
7902 | 7835 |
7903 __ bind(&fast_elements_case); | 7836 __ bind(&fast_elements_case); |
7904 GenerateCase(masm, FAST_ELEMENTS); | 7837 GenerateCase(masm, FAST_ELEMENTS); |
7905 } else { | |
7906 Label generic_constructor; | |
7907 // Run the native code for the Array function called as constructor. | |
7908 ArrayNativeCode(masm, &generic_constructor); | |
7909 | |
7910 // Jump to the generic construct code in case the specialized code cannot | |
7911 // handle the construction. | |
7912 __ bind(&generic_constructor); | |
7913 Handle<Code> generic_construct_stub = | |
7914 masm->isolate()->builtins()->JSConstructStubGeneric(); | |
7915 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | |
7916 } | |
7917 } | 7838 } |
7918 | 7839 |
7919 | 7840 |
7920 #undef __ | 7841 #undef __ |
7921 | 7842 |
7922 } } // namespace v8::internal | 7843 } } // namespace v8::internal |
7923 | 7844 |
7924 #endif // V8_TARGET_ARCH_MIPS | 7845 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |