| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_S390 |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| 11 #include "src/runtime/runtime.h" | 11 #include "src/runtime/runtime.h" |
| 12 | 12 |
| 13 namespace v8 { | 13 namespace v8 { |
| 14 namespace internal { | 14 namespace internal { |
| 15 | 15 |
| 16 | |
| 17 #define __ ACCESS_MASM(masm) | 16 #define __ ACCESS_MASM(masm) |
| 18 | 17 |
| 19 | |
| 20 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, | 18 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, |
| 21 BuiltinExtraArguments extra_args) { | 19 BuiltinExtraArguments extra_args) { |
| 22 // ----------- S t a t e ------------- | 20 // ----------- S t a t e ------------- |
| 23 // -- r3 : number of arguments excluding receiver | 21 // -- r2 : number of arguments excluding receiver |
| 24 // -- r4 : target | 22 // -- r3 : target |
| 25 // -- r6 : new.target | 23 // -- r5 : new.target |
| 26 // -- sp[0] : last argument | 24 // -- sp[0] : last argument |
| 27 // -- ... | 25 // -- ... |
| 28 // -- sp[4 * (argc - 1)] : first argument | 26 // -- sp[4 * (argc - 1)] : first argument |
| 29 // -- sp[4 * argc] : receiver | 27 // -- sp[4 * argc] : receiver |
| 30 // ----------------------------------- | 28 // ----------------------------------- |
| 31 __ AssertFunction(r4); | 29 __ AssertFunction(r3); |
| 32 | 30 |
| 33 // Make sure we operate in the context of the called function (for example | 31 // Make sure we operate in the context of the called function (for example |
| 34 // ConstructStubs implemented in C++ will be run in the context of the caller | 32 // ConstructStubs implemented in C++ will be run in the context of the caller |
| 35 // instead of the callee, due to the way that [[Construct]] is defined for | 33 // instead of the callee, due to the way that [[Construct]] is defined for |
| 36 // ordinary functions). | 34 // ordinary functions). |
| 37 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | 35 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); |
| 38 | 36 |
| 39 // Insert extra arguments. | 37 // Insert extra arguments. |
| 40 int num_extra_args = 0; | 38 int num_extra_args = 0; |
| 41 switch (extra_args) { | 39 switch (extra_args) { |
| 42 case BuiltinExtraArguments::kTarget: | 40 case BuiltinExtraArguments::kTarget: |
| 43 __ Push(r4); | 41 __ Push(r3); |
| 44 ++num_extra_args; | 42 ++num_extra_args; |
| 45 break; | 43 break; |
| 46 case BuiltinExtraArguments::kNewTarget: | 44 case BuiltinExtraArguments::kNewTarget: |
| 47 __ Push(r6); | 45 __ Push(r5); |
| 48 ++num_extra_args; | 46 ++num_extra_args; |
| 49 break; | 47 break; |
| 50 case BuiltinExtraArguments::kTargetAndNewTarget: | 48 case BuiltinExtraArguments::kTargetAndNewTarget: |
| 51 __ Push(r4, r6); | 49 __ Push(r3, r5); |
| 52 num_extra_args += 2; | 50 num_extra_args += 2; |
| 53 break; | 51 break; |
| 54 case BuiltinExtraArguments::kNone: | 52 case BuiltinExtraArguments::kNone: |
| 55 break; | 53 break; |
| 56 } | 54 } |
| 57 | 55 |
| 58 // JumpToExternalReference expects r3 to contain the number of arguments | 56 // JumpToExternalReference expects r2 to contain the number of arguments |
| 59 // including the receiver and the extra arguments. | 57 // including the receiver and the extra arguments. |
| 60 __ addi(r3, r3, Operand(num_extra_args + 1)); | 58 __ AddP(r2, r2, Operand(num_extra_args + 1)); |
| 61 | 59 |
| 62 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | 60 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); |
| 63 } | 61 } |
| 64 | 62 |
| 65 | |
| 66 // Load the built-in InternalArray function from the current context. | 63 // Load the built-in InternalArray function from the current context. |
| 67 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | 64 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, |
| 68 Register result) { | 65 Register result) { |
| 69 // Load the InternalArray function from the current native context. | 66 // Load the InternalArray function from the current native context. |
| 70 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); | 67 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); |
| 71 } | 68 } |
| 72 | 69 |
| 73 | |
| 74 // Load the built-in Array function from the current context. | 70 // Load the built-in Array function from the current context. |
| 75 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | 71 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { |
| 76 // Load the Array function from the current native context. | 72 // Load the Array function from the current native context. |
| 77 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); | 73 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); |
| 78 } | 74 } |
| 79 | 75 |
| 80 | |
| 81 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | 76 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
| 82 // ----------- S t a t e ------------- | 77 // ----------- S t a t e ------------- |
| 83 // -- r3 : number of arguments | 78 // -- r2 : number of arguments |
| 84 // -- lr : return address | 79 // -- lr : return address |
| 85 // -- sp[...]: constructor arguments | 80 // -- sp[...]: constructor arguments |
| 86 // ----------------------------------- | 81 // ----------------------------------- |
| 87 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 82 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
| 88 | 83 |
| 89 // Get the InternalArray function. | 84 // Get the InternalArray function. |
| 90 GenerateLoadInternalArrayFunction(masm, r4); | 85 GenerateLoadInternalArrayFunction(masm, r3); |
| 91 | 86 |
| 92 if (FLAG_debug_code) { | 87 if (FLAG_debug_code) { |
| 93 // Initial map for the builtin InternalArray functions should be maps. | 88 // Initial map for the builtin InternalArray functions should be maps. |
| 94 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); | 89 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); |
| 95 __ TestIfSmi(r5, r0); | 90 __ TestIfSmi(r4); |
| 96 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); | 91 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); |
| 97 __ CompareObjectType(r5, r6, r7, MAP_TYPE); | 92 __ CompareObjectType(r4, r5, r6, MAP_TYPE); |
| 98 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); | 93 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); |
| 99 } | 94 } |
| 100 | 95 |
| 101 // Run the native code for the InternalArray function called as a normal | 96 // Run the native code for the InternalArray function called as a normal |
| 102 // function. | 97 // function. |
| 103 // tail call a stub | 98 // tail call a stub |
| 104 InternalArrayConstructorStub stub(masm->isolate()); | 99 InternalArrayConstructorStub stub(masm->isolate()); |
| 105 __ TailCallStub(&stub); | 100 __ TailCallStub(&stub); |
| 106 } | 101 } |
| 107 | 102 |
| 108 | |
| 109 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | 103 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
| 110 // ----------- S t a t e ------------- | 104 // ----------- S t a t e ------------- |
| 111 // -- r3 : number of arguments | 105 // -- r2 : number of arguments |
| 112 // -- lr : return address | 106 // -- lr : return address |
| 113 // -- sp[...]: constructor arguments | 107 // -- sp[...]: constructor arguments |
| 114 // ----------------------------------- | 108 // ----------------------------------- |
| 115 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 109 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
| 116 | 110 |
| 117 // Get the Array function. | 111 // Get the Array function. |
| 118 GenerateLoadArrayFunction(masm, r4); | 112 GenerateLoadArrayFunction(masm, r3); |
| 119 | 113 |
| 120 if (FLAG_debug_code) { | 114 if (FLAG_debug_code) { |
| 121 // Initial map for the builtin Array functions should be maps. | 115 // Initial map for the builtin Array functions should be maps. |
| 122 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); | 116 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); |
| 123 __ TestIfSmi(r5, r0); | 117 __ TestIfSmi(r4); |
| 124 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); | 118 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); |
| 125 __ CompareObjectType(r5, r6, r7, MAP_TYPE); | 119 __ CompareObjectType(r4, r5, r6, MAP_TYPE); |
| 126 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | 120 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); |
| 127 } | 121 } |
| 128 | 122 |
| 129 __ mr(r6, r4); | 123 __ LoadRR(r5, r3); |
| 130 // Run the native code for the Array function called as a normal function. | 124 // Run the native code for the Array function called as a normal function. |
| 131 // tail call a stub | 125 // tail call a stub |
| 132 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 126 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 133 ArrayConstructorStub stub(masm->isolate()); | 127 ArrayConstructorStub stub(masm->isolate()); |
| 134 __ TailCallStub(&stub); | 128 __ TailCallStub(&stub); |
| 135 } | 129 } |
| 136 | 130 |
| 137 | |
| 138 // static | 131 // static |
| 139 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { | 132 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { |
| 140 // ----------- S t a t e ------------- | 133 // ----------- S t a t e ------------- |
| 141 // -- r3 : number of arguments | 134 // -- r2 : number of arguments |
| 142 // -- lr : return address | 135 // -- lr : return address |
| 143 // -- sp[(argc - n) * 8] : arg[n] (zero-based) | 136 // -- sp[(argc - n) * 8] : arg[n] (zero-based) |
| 144 // -- sp[(argc + 1) * 8] : receiver | 137 // -- sp[(argc + 1) * 8] : receiver |
| 145 // ----------------------------------- | 138 // ----------------------------------- |
| 146 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt; | 139 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt; |
| 147 Heap::RootListIndex const root_index = | 140 Heap::RootListIndex const root_index = |
| 148 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex | 141 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex |
| 149 : Heap::kMinusInfinityValueRootIndex; | 142 : Heap::kMinusInfinityValueRootIndex; |
| 150 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1; | 143 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1; |
| 151 | 144 |
| 152 // Load the accumulator with the default return value (either -Infinity or | 145 // Load the accumulator with the default return value (either -Infinity or |
| 153 // +Infinity), with the tagged value in r4 and the double value in d1. | 146 // +Infinity), with the tagged value in r3 and the double value in d1. |
| 154 __ LoadRoot(r4, root_index); | 147 __ LoadRoot(r3, root_index); |
| 155 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset)); | 148 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset)); |
| 156 | 149 |
| 157 // Setup state for loop | 150 // Setup state for loop |
| 158 // r5: address of arg[0] + kPointerSize | 151 // r4: address of arg[0] + kPointerSize |
| 159 // r6: number of slots to drop at exit (arguments + receiver) | 152 // r5: number of slots to drop at exit (arguments + receiver) |
| 160 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); | 153 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); |
| 161 __ add(r5, sp, r5); | 154 __ AddP(r4, sp, r4); |
| 162 __ addi(r6, r3, Operand(1)); | 155 __ AddP(r5, r2, Operand(1)); |
| 163 | 156 |
| 164 Label done_loop, loop; | 157 Label done_loop, loop; |
| 165 __ bind(&loop); | 158 __ bind(&loop); |
| 166 { | 159 { |
| 167 // Check if all parameters done. | 160 // Check if all parameters done. |
| 168 __ cmpl(r5, sp); | 161 __ CmpLogicalP(r4, sp); |
| 169 __ ble(&done_loop); | 162 __ ble(&done_loop); |
| 170 | 163 |
| 171 // Load the next parameter tagged value into r3. | 164 // Load the next parameter tagged value into r2. |
| 172 __ LoadPU(r3, MemOperand(r5, -kPointerSize)); | 165 __ lay(r4, MemOperand(r4, -kPointerSize)); |
| 166 __ LoadP(r2, MemOperand(r4)); |
| 173 | 167 |
| 174 // Load the double value of the parameter into d2, maybe converting the | 168 // Load the double value of the parameter into d2, maybe converting the |
| 175 // parameter to a number first using the ToNumberStub if necessary. | 169 // parameter to a number first using the ToNumberStub if necessary. |
| 176 Label convert, convert_smi, convert_number, done_convert; | 170 Label convert, convert_smi, convert_number, done_convert; |
| 177 __ bind(&convert); | 171 __ bind(&convert); |
| 178 __ JumpIfSmi(r3, &convert_smi); | 172 __ JumpIfSmi(r2, &convert_smi); |
| 179 __ LoadP(r7, FieldMemOperand(r3, HeapObject::kMapOffset)); | 173 __ LoadP(r6, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 180 __ JumpIfRoot(r7, Heap::kHeapNumberMapRootIndex, &convert_number); | 174 __ JumpIfRoot(r6, Heap::kHeapNumberMapRootIndex, &convert_number); |
| 181 { | 175 { |
| 182 // Parameter is not a Number, use the ToNumberStub to convert it. | 176 // Parameter is not a Number, use the ToNumberStub to convert it. |
| 183 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 177 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 184 __ SmiTag(r6); | 178 __ SmiTag(r5); |
| 185 __ Push(r4, r5, r6); | 179 __ Push(r3, r4, r5); |
| 186 ToNumberStub stub(masm->isolate()); | 180 ToNumberStub stub(masm->isolate()); |
| 187 __ CallStub(&stub); | 181 __ CallStub(&stub); |
| 188 __ Pop(r4, r5, r6); | 182 __ Pop(r3, r4, r5); |
| 189 __ SmiUntag(r6); | 183 __ SmiUntag(r5); |
| 190 { | 184 { |
| 191 // Restore the double accumulator value (d1). | 185 // Restore the double accumulator value (d1). |
| 192 Label done_restore; | 186 Label done_restore; |
| 193 __ SmiToDouble(d1, r4); | 187 __ SmiToDouble(d1, r3); |
| 194 __ JumpIfSmi(r4, &done_restore); | 188 __ JumpIfSmi(r3, &done_restore); |
| 195 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset)); | 189 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset)); |
| 196 __ bind(&done_restore); | 190 __ bind(&done_restore); |
| 197 } | 191 } |
| 198 } | 192 } |
| 199 __ b(&convert); | 193 __ b(&convert); |
| 200 __ bind(&convert_number); | 194 __ bind(&convert_number); |
| 201 __ lfd(d2, FieldMemOperand(r3, HeapNumber::kValueOffset)); | 195 __ LoadDouble(d2, FieldMemOperand(r2, HeapNumber::kValueOffset)); |
| 202 __ b(&done_convert); | 196 __ b(&done_convert); |
| 203 __ bind(&convert_smi); | 197 __ bind(&convert_smi); |
| 204 __ SmiToDouble(d2, r3); | 198 __ SmiToDouble(d2, r2); |
| 205 __ bind(&done_convert); | 199 __ bind(&done_convert); |
| 206 | 200 |
| 207 // Perform the actual comparison with the accumulator value on the left hand | 201 // Perform the actual comparison with the accumulator value on the left hand |
| 208 // side (d1) and the next parameter value on the right hand side (d2). | 202 // side (d1) and the next parameter value on the right hand side (d2). |
| 209 Label compare_nan, compare_swap; | 203 Label compare_nan, compare_swap; |
| 210 __ fcmpu(d1, d2); | 204 __ cdbr(d1, d2); |
| 211 __ bunordered(&compare_nan); | 205 __ bunordered(&compare_nan); |
| 212 __ b(cond_done, &loop); | 206 __ b(cond_done, &loop); |
| 213 __ b(CommuteCondition(cond_done), &compare_swap); | 207 __ b(CommuteCondition(cond_done), &compare_swap); |
| 214 | 208 |
| 215 // Left and right hand side are equal, check for -0 vs. +0. | 209 // Left and right hand side are equal, check for -0 vs. +0. |
| 216 __ TestDoubleIsMinusZero(reg, r7, r8); | 210 __ TestDoubleIsMinusZero(reg, r6, r7); |
| 217 __ bne(&loop); | 211 __ bne(&loop); |
| 218 | 212 |
| 219 // Update accumulator. Result is on the right hand side. | 213 // Update accumulator. Result is on the right hand side. |
| 220 __ bind(&compare_swap); | 214 __ bind(&compare_swap); |
| 221 __ fmr(d1, d2); | 215 __ ldr(d1, d2); |
| 222 __ mr(r4, r3); | 216 __ LoadRR(r3, r2); |
| 223 __ b(&loop); | 217 __ b(&loop); |
| 224 | 218 |
| 225 // At least one side is NaN, which means that the result will be NaN too. | 219 // At least one side is NaN, which means that the result will be NaN too. |
| 226 // We still need to visit the rest of the arguments. | 220 // We still need to visit the rest of the arguments. |
| 227 __ bind(&compare_nan); | 221 __ bind(&compare_nan); |
| 228 __ LoadRoot(r4, Heap::kNanValueRootIndex); | 222 __ LoadRoot(r3, Heap::kNanValueRootIndex); |
| 229 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset)); | 223 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset)); |
| 230 __ b(&loop); | 224 __ b(&loop); |
| 231 } | 225 } |
| 232 | 226 |
| 233 __ bind(&done_loop); | 227 __ bind(&done_loop); |
| 234 __ mr(r3, r4); | 228 __ LoadRR(r2, r3); |
| 235 __ Drop(r6); | 229 __ Drop(r5); |
| 236 __ Ret(); | 230 __ Ret(); |
| 237 } | 231 } |
| 238 | 232 |
| 239 // static | 233 // static |
| 240 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { | 234 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { |
| 241 // ----------- S t a t e ------------- | 235 // ----------- S t a t e ------------- |
| 242 // -- r3 : number of arguments | 236 // -- r2 : number of arguments |
| 243 // -- r4 : constructor function | 237 // -- r3 : constructor function |
| 244 // -- lr : return address | 238 // -- lr : return address |
| 245 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 239 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
| 246 // -- sp[argc * 4] : receiver | 240 // -- sp[argc * 4] : receiver |
| 247 // ----------------------------------- | 241 // ----------------------------------- |
| 248 | 242 |
| 249 // 1. Load the first argument into r3 and get rid of the rest (including the | 243 // 1. Load the first argument into r2 and get rid of the rest (including the |
| 250 // receiver). | 244 // receiver). |
| 251 Label no_arguments; | 245 Label no_arguments; |
| 252 { | 246 { |
| 253 __ cmpi(r3, Operand::Zero()); | 247 __ CmpP(r2, Operand::Zero()); |
| 254 __ beq(&no_arguments); | 248 __ beq(&no_arguments); |
| 255 __ subi(r3, r3, Operand(1)); | 249 __ SubP(r2, r2, Operand(1)); |
| 256 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2)); | 250 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2)); |
| 257 __ LoadPUX(r3, MemOperand(sp, r3)); | 251 __ la(sp, MemOperand(sp, r2)); |
| 252 __ LoadP(r2, MemOperand(sp)); |
| 258 __ Drop(2); | 253 __ Drop(2); |
| 259 } | 254 } |
| 260 | 255 |
| 261 // 2a. Convert the first argument to a number. | 256 // 2a. Convert the first argument to a number. |
| 262 ToNumberStub stub(masm->isolate()); | 257 ToNumberStub stub(masm->isolate()); |
| 263 __ TailCallStub(&stub); | 258 __ TailCallStub(&stub); |
| 264 | 259 |
| 265 // 2b. No arguments, return +0. | 260 // 2b. No arguments, return +0. |
| 266 __ bind(&no_arguments); | 261 __ bind(&no_arguments); |
| 267 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | 262 __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
| 268 __ Ret(1); | 263 __ Ret(1); |
| 269 } | 264 } |
| 270 | 265 |
| 271 | |
| 272 // static | 266 // static |
| 273 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { | 267 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { |
| 274 // ----------- S t a t e ------------- | 268 // ----------- S t a t e ------------- |
| 275 // -- r3 : number of arguments | 269 // -- r2 : number of arguments |
| 276 // -- r4 : constructor function | 270 // -- r3 : constructor function |
| 277 // -- r6 : new target | 271 // -- r5 : new target |
| 278 // -- lr : return address | 272 // -- lr : return address |
| 279 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 273 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
| 280 // -- sp[argc * 4] : receiver | 274 // -- sp[argc * 4] : receiver |
| 281 // ----------------------------------- | 275 // ----------------------------------- |
| 282 | 276 |
| 283 // 1. Make sure we operate in the context of the called function. | 277 // 1. Make sure we operate in the context of the called function. |
| 284 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | 278 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); |
| 285 | 279 |
| 286 // 2. Load the first argument into r5 and get rid of the rest (including the | 280 // 2. Load the first argument into r4 and get rid of the rest (including the |
| 287 // receiver). | 281 // receiver). |
| 288 { | 282 { |
| 289 Label no_arguments, done; | 283 Label no_arguments, done; |
| 290 __ cmpi(r3, Operand::Zero()); | 284 __ CmpP(r2, Operand::Zero()); |
| 291 __ beq(&no_arguments); | 285 __ beq(&no_arguments); |
| 292 __ subi(r3, r3, Operand(1)); | 286 __ SubP(r2, r2, Operand(1)); |
| 293 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); | 287 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); |
| 294 __ LoadPUX(r5, MemOperand(sp, r5)); | 288 __ la(sp, MemOperand(sp, r4)); |
| 289 __ LoadP(r4, MemOperand(sp)); |
| 295 __ Drop(2); | 290 __ Drop(2); |
| 296 __ b(&done); | 291 __ b(&done); |
| 297 __ bind(&no_arguments); | 292 __ bind(&no_arguments); |
| 298 __ LoadSmiLiteral(r5, Smi::FromInt(0)); | 293 __ LoadSmiLiteral(r4, Smi::FromInt(0)); |
| 299 __ Drop(1); | 294 __ Drop(1); |
| 300 __ bind(&done); | 295 __ bind(&done); |
| 301 } | 296 } |
| 302 | 297 |
| 303 // 3. Make sure r5 is a number. | 298 // 3. Make sure r4 is a number. |
| 304 { | 299 { |
| 305 Label done_convert; | 300 Label done_convert; |
| 306 __ JumpIfSmi(r5, &done_convert); | 301 __ JumpIfSmi(r4, &done_convert); |
| 307 __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE); | 302 __ CompareObjectType(r4, r6, r6, HEAP_NUMBER_TYPE); |
| 308 __ beq(&done_convert); | 303 __ beq(&done_convert); |
| 309 { | 304 { |
| 310 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 305 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 311 __ Push(r4, r6); | 306 __ Push(r3, r5); |
| 312 __ mr(r3, r5); | 307 __ LoadRR(r2, r4); |
| 313 ToNumberStub stub(masm->isolate()); | 308 ToNumberStub stub(masm->isolate()); |
| 314 __ CallStub(&stub); | 309 __ CallStub(&stub); |
| 315 __ mr(r5, r3); | 310 __ LoadRR(r4, r2); |
| 316 __ Pop(r4, r6); | 311 __ Pop(r3, r5); |
| 317 } | 312 } |
| 318 __ bind(&done_convert); | 313 __ bind(&done_convert); |
| 319 } | 314 } |
| 320 | 315 |
| 321 // 4. Check if new target and constructor differ. | 316 // 4. Check if new target and constructor differ. |
| 322 Label new_object; | 317 Label new_object; |
| 323 __ cmp(r4, r6); | 318 __ CmpP(r3, r5); |
| 324 __ bne(&new_object); | 319 __ bne(&new_object); |
| 325 | 320 |
| 326 // 5. Allocate a JSValue wrapper for the number. | 321 // 5. Allocate a JSValue wrapper for the number. |
| 327 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object); | 322 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object); |
| 328 __ Ret(); | 323 __ Ret(); |
| 329 | 324 |
| 330 // 6. Fallback to the runtime to create new object. | 325 // 6. Fallback to the runtime to create new object. |
| 331 __ bind(&new_object); | 326 __ bind(&new_object); |
| 332 { | 327 { |
| 333 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 328 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 334 __ Push(r5); // first argument | 329 __ Push(r4); // first argument |
| 335 FastNewObjectStub stub(masm->isolate()); | 330 FastNewObjectStub stub(masm->isolate()); |
| 336 __ CallStub(&stub); | 331 __ CallStub(&stub); |
| 337 __ Pop(r5); | 332 __ Pop(r4); |
| 338 } | 333 } |
| 339 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); | 334 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0); |
| 340 __ Ret(); | 335 __ Ret(); |
| 341 } | 336 } |
| 342 | 337 |
| 343 | |
| 344 // static | 338 // static |
| 345 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { | 339 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { |
| 346 // ----------- S t a t e ------------- | 340 // ----------- S t a t e ------------- |
| 347 // -- r3 : number of arguments | 341 // -- r2 : number of arguments |
| 348 // -- r4 : constructor function | 342 // -- r3 : constructor function |
| 349 // -- lr : return address | 343 // -- lr : return address |
| 350 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 344 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
| 351 // -- sp[argc * 4] : receiver | 345 // -- sp[argc * 4] : receiver |
| 352 // ----------------------------------- | 346 // ----------------------------------- |
| 353 | 347 // 1. Load the first argument into r2 and get rid of the rest (including the |
| 354 // 1. Load the first argument into r3 and get rid of the rest (including the | |
| 355 // receiver). | 348 // receiver). |
| 356 Label no_arguments; | 349 Label no_arguments; |
| 357 { | 350 { |
| 358 __ cmpi(r3, Operand::Zero()); | 351 __ CmpP(r2, Operand::Zero()); |
| 359 __ beq(&no_arguments); | 352 __ beq(&no_arguments); |
| 360 __ subi(r3, r3, Operand(1)); | 353 __ SubP(r2, r2, Operand(1)); |
| 361 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2)); | 354 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2)); |
| 362 __ LoadPUX(r3, MemOperand(sp, r3)); | 355 __ lay(sp, MemOperand(sp, r2)); |
| 356 __ LoadP(r2, MemOperand(sp)); |
| 363 __ Drop(2); | 357 __ Drop(2); |
| 364 } | 358 } |
| 365 | 359 |
| 366 // 2a. At least one argument, return r3 if it's a string, otherwise | 360 // 2a. At least one argument, return r2 if it's a string, otherwise |
| 367 // dispatch to appropriate conversion. | 361 // dispatch to appropriate conversion. |
| 368 Label to_string, symbol_descriptive_string; | 362 Label to_string, symbol_descriptive_string; |
| 369 { | 363 { |
| 370 __ JumpIfSmi(r3, &to_string); | 364 __ JumpIfSmi(r2, &to_string); |
| 371 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); | 365 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); |
| 372 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE); | 366 __ CompareObjectType(r2, r3, r3, FIRST_NONSTRING_TYPE); |
| 373 __ bgt(&to_string); | 367 __ bgt(&to_string); |
| 374 __ beq(&symbol_descriptive_string); | 368 __ beq(&symbol_descriptive_string); |
| 375 __ Ret(); | 369 __ Ret(); |
| 376 } | 370 } |
| 377 | 371 |
| 378 // 2b. No arguments, return the empty string (and pop the receiver). | 372 // 2b. No arguments, return the empty string (and pop the receiver). |
| 379 __ bind(&no_arguments); | 373 __ bind(&no_arguments); |
| 380 { | 374 { |
| 381 __ LoadRoot(r3, Heap::kempty_stringRootIndex); | 375 __ LoadRoot(r2, Heap::kempty_stringRootIndex); |
| 382 __ Ret(1); | 376 __ Ret(1); |
| 383 } | 377 } |
| 384 | 378 |
| 385 // 3a. Convert r3 to a string. | 379 // 3a. Convert r2 to a string. |
| 386 __ bind(&to_string); | 380 __ bind(&to_string); |
| 387 { | 381 { |
| 388 ToStringStub stub(masm->isolate()); | 382 ToStringStub stub(masm->isolate()); |
| 389 __ TailCallStub(&stub); | 383 __ TailCallStub(&stub); |
| 390 } | 384 } |
| 391 | 385 // 3b. Convert symbol in r2 to a string. |
| 392 // 3b. Convert symbol in r3 to a string. | |
| 393 __ bind(&symbol_descriptive_string); | 386 __ bind(&symbol_descriptive_string); |
| 394 { | 387 { |
| 395 __ Push(r3); | 388 __ Push(r2); |
| 396 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); | 389 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); |
| 397 } | 390 } |
| 398 } | 391 } |
| 399 | 392 |
| 400 | |
| 401 // static | 393 // static |
| 402 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { | 394 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { |
| 403 // ----------- S t a t e ------------- | 395 // ----------- S t a t e ------------- |
| 404 // -- r3 : number of arguments | 396 // -- r2 : number of arguments |
| 405 // -- r4 : constructor function | 397 // -- r3 : constructor function |
| 406 // -- r6 : new target | 398 // -- r5 : new target |
| 407 // -- lr : return address | 399 // -- lr : return address |
| 408 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 400 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
| 409 // -- sp[argc * 4] : receiver | 401 // -- sp[argc * 4] : receiver |
| 410 // ----------------------------------- | 402 // ----------------------------------- |
| 411 | 403 |
| 412 // 1. Make sure we operate in the context of the called function. | 404 // 1. Make sure we operate in the context of the called function. |
| 413 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | 405 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); |
| 414 | 406 |
| 415 // 2. Load the first argument into r5 and get rid of the rest (including the | 407 // 2. Load the first argument into r4 and get rid of the rest (including the |
| 416 // receiver). | 408 // receiver). |
| 417 { | 409 { |
| 418 Label no_arguments, done; | 410 Label no_arguments, done; |
| 419 __ cmpi(r3, Operand::Zero()); | 411 __ CmpP(r2, Operand::Zero()); |
| 420 __ beq(&no_arguments); | 412 __ beq(&no_arguments); |
| 421 __ subi(r3, r3, Operand(1)); | 413 __ SubP(r2, r2, Operand(1)); |
| 422 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); | 414 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); |
| 423 __ LoadPUX(r5, MemOperand(sp, r5)); | 415 __ lay(sp, MemOperand(sp, r4)); |
| 416 __ LoadP(r4, MemOperand(sp)); |
| 424 __ Drop(2); | 417 __ Drop(2); |
| 425 __ b(&done); | 418 __ b(&done); |
| 426 __ bind(&no_arguments); | 419 __ bind(&no_arguments); |
| 427 __ LoadRoot(r5, Heap::kempty_stringRootIndex); | 420 __ LoadRoot(r4, Heap::kempty_stringRootIndex); |
| 428 __ Drop(1); | 421 __ Drop(1); |
| 429 __ bind(&done); | 422 __ bind(&done); |
| 430 } | 423 } |
| 431 | 424 |
| 432 // 3. Make sure r5 is a string. | 425 // 3. Make sure r4 is a string. |
| 433 { | 426 { |
| 434 Label convert, done_convert; | 427 Label convert, done_convert; |
| 435 __ JumpIfSmi(r5, &convert); | 428 __ JumpIfSmi(r4, &convert); |
| 436 __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE); | 429 __ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE); |
| 437 __ blt(&done_convert); | 430 __ blt(&done_convert); |
| 438 __ bind(&convert); | 431 __ bind(&convert); |
| 439 { | 432 { |
| 440 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 433 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 441 ToStringStub stub(masm->isolate()); | 434 ToStringStub stub(masm->isolate()); |
| 442 __ Push(r4, r6); | 435 __ Push(r3, r5); |
| 443 __ mr(r3, r5); | 436 __ LoadRR(r2, r4); |
| 444 __ CallStub(&stub); | 437 __ CallStub(&stub); |
| 445 __ mr(r5, r3); | 438 __ LoadRR(r4, r2); |
| 446 __ Pop(r4, r6); | 439 __ Pop(r3, r5); |
| 447 } | 440 } |
| 448 __ bind(&done_convert); | 441 __ bind(&done_convert); |
| 449 } | 442 } |
| 450 | 443 |
| 451 // 4. Check if new target and constructor differ. | 444 // 4. Check if new target and constructor differ. |
| 452 Label new_object; | 445 Label new_object; |
| 453 __ cmp(r4, r6); | 446 __ CmpP(r3, r5); |
| 454 __ bne(&new_object); | 447 __ bne(&new_object); |
| 455 | 448 |
| 456 // 5. Allocate a JSValue wrapper for the string. | 449 // 5. Allocate a JSValue wrapper for the string. |
| 457 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object); | 450 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object); |
| 458 __ Ret(); | 451 __ Ret(); |
| 459 | 452 |
| 460 // 6. Fallback to the runtime to create new object. | 453 // 6. Fallback to the runtime to create new object. |
| 461 __ bind(&new_object); | 454 __ bind(&new_object); |
| 462 { | 455 { |
| 463 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 456 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 464 __ Push(r5); // first argument | 457 __ Push(r4); // first argument |
| 465 FastNewObjectStub stub(masm->isolate()); | 458 FastNewObjectStub stub(masm->isolate()); |
| 466 __ CallStub(&stub); | 459 __ CallStub(&stub); |
| 467 __ Pop(r5); | 460 __ Pop(r4); |
| 468 } | 461 } |
| 469 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); | 462 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0); |
| 470 __ Ret(); | 463 __ Ret(); |
| 471 } | 464 } |
| 472 | 465 |
| 473 | |
| 474 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 466 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
| 475 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 467 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
| 476 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset)); | 468 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset)); |
| 477 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 469 __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 478 __ JumpToJSEntry(ip); | 470 __ JumpToJSEntry(ip); |
| 479 } | 471 } |
| 480 | 472 |
| 481 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, | 473 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, |
| 482 Runtime::FunctionId function_id) { | 474 Runtime::FunctionId function_id) { |
| 483 // ----------- S t a t e ------------- | 475 // ----------- S t a t e ------------- |
| 484 // -- r3 : argument count (preserved for callee) | 476 // -- r2 : argument count (preserved for callee) |
| 485 // -- r4 : target function (preserved for callee) | 477 // -- r3 : target function (preserved for callee) |
| 486 // -- r6 : new target (preserved for callee) | 478 // -- r5 : new target (preserved for callee) |
| 487 // ----------------------------------- | 479 // ----------------------------------- |
| 488 { | 480 { |
| 489 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 481 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 490 // Push the number of arguments to the callee. | 482 // Push the number of arguments to the callee. |
| 491 // Push a copy of the target function and the new target. | 483 // Push a copy of the target function and the new target. |
| 492 // Push function as parameter to the runtime call. | 484 // Push function as parameter to the runtime call. |
| 493 __ SmiTag(r3); | 485 __ SmiTag(r2); |
| 494 __ Push(r3, r4, r6, r4); | 486 __ Push(r2, r3, r5, r3); |
| 495 | 487 |
| 496 __ CallRuntime(function_id, 1); | 488 __ CallRuntime(function_id, 1); |
| 497 __ mr(r5, r3); | 489 __ LoadRR(r4, r2); |
| 498 | 490 |
| 499 // Restore target function and new target. | 491 // Restore target function and new target. |
| 500 __ Pop(r3, r4, r6); | 492 __ Pop(r2, r3, r5); |
| 501 __ SmiUntag(r3); | 493 __ SmiUntag(r2); |
| 502 } | 494 } |
| 503 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); | 495 __ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 504 __ JumpToJSEntry(ip); | 496 __ JumpToJSEntry(ip); |
| 505 } | 497 } |
| 506 | 498 |
| 507 | |
| 508 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 499 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
| 509 // Checking whether the queued function is ready for install is optional, | 500 // Checking whether the queued function is ready for install is optional, |
| 510 // since we come across interrupts and stack checks elsewhere. However, | 501 // since we come across interrupts and stack checks elsewhere. However, |
| 511 // not checking may delay installing ready functions, and always checking | 502 // not checking may delay installing ready functions, and always checking |
| 512 // would be quite expensive. A good compromise is to first check against | 503 // would be quite expensive. A good compromise is to first check against |
| 513 // stack limit as a cue for an interrupt signal. | 504 // stack limit as a cue for an interrupt signal. |
| 514 Label ok; | 505 Label ok; |
| 515 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 506 __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex)); |
| 516 __ cmpl(sp, ip); | 507 __ bge(&ok, Label::kNear); |
| 517 __ bge(&ok); | |
| 518 | 508 |
| 519 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); | 509 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); |
| 520 | 510 |
| 521 __ bind(&ok); | 511 __ bind(&ok); |
| 522 GenerateTailCallToSharedCode(masm); | 512 GenerateTailCallToSharedCode(masm); |
| 523 } | 513 } |
| 524 | 514 |
| 525 | |
| 526 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 515 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 527 bool is_api_function, | 516 bool is_api_function, |
| 528 bool create_implicit_receiver, | 517 bool create_implicit_receiver, |
| 529 bool check_derived_construct) { | 518 bool check_derived_construct) { |
| 530 // ----------- S t a t e ------------- | 519 // ----------- S t a t e ------------- |
| 531 // -- r3 : number of arguments | 520 // -- r2 : number of arguments |
| 532 // -- r4 : constructor function | 521 // -- r3 : constructor function |
| 533 // -- r5 : allocation site or undefined | 522 // -- r4 : allocation site or undefined |
| 534 // -- r6 : new target | 523 // -- r5 : new target |
| 535 // -- lr : return address | 524 // -- lr : return address |
| 536 // -- sp[...]: constructor arguments | 525 // -- sp[...]: constructor arguments |
| 537 // ----------------------------------- | 526 // ----------------------------------- |
| 538 | 527 |
| 539 Isolate* isolate = masm->isolate(); | 528 Isolate* isolate = masm->isolate(); |
| 540 | 529 |
| 541 // Enter a construct frame. | 530 // Enter a construct frame. |
| 542 { | 531 { |
| 543 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); | 532 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); |
| 544 | 533 |
| 545 // Preserve the incoming parameters on the stack. | 534 // Preserve the incoming parameters on the stack. |
| 546 __ AssertUndefinedOrAllocationSite(r5, r7); | 535 __ AssertUndefinedOrAllocationSite(r4, r6); |
| 547 | 536 |
| 548 if (!create_implicit_receiver) { | 537 if (!create_implicit_receiver) { |
| 549 __ SmiTag(r7, r3, SetRC); | 538 __ SmiTag(r6, r2); |
| 550 __ Push(r5, r7); | 539 __ LoadAndTestP(r6, r6); |
| 540 __ Push(r4, r6); |
| 551 __ PushRoot(Heap::kTheHoleValueRootIndex); | 541 __ PushRoot(Heap::kTheHoleValueRootIndex); |
| 552 } else { | 542 } else { |
| 553 __ SmiTag(r3); | 543 __ SmiTag(r2); |
| 554 __ Push(r5, r3); | 544 __ Push(r4, r2); |
| 555 | 545 |
| 556 // Allocate the new receiver object. | 546 // Allocate the new receiver object. |
| 557 __ Push(r4, r6); | 547 __ Push(r3, r5); |
| 558 FastNewObjectStub stub(masm->isolate()); | 548 FastNewObjectStub stub(masm->isolate()); |
| 559 __ CallStub(&stub); | 549 __ CallStub(&stub); |
| 560 __ mr(r7, r3); | 550 __ LoadRR(r6, r2); |
| 561 __ Pop(r4, r6); | 551 __ Pop(r3, r5); |
| 562 | 552 |
| 563 // ----------- S t a t e ------------- | 553 // ----------- S t a t e ------------- |
| 564 // -- r4: constructor function | 554 // -- r3: constructor function |
| 565 // -- r6: new target | 555 // -- r5: new target |
| 566 // -- r7: newly allocated object | 556 // -- r6: newly allocated object |
| 567 // ----------------------------------- | 557 // ----------------------------------- |
| 568 | 558 |
| 569 // Retrieve smi-tagged arguments count from the stack. | 559 // Retrieve smi-tagged arguments count from the stack. |
| 570 __ LoadP(r3, MemOperand(sp)); | 560 __ LoadP(r2, MemOperand(sp)); |
| 571 __ SmiUntag(r3, SetRC); | 561 __ SmiUntag(r2); |
| 562 __ LoadAndTestP(r2, r2); |
| 572 | 563 |
| 573 // Push the allocated receiver to the stack. We need two copies | 564 // Push the allocated receiver to the stack. We need two copies |
| 574 // because we may have to return the original one and the calling | 565 // because we may have to return the original one and the calling |
| 575 // conventions dictate that the called function pops the receiver. | 566 // conventions dictate that the called function pops the receiver. |
| 576 __ Push(r7, r7); | 567 __ Push(r6, r6); |
| 577 } | 568 } |
| 578 | 569 |
| 579 // Set up pointer to last argument. | 570 // Set up pointer to last argument. |
| 580 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset)); | 571 __ la(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset)); |
| 581 | 572 |
| 582 // Copy arguments and receiver to the expression stack. | 573 // Copy arguments and receiver to the expression stack. |
| 583 // r3: number of arguments | 574 // r2: number of arguments |
| 584 // r4: constructor function | 575 // r3: constructor function |
| 585 // r5: address of last argument (caller sp) | 576 // r4: address of last argument (caller sp) |
| 586 // r6: new target | 577 // r5: new target |
| 587 // cr0: condition indicating whether r3 is zero | 578 // cr0: condition indicating whether r2 is zero |
| 588 // sp[0]: receiver | 579 // sp[0]: receiver |
| 589 // sp[1]: receiver | 580 // sp[1]: receiver |
| 590 // sp[2]: number of arguments (smi-tagged) | 581 // sp[2]: number of arguments (smi-tagged) |
| 591 Label loop, no_args; | 582 Label loop, no_args; |
| 592 __ beq(&no_args, cr0); | 583 __ beq(&no_args); |
| 593 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); | 584 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2)); |
| 594 __ sub(sp, sp, ip); | 585 __ SubP(sp, sp, ip); |
| 595 __ mtctr(r3); | 586 __ LoadRR(r1, r2); |
| 596 __ bind(&loop); | 587 __ bind(&loop); |
| 597 __ subi(ip, ip, Operand(kPointerSize)); | 588 __ lay(ip, MemOperand(ip, -kPointerSize)); |
| 598 __ LoadPX(r0, MemOperand(r5, ip)); | 589 __ LoadP(r0, MemOperand(ip, r4)); |
| 599 __ StorePX(r0, MemOperand(sp, ip)); | 590 __ StoreP(r0, MemOperand(ip, sp)); |
| 600 __ bdnz(&loop); | 591 __ BranchOnCount(r1, &loop); |
| 601 __ bind(&no_args); | 592 __ bind(&no_args); |
| 602 | 593 |
| 603 // Call the function. | 594 // Call the function. |
| 604 // r3: number of arguments | 595 // r2: number of arguments |
| 605 // r4: constructor function | 596 // r3: constructor function |
| 606 // r6: new target | 597 // r5: new target |
| 607 if (is_api_function) { | 598 if (is_api_function) { |
| 608 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | 599 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); |
| 609 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct(); | 600 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct(); |
| 610 __ Call(code, RelocInfo::CODE_TARGET); | 601 __ Call(code, RelocInfo::CODE_TARGET); |
| 611 } else { | 602 } else { |
| 612 ParameterCount actual(r3); | 603 ParameterCount actual(r2); |
| 613 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION, | 604 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION, |
| 614 CheckDebugStepCallWrapper()); | 605 CheckDebugStepCallWrapper()); |
| 615 } | 606 } |
| 616 | 607 |
| 617 // Store offset of return address for deoptimizer. | 608 // Store offset of return address for deoptimizer. |
| 618 if (create_implicit_receiver && !is_api_function) { | 609 if (create_implicit_receiver && !is_api_function) { |
| 619 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | 610 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); |
| 620 } | 611 } |
| 621 | 612 |
| 622 // Restore context from the frame. | 613 // Restore context from the frame. |
| 623 // r3: result | 614 // r2: result |
| 624 // sp[0]: receiver | 615 // sp[0]: receiver |
| 625 // sp[1]: number of arguments (smi-tagged) | 616 // sp[1]: number of arguments (smi-tagged) |
| 626 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 617 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 627 | 618 |
| 628 if (create_implicit_receiver) { | 619 if (create_implicit_receiver) { |
| 629 // If the result is an object (in the ECMA sense), we should get rid | 620 // If the result is an object (in the ECMA sense), we should get rid |
| 630 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | 621 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 |
| 631 // on page 74. | 622 // on page 74. |
| 632 Label use_receiver, exit; | 623 Label use_receiver, exit; |
| 633 | 624 |
| 634 // If the result is a smi, it is *not* an object in the ECMA sense. | 625 // If the result is a smi, it is *not* an object in the ECMA sense. |
| 635 // r3: result | 626 // r2: result |
| 636 // sp[0]: receiver | 627 // sp[0]: receiver |
| 637 // sp[1]: number of arguments (smi-tagged) | 628 // sp[1]: new.target |
| 638 __ JumpIfSmi(r3, &use_receiver); | 629 // sp[2]: number of arguments (smi-tagged) |
| 630 __ JumpIfSmi(r2, &use_receiver); |
| 639 | 631 |
| 640 // If the type of the result (stored in its map) is less than | 632 // If the type of the result (stored in its map) is less than |
| 641 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. | 633 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. |
| 642 __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE); | 634 __ CompareObjectType(r2, r3, r5, FIRST_JS_RECEIVER_TYPE); |
| 643 __ bge(&exit); | 635 __ bge(&exit); |
| 644 | 636 |
| 645 // Throw away the result of the constructor invocation and use the | 637 // Throw away the result of the constructor invocation and use the |
| 646 // on-stack receiver as the result. | 638 // on-stack receiver as the result. |
| 647 __ bind(&use_receiver); | 639 __ bind(&use_receiver); |
| 648 __ LoadP(r3, MemOperand(sp)); | 640 __ LoadP(r2, MemOperand(sp)); |
| 649 | 641 |
| 650 // Remove receiver from the stack, remove caller arguments, and | 642 // Remove receiver from the stack, remove caller arguments, and |
| 651 // return. | 643 // return. |
| 652 __ bind(&exit); | 644 __ bind(&exit); |
| 653 // r3: result | 645 // r2: result |
| 654 // sp[0]: receiver (newly allocated object) | 646 // sp[0]: receiver (newly allocated object) |
| 655 // sp[1]: number of arguments (smi-tagged) | 647 // sp[1]: number of arguments (smi-tagged) |
| 656 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize)); | 648 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); |
| 657 } else { | 649 } else { |
| 658 __ LoadP(r4, MemOperand(sp)); | 650 __ LoadP(r3, MemOperand(sp)); |
| 659 } | 651 } |
| 660 | 652 |
| 661 // Leave construct frame. | 653 // Leave construct frame. |
| 662 } | 654 } |
| 663 | 655 |
| 664 // ES6 9.2.2. Step 13+ | 656 // ES6 9.2.2. Step 13+ |
| 665 // Check that the result is not a Smi, indicating that the constructor result | 657 // Check that the result is not a Smi, indicating that the constructor result |
| 666 // from a derived class is neither undefined nor an Object. | 658 // from a derived class is neither undefined nor an Object. |
| 667 if (check_derived_construct) { | 659 if (check_derived_construct) { |
| 668 Label dont_throw; | 660 Label dont_throw; |
| 669 __ JumpIfNotSmi(r3, &dont_throw); | 661 __ JumpIfNotSmi(r2, &dont_throw); |
| 670 { | 662 { |
| 671 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 663 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 672 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); | 664 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); |
| 673 } | 665 } |
| 674 __ bind(&dont_throw); | 666 __ bind(&dont_throw); |
| 675 } | 667 } |
| 676 | 668 |
| 677 __ SmiToPtrArrayOffset(r4, r4); | 669 __ SmiToPtrArrayOffset(r3, r3); |
| 678 __ add(sp, sp, r4); | 670 __ AddP(sp, sp, r3); |
| 679 __ addi(sp, sp, Operand(kPointerSize)); | 671 __ AddP(sp, sp, Operand(kPointerSize)); |
| 680 if (create_implicit_receiver) { | 672 if (create_implicit_receiver) { |
| 681 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5); | 673 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r3, r4); |
| 682 } | 674 } |
| 683 __ blr(); | 675 __ Ret(); |
| 684 } | 676 } |
| 685 | 677 |
| 686 | |
| 687 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | 678 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { |
| 688 Generate_JSConstructStubHelper(masm, false, true, false); | 679 Generate_JSConstructStubHelper(masm, false, true, false); |
| 689 } | 680 } |
| 690 | 681 |
| 691 | |
| 692 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | 682 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { |
| 693 Generate_JSConstructStubHelper(masm, true, false, false); | 683 Generate_JSConstructStubHelper(masm, true, false, false); |
| 694 } | 684 } |
| 695 | 685 |
| 696 | |
| 697 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | 686 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { |
| 698 Generate_JSConstructStubHelper(masm, false, false, false); | 687 Generate_JSConstructStubHelper(masm, false, false, false); |
| 699 } | 688 } |
| 700 | 689 |
| 701 | |
| 702 void Builtins::Generate_JSBuiltinsConstructStubForDerived( | 690 void Builtins::Generate_JSBuiltinsConstructStubForDerived( |
| 703 MacroAssembler* masm) { | 691 MacroAssembler* masm) { |
| 704 Generate_JSConstructStubHelper(masm, false, false, true); | 692 Generate_JSConstructStubHelper(masm, false, false, true); |
| 705 } | 693 } |
| 706 | 694 |
| 707 | |
| 708 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | 695 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { |
| 709 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 696 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 710 __ push(r4); | 697 __ push(r3); |
| 711 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); | 698 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); |
| 712 } | 699 } |
| 713 | 700 |
| 714 | |
| 715 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | 701 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
| 716 | 702 |
| 717 | 703 // Clobbers r4; preserves all other registers. |
| 718 // Clobbers r5; preserves all other registers. | |
| 719 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | 704 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
| 720 IsTagged argc_is_tagged) { | 705 IsTagged argc_is_tagged) { |
| 721 // Check the stack for overflow. We are not trying to catch | 706 // Check the stack for overflow. We are not trying to catch |
| 722 // interruptions (e.g. debug break and preemption) here, so the "real stack | 707 // interruptions (e.g. debug break and preemption) here, so the "real stack |
| 723 // limit" is checked. | 708 // limit" is checked. |
| 724 Label okay; | 709 Label okay; |
| 725 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); | 710 __ LoadRoot(r4, Heap::kRealStackLimitRootIndex); |
| 726 // Make r5 the space we have left. The stack might already be overflowed | 711 // Make r4 the space we have left. The stack might already be overflowed |
| 727 // here which will cause r5 to become negative. | 712 // here which will cause r4 to become negative. |
| 728 __ sub(r5, sp, r5); | 713 __ SubP(r4, sp, r4); |
| 729 // Check if the arguments will overflow the stack. | 714 // Check if the arguments will overflow the stack. |
| 730 if (argc_is_tagged == kArgcIsSmiTagged) { | 715 if (argc_is_tagged == kArgcIsSmiTagged) { |
| 731 __ SmiToPtrArrayOffset(r0, argc); | 716 __ SmiToPtrArrayOffset(r0, argc); |
| 732 } else { | 717 } else { |
| 733 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | 718 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
| 734 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2)); | 719 __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2)); |
| 735 } | 720 } |
| 736 __ cmp(r5, r0); | 721 __ CmpP(r4, r0); |
| 737 __ bgt(&okay); // Signed comparison. | 722 __ bgt(&okay); // Signed comparison. |
| 738 | 723 |
| 739 // Out of stack space. | 724 // Out of stack space. |
| 740 __ CallRuntime(Runtime::kThrowStackOverflow); | 725 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 741 | 726 |
| 742 __ bind(&okay); | 727 __ bind(&okay); |
| 743 } | 728 } |
| 744 | 729 |
| 745 | |
| 746 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 730 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
| 747 bool is_construct) { | 731 bool is_construct) { |
| 748 // Called from Generate_JS_Entry | 732 // Called from Generate_JS_Entry |
| 749 // r3: new.target | 733 // r2: new.target |
| 750 // r4: function | 734 // r3: function |
| 751 // r5: receiver | 735 // r4: receiver |
| 752 // r6: argc | 736 // r5: argc |
| 753 // r7: argv | 737 // r6: argv |
| 754 // r0,r8-r9, cp may be clobbered | 738 // r0,r7-r9, cp may be clobbered |
| 755 ProfileEntryHookStub::MaybeCallEntryHook(masm); | 739 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
| 756 | 740 |
| 757 // Clear the context before we push it when entering the internal frame. | 741 // Clear the context before we push it when entering the internal frame. |
| 758 __ li(cp, Operand::Zero()); | 742 __ LoadImmP(cp, Operand::Zero()); |
| 759 | 743 |
| 760 // Enter an internal frame. | 744 // Enter an internal frame. |
| 761 { | 745 { |
| 746 // FrameScope ends up calling MacroAssembler::EnterFrame here |
| 762 FrameScope scope(masm, StackFrame::INTERNAL); | 747 FrameScope scope(masm, StackFrame::INTERNAL); |
| 763 | 748 |
| 764 // Setup the context (we need to use the caller context from the isolate). | 749 // Setup the context (we need to use the caller context from the isolate). |
| 765 ExternalReference context_address(Isolate::kContextAddress, | 750 ExternalReference context_address(Isolate::kContextAddress, |
| 766 masm->isolate()); | 751 masm->isolate()); |
| 767 __ mov(cp, Operand(context_address)); | 752 __ mov(cp, Operand(context_address)); |
| 768 __ LoadP(cp, MemOperand(cp)); | 753 __ LoadP(cp, MemOperand(cp)); |
| 769 | 754 |
| 770 __ InitializeRootRegister(); | 755 __ InitializeRootRegister(); |
| 771 | 756 |
| 772 // Push the function and the receiver onto the stack. | 757 // Push the function and the receiver onto the stack. |
| 773 __ Push(r4, r5); | 758 __ Push(r3, r4); |
| 774 | 759 |
| 775 // Check if we have enough stack space to push all arguments. | 760 // Check if we have enough stack space to push all arguments. |
| 776 // Clobbers r5. | 761 // Clobbers r4. |
| 777 Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt); | 762 Generate_CheckStackOverflow(masm, r5, kArgcIsUntaggedInt); |
| 778 | 763 |
| 779 // Copy arguments to the stack in a loop. | 764 // Copy arguments to the stack in a loop from argv to sp. |
| 780 // r4: function | 765 // The arguments are actually placed in reverse order on sp |
| 781 // r6: argc | 766 // compared to argv (i.e. arg1 is highest memory in sp). |
| 782 // r7: argv, i.e. points to first arg | 767 // r3: function |
| 783 Label loop, entry; | 768 // r5: argc |
| 784 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2)); | 769 // r6: argv, i.e. points to first arg |
| 785 __ add(r5, r7, r0); | 770 // r7: scratch reg to hold scaled argc |
| 786 // r5 points past last arg. | 771 // r8: scratch reg to hold arg handle |
| 787 __ b(&entry); | 772 // r9: scratch reg to hold index into argv |
| 788 __ bind(&loop); | 773 Label argLoop, argExit; |
| 789 __ LoadP(r8, MemOperand(r7)); // read next parameter | 774 intptr_t zero = 0; |
| 790 __ addi(r7, r7, Operand(kPointerSize)); | 775 __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2)); |
| 791 __ LoadP(r0, MemOperand(r8)); // dereference handle | 776 __ SubRR(sp, r7); // Buy the stack frame to fit args |
| 792 __ push(r0); // push parameter | 777 __ LoadImmP(r9, Operand(zero)); // Initialize argv index |
| 793 __ bind(&entry); | 778 __ bind(&argLoop); |
| 794 __ cmp(r7, r5); | 779 __ CmpPH(r7, Operand(zero)); |
| 795 __ bne(&loop); | 780 __ beq(&argExit, Label::kNear); |
| 781 __ lay(r7, MemOperand(r7, -kPointerSize)); |
| 782 __ LoadP(r8, MemOperand(r9, r6)); // read next parameter |
| 783 __ la(r9, MemOperand(r9, kPointerSize)); // r9++; |
| 784 __ LoadP(r0, MemOperand(r8)); // dereference handle |
| 785 __ StoreP(r0, MemOperand(r7, sp)); // push parameter |
| 786 __ b(&argLoop); |
| 787 __ bind(&argExit); |
| 796 | 788 |
| 797 // Setup new.target and argc. | 789 // Setup new.target and argc. |
| 798 __ mr(r7, r3); | 790 __ LoadRR(r6, r2); |
| 799 __ mr(r3, r6); | 791 __ LoadRR(r2, r5); |
| 800 __ mr(r6, r7); | 792 __ LoadRR(r5, r6); |
| 801 | 793 |
| 802 // Initialize all JavaScript callee-saved registers, since they will be seen | 794 // Initialize all JavaScript callee-saved registers, since they will be seen |
| 803 // by the garbage collector as part of handlers. | 795 // by the garbage collector as part of handlers. |
| 804 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); | 796 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
| 805 __ mr(r14, r7); | 797 __ LoadRR(r7, r6); |
| 806 __ mr(r15, r7); | 798 __ LoadRR(r8, r6); |
| 807 __ mr(r16, r7); | 799 __ LoadRR(r9, r6); |
| 808 __ mr(r17, r7); | |
| 809 | 800 |
| 810 // Invoke the code. | 801 // Invoke the code. |
| 811 Handle<Code> builtin = is_construct | 802 Handle<Code> builtin = is_construct |
| 812 ? masm->isolate()->builtins()->Construct() | 803 ? masm->isolate()->builtins()->Construct() |
| 813 : masm->isolate()->builtins()->Call(); | 804 : masm->isolate()->builtins()->Call(); |
| 814 __ Call(builtin, RelocInfo::CODE_TARGET); | 805 __ Call(builtin, RelocInfo::CODE_TARGET); |
| 815 | 806 |
| 816 // Exit the JS frame and remove the parameters (except function), and | 807 // Exit the JS frame and remove the parameters (except function), and |
| 817 // return. | 808 // return. |
| 818 } | 809 } |
| 819 __ blr(); | 810 __ b(r14); |
| 820 | 811 |
| 821 // r3: result | 812 // r2: result |
| 822 } | 813 } |
| 823 | 814 |
| 824 | |
| 825 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 815 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
| 826 Generate_JSEntryTrampolineHelper(masm, false); | 816 Generate_JSEntryTrampolineHelper(masm, false); |
| 827 } | 817 } |
| 828 | 818 |
| 829 | |
| 830 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 819 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 831 Generate_JSEntryTrampolineHelper(masm, true); | 820 Generate_JSEntryTrampolineHelper(masm, true); |
| 832 } | 821 } |
| 833 | 822 |
| 834 | |
| 835 // Generate code for entering a JS function with the interpreter. | 823 // Generate code for entering a JS function with the interpreter. |
| 836 // On entry to the function the receiver and arguments have been pushed on the | 824 // On entry to the function the receiver and arguments have been pushed on the |
| 837 // stack left to right. The actual argument count matches the formal parameter | 825 // stack left to right. The actual argument count matches the formal parameter |
| 838 // count expected by the function. | 826 // count expected by the function. |
| 839 // | 827 // |
| 840 // The live registers are: | 828 // The live registers are: |
| 841 // o r4: the JS function object being called. | 829 // o r3: the JS function object being called. |
| 842 // o r6: the new target | 830 // o r5: the new target |
| 843 // o cp: our context | 831 // o cp: our context |
| 844 // o pp: the caller's constant pool pointer (if enabled) | 832 // o pp: the caller's constant pool pointer (if enabled) |
| 845 // o fp: the caller's frame pointer | 833 // o fp: the caller's frame pointer |
| 846 // o sp: stack pointer | 834 // o sp: stack pointer |
| 847 // o lr: return address | 835 // o lr: return address |
| 848 // | 836 // |
| 849 // The function builds an interpreter frame. See InterpreterFrameConstants in | 837 // The function builds an interpreter frame. See InterpreterFrameConstants in |
| 850 // frames.h for its layout. | 838 // frames.h for its layout. |
| 851 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { | 839 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { |
| 852 // Open a frame scope to indicate that there is a frame on the stack. The | 840 // Open a frame scope to indicate that there is a frame on the stack. The |
| 853 // MANUAL indicates that the scope shouldn't actually generate code to set up | 841 // MANUAL indicates that the scope shouldn't actually generate code to set up |
| 854 // the frame (that is done below). | 842 // the frame (that is done below). |
| 855 FrameScope frame_scope(masm, StackFrame::MANUAL); | 843 FrameScope frame_scope(masm, StackFrame::MANUAL); |
| 856 __ PushFixedFrame(r4); | 844 __ PushFixedFrame(r3); |
| 857 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 845 __ AddP(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 858 | 846 |
| 859 // Get the bytecode array from the function object and load the pointer to the | 847 // Get the bytecode array from the function object and load the pointer to the |
| 860 // first entry into kInterpreterBytecodeRegister. | 848 // first entry into kInterpreterBytecodeRegister. |
| 861 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 849 __ LoadP(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
| 862 Label array_done; | |
| 863 Register debug_info = r5; | |
| 864 DCHECK(!debug_info.is(r3)); | |
| 865 __ LoadP(debug_info, | |
| 866 FieldMemOperand(r3, SharedFunctionInfo::kDebugInfoOffset)); | |
| 867 // Load original bytecode array or the debug copy. | |
| 868 __ LoadP(kInterpreterBytecodeArrayRegister, | 850 __ LoadP(kInterpreterBytecodeArrayRegister, |
| 869 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset)); | 851 FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset)); |
| 870 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0); | |
| 871 __ beq(&array_done); | |
| 872 __ LoadP(kInterpreterBytecodeArrayRegister, | |
| 873 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex)); | |
| 874 __ bind(&array_done); | |
| 875 | 852 |
| 876 if (FLAG_debug_code) { | 853 if (FLAG_debug_code) { |
| 877 // Check function data field is actually a BytecodeArray object. | 854 // Check function data field is actually a BytecodeArray object. |
| 878 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0); | 855 __ TestIfSmi(kInterpreterBytecodeArrayRegister); |
| 879 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | 856 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); |
| 880 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg, | 857 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg, |
| 881 BYTECODE_ARRAY_TYPE); | 858 BYTECODE_ARRAY_TYPE); |
| 882 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | 859 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); |
| 883 } | 860 } |
| 884 | 861 |
| 885 // Push new.target, bytecode array and zero for bytecode array offset. | 862 // Push new.target, bytecode array and zero for bytecode array offset. |
| 886 __ li(r3, Operand::Zero()); | 863 __ LoadImmP(r2, Operand::Zero()); |
| 887 __ Push(r6, kInterpreterBytecodeArrayRegister, r3); | 864 __ Push(r5, kInterpreterBytecodeArrayRegister, r2); |
| 888 | 865 |
| 889 // Allocate the local and temporary register file on the stack. | 866 // Allocate the local and temporary register file on the stack. |
| 890 { | 867 { |
| 891 // Load frame size (word) from the BytecodeArray object. | 868 // Load frame size (word) from the BytecodeArray object. |
| 892 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister, | 869 __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister, |
| 893 BytecodeArray::kFrameSizeOffset)); | 870 BytecodeArray::kFrameSizeOffset)); |
| 894 | 871 |
| 895 // Do a stack check to ensure we don't go over the limit. | 872 // Do a stack check to ensure we don't go over the limit. |
| 896 Label ok; | 873 Label ok; |
| 897 __ sub(r6, sp, r5); | 874 __ SubP(r5, sp, r4); |
| 898 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex); | 875 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex); |
| 899 __ cmpl(r6, r0); | 876 __ CmpLogicalP(r5, r0); |
| 900 __ bge(&ok); | 877 __ bge(&ok); |
| 901 __ CallRuntime(Runtime::kThrowStackOverflow); | 878 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 902 __ bind(&ok); | 879 __ bind(&ok); |
| 903 | 880 |
| 904 // If ok, push undefined as the initial value for all register file entries. | 881 // If ok, push undefined as the initial value for all register file entries. |
| 905 // TODO(rmcilroy): Consider doing more than one push per loop iteration. | 882 // TODO(rmcilroy): Consider doing more than one push per loop iteration. |
| 906 Label loop, no_args; | 883 Label loop, no_args; |
| 907 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); | 884 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
| 908 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC); | 885 __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2)); |
| 909 __ beq(&no_args, cr0); | 886 __ LoadAndTestP(r4, r4); |
| 910 __ mtctr(r5); | 887 __ beq(&no_args); |
| 888 __ LoadRR(r1, r4); |
| 911 __ bind(&loop); | 889 __ bind(&loop); |
| 912 __ push(r6); | 890 __ push(r5); |
| 913 __ bdnz(&loop); | 891 __ SubP(r1, Operand(1)); |
| 892 __ bne(&loop); |
| 914 __ bind(&no_args); | 893 __ bind(&no_args); |
| 915 } | 894 } |
| 916 | 895 |
| 917 // TODO(rmcilroy): List of things not currently dealt with here but done in | 896 // TODO(rmcilroy): List of things not currently dealt with here but done in |
| 918 // fullcodegen's prologue: | 897 // fullcodegen's prologue: |
| 919 // - Call ProfileEntryHookStub when isolate has a function_entry_hook. | 898 // - Call ProfileEntryHookStub when isolate has a function_entry_hook. |
| 920 // - Code aging of the BytecodeArray object. | 899 // - Code aging of the BytecodeArray object. |
| 921 | 900 |
| 922 // Load accumulator, register file, bytecode offset, dispatch table into | 901 // Load accumulator, register file, bytecode offset, dispatch table into |
| 923 // registers. | 902 // registers. |
| 924 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); | 903 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); |
| 925 __ addi(kInterpreterRegisterFileRegister, fp, | 904 __ AddP(kInterpreterRegisterFileRegister, fp, |
| 926 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); | 905 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); |
| 927 __ mov(kInterpreterBytecodeOffsetRegister, | 906 __ mov(kInterpreterBytecodeOffsetRegister, |
| 928 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); | 907 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); |
| 929 __ mov(kInterpreterDispatchTableRegister, | 908 __ mov(kInterpreterDispatchTableRegister, |
| 930 Operand(ExternalReference::interpreter_dispatch_table_address( | 909 Operand(ExternalReference::interpreter_dispatch_table_address( |
| 931 masm->isolate()))); | 910 masm->isolate()))); |
| 932 | 911 |
| 933 // Dispatch to the first bytecode handler for the function. | 912 // Dispatch to the first bytecode handler for the function. |
| 934 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister, | 913 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, |
| 935 kInterpreterBytecodeOffsetRegister)); | 914 kInterpreterBytecodeOffsetRegister)); |
| 936 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2)); | 915 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); |
| 937 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | 916 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); |
| 938 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging | 917 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging |
| 939 // and header removal. | 918 // and header removal. |
| 940 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 919 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 941 __ Call(ip); | 920 __ Call(ip); |
| 942 | 921 |
| 943 // Even though the first bytecode handler was called, we will never return. | 922 // Even though the first bytecode handler was called, we will never return. |
| 944 __ Abort(kUnexpectedReturnFromBytecodeHandler); | 923 __ Abort(kUnexpectedReturnFromBytecodeHandler); |
| 945 } | 924 } |
| 946 | 925 |
| 947 | |
| 948 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) { | 926 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) { |
| 949 // TODO(rmcilroy): List of things not currently dealt with here but done in | 927 // TODO(rmcilroy): List of things not currently dealt with here but done in |
| 950 // fullcodegen's EmitReturnSequence. | 928 // fullcodegen's EmitReturnSequence. |
| 951 // - Supporting FLAG_trace for Runtime::TraceExit. | 929 // - Supporting FLAG_trace for Runtime::TraceExit. |
| 952 // - Support profiler (specifically decrementing profiling_counter | 930 // - Support profiler (specifically decrementing profiling_counter |
| 953 // appropriately and calling out to HandleInterrupts if necessary). | 931 // appropriately and calling out to HandleInterrupts if necessary). |
| 954 | 932 |
| 955 // The return value is in accumulator, which is already in r3. | 933 // The return value is in accumulator, which is already in r2. |
| 956 | 934 |
| 957 // Leave the frame (also dropping the register file). | 935 // Leave the frame (also dropping the register file). |
| 958 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | 936 __ LeaveFrame(StackFrame::JAVA_SCRIPT); |
| 959 | 937 |
| 960 // Drop receiver + arguments and return. | 938 // Drop receiver + arguments and return. |
| 961 __ lwz(r0, FieldMemOperand(kInterpreterBytecodeArrayRegister, | 939 __ LoadlW(r0, FieldMemOperand(kInterpreterBytecodeArrayRegister, |
| 962 BytecodeArray::kParameterSizeOffset)); | 940 BytecodeArray::kParameterSizeOffset)); |
| 963 __ add(sp, sp, r0); | 941 __ AddP(sp, sp, r0); |
| 964 __ blr(); | 942 __ Ret(); |
| 965 } | 943 } |
| 966 | 944 |
| 967 | |
| 968 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index, | 945 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index, |
| 969 Register count, Register scratch) { | 946 Register count, Register scratch) { |
| 970 Label loop; | 947 Label loop; |
| 971 __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU | 948 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU |
| 972 __ mtctr(count); | 949 __ LoadRR(r0, count); |
| 973 __ bind(&loop); | 950 __ bind(&loop); |
| 974 __ LoadPU(scratch, MemOperand(index, -kPointerSize)); | 951 __ LoadP(scratch, MemOperand(index, -kPointerSize)); |
| 952 __ lay(index, MemOperand(index, -kPointerSize)); |
| 975 __ push(scratch); | 953 __ push(scratch); |
| 976 __ bdnz(&loop); | 954 __ SubP(r0, Operand(1)); |
| 955 __ bne(&loop); |
| 977 } | 956 } |
| 978 | 957 |
| 979 | |
| 980 // static | 958 // static |
| 981 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | 959 void Builtins::Generate_InterpreterPushArgsAndCallImpl( |
| 982 MacroAssembler* masm, TailCallMode tail_call_mode) { | 960 MacroAssembler* masm, TailCallMode tail_call_mode) { |
| 983 // ----------- S t a t e ------------- | 961 // ----------- S t a t e ------------- |
| 984 // -- r3 : the number of arguments (not including the receiver) | 962 // -- r2 : the number of arguments (not including the receiver) |
| 985 // -- r5 : the address of the first argument to be pushed. Subsequent | 963 // -- r4 : the address of the first argument to be pushed. Subsequent |
| 986 // arguments should be consecutive above this, in the same order as | 964 // arguments should be consecutive above this, in the same order as |
| 987 // they are to be pushed onto the stack. | 965 // they are to be pushed onto the stack. |
| 988 // -- r4 : the target to call (can be any Object). | 966 // -- r3 : the target to call (can be any Object). |
| 989 // ----------------------------------- | 967 // ----------------------------------- |
| 990 | 968 |
| 991 // Calculate number of arguments (add one for receiver). | 969 // Calculate number of arguments (AddP one for receiver). |
| 992 __ addi(r6, r3, Operand(1)); | 970 __ AddP(r5, r2, Operand(1)); |
| 993 | 971 |
| 994 // Push the arguments. | 972 // Push the arguments. |
| 995 Generate_InterpreterPushArgs(masm, r5, r6, r7); | 973 Generate_InterpreterPushArgs(masm, r4, r5, r6); |
| 996 | 974 |
| 997 // Call the target. | 975 // Call the target. |
| 998 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | 976 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
| 999 tail_call_mode), | 977 tail_call_mode), |
| 1000 RelocInfo::CODE_TARGET); | 978 RelocInfo::CODE_TARGET); |
| 1001 } | 979 } |
| 1002 | 980 |
| 1003 | |
| 1004 // static | 981 // static |
| 1005 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { | 982 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { |
| 1006 // ----------- S t a t e ------------- | 983 // ----------- S t a t e ------------- |
| 1007 // -- r3 : argument count (not including receiver) | 984 // -- r2 : argument count (not including receiver) |
| 1008 // -- r6 : new target | 985 // -- r5 : new target |
| 1009 // -- r4 : constructor to call | 986 // -- r3 : constructor to call |
| 1010 // -- r5 : address of the first argument | 987 // -- r4 : address of the first argument |
| 1011 // ----------------------------------- | 988 // ----------------------------------- |
| 1012 | 989 |
| 1013 // Push a slot for the receiver to be constructed. | 990 // Push a slot for the receiver to be constructed. |
| 1014 __ li(r0, Operand::Zero()); | 991 __ LoadImmP(r0, Operand::Zero()); |
| 1015 __ push(r0); | 992 __ push(r0); |
| 1016 | 993 |
| 1017 // Push the arguments (skip if none). | 994 // Push the arguments (skip if none). |
| 1018 Label skip; | 995 Label skip; |
| 1019 __ cmpi(r3, Operand::Zero()); | 996 __ CmpP(r2, Operand::Zero()); |
| 1020 __ beq(&skip); | 997 __ beq(&skip); |
| 1021 Generate_InterpreterPushArgs(masm, r5, r3, r7); | 998 Generate_InterpreterPushArgs(masm, r4, r2, r6); |
| 1022 __ bind(&skip); | 999 __ bind(&skip); |
| 1023 | 1000 |
| 1024 // Call the constructor with r3, r4, and r6 unmodified. | 1001 // Call the constructor with r2, r3, and r5 unmodified. |
| 1025 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1002 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1026 } | 1003 } |
| 1027 | 1004 |
| 1028 | |
| 1029 static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) { | 1005 static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) { |
| 1030 // Initialize register file register and dispatch table register. | 1006 // Initialize register file register and dispatch table register. |
| 1031 __ addi(kInterpreterRegisterFileRegister, fp, | 1007 __ AddP(kInterpreterRegisterFileRegister, fp, |
| 1032 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); | 1008 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); |
| 1033 __ mov(kInterpreterDispatchTableRegister, | 1009 __ mov(kInterpreterDispatchTableRegister, |
| 1034 Operand(ExternalReference::interpreter_dispatch_table_address( | 1010 Operand(ExternalReference::interpreter_dispatch_table_address( |
| 1035 masm->isolate()))); | 1011 masm->isolate()))); |
| 1036 | 1012 |
| 1037 // Get the context from the frame. | 1013 // Get the context from the frame. |
| 1038 __ LoadP(kContextRegister, | 1014 __ LoadP(kContextRegister, |
| 1039 MemOperand(kInterpreterRegisterFileRegister, | 1015 MemOperand(kInterpreterRegisterFileRegister, |
| 1040 InterpreterFrameConstants::kContextFromRegisterPointer)); | 1016 InterpreterFrameConstants::kContextFromRegisterPointer)); |
| 1041 | 1017 |
| 1042 // Get the bytecode array pointer from the frame. | 1018 // Get the bytecode array pointer from the frame. |
| 1043 __ LoadP( | 1019 __ LoadP( |
| 1044 kInterpreterBytecodeArrayRegister, | 1020 kInterpreterBytecodeArrayRegister, |
| 1045 MemOperand(kInterpreterRegisterFileRegister, | 1021 MemOperand(kInterpreterRegisterFileRegister, |
| 1046 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer)); | 1022 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer)); |
| 1047 | 1023 |
| 1048 if (FLAG_debug_code) { | 1024 if (FLAG_debug_code) { |
| 1049 // Check function data field is actually a BytecodeArray object. | 1025 // Check function data field is actually a BytecodeArray object. |
| 1050 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0); | 1026 __ TestIfSmi(kInterpreterBytecodeArrayRegister); |
| 1051 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | 1027 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); |
| 1052 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg, | 1028 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg, |
| 1053 BYTECODE_ARRAY_TYPE); | 1029 BYTECODE_ARRAY_TYPE); |
| 1054 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | 1030 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); |
| 1055 } | 1031 } |
| 1056 | 1032 |
| 1057 // Get the target bytecode offset from the frame. | 1033 // Get the target bytecode offset from the frame. |
| 1058 __ LoadP(kInterpreterBytecodeOffsetRegister, | 1034 __ LoadP(kInterpreterBytecodeOffsetRegister, |
| 1059 MemOperand( | 1035 MemOperand( |
| 1060 kInterpreterRegisterFileRegister, | 1036 kInterpreterRegisterFileRegister, |
| 1061 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); | 1037 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); |
| 1062 __ SmiUntag(kInterpreterBytecodeOffsetRegister); | 1038 __ SmiUntag(kInterpreterBytecodeOffsetRegister); |
| 1063 | 1039 |
| 1064 // Dispatch to the target bytecode. | 1040 // Dispatch to the target bytecode. |
| 1065 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister, | 1041 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, |
| 1066 kInterpreterBytecodeOffsetRegister)); | 1042 kInterpreterBytecodeOffsetRegister)); |
| 1067 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2)); | 1043 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); |
| 1068 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | 1044 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); |
| 1069 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1045 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 1070 __ Jump(ip); | 1046 __ Jump(ip); |
| 1071 } | 1047 } |
| 1072 | 1048 |
| 1073 | |
| 1074 static void Generate_InterpreterNotifyDeoptimizedHelper( | 1049 static void Generate_InterpreterNotifyDeoptimizedHelper( |
| 1075 MacroAssembler* masm, Deoptimizer::BailoutType type) { | 1050 MacroAssembler* masm, Deoptimizer::BailoutType type) { |
| 1076 // Enter an internal frame. | 1051 // Enter an internal frame. |
| 1077 { | 1052 { |
| 1078 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1053 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1079 | 1054 |
| 1080 // Pass the deoptimization type to the runtime system. | 1055 // Pass the deoptimization type to the runtime system. |
| 1081 __ LoadSmiLiteral(r4, Smi::FromInt(static_cast<int>(type))); | 1056 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); |
| 1082 __ Push(r4); | 1057 __ Push(r3); |
| 1083 __ CallRuntime(Runtime::kNotifyDeoptimized); | 1058 __ CallRuntime(Runtime::kNotifyDeoptimized); |
| 1084 // Tear down internal frame. | 1059 // Tear down internal frame. |
| 1085 } | 1060 } |
| 1086 | 1061 |
| 1087 // Drop state (we don't use these for interpreter deopts) and and pop the | 1062 // Drop state (we don't use these for interpreter deopts) and and pop the |
| 1088 // accumulator value into the accumulator register. | 1063 // accumulator value into the accumulator register. |
| 1089 __ Drop(1); | 1064 __ Drop(1); |
| 1090 __ Pop(kInterpreterAccumulatorRegister); | 1065 __ Pop(kInterpreterAccumulatorRegister); |
| 1091 | 1066 |
| 1092 // Enter the bytecode dispatch. | 1067 // Enter the bytecode dispatch. |
| 1093 Generate_EnterBytecodeDispatch(masm); | 1068 Generate_EnterBytecodeDispatch(masm); |
| 1094 } | 1069 } |
| 1095 | 1070 |
| 1096 | |
| 1097 void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { | 1071 void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { |
| 1098 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 1072 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
| 1099 } | 1073 } |
| 1100 | 1074 |
| 1101 | |
| 1102 void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { | 1075 void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { |
| 1103 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | 1076 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); |
| 1104 } | 1077 } |
| 1105 | 1078 |
| 1106 | |
| 1107 void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { | 1079 void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { |
| 1108 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 1080 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 1109 } | 1081 } |
| 1110 | 1082 |
| 1111 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { | 1083 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { |
| 1112 // Set the address of the interpreter entry trampoline as a return address. | 1084 // Set the address of the interpreter entry trampoline as a return address. |
| 1113 // This simulates the initial call to bytecode handlers in interpreter entry | 1085 // This simulates the initial call to bytecode handlers in interpreter entry |
| 1114 // trampoline. The return will never actually be taken, but our stack walker | 1086 // trampoline. The return will never actually be taken, but our stack walker |
| 1115 // uses this address to determine whether a frame is interpreted. | 1087 // uses this address to determine whether a frame is interpreted. |
| 1116 __ mov(r0, | 1088 __ mov(r14, |
| 1117 Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); | 1089 Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); |
| 1118 __ mtlr(r0); | |
| 1119 | 1090 |
| 1120 Generate_EnterBytecodeDispatch(masm); | 1091 Generate_EnterBytecodeDispatch(masm); |
| 1121 } | 1092 } |
| 1122 | 1093 |
| 1123 | |
| 1124 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 1094 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
| 1125 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | 1095 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); |
| 1126 } | 1096 } |
| 1127 | 1097 |
| 1128 | |
| 1129 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 1098 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 1130 GenerateTailCallToReturnedCode(masm, | 1099 GenerateTailCallToReturnedCode(masm, |
| 1131 Runtime::kCompileOptimized_NotConcurrent); | 1100 Runtime::kCompileOptimized_NotConcurrent); |
| 1132 } | 1101 } |
| 1133 | 1102 |
| 1134 | |
| 1135 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | 1103 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
| 1136 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); | 1104 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); |
| 1137 } | 1105 } |
| 1138 | 1106 |
| 1139 | |
| 1140 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 1107 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
| 1141 // For now, we are relying on the fact that make_code_young doesn't do any | 1108 // For now, we are relying on the fact that make_code_young doesn't do any |
| 1142 // garbage collection which allows us to save/restore the registers without | 1109 // garbage collection which allows us to save/restore the registers without |
| 1143 // worrying about which of them contain pointers. We also don't build an | 1110 // worrying about which of them contain pointers. We also don't build an |
| 1144 // internal frame to make the code faster, since we shouldn't have to do stack | 1111 // internal frame to make the code faster, since we shouldn't have to do stack |
| 1145 // crawls in MakeCodeYoung. This seems a bit fragile. | 1112 // crawls in MakeCodeYoung. This seems a bit fragile. |
| 1146 | 1113 |
| 1147 // Point r3 at the start of the PlatformCodeAge sequence. | 1114 // Point r2 at the start of the PlatformCodeAge sequence. |
| 1148 __ mr(r3, ip); | 1115 __ CleanseP(r14); |
| 1116 __ SubP(r14, Operand(kCodeAgingSequenceLength)); |
| 1117 __ LoadRR(r2, r14); |
| 1118 |
| 1119 __ pop(r14); |
| 1149 | 1120 |
| 1150 // The following registers must be saved and restored when calling through to | 1121 // The following registers must be saved and restored when calling through to |
| 1151 // the runtime: | 1122 // the runtime: |
| 1152 // r3 - contains return address (beginning of patch sequence) | 1123 // r2 - contains return address (beginning of patch sequence) |
| 1153 // r4 - isolate | 1124 // r3 - isolate |
| 1154 // r6 - new target | 1125 // r5 - new target |
| 1155 // lr - return address | 1126 // lr - return address |
| 1156 FrameScope scope(masm, StackFrame::MANUAL); | 1127 FrameScope scope(masm, StackFrame::MANUAL); |
| 1157 __ mflr(r0); | 1128 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); |
| 1158 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); | 1129 __ PrepareCallCFunction(2, 0, r4); |
| 1159 __ PrepareCallCFunction(2, 0, r5); | 1130 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 1160 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
| 1161 __ CallCFunction( | 1131 __ CallCFunction( |
| 1162 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | 1132 ExternalReference::get_make_code_young_function(masm->isolate()), 2); |
| 1163 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); | 1133 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); |
| 1164 __ mtlr(r0); | 1134 __ LoadRR(ip, r2); |
| 1165 __ mr(ip, r3); | |
| 1166 __ Jump(ip); | 1135 __ Jump(ip); |
| 1167 } | 1136 } |
| 1168 | 1137 |
| 1169 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | 1138 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ |
| 1170 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | 1139 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ |
| 1171 MacroAssembler* masm) { \ | 1140 MacroAssembler* masm) { \ |
| 1172 GenerateMakeCodeYoungAgainCommon(masm); \ | 1141 GenerateMakeCodeYoungAgainCommon(masm); \ |
| 1173 } \ | 1142 } \ |
| 1174 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | 1143 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ |
| 1175 MacroAssembler* masm) { \ | 1144 MacroAssembler* masm) { \ |
| 1176 GenerateMakeCodeYoungAgainCommon(masm); \ | 1145 GenerateMakeCodeYoungAgainCommon(masm); \ |
| 1177 } | 1146 } |
| 1178 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | 1147 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) |
| 1179 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | 1148 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR |
| 1180 | 1149 |
| 1181 | |
| 1182 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | 1150 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
| 1183 // For now, we are relying on the fact that make_code_young doesn't do any | 1151 // For now, we are relying on the fact that make_code_young doesn't do any |
| 1184 // garbage collection which allows us to save/restore the registers without | 1152 // garbage collection which allows us to save/restore the registers without |
| 1185 // worrying about which of them contain pointers. We also don't build an | 1153 // worrying about which of them contain pointers. We also don't build an |
| 1186 // internal frame to make the code faster, since we shouldn't have to do stack | 1154 // internal frame to make the code faster, since we shouldn't have to do stack |
| 1187 // crawls in MakeCodeYoung. This seems a bit fragile. | 1155 // crawls in MakeCodeYoung. This seems a bit fragile. |
| 1188 | 1156 |
| 1189 // Point r3 at the start of the PlatformCodeAge sequence. | 1157 // Point r2 at the start of the PlatformCodeAge sequence. |
| 1190 __ mr(r3, ip); | 1158 __ CleanseP(r14); |
| 1159 __ SubP(r14, Operand(kCodeAgingSequenceLength)); |
| 1160 __ LoadRR(r2, r14); |
| 1161 |
| 1162 __ pop(r14); |
| 1191 | 1163 |
| 1192 // The following registers must be saved and restored when calling through to | 1164 // The following registers must be saved and restored when calling through to |
| 1193 // the runtime: | 1165 // the runtime: |
| 1194 // r3 - contains return address (beginning of patch sequence) | 1166 // r2 - contains return address (beginning of patch sequence) |
| 1195 // r4 - isolate | 1167 // r3 - isolate |
| 1196 // r6 - new target | 1168 // r5 - new target |
| 1197 // lr - return address | 1169 // lr - return address |
| 1198 FrameScope scope(masm, StackFrame::MANUAL); | 1170 FrameScope scope(masm, StackFrame::MANUAL); |
| 1199 __ mflr(r0); | 1171 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); |
| 1200 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); | 1172 __ PrepareCallCFunction(2, 0, r4); |
| 1201 __ PrepareCallCFunction(2, 0, r5); | 1173 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 1202 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
| 1203 __ CallCFunction( | 1174 __ CallCFunction( |
| 1204 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), | 1175 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), |
| 1205 2); | 1176 2); |
| 1206 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); | 1177 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); |
| 1207 __ mtlr(r0); | 1178 __ LoadRR(ip, r2); |
| 1208 __ mr(ip, r3); | |
| 1209 | 1179 |
| 1210 // Perform prologue operations usually performed by the young code stub. | 1180 // Perform prologue operations usually performed by the young code stub. |
| 1211 __ PushFixedFrame(r4); | 1181 __ PushFixedFrame(r3); |
| 1212 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 1182 __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 1213 | 1183 |
| 1214 // Jump to point after the code-age stub. | 1184 // Jump to point after the code-age stub. |
| 1215 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength)); | 1185 __ AddP(r2, ip, Operand(kNoCodeAgeSequenceLength)); |
| 1216 __ Jump(r3); | 1186 __ Jump(r2); |
| 1217 } | 1187 } |
| 1218 | 1188 |
| 1219 | |
| 1220 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | 1189 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { |
| 1221 GenerateMakeCodeYoungAgainCommon(masm); | 1190 GenerateMakeCodeYoungAgainCommon(masm); |
| 1222 } | 1191 } |
| 1223 | 1192 |
| 1224 | |
| 1225 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { | 1193 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { |
| 1226 Generate_MarkCodeAsExecutedOnce(masm); | 1194 Generate_MarkCodeAsExecutedOnce(masm); |
| 1227 } | 1195 } |
| 1228 | 1196 |
| 1229 | |
| 1230 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 1197 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
| 1231 SaveFPRegsMode save_doubles) { | 1198 SaveFPRegsMode save_doubles) { |
| 1232 { | 1199 { |
| 1233 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1200 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1234 | 1201 |
| 1235 // Preserve registers across notification, this is important for compiled | 1202 // Preserve registers across notification, this is important for compiled |
| 1236 // stubs that tail call the runtime on deopts passing their parameters in | 1203 // stubs that tail call the runtime on deopts passing their parameters in |
| 1237 // registers. | 1204 // registers. |
| 1238 __ MultiPush(kJSCallerSaved | kCalleeSaved); | 1205 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
| 1239 // Pass the function and deoptimization type to the runtime system. | 1206 // Pass the function and deoptimization type to the runtime system. |
| 1240 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); | 1207 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); |
| 1241 __ MultiPop(kJSCallerSaved | kCalleeSaved); | 1208 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
| 1242 } | 1209 } |
| 1243 | 1210 |
| 1244 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state | 1211 __ la(sp, MemOperand(sp, kPointerSize)); // Ignore state |
| 1245 __ blr(); // Jump to miss handler | 1212 __ Ret(); // Jump to miss handler |
| 1246 } | 1213 } |
| 1247 | 1214 |
| 1248 | |
| 1249 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 1215 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 1250 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 1216 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 1251 } | 1217 } |
| 1252 | 1218 |
| 1253 | |
| 1254 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 1219 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 1255 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 1220 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 1256 } | 1221 } |
| 1257 | 1222 |
| 1258 | |
| 1259 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 1223 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 1260 Deoptimizer::BailoutType type) { | 1224 Deoptimizer::BailoutType type) { |
| 1261 { | 1225 { |
| 1262 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1226 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1263 // Pass the function and deoptimization type to the runtime system. | 1227 // Pass the function and deoptimization type to the runtime system. |
| 1264 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); | 1228 __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type))); |
| 1265 __ push(r3); | 1229 __ push(r2); |
| 1266 __ CallRuntime(Runtime::kNotifyDeoptimized); | 1230 __ CallRuntime(Runtime::kNotifyDeoptimized); |
| 1267 } | 1231 } |
| 1268 | 1232 |
| 1269 // Get the full codegen state from the stack and untag it -> r9. | 1233 // Get the full codegen state from the stack and untag it -> r8. |
| 1270 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize)); | 1234 __ LoadP(r8, MemOperand(sp, 0 * kPointerSize)); |
| 1271 __ SmiUntag(r9); | 1235 __ SmiUntag(r8); |
| 1272 // Switch on the state. | 1236 // Switch on the state. |
| 1273 Label with_tos_register, unknown_state; | 1237 Label with_tos_register, unknown_state; |
| 1274 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS)); | 1238 __ CmpP(r8, Operand(FullCodeGenerator::NO_REGISTERS)); |
| 1275 __ bne(&with_tos_register); | 1239 __ bne(&with_tos_register); |
| 1276 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state. | 1240 __ la(sp, MemOperand(sp, 1 * kPointerSize)); // Remove state. |
| 1277 __ Ret(); | 1241 __ Ret(); |
| 1278 | 1242 |
| 1279 __ bind(&with_tos_register); | 1243 __ bind(&with_tos_register); |
| 1280 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); | 1244 __ LoadP(r2, MemOperand(sp, 1 * kPointerSize)); |
| 1281 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG)); | 1245 __ CmpP(r8, Operand(FullCodeGenerator::TOS_REG)); |
| 1282 __ bne(&unknown_state); | 1246 __ bne(&unknown_state); |
| 1283 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state. | 1247 __ la(sp, MemOperand(sp, 2 * kPointerSize)); // Remove state. |
| 1284 __ Ret(); | 1248 __ Ret(); |
| 1285 | 1249 |
| 1286 __ bind(&unknown_state); | 1250 __ bind(&unknown_state); |
| 1287 __ stop("no cases left"); | 1251 __ stop("no cases left"); |
| 1288 } | 1252 } |
| 1289 | 1253 |
| 1290 | |
| 1291 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 1254 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
| 1292 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 1255 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
| 1293 } | 1256 } |
| 1294 | 1257 |
| 1295 | |
| 1296 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | 1258 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { |
| 1297 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | 1259 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); |
| 1298 } | 1260 } |
| 1299 | 1261 |
| 1300 | |
| 1301 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 1262 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
| 1302 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 1263 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 1303 } | 1264 } |
| 1304 | 1265 |
| 1305 | 1266 // Clobbers registers {r6, r7, r8, r9}. |
| 1306 // Clobbers registers {r7, r8, r9, r10}. | |
| 1307 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, | 1267 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, |
| 1308 Register function_template_info, | 1268 Register function_template_info, |
| 1309 Label* receiver_check_failed) { | 1269 Label* receiver_check_failed) { |
| 1310 Register signature = r7; | 1270 Register signature = r6; |
| 1311 Register map = r8; | 1271 Register map = r7; |
| 1312 Register constructor = r9; | 1272 Register constructor = r8; |
| 1313 Register scratch = r10; | 1273 Register scratch = r9; |
| 1314 | 1274 |
| 1315 // If there is no signature, return the holder. | 1275 // If there is no signature, return the holder. |
| 1316 __ LoadP(signature, FieldMemOperand(function_template_info, | 1276 __ LoadP(signature, FieldMemOperand(function_template_info, |
| 1317 FunctionTemplateInfo::kSignatureOffset)); | 1277 FunctionTemplateInfo::kSignatureOffset)); |
| 1318 Label receiver_check_passed; | 1278 Label receiver_check_passed; |
| 1319 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, | 1279 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, |
| 1320 &receiver_check_passed); | 1280 &receiver_check_passed); |
| 1321 | 1281 |
| 1322 // Walk the prototype chain. | 1282 // Walk the prototype chain. |
| 1323 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 1283 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 1324 Label prototype_loop_start; | 1284 Label prototype_loop_start; |
| 1325 __ bind(&prototype_loop_start); | 1285 __ bind(&prototype_loop_start); |
| 1326 | 1286 |
| 1327 // Get the constructor, if any. | 1287 // Get the constructor, if any. |
| 1328 __ GetMapConstructor(constructor, map, scratch, scratch); | 1288 __ GetMapConstructor(constructor, map, scratch, scratch); |
| 1329 __ cmpi(scratch, Operand(JS_FUNCTION_TYPE)); | 1289 __ CmpP(scratch, Operand(JS_FUNCTION_TYPE)); |
| 1330 Label next_prototype; | 1290 Label next_prototype; |
| 1331 __ bne(&next_prototype); | 1291 __ bne(&next_prototype); |
| 1332 Register type = constructor; | 1292 Register type = constructor; |
| 1333 __ LoadP(type, | 1293 __ LoadP(type, |
| 1334 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset)); | 1294 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset)); |
| 1335 __ LoadP(type, | 1295 __ LoadP(type, |
| 1336 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); | 1296 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); |
| 1337 | 1297 |
| 1338 // Loop through the chain of inheriting function templates. | 1298 // Loop through the chain of inheriting function templates. |
| 1339 Label function_template_loop; | 1299 Label function_template_loop; |
| 1340 __ bind(&function_template_loop); | 1300 __ bind(&function_template_loop); |
| 1341 | 1301 |
| 1342 // If the signatures match, we have a compatible receiver. | 1302 // If the signatures match, we have a compatible receiver. |
| 1343 __ cmp(signature, type); | 1303 __ CmpP(signature, type); |
| 1344 __ beq(&receiver_check_passed); | 1304 __ beq(&receiver_check_passed); |
| 1345 | 1305 |
| 1346 // If the current type is not a FunctionTemplateInfo, load the next prototype | 1306 // If the current type is not a FunctionTemplateInfo, load the next prototype |
| 1347 // in the chain. | 1307 // in the chain. |
| 1348 __ JumpIfSmi(type, &next_prototype); | 1308 __ JumpIfSmi(type, &next_prototype); |
| 1349 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE); | 1309 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE); |
| 1350 __ bne(&next_prototype); | 1310 __ bne(&next_prototype); |
| 1351 | 1311 |
| 1352 // Otherwise load the parent function template and iterate. | 1312 // Otherwise load the parent function template and iterate. |
| 1353 __ LoadP(type, | 1313 __ LoadP(type, |
| 1354 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset)); | 1314 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset)); |
| 1355 __ b(&function_template_loop); | 1315 __ b(&function_template_loop); |
| 1356 | 1316 |
| 1357 // Load the next prototype. | 1317 // Load the next prototype. |
| 1358 __ bind(&next_prototype); | 1318 __ bind(&next_prototype); |
| 1359 __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset)); | 1319 __ LoadlW(scratch, FieldMemOperand(map, Map::kBitField3Offset)); |
| 1360 __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC); | 1320 __ DecodeField<Map::HasHiddenPrototype>(scratch); |
| 1361 __ beq(receiver_check_failed, cr0); | 1321 __ beq(receiver_check_failed); |
| 1362 | 1322 |
| 1363 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); | 1323 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); |
| 1364 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 1324 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 1365 // Iterate. | 1325 // Iterate. |
| 1366 __ b(&prototype_loop_start); | 1326 __ b(&prototype_loop_start); |
| 1367 | 1327 |
| 1368 __ bind(&receiver_check_passed); | 1328 __ bind(&receiver_check_passed); |
| 1369 } | 1329 } |
| 1370 | 1330 |
| 1371 | |
| 1372 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { | 1331 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { |
| 1373 // ----------- S t a t e ------------- | 1332 // ----------- S t a t e ------------- |
| 1374 // -- r3 : number of arguments excluding receiver | 1333 // -- r2 : number of arguments excluding receiver |
| 1375 // -- r4 : callee | 1334 // -- r3 : callee |
| 1376 // -- lr : return address | 1335 // -- lr : return address |
| 1377 // -- sp[0] : last argument | 1336 // -- sp[0] : last argument |
| 1378 // -- ... | 1337 // -- ... |
| 1379 // -- sp[4 * (argc - 1)] : first argument | 1338 // -- sp[4 * (argc - 1)] : first argument |
| 1380 // -- sp[4 * argc] : receiver | 1339 // -- sp[4 * argc] : receiver |
| 1381 // ----------------------------------- | 1340 // ----------------------------------- |
| 1382 | 1341 |
| 1383 | |
| 1384 // Load the FunctionTemplateInfo. | 1342 // Load the FunctionTemplateInfo. |
| 1385 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 1343 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
| 1386 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset)); | 1344 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset)); |
| 1387 | 1345 |
| 1388 // Do the compatible receiver check. | 1346 // Do the compatible receiver check. |
| 1389 Label receiver_check_failed; | 1347 Label receiver_check_failed; |
| 1390 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2)); | 1348 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); |
| 1391 __ LoadPX(r5, MemOperand(sp, r11)); | 1349 __ LoadP(r4, MemOperand(sp, r1)); |
| 1392 CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed); | 1350 CompatibleReceiverCheck(masm, r4, r5, &receiver_check_failed); |
| 1393 | 1351 |
| 1394 // Get the callback offset from the FunctionTemplateInfo, and jump to the | 1352 // Get the callback offset from the FunctionTemplateInfo, and jump to the |
| 1395 // beginning of the code. | 1353 // beginning of the code. |
| 1396 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset)); | 1354 __ LoadP(r6, FieldMemOperand(r5, FunctionTemplateInfo::kCallCodeOffset)); |
| 1397 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset)); | 1355 __ LoadP(r6, FieldMemOperand(r6, CallHandlerInfo::kFastHandlerOffset)); |
| 1398 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1356 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 1399 __ JumpToJSEntry(ip); | 1357 __ JumpToJSEntry(ip); |
| 1400 | 1358 |
| 1401 // Compatible receiver check failed: throw an Illegal Invocation exception. | 1359 // Compatible receiver check failed: throw an Illegal Invocation exception. |
| 1402 __ bind(&receiver_check_failed); | 1360 __ bind(&receiver_check_failed); |
| 1403 // Drop the arguments (including the receiver); | 1361 // Drop the arguments (including the receiver); |
| 1404 __ addi(r11, r11, Operand(kPointerSize)); | 1362 __ AddP(r1, r1, Operand(kPointerSize)); |
| 1405 __ add(sp, sp, r11); | 1363 __ AddP(sp, sp, r1); |
| 1406 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); | 1364 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); |
| 1407 } | 1365 } |
| 1408 | 1366 |
| 1409 | |
| 1410 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1367 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 1411 // Lookup the function in the JavaScript frame. | 1368 // Lookup the function in the JavaScript frame. |
| 1412 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1369 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1413 { | 1370 { |
| 1414 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1371 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1415 // Pass function as argument. | 1372 // Pass function as argument. |
| 1416 __ push(r3); | 1373 __ push(r2); |
| 1417 __ CallRuntime(Runtime::kCompileForOnStackReplacement); | 1374 __ CallRuntime(Runtime::kCompileForOnStackReplacement); |
| 1418 } | 1375 } |
| 1419 | 1376 |
| 1420 // If the code object is null, just return to the unoptimized code. | 1377 // If the code object is null, just return to the unoptimized code. |
| 1421 Label skip; | 1378 Label skip; |
| 1422 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); | 1379 __ CmpSmiLiteral(r2, Smi::FromInt(0), r0); |
| 1423 __ bne(&skip); | 1380 __ bne(&skip); |
| 1424 __ Ret(); | 1381 __ Ret(); |
| 1425 | 1382 |
| 1426 __ bind(&skip); | 1383 __ bind(&skip); |
| 1427 | 1384 |
| 1428 // Load deoptimization data from the code object. | 1385 // Load deoptimization data from the code object. |
| 1429 // <deopt_data> = <code>[#deoptimization_data_offset] | 1386 // <deopt_data> = <code>[#deoptimization_data_offset] |
| 1430 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset)); | 1387 __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset)); |
| 1431 | 1388 |
| 1432 { | 1389 // Load the OSR entrypoint offset from the deoptimization data. |
| 1433 ConstantPoolUnavailableScope constant_pool_unavailable(masm); | 1390 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] |
| 1434 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start | 1391 __ LoadP( |
| 1392 r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt( |
| 1393 DeoptimizationInputData::kOsrPcOffsetIndex))); |
| 1394 __ SmiUntag(r3); |
| 1435 | 1395 |
| 1436 if (FLAG_enable_embedded_constant_pool) { | 1396 // Compute the target address = code_obj + header_size + osr_offset |
| 1437 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3); | 1397 // <entry_addr> = <code_obj> + #header_size + <osr_offset> |
| 1438 } | 1398 __ AddP(r2, r3); |
| 1399 __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 1400 __ LoadRR(r14, r0); |
| 1439 | 1401 |
| 1440 // Load the OSR entrypoint offset from the deoptimization data. | 1402 // And "return" to the OSR entry point of the function. |
| 1441 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | 1403 __ Ret(); |
| 1442 __ LoadP(r4, FieldMemOperand( | |
| 1443 r4, FixedArray::OffsetOfElementAt( | |
| 1444 DeoptimizationInputData::kOsrPcOffsetIndex))); | |
| 1445 __ SmiUntag(r4); | |
| 1446 | |
| 1447 // Compute the target address = code start + osr_offset | |
| 1448 __ add(r0, r3, r4); | |
| 1449 | |
| 1450 // And "return" to the OSR entry point of the function. | |
| 1451 __ mtlr(r0); | |
| 1452 __ blr(); | |
| 1453 } | |
| 1454 } | 1404 } |
| 1455 | 1405 |
| 1456 | |
| 1457 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1406 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 1458 // We check the stack limit as indicator that recompilation might be done. | 1407 // We check the stack limit as indicator that recompilation might be done. |
| 1459 Label ok; | 1408 Label ok; |
| 1460 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 1409 __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex)); |
| 1461 __ cmpl(sp, ip); | 1410 __ bge(&ok, Label::kNear); |
| 1462 __ bge(&ok); | |
| 1463 { | 1411 { |
| 1464 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1412 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1465 __ CallRuntime(Runtime::kStackGuard); | 1413 __ CallRuntime(Runtime::kStackGuard); |
| 1466 } | 1414 } |
| 1467 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1415 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 1468 RelocInfo::CODE_TARGET); | 1416 RelocInfo::CODE_TARGET); |
| 1469 | 1417 |
| 1470 __ bind(&ok); | 1418 __ bind(&ok); |
| 1471 __ Ret(); | 1419 __ Ret(); |
| 1472 } | 1420 } |
| 1473 | 1421 |
| 1474 | |
| 1475 // static | 1422 // static |
| 1476 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, | 1423 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, |
| 1477 int field_index) { | 1424 int field_index) { |
| 1478 // ----------- S t a t e ------------- | 1425 // ----------- S t a t e ------------- |
| 1479 // -- lr : return address | 1426 // -- lr : return address |
| 1480 // -- sp[0] : receiver | 1427 // -- sp[0] : receiver |
| 1481 // ----------------------------------- | 1428 // ----------------------------------- |
| 1482 | 1429 |
| 1483 // 1. Pop receiver into r3 and check that it's actually a JSDate object. | 1430 // 1. Pop receiver into r2 and check that it's actually a JSDate object. |
| 1484 Label receiver_not_date; | 1431 Label receiver_not_date; |
| 1485 { | 1432 { |
| 1486 __ Pop(r3); | 1433 __ Pop(r2); |
| 1487 __ JumpIfSmi(r3, &receiver_not_date); | 1434 __ JumpIfSmi(r2, &receiver_not_date); |
| 1488 __ CompareObjectType(r3, r4, r5, JS_DATE_TYPE); | 1435 __ CompareObjectType(r2, r3, r4, JS_DATE_TYPE); |
| 1489 __ bne(&receiver_not_date); | 1436 __ bne(&receiver_not_date); |
| 1490 } | 1437 } |
| 1491 | 1438 |
| 1492 // 2. Load the specified date field, falling back to the runtime as necessary. | 1439 // 2. Load the specified date field, falling back to the runtime as necessary. |
| 1493 if (field_index == JSDate::kDateValue) { | 1440 if (field_index == JSDate::kDateValue) { |
| 1494 __ LoadP(r3, FieldMemOperand(r3, JSDate::kValueOffset)); | 1441 __ LoadP(r2, FieldMemOperand(r2, JSDate::kValueOffset)); |
| 1495 } else { | 1442 } else { |
| 1496 if (field_index < JSDate::kFirstUncachedField) { | 1443 if (field_index < JSDate::kFirstUncachedField) { |
| 1497 Label stamp_mismatch; | 1444 Label stamp_mismatch; |
| 1498 __ mov(r4, Operand(ExternalReference::date_cache_stamp(masm->isolate()))); | 1445 __ mov(r3, Operand(ExternalReference::date_cache_stamp(masm->isolate()))); |
| 1499 __ LoadP(r4, MemOperand(r4)); | 1446 __ LoadP(r3, MemOperand(r3)); |
| 1500 __ LoadP(ip, FieldMemOperand(r3, JSDate::kCacheStampOffset)); | 1447 __ LoadP(ip, FieldMemOperand(r2, JSDate::kCacheStampOffset)); |
| 1501 __ cmp(r4, ip); | 1448 __ CmpP(r3, ip); |
| 1502 __ bne(&stamp_mismatch); | 1449 __ bne(&stamp_mismatch); |
| 1503 __ LoadP(r3, FieldMemOperand( | 1450 __ LoadP(r2, FieldMemOperand( |
| 1504 r3, JSDate::kValueOffset + field_index * kPointerSize)); | 1451 r2, JSDate::kValueOffset + field_index * kPointerSize)); |
| 1505 __ Ret(); | 1452 __ Ret(); |
| 1506 __ bind(&stamp_mismatch); | 1453 __ bind(&stamp_mismatch); |
| 1507 } | 1454 } |
| 1508 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1455 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1509 __ PrepareCallCFunction(2, r4); | 1456 __ PrepareCallCFunction(2, r3); |
| 1510 __ LoadSmiLiteral(r4, Smi::FromInt(field_index)); | 1457 __ LoadSmiLiteral(r3, Smi::FromInt(field_index)); |
| 1511 __ CallCFunction( | 1458 __ CallCFunction( |
| 1512 ExternalReference::get_date_field_function(masm->isolate()), 2); | 1459 ExternalReference::get_date_field_function(masm->isolate()), 2); |
| 1513 } | 1460 } |
| 1514 __ Ret(); | 1461 __ Ret(); |
| 1515 | 1462 |
| 1516 // 3. Raise a TypeError if the receiver is not a date. | 1463 // 3. Raise a TypeError if the receiver is not a date. |
| 1517 __ bind(&receiver_not_date); | 1464 __ bind(&receiver_not_date); |
| 1518 __ TailCallRuntime(Runtime::kThrowNotDateError); | 1465 __ TailCallRuntime(Runtime::kThrowNotDateError); |
| 1519 } | 1466 } |
| 1520 | 1467 |
| 1521 | |
| 1522 // static | 1468 // static |
| 1523 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { | 1469 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { |
| 1524 // ----------- S t a t e ------------- | 1470 // ----------- S t a t e ------------- |
| 1525 // -- r3 : argc | 1471 // -- r2 : argc |
| 1526 // -- sp[0] : argArray | 1472 // -- sp[0] : argArray |
| 1527 // -- sp[4] : thisArg | 1473 // -- sp[4] : thisArg |
| 1528 // -- sp[8] : receiver | 1474 // -- sp[8] : receiver |
| 1529 // ----------------------------------- | 1475 // ----------------------------------- |
| 1530 | 1476 |
| 1531 // 1. Load receiver into r4, argArray into r3 (if present), remove all | 1477 // 1. Load receiver into r3, argArray into r2 (if present), remove all |
| 1532 // arguments from the stack (including the receiver), and push thisArg (if | 1478 // arguments from the stack (including the receiver), and push thisArg (if |
| 1533 // present) instead. | 1479 // present) instead. |
| 1534 { | 1480 { |
| 1535 Label skip; | 1481 Label skip; |
| 1536 Register arg_size = r5; | 1482 Register arg_size = r4; |
| 1537 Register new_sp = r6; | 1483 Register new_sp = r5; |
| 1538 Register scratch = r7; | 1484 Register scratch = r6; |
| 1539 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); | 1485 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); |
| 1540 __ add(new_sp, sp, arg_size); | 1486 __ AddP(new_sp, sp, arg_size); |
| 1541 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 1487 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 1542 __ mr(scratch, r3); | 1488 __ LoadRR(scratch, r2); |
| 1543 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver | 1489 __ LoadP(r3, MemOperand(new_sp, 0)); // receiver |
| 1544 __ cmpi(arg_size, Operand(kPointerSize)); | 1490 __ CmpP(arg_size, Operand(kPointerSize)); |
| 1545 __ blt(&skip); | 1491 __ blt(&skip); |
| 1546 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg | 1492 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg |
| 1547 __ beq(&skip); | 1493 __ beq(&skip); |
| 1548 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray | 1494 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argArray |
| 1549 __ bind(&skip); | 1495 __ bind(&skip); |
| 1550 __ mr(sp, new_sp); | 1496 __ LoadRR(sp, new_sp); |
| 1551 __ StoreP(scratch, MemOperand(sp, 0)); | 1497 __ StoreP(scratch, MemOperand(sp, 0)); |
| 1552 } | 1498 } |
| 1553 | 1499 |
| 1554 // ----------- S t a t e ------------- | 1500 // ----------- S t a t e ------------- |
| 1555 // -- r3 : argArray | 1501 // -- r2 : argArray |
| 1556 // -- r4 : receiver | 1502 // -- r3 : receiver |
| 1557 // -- sp[0] : thisArg | 1503 // -- sp[0] : thisArg |
| 1558 // ----------------------------------- | 1504 // ----------------------------------- |
| 1559 | 1505 |
| 1560 // 2. Make sure the receiver is actually callable. | 1506 // 2. Make sure the receiver is actually callable. |
| 1561 Label receiver_not_callable; | 1507 Label receiver_not_callable; |
| 1562 __ JumpIfSmi(r4, &receiver_not_callable); | 1508 __ JumpIfSmi(r3, &receiver_not_callable); |
| 1563 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | 1509 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 1564 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | 1510 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); |
| 1565 __ TestBit(r7, Map::kIsCallable, r0); | 1511 __ TestBit(r6, Map::kIsCallable); |
| 1566 __ beq(&receiver_not_callable, cr0); | 1512 __ beq(&receiver_not_callable); |
| 1567 | 1513 |
| 1568 // 3. Tail call with no arguments if argArray is null or undefined. | 1514 // 3. Tail call with no arguments if argArray is null or undefined. |
| 1569 Label no_arguments; | 1515 Label no_arguments; |
| 1570 __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments); | 1516 __ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments); |
| 1571 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments); | 1517 __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments); |
| 1572 | 1518 |
| 1573 // 4a. Apply the receiver to the given argArray (passing undefined for | 1519 // 4a. Apply the receiver to the given argArray (passing undefined for |
| 1574 // new.target). | 1520 // new.target). |
| 1575 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); | 1521 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
| 1576 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | 1522 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1577 | 1523 |
| 1578 // 4b. The argArray is either null or undefined, so we tail call without any | 1524 // 4b. The argArray is either null or undefined, so we tail call without any |
| 1579 // arguments to the receiver. | 1525 // arguments to the receiver. |
| 1580 __ bind(&no_arguments); | 1526 __ bind(&no_arguments); |
| 1581 { | 1527 { |
| 1582 __ li(r3, Operand::Zero()); | 1528 __ LoadImmP(r2, Operand::Zero()); |
| 1583 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1529 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1584 } | 1530 } |
| 1585 | 1531 |
| 1586 // 4c. The receiver is not callable, throw an appropriate TypeError. | 1532 // 4c. The receiver is not callable, throw an appropriate TypeError. |
| 1587 __ bind(&receiver_not_callable); | 1533 __ bind(&receiver_not_callable); |
| 1588 { | 1534 { |
| 1589 __ StoreP(r4, MemOperand(sp, 0)); | 1535 __ StoreP(r3, MemOperand(sp, 0)); |
| 1590 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | 1536 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); |
| 1591 } | 1537 } |
| 1592 } | 1538 } |
| 1593 | 1539 |
| 1594 | |
| 1595 // static | 1540 // static |
| 1596 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | 1541 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { |
| 1597 // 1. Make sure we have at least one argument. | 1542 // 1. Make sure we have at least one argument. |
| 1598 // r3: actual number of arguments | 1543 // r2: actual number of arguments |
| 1599 { | 1544 { |
| 1600 Label done; | 1545 Label done; |
| 1601 __ cmpi(r3, Operand::Zero()); | 1546 __ CmpP(r2, Operand::Zero()); |
| 1602 __ bne(&done); | 1547 __ bne(&done, Label::kNear); |
| 1603 __ PushRoot(Heap::kUndefinedValueRootIndex); | 1548 __ PushRoot(Heap::kUndefinedValueRootIndex); |
| 1604 __ addi(r3, r3, Operand(1)); | 1549 __ AddP(r2, Operand(1)); |
| 1605 __ bind(&done); | 1550 __ bind(&done); |
| 1606 } | 1551 } |
| 1607 | 1552 |
| 1553 // r2: actual number of arguments |
| 1608 // 2. Get the callable to call (passed as receiver) from the stack. | 1554 // 2. Get the callable to call (passed as receiver) from the stack. |
| 1609 // r3: actual number of arguments | 1555 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); |
| 1610 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); | 1556 __ LoadP(r3, MemOperand(sp, r4)); |
| 1611 __ LoadPX(r4, MemOperand(sp, r5)); | |
| 1612 | 1557 |
| 1613 // 3. Shift arguments and return address one slot down on the stack | 1558 // 3. Shift arguments and return address one slot down on the stack |
| 1614 // (overwriting the original receiver). Adjust argument count to make | 1559 // (overwriting the original receiver). Adjust argument count to make |
| 1615 // the original first argument the new receiver. | 1560 // the original first argument the new receiver. |
| 1616 // r3: actual number of arguments | 1561 // r2: actual number of arguments |
| 1617 // r4: callable | 1562 // r3: callable |
| 1618 { | 1563 { |
| 1619 Label loop; | 1564 Label loop; |
| 1620 // Calculate the copy start address (destination). Copy end address is sp. | 1565 // Calculate the copy start address (destination). Copy end address is sp. |
| 1621 __ add(r5, sp, r5); | 1566 __ AddP(r4, sp, r4); |
| 1622 | 1567 |
| 1623 | |
| 1624 __ mtctr(r3); | |
| 1625 __ bind(&loop); | 1568 __ bind(&loop); |
| 1626 __ LoadP(ip, MemOperand(r5, -kPointerSize)); | 1569 __ LoadP(ip, MemOperand(r4, -kPointerSize)); |
| 1627 __ StoreP(ip, MemOperand(r5)); | 1570 __ StoreP(ip, MemOperand(r4)); |
| 1628 __ subi(r5, r5, Operand(kPointerSize)); | 1571 __ SubP(r4, Operand(kPointerSize)); |
| 1629 __ bdnz(&loop); | 1572 __ CmpP(r4, sp); |
| 1573 __ bne(&loop); |
| 1630 // Adjust the actual number of arguments and remove the top element | 1574 // Adjust the actual number of arguments and remove the top element |
| 1631 // (which is a copy of the last argument). | 1575 // (which is a copy of the last argument). |
| 1632 __ subi(r3, r3, Operand(1)); | 1576 __ SubP(r2, Operand(1)); |
| 1633 __ pop(); | 1577 __ pop(); |
| 1634 } | 1578 } |
| 1635 | 1579 |
| 1636 // 4. Call the callable. | 1580 // 4. Call the callable. |
| 1637 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1581 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1638 } | 1582 } |
| 1639 | 1583 |
| 1640 | |
| 1641 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | 1584 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
| 1642 // ----------- S t a t e ------------- | 1585 // ----------- S t a t e ------------- |
| 1643 // -- r3 : argc | 1586 // -- r2 : argc |
| 1644 // -- sp[0] : argumentsList | 1587 // -- sp[0] : argumentsList |
| 1645 // -- sp[4] : thisArgument | 1588 // -- sp[4] : thisArgument |
| 1646 // -- sp[8] : target | 1589 // -- sp[8] : target |
| 1647 // -- sp[12] : receiver | 1590 // -- sp[12] : receiver |
| 1648 // ----------------------------------- | 1591 // ----------------------------------- |
| 1649 | 1592 |
| 1650 // 1. Load target into r4 (if present), argumentsList into r3 (if present), | 1593 // 1. Load target into r3 (if present), argumentsList into r2 (if present), |
| 1651 // remove all arguments from the stack (including the receiver), and push | 1594 // remove all arguments from the stack (including the receiver), and push |
| 1652 // thisArgument (if present) instead. | 1595 // thisArgument (if present) instead. |
| 1653 { | 1596 { |
| 1654 Label skip; | 1597 Label skip; |
| 1655 Register arg_size = r5; | 1598 Register arg_size = r4; |
| 1656 Register new_sp = r6; | 1599 Register new_sp = r5; |
| 1657 Register scratch = r7; | 1600 Register scratch = r6; |
| 1658 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); | 1601 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); |
| 1659 __ add(new_sp, sp, arg_size); | 1602 __ AddP(new_sp, sp, arg_size); |
| 1660 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | 1603 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); |
| 1661 __ mr(scratch, r4); | 1604 __ LoadRR(scratch, r3); |
| 1662 __ mr(r3, r4); | 1605 __ LoadRR(r2, r3); |
| 1663 __ cmpi(arg_size, Operand(kPointerSize)); | 1606 __ CmpP(arg_size, Operand(kPointerSize)); |
| 1664 __ blt(&skip); | 1607 __ blt(&skip); |
| 1665 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target | 1608 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target |
| 1666 __ beq(&skip); | 1609 __ beq(&skip); |
| 1667 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument | 1610 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument |
| 1668 __ cmpi(arg_size, Operand(2 * kPointerSize)); | 1611 __ CmpP(arg_size, Operand(2 * kPointerSize)); |
| 1669 __ beq(&skip); | 1612 __ beq(&skip); |
| 1670 __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList | 1613 __ LoadP(r2, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList |
| 1671 __ bind(&skip); | 1614 __ bind(&skip); |
| 1672 __ mr(sp, new_sp); | 1615 __ LoadRR(sp, new_sp); |
| 1673 __ StoreP(scratch, MemOperand(sp, 0)); | 1616 __ StoreP(scratch, MemOperand(sp, 0)); |
| 1674 } | 1617 } |
| 1675 | 1618 |
| 1676 // ----------- S t a t e ------------- | 1619 // ----------- S t a t e ------------- |
| 1677 // -- r3 : argumentsList | 1620 // -- r2 : argumentsList |
| 1678 // -- r4 : target | 1621 // -- r3 : target |
| 1679 // -- sp[0] : thisArgument | 1622 // -- sp[0] : thisArgument |
| 1680 // ----------------------------------- | 1623 // ----------------------------------- |
| 1681 | 1624 |
| 1682 // 2. Make sure the target is actually callable. | 1625 // 2. Make sure the target is actually callable. |
| 1683 Label target_not_callable; | 1626 Label target_not_callable; |
| 1684 __ JumpIfSmi(r4, &target_not_callable); | 1627 __ JumpIfSmi(r3, &target_not_callable); |
| 1685 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | 1628 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 1686 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | 1629 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); |
| 1687 __ TestBit(r7, Map::kIsCallable, r0); | 1630 __ TestBit(r6, Map::kIsCallable); |
| 1688 __ beq(&target_not_callable, cr0); | 1631 __ beq(&target_not_callable); |
| 1689 | 1632 |
| 1690 // 3a. Apply the target to the given argumentsList (passing undefined for | 1633 // 3a. Apply the target to the given argumentsList (passing undefined for |
| 1691 // new.target). | 1634 // new.target). |
| 1692 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); | 1635 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
| 1693 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | 1636 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1694 | 1637 |
| 1695 // 3b. The target is not callable, throw an appropriate TypeError. | 1638 // 3b. The target is not callable, throw an appropriate TypeError. |
| 1696 __ bind(&target_not_callable); | 1639 __ bind(&target_not_callable); |
| 1697 { | 1640 { |
| 1698 __ StoreP(r4, MemOperand(sp, 0)); | 1641 __ StoreP(r3, MemOperand(sp, 0)); |
| 1699 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | 1642 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); |
| 1700 } | 1643 } |
| 1701 } | 1644 } |
| 1702 | 1645 |
| 1703 | |
| 1704 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | 1646 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
| 1705 // ----------- S t a t e ------------- | 1647 // ----------- S t a t e ------------- |
| 1706 // -- r3 : argc | 1648 // -- r2 : argc |
| 1707 // -- sp[0] : new.target (optional) | 1649 // -- sp[0] : new.target (optional) |
| 1708 // -- sp[4] : argumentsList | 1650 // -- sp[4] : argumentsList |
| 1709 // -- sp[8] : target | 1651 // -- sp[8] : target |
| 1710 // -- sp[12] : receiver | 1652 // -- sp[12] : receiver |
| 1711 // ----------------------------------- | 1653 // ----------------------------------- |
| 1712 | 1654 |
| 1713 // 1. Load target into r4 (if present), argumentsList into r3 (if present), | 1655 // 1. Load target into r3 (if present), argumentsList into r2 (if present), |
| 1714 // new.target into r6 (if present, otherwise use target), remove all | 1656 // new.target into r5 (if present, otherwise use target), remove all |
| 1715 // arguments from the stack (including the receiver), and push thisArgument | 1657 // arguments from the stack (including the receiver), and push thisArgument |
| 1716 // (if present) instead. | 1658 // (if present) instead. |
| 1717 { | 1659 { |
| 1718 Label skip; | 1660 Label skip; |
| 1719 Register arg_size = r5; | 1661 Register arg_size = r4; |
| 1720 Register new_sp = r7; | 1662 Register new_sp = r6; |
| 1721 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); | 1663 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); |
| 1722 __ add(new_sp, sp, arg_size); | 1664 __ AddP(new_sp, sp, arg_size); |
| 1723 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | 1665 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); |
| 1724 __ mr(r3, r4); | 1666 __ LoadRR(r2, r3); |
| 1725 __ mr(r6, r4); | 1667 __ LoadRR(r5, r3); |
| 1726 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined) | 1668 __ StoreP(r3, MemOperand(new_sp, 0)); // receiver (undefined) |
| 1727 __ cmpi(arg_size, Operand(kPointerSize)); | 1669 __ CmpP(arg_size, Operand(kPointerSize)); |
| 1728 __ blt(&skip); | 1670 __ blt(&skip); |
| 1729 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target | 1671 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target |
| 1730 __ mr(r6, r4); // new.target defaults to target | 1672 __ LoadRR(r5, r3); // new.target defaults to target |
| 1731 __ beq(&skip); | 1673 __ beq(&skip); |
| 1732 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList | 1674 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList |
| 1733 __ cmpi(arg_size, Operand(2 * kPointerSize)); | 1675 __ CmpP(arg_size, Operand(2 * kPointerSize)); |
| 1734 __ beq(&skip); | 1676 __ beq(&skip); |
| 1735 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target | 1677 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // new.target |
| 1736 __ bind(&skip); | 1678 __ bind(&skip); |
| 1737 __ mr(sp, new_sp); | 1679 __ LoadRR(sp, new_sp); |
| 1738 } | 1680 } |
| 1739 | 1681 |
| 1740 // ----------- S t a t e ------------- | 1682 // ----------- S t a t e ------------- |
| 1741 // -- r3 : argumentsList | 1683 // -- r2 : argumentsList |
| 1742 // -- r6 : new.target | 1684 // -- r5 : new.target |
| 1743 // -- r4 : target | 1685 // -- r3 : target |
| 1744 // -- sp[0] : receiver (undefined) | 1686 // -- sp[0] : receiver (undefined) |
| 1745 // ----------------------------------- | 1687 // ----------------------------------- |
| 1746 | 1688 |
| 1747 // 2. Make sure the target is actually a constructor. | 1689 // 2. Make sure the target is actually a constructor. |
| 1748 Label target_not_constructor; | 1690 Label target_not_constructor; |
| 1749 __ JumpIfSmi(r4, &target_not_constructor); | 1691 __ JumpIfSmi(r3, &target_not_constructor); |
| 1750 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | 1692 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 1751 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | 1693 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); |
| 1752 __ TestBit(r7, Map::kIsConstructor, r0); | 1694 __ TestBit(r6, Map::kIsConstructor); |
| 1753 __ beq(&target_not_constructor, cr0); | 1695 __ beq(&target_not_constructor); |
| 1754 | 1696 |
| 1755 // 3. Make sure the target is actually a constructor. | 1697 // 3. Make sure the target is actually a constructor. |
| 1756 Label new_target_not_constructor; | 1698 Label new_target_not_constructor; |
| 1757 __ JumpIfSmi(r6, &new_target_not_constructor); | 1699 __ JumpIfSmi(r5, &new_target_not_constructor); |
| 1758 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset)); | 1700 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset)); |
| 1759 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | 1701 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); |
| 1760 __ TestBit(r7, Map::kIsConstructor, r0); | 1702 __ TestBit(r6, Map::kIsConstructor); |
| 1761 __ beq(&new_target_not_constructor, cr0); | 1703 __ beq(&new_target_not_constructor); |
| 1762 | 1704 |
| 1763 // 4a. Construct the target with the given new.target and argumentsList. | 1705 // 4a. Construct the target with the given new.target and argumentsList. |
| 1764 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | 1706 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1765 | 1707 |
| 1766 // 4b. The target is not a constructor, throw an appropriate TypeError. | 1708 // 4b. The target is not a constructor, throw an appropriate TypeError. |
| 1767 __ bind(&target_not_constructor); | 1709 __ bind(&target_not_constructor); |
| 1768 { | 1710 { |
| 1769 __ StoreP(r4, MemOperand(sp, 0)); | 1711 __ StoreP(r3, MemOperand(sp, 0)); |
| 1770 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | 1712 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); |
| 1771 } | 1713 } |
| 1772 | 1714 |
| 1773 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | 1715 // 4c. The new.target is not a constructor, throw an appropriate TypeError. |
| 1774 __ bind(&new_target_not_constructor); | 1716 __ bind(&new_target_not_constructor); |
| 1775 { | 1717 { |
| 1776 __ StoreP(r6, MemOperand(sp, 0)); | 1718 __ StoreP(r5, MemOperand(sp, 0)); |
| 1777 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | 1719 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); |
| 1778 } | 1720 } |
| 1779 } | 1721 } |
| 1780 | 1722 |
| 1781 | |
| 1782 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | 1723 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
| 1783 Label* stack_overflow) { | 1724 Label* stack_overflow) { |
| 1784 // ----------- S t a t e ------------- | 1725 // ----------- S t a t e ------------- |
| 1785 // -- r3 : actual number of arguments | 1726 // -- r2 : actual number of arguments |
| 1786 // -- r4 : function (passed through to callee) | 1727 // -- r3 : function (passed through to callee) |
| 1787 // -- r5 : expected number of arguments | 1728 // -- r4 : expected number of arguments |
| 1788 // -- r6 : new target (passed through to callee) | 1729 // -- r5 : new target (passed through to callee) |
| 1789 // ----------------------------------- | 1730 // ----------------------------------- |
| 1790 // Check the stack for overflow. We are not trying to catch | 1731 // Check the stack for overflow. We are not trying to catch |
| 1791 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1732 // interruptions (e.g. debug break and preemption) here, so the "real stack |
| 1792 // limit" is checked. | 1733 // limit" is checked. |
| 1793 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex); | 1734 __ LoadRoot(r7, Heap::kRealStackLimitRootIndex); |
| 1794 // Make r8 the space we have left. The stack might already be overflowed | 1735 // Make r7 the space we have left. The stack might already be overflowed |
| 1795 // here which will cause r8 to become negative. | 1736 // here which will cause r7 to become negative. |
| 1796 __ sub(r8, sp, r8); | 1737 __ SubP(r7, sp, r7); |
| 1797 // Check if the arguments will overflow the stack. | 1738 // Check if the arguments will overflow the stack. |
| 1798 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); | 1739 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2)); |
| 1799 __ cmp(r8, r0); | 1740 __ CmpP(r7, r0); |
| 1800 __ ble(stack_overflow); // Signed comparison. | 1741 __ ble(stack_overflow); // Signed comparison. |
| 1801 } | 1742 } |
| 1802 | 1743 |
| 1744 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
| 1745 __ SmiTag(r2); |
| 1746 __ LoadSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 1747 // Stack updated as such: |
| 1748 // old SP ---> |
| 1749 // R14 Return Addr |
| 1750 // Old FP <--- New FP |
| 1751 // Argument Adapter SMI |
| 1752 // Function |
| 1753 // ArgC as SMI <--- New SP |
| 1754 __ lay(sp, MemOperand(sp, -5 * kPointerSize)); |
| 1803 | 1755 |
| 1804 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | 1756 // Cleanse the top nibble of 31-bit pointers. |
| 1805 __ SmiTag(r3); | 1757 __ CleanseP(r14); |
| 1806 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 1758 __ StoreP(r14, MemOperand(sp, 4 * kPointerSize)); |
| 1807 __ mflr(r0); | 1759 __ StoreP(fp, MemOperand(sp, 3 * kPointerSize)); |
| 1808 __ push(r0); | 1760 __ StoreP(r6, MemOperand(sp, 2 * kPointerSize)); |
| 1809 if (FLAG_enable_embedded_constant_pool) { | 1761 __ StoreP(r3, MemOperand(sp, 1 * kPointerSize)); |
| 1810 __ Push(fp, kConstantPoolRegister, r7, r4, r3); | 1762 __ StoreP(r2, MemOperand(sp, 0 * kPointerSize)); |
| 1811 } else { | 1763 __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1812 __ Push(fp, r7, r4, r3); | 1764 kPointerSize)); |
| 1813 } | |
| 1814 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | |
| 1815 kPointerSize)); | |
| 1816 } | 1765 } |
| 1817 | 1766 |
| 1818 | |
| 1819 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | 1767 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
| 1820 // ----------- S t a t e ------------- | 1768 // ----------- S t a t e ------------- |
| 1821 // -- r3 : result being passed through | 1769 // -- r2 : result being passed through |
| 1822 // ----------------------------------- | 1770 // ----------------------------------- |
| 1823 // Get the number of arguments passed (as a smi), tear down the frame and | 1771 // Get the number of arguments passed (as a smi), tear down the frame and |
| 1824 // then tear down the parameters. | 1772 // then tear down the parameters. |
| 1825 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | 1773 __ LoadP(r3, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1826 kPointerSize))); | 1774 kPointerSize))); |
| 1827 int stack_adjustment = kPointerSize; // adjust for receiver | 1775 int stack_adjustment = kPointerSize; // adjust for receiver |
| 1828 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); | 1776 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); |
| 1829 __ SmiToPtrArrayOffset(r0, r4); | 1777 __ SmiToPtrArrayOffset(r3, r3); |
| 1830 __ add(sp, sp, r0); | 1778 __ lay(sp, MemOperand(sp, r3)); |
| 1831 } | 1779 } |
| 1832 | 1780 |
| 1833 | |
| 1834 // static | 1781 // static |
| 1835 void Builtins::Generate_Apply(MacroAssembler* masm) { | 1782 void Builtins::Generate_Apply(MacroAssembler* masm) { |
| 1836 // ----------- S t a t e ------------- | 1783 // ----------- S t a t e ------------- |
| 1837 // -- r3 : argumentsList | 1784 // -- r2 : argumentsList |
| 1838 // -- r4 : target | 1785 // -- r3 : target |
| 1839 // -- r6 : new.target (checked to be constructor or undefined) | 1786 // -- r5 : new.target (checked to be constructor or undefined) |
| 1840 // -- sp[0] : thisArgument | 1787 // -- sp[0] : thisArgument |
| 1841 // ----------------------------------- | 1788 // ----------------------------------- |
| 1842 | 1789 |
| 1843 // Create the list of arguments from the array-like argumentsList. | 1790 // Create the list of arguments from the array-like argumentsList. |
| 1844 { | 1791 { |
| 1845 Label create_arguments, create_array, create_runtime, done_create; | 1792 Label create_arguments, create_array, create_runtime, done_create; |
| 1846 __ JumpIfSmi(r3, &create_runtime); | 1793 __ JumpIfSmi(r2, &create_runtime); |
| 1847 | 1794 |
| 1848 // Load the map of argumentsList into r5. | 1795 // Load the map of argumentsList into r4. |
| 1849 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); | 1796 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 1850 | 1797 |
| 1851 // Load native context into r7. | 1798 // Load native context into r6. |
| 1852 __ LoadP(r7, NativeContextMemOperand()); | 1799 __ LoadP(r6, NativeContextMemOperand()); |
| 1853 | 1800 |
| 1854 // Check if argumentsList is an (unmodified) arguments object. | 1801 // Check if argumentsList is an (unmodified) arguments object. |
| 1855 __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); | 1802 __ LoadP(ip, ContextMemOperand(r6, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); |
| 1856 __ cmp(ip, r5); | 1803 __ CmpP(ip, r4); |
| 1857 __ beq(&create_arguments); | 1804 __ beq(&create_arguments); |
| 1858 __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX)); | 1805 __ LoadP(ip, ContextMemOperand(r6, Context::STRICT_ARGUMENTS_MAP_INDEX)); |
| 1859 __ cmp(ip, r5); | 1806 __ CmpP(ip, r4); |
| 1860 __ beq(&create_arguments); | 1807 __ beq(&create_arguments); |
| 1861 | 1808 |
| 1862 // Check if argumentsList is a fast JSArray. | 1809 // Check if argumentsList is a fast JSArray. |
| 1863 __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE); | 1810 __ CompareInstanceType(r4, ip, JS_ARRAY_TYPE); |
| 1864 __ beq(&create_array); | 1811 __ beq(&create_array); |
| 1865 | 1812 |
| 1866 // Ask the runtime to create the list (actually a FixedArray). | 1813 // Ask the runtime to create the list (actually a FixedArray). |
| 1867 __ bind(&create_runtime); | 1814 __ bind(&create_runtime); |
| 1868 { | 1815 { |
| 1869 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1816 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1870 __ Push(r4, r6, r3); | 1817 __ Push(r3, r5, r2); |
| 1871 __ CallRuntime(Runtime::kCreateListFromArrayLike); | 1818 __ CallRuntime(Runtime::kCreateListFromArrayLike); |
| 1872 __ Pop(r4, r6); | 1819 __ Pop(r3, r5); |
| 1873 __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset)); | 1820 __ LoadP(r4, FieldMemOperand(r2, FixedArray::kLengthOffset)); |
| 1874 __ SmiUntag(r5); | 1821 __ SmiUntag(r4); |
| 1875 } | 1822 } |
| 1876 __ b(&done_create); | 1823 __ b(&done_create); |
| 1877 | 1824 |
| 1878 // Try to create the list from an arguments object. | 1825 // Try to create the list from an arguments object. |
| 1879 __ bind(&create_arguments); | 1826 __ bind(&create_arguments); |
| 1880 __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset)); | 1827 __ LoadP(r4, FieldMemOperand(r2, JSArgumentsObject::kLengthOffset)); |
| 1881 __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset)); | 1828 __ LoadP(r6, FieldMemOperand(r2, JSObject::kElementsOffset)); |
| 1882 __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset)); | 1829 __ LoadP(ip, FieldMemOperand(r6, FixedArray::kLengthOffset)); |
| 1883 __ cmp(r5, ip); | 1830 __ CmpP(r4, ip); |
| 1884 __ bne(&create_runtime); | 1831 __ bne(&create_runtime); |
| 1885 __ SmiUntag(r5); | 1832 __ SmiUntag(r4); |
| 1886 __ mr(r3, r7); | 1833 __ LoadRR(r2, r6); |
| 1887 __ b(&done_create); | 1834 __ b(&done_create); |
| 1888 | 1835 |
| 1889 // Try to create the list from a JSArray object. | 1836 // Try to create the list from a JSArray object. |
| 1890 __ bind(&create_array); | 1837 __ bind(&create_array); |
| 1891 __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset)); | 1838 __ LoadlB(r4, FieldMemOperand(r4, Map::kBitField2Offset)); |
| 1892 __ DecodeField<Map::ElementsKindBits>(r5); | 1839 __ DecodeField<Map::ElementsKindBits>(r4); |
| 1893 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | 1840 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); |
| 1894 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 1841 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); |
| 1895 STATIC_ASSERT(FAST_ELEMENTS == 2); | 1842 STATIC_ASSERT(FAST_ELEMENTS == 2); |
| 1896 __ cmpi(r5, Operand(FAST_ELEMENTS)); | 1843 __ CmpP(r4, Operand(FAST_ELEMENTS)); |
| 1897 __ bgt(&create_runtime); | 1844 __ bgt(&create_runtime); |
| 1898 __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS)); | 1845 __ CmpP(r4, Operand(FAST_HOLEY_SMI_ELEMENTS)); |
| 1899 __ beq(&create_runtime); | 1846 __ beq(&create_runtime); |
| 1900 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset)); | 1847 __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset)); |
| 1901 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); | 1848 __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset)); |
| 1902 __ SmiUntag(r5); | 1849 __ SmiUntag(r4); |
| 1903 | 1850 |
| 1904 __ bind(&done_create); | 1851 __ bind(&done_create); |
| 1905 } | 1852 } |
| 1906 | 1853 |
| 1907 // Check for stack overflow. | 1854 // Check for stack overflow. |
| 1908 { | 1855 { |
| 1909 // Check the stack for overflow. We are not trying to catch interruptions | 1856 // Check the stack for overflow. We are not trying to catch interruptions |
| 1910 // (i.e. debug break and preemption) here, so check the "real stack limit". | 1857 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 1911 Label done; | 1858 Label done; |
| 1912 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex); | 1859 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex); |
| 1913 // Make ip the space we have left. The stack might already be overflowed | 1860 // Make ip the space we have left. The stack might already be overflowed |
| 1914 // here which will cause ip to become negative. | 1861 // here which will cause ip to become negative. |
| 1915 __ sub(ip, sp, ip); | 1862 __ SubP(ip, sp, ip); |
| 1916 // Check if the arguments will overflow the stack. | 1863 // Check if the arguments will overflow the stack. |
| 1917 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); | 1864 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2)); |
| 1918 __ cmp(ip, r0); // Signed comparison. | 1865 __ CmpP(ip, r0); // Signed comparison. |
| 1919 __ bgt(&done); | 1866 __ bgt(&done); |
| 1920 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1867 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 1921 __ bind(&done); | 1868 __ bind(&done); |
| 1922 } | 1869 } |
| 1923 | 1870 |
| 1924 // ----------- S t a t e ------------- | 1871 // ----------- S t a t e ------------- |
| 1925 // -- r4 : target | 1872 // -- r3 : target |
| 1926 // -- r3 : args (a FixedArray built from argumentsList) | 1873 // -- r2 : args (a FixedArray built from argumentsList) |
| 1927 // -- r5 : len (number of elements to push from args) | 1874 // -- r4 : len (number of elements to push from args) |
| 1928 // -- r6 : new.target (checked to be constructor or undefined) | 1875 // -- r5 : new.target (checked to be constructor or undefined) |
| 1929 // -- sp[0] : thisArgument | 1876 // -- sp[0] : thisArgument |
| 1930 // ----------------------------------- | 1877 // ----------------------------------- |
| 1931 | 1878 |
| 1932 // Push arguments onto the stack (thisArgument is already on the stack). | 1879 // Push arguments onto the stack (thisArgument is already on the stack). |
| 1933 { | 1880 { |
| 1934 Label loop, no_args; | 1881 Label loop, no_args; |
| 1935 __ cmpi(r5, Operand::Zero()); | 1882 __ CmpP(r4, Operand::Zero()); |
| 1936 __ beq(&no_args); | 1883 __ beq(&no_args); |
| 1937 __ addi(r3, r3, | 1884 __ AddP(r2, r2, |
| 1938 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); | 1885 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); |
| 1939 __ mtctr(r5); | 1886 __ LoadRR(r1, r4); |
| 1940 __ bind(&loop); | 1887 __ bind(&loop); |
| 1941 __ LoadPU(r0, MemOperand(r3, kPointerSize)); | 1888 __ LoadP(r0, MemOperand(r2, kPointerSize)); |
| 1889 __ la(r2, MemOperand(r2, kPointerSize)); |
| 1942 __ push(r0); | 1890 __ push(r0); |
| 1943 __ bdnz(&loop); | 1891 __ BranchOnCount(r1, &loop); |
| 1944 __ bind(&no_args); | 1892 __ bind(&no_args); |
| 1945 __ mr(r3, r5); | 1893 __ LoadRR(r2, r4); |
| 1946 } | 1894 } |
| 1947 | 1895 |
| 1948 // Dispatch to Call or Construct depending on whether new.target is undefined. | 1896 // Dispatch to Call or Construct depending on whether new.target is undefined. |
| 1949 { | 1897 { |
| 1950 __ CompareRoot(r6, Heap::kUndefinedValueRootIndex); | 1898 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex); |
| 1951 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq); | 1899 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq); |
| 1952 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1900 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1953 } | 1901 } |
| 1954 } | 1902 } |
| 1955 | 1903 |
| 1956 namespace { | 1904 namespace { |
| 1957 | 1905 |
| 1958 // Drops top JavaScript frame and an arguments adaptor frame below it (if | 1906 // Drops top JavaScript frame and an arguments adaptor frame below it (if |
| 1959 // present) preserving all the arguments prepared for current call. | 1907 // present) preserving all the arguments prepared for current call. |
| 1960 // Does nothing if debugger is currently active. | 1908 // Does nothing if debugger is currently active. |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1986 Register scratch1, Register scratch2, | 1934 Register scratch1, Register scratch2, |
| 1987 Register scratch3) { | 1935 Register scratch3) { |
| 1988 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | 1936 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); |
| 1989 Comment cmnt(masm, "[ PrepareForTailCall"); | 1937 Comment cmnt(masm, "[ PrepareForTailCall"); |
| 1990 | 1938 |
| 1991 // Prepare for tail call only if the debugger is not active. | 1939 // Prepare for tail call only if the debugger is not active. |
| 1992 Label done; | 1940 Label done; |
| 1993 ExternalReference debug_is_active = | 1941 ExternalReference debug_is_active = |
| 1994 ExternalReference::debug_is_active_address(masm->isolate()); | 1942 ExternalReference::debug_is_active_address(masm->isolate()); |
| 1995 __ mov(scratch1, Operand(debug_is_active)); | 1943 __ mov(scratch1, Operand(debug_is_active)); |
| 1996 __ lbz(scratch1, MemOperand(scratch1)); | 1944 __ LoadlB(scratch1, MemOperand(scratch1)); |
| 1997 __ cmpi(scratch1, Operand::Zero()); | 1945 __ CmpP(scratch1, Operand::Zero()); |
| 1998 __ bne(&done); | 1946 __ bne(&done); |
| 1999 | 1947 |
| 2000 // Drop possible interpreter handler/stub frame. | 1948 // Drop possible interpreter handler/stub frame. |
| 2001 { | 1949 { |
| 2002 Label no_interpreter_frame; | 1950 Label no_interpreter_frame; |
| 2003 __ LoadP(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset)); | 1951 __ LoadP(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset)); |
| 2004 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0); | 1952 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0); |
| 2005 __ bne(&no_interpreter_frame); | 1953 __ bne(&no_interpreter_frame); |
| 2006 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1954 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 2007 __ bind(&no_interpreter_frame); | 1955 __ bind(&no_interpreter_frame); |
| 2008 } | 1956 } |
| 2009 | 1957 |
| 2010 // Check if next frame is an arguments adaptor frame. | 1958 // Check if next frame is an arguments adaptor frame. |
| 2011 Label no_arguments_adaptor, formal_parameter_count_loaded; | 1959 Label no_arguments_adaptor, formal_parameter_count_loaded; |
| 2012 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1960 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 2013 __ LoadP(scratch3, | 1961 __ LoadP(scratch3, |
| 2014 MemOperand(scratch2, StandardFrameConstants::kContextOffset)); | 1962 MemOperand(scratch2, StandardFrameConstants::kContextOffset)); |
| 2015 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); | 1963 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); |
| 2016 __ bne(&no_arguments_adaptor); | 1964 __ bne(&no_arguments_adaptor); |
| 2017 | 1965 |
| 2018 // Drop arguments adaptor frame and load arguments count. | 1966 // Drop arguments adaptor frame and load arguments count. |
| 2019 __ mr(fp, scratch2); | 1967 __ LoadRR(fp, scratch2); |
| 2020 __ LoadP(scratch1, | 1968 __ LoadP(scratch1, |
| 2021 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1969 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2022 __ SmiUntag(scratch1); | 1970 __ SmiUntag(scratch1); |
| 2023 __ b(&formal_parameter_count_loaded); | 1971 __ b(&formal_parameter_count_loaded); |
| 2024 | 1972 |
| 2025 __ bind(&no_arguments_adaptor); | 1973 __ bind(&no_arguments_adaptor); |
| 2026 // Load caller's formal parameter count | 1974 // Load caller's formal parameter count |
| 2027 __ LoadP(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1975 __ LoadP(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 2028 __ LoadP(scratch1, | 1976 __ LoadP(scratch1, |
| 2029 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); | 1977 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); |
| 2030 __ LoadWordArith( | 1978 __ LoadW(scratch1, |
| 2031 scratch1, FieldMemOperand( | 1979 FieldMemOperand(scratch1, |
| 2032 scratch1, SharedFunctionInfo::kFormalParameterCountOffset)); | 1980 SharedFunctionInfo::kFormalParameterCountOffset)); |
| 2033 #if !V8_TARGET_ARCH_PPC64 | 1981 #if !V8_TARGET_ARCH_S390X |
| 2034 __ SmiUntag(scratch1); | 1982 __ SmiUntag(scratch1); |
| 2035 #endif | 1983 #endif |
| 2036 | 1984 |
| 2037 __ bind(&formal_parameter_count_loaded); | 1985 __ bind(&formal_parameter_count_loaded); |
| 2038 | 1986 |
| 2039 // Calculate the end of destination area where we will put the arguments | 1987 // Calculate the end of destination area where we will put the arguments |
| 2040 // after we drop current frame. We add kPointerSize to count the receiver | 1988 // after we drop current frame. We AddP kPointerSize to count the receiver |
| 2041 // argument which is not included into formal parameters count. | 1989 // argument which is not included into formal parameters count. |
| 2042 Register dst_reg = scratch2; | 1990 Register dst_reg = scratch2; |
| 2043 __ ShiftLeftImm(dst_reg, scratch1, Operand(kPointerSizeLog2)); | 1991 __ ShiftLeftP(dst_reg, scratch1, Operand(kPointerSizeLog2)); |
| 2044 __ add(dst_reg, fp, dst_reg); | 1992 __ AddP(dst_reg, fp, dst_reg); |
| 2045 __ addi(dst_reg, dst_reg, | 1993 __ AddP(dst_reg, dst_reg, |
| 2046 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize)); | 1994 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize)); |
| 2047 | 1995 |
| 2048 Register src_reg = scratch1; | 1996 Register src_reg = scratch1; |
| 2049 __ ShiftLeftImm(src_reg, args_reg, Operand(kPointerSizeLog2)); | 1997 __ ShiftLeftP(src_reg, args_reg, Operand(kPointerSizeLog2)); |
| 2050 __ add(src_reg, sp, src_reg); | 1998 __ AddP(src_reg, sp, src_reg); |
| 2051 // Count receiver argument as well (not included in args_reg). | 1999 // Count receiver argument as well (not included in args_reg). |
| 2052 __ addi(src_reg, src_reg, Operand(kPointerSize)); | 2000 __ AddP(src_reg, src_reg, Operand(kPointerSize)); |
| 2053 | 2001 |
| 2054 if (FLAG_debug_code) { | 2002 if (FLAG_debug_code) { |
| 2055 __ cmpl(src_reg, dst_reg); | 2003 __ CmpLogicalP(src_reg, dst_reg); |
| 2056 __ Check(lt, kStackAccessBelowStackPointer); | 2004 __ Check(lt, kStackAccessBelowStackPointer); |
| 2057 } | 2005 } |
| 2058 | 2006 |
| 2059 // Restore caller's frame pointer and return address now as they will be | 2007 // Restore caller's frame pointer and return address now as they will be |
| 2060 // overwritten by the copying loop. | 2008 // overwritten by the copying loop. |
| 2061 __ RestoreFrameStateForTailCall(); | 2009 __ RestoreFrameStateForTailCall(); |
| 2062 | 2010 |
| 2063 // Now copy callee arguments to the caller frame going backwards to avoid | 2011 // Now copy callee arguments to the caller frame going backwards to avoid |
| 2064 // callee arguments corruption (source and destination areas could overlap). | 2012 // callee arguments corruption (source and destination areas could overlap). |
| 2065 | 2013 |
| 2066 // Both src_reg and dst_reg are pointing to the word after the one to copy, | 2014 // Both src_reg and dst_reg are pointing to the word after the one to copy, |
| 2067 // so they must be pre-decremented in the loop. | 2015 // so they must be pre-decremented in the loop. |
| 2068 Register tmp_reg = scratch3; | 2016 Register tmp_reg = scratch3; |
| 2069 Label loop; | 2017 Label loop; |
| 2070 __ addi(tmp_reg, args_reg, Operand(1)); // +1 for receiver | 2018 DCHECK(!src_reg.is(r1)); |
| 2071 __ mtctr(tmp_reg); | 2019 DCHECK(!dst_reg.is(r1)); |
| 2020 DCHECK(!tmp_reg.is(r1)); |
| 2021 |
| 2022 __ AddP(r1, args_reg, Operand(1)); // +1 for receiver |
| 2072 __ bind(&loop); | 2023 __ bind(&loop); |
| 2073 __ LoadPU(tmp_reg, MemOperand(src_reg, -kPointerSize)); | 2024 __ lay(src_reg, MemOperand(src_reg, -kPointerSize)); |
| 2074 __ StorePU(tmp_reg, MemOperand(dst_reg, -kPointerSize)); | 2025 __ LoadP(tmp_reg, MemOperand(src_reg)); |
| 2075 __ bdnz(&loop); | 2026 __ lay(dst_reg, MemOperand(dst_reg, -kPointerSize)); |
| 2027 __ StoreP(tmp_reg, MemOperand(dst_reg)); |
| 2028 __ BranchOnCount(r1, &loop); |
| 2076 | 2029 |
| 2077 // Leave current frame. | 2030 // Leave current frame. |
| 2078 __ mr(sp, dst_reg); | 2031 __ LoadRR(sp, dst_reg); |
| 2079 | 2032 |
| 2080 __ bind(&done); | 2033 __ bind(&done); |
| 2081 } | 2034 } |
| 2082 } // namespace | 2035 } // namespace |
| 2083 | 2036 |
| 2084 // static | 2037 // static |
| 2085 void Builtins::Generate_CallFunction(MacroAssembler* masm, | 2038 void Builtins::Generate_CallFunction(MacroAssembler* masm, |
| 2086 ConvertReceiverMode mode, | 2039 ConvertReceiverMode mode, |
| 2087 TailCallMode tail_call_mode) { | 2040 TailCallMode tail_call_mode) { |
| 2088 // ----------- S t a t e ------------- | 2041 // ----------- S t a t e ------------- |
| 2089 // -- r3 : the number of arguments (not including the receiver) | 2042 // -- r2 : the number of arguments (not including the receiver) |
| 2090 // -- r4 : the function to call (checked to be a JSFunction) | 2043 // -- r3 : the function to call (checked to be a JSFunction) |
| 2091 // ----------------------------------- | 2044 // ----------------------------------- |
| 2092 __ AssertFunction(r4); | 2045 __ AssertFunction(r3); |
| 2093 | 2046 |
| 2094 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | 2047 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) |
| 2095 // Check that the function is not a "classConstructor". | 2048 // Check that the function is not a "classConstructor". |
| 2096 Label class_constructor; | 2049 Label class_constructor; |
| 2097 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 2050 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
| 2098 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset)); | 2051 __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset)); |
| 2099 __ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0); | 2052 __ TestBitMask(r5, SharedFunctionInfo::kClassConstructorBits, r0); |
| 2100 __ bne(&class_constructor, cr0); | 2053 __ bne(&class_constructor); |
| 2101 | 2054 |
| 2102 // Enter the context of the function; ToObject has to run in the function | 2055 // Enter the context of the function; ToObject has to run in the function |
| 2103 // context, and we also need to take the global proxy from the function | 2056 // context, and we also need to take the global proxy from the function |
| 2104 // context in case of conversion. | 2057 // context in case of conversion. |
| 2105 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | 2058 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); |
| 2106 // We need to convert the receiver for non-native sloppy mode functions. | 2059 // We need to convert the receiver for non-native sloppy mode functions. |
| 2107 Label done_convert; | 2060 Label done_convert; |
| 2108 __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) | | 2061 __ AndP(r0, r5, Operand((1 << SharedFunctionInfo::kStrictModeBit) | |
| 2109 (1 << SharedFunctionInfo::kNativeBit))); | 2062 (1 << SharedFunctionInfo::kNativeBit))); |
| 2110 __ bne(&done_convert, cr0); | 2063 __ bne(&done_convert); |
| 2111 { | 2064 { |
| 2112 // ----------- S t a t e ------------- | 2065 // ----------- S t a t e ------------- |
| 2113 // -- r3 : the number of arguments (not including the receiver) | 2066 // -- r2 : the number of arguments (not including the receiver) |
| 2114 // -- r4 : the function to call (checked to be a JSFunction) | 2067 // -- r3 : the function to call (checked to be a JSFunction) |
| 2115 // -- r5 : the shared function info. | 2068 // -- r4 : the shared function info. |
| 2116 // -- cp : the function context. | 2069 // -- cp : the function context. |
| 2117 // ----------------------------------- | 2070 // ----------------------------------- |
| 2118 | 2071 |
| 2119 if (mode == ConvertReceiverMode::kNullOrUndefined) { | 2072 if (mode == ConvertReceiverMode::kNullOrUndefined) { |
| 2120 // Patch receiver to global proxy. | 2073 // Patch receiver to global proxy. |
| 2121 __ LoadGlobalProxy(r6); | 2074 __ LoadGlobalProxy(r5); |
| 2122 } else { | 2075 } else { |
| 2123 Label convert_to_object, convert_receiver; | 2076 Label convert_to_object, convert_receiver; |
| 2124 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2)); | 2077 __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2)); |
| 2125 __ LoadPX(r6, MemOperand(sp, r6)); | 2078 __ LoadP(r5, MemOperand(sp, r5)); |
| 2126 __ JumpIfSmi(r6, &convert_to_object); | 2079 __ JumpIfSmi(r5, &convert_to_object); |
| 2127 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | 2080 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); |
| 2128 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE); | 2081 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE); |
| 2129 __ bge(&done_convert); | 2082 __ bge(&done_convert); |
| 2130 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { | 2083 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { |
| 2131 Label convert_global_proxy; | 2084 Label convert_global_proxy; |
| 2132 __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex, | 2085 __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex, |
| 2133 &convert_global_proxy); | 2086 &convert_global_proxy); |
| 2134 __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object); | 2087 __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object); |
| 2135 __ bind(&convert_global_proxy); | 2088 __ bind(&convert_global_proxy); |
| 2136 { | 2089 { |
| 2137 // Patch receiver to global proxy. | 2090 // Patch receiver to global proxy. |
| 2138 __ LoadGlobalProxy(r6); | 2091 __ LoadGlobalProxy(r5); |
| 2139 } | 2092 } |
| 2140 __ b(&convert_receiver); | 2093 __ b(&convert_receiver); |
| 2141 } | 2094 } |
| 2142 __ bind(&convert_to_object); | 2095 __ bind(&convert_to_object); |
| 2143 { | 2096 { |
| 2144 // Convert receiver using ToObject. | 2097 // Convert receiver using ToObject. |
| 2145 // TODO(bmeurer): Inline the allocation here to avoid building the frame | 2098 // TODO(bmeurer): Inline the allocation here to avoid building the frame |
| 2146 // in the fast case? (fall back to AllocateInNewSpace?) | 2099 // in the fast case? (fall back to AllocateInNewSpace?) |
| 2147 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2100 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 2148 __ SmiTag(r3); | 2101 __ SmiTag(r2); |
| 2149 __ Push(r3, r4); | 2102 __ Push(r2, r3); |
| 2150 __ mr(r3, r6); | 2103 __ LoadRR(r2, r5); |
| 2151 ToObjectStub stub(masm->isolate()); | 2104 ToObjectStub stub(masm->isolate()); |
| 2152 __ CallStub(&stub); | 2105 __ CallStub(&stub); |
| 2153 __ mr(r6, r3); | 2106 __ LoadRR(r5, r2); |
| 2154 __ Pop(r3, r4); | 2107 __ Pop(r2, r3); |
| 2155 __ SmiUntag(r3); | 2108 __ SmiUntag(r2); |
| 2156 } | 2109 } |
| 2157 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 2110 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
| 2158 __ bind(&convert_receiver); | 2111 __ bind(&convert_receiver); |
| 2159 } | 2112 } |
| 2160 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2)); | 2113 __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2)); |
| 2161 __ StorePX(r6, MemOperand(sp, r7)); | 2114 __ StoreP(r5, MemOperand(sp, r6)); |
| 2162 } | 2115 } |
| 2163 __ bind(&done_convert); | 2116 __ bind(&done_convert); |
| 2164 | 2117 |
| 2165 // ----------- S t a t e ------------- | 2118 // ----------- S t a t e ------------- |
| 2166 // -- r3 : the number of arguments (not including the receiver) | 2119 // -- r2 : the number of arguments (not including the receiver) |
| 2167 // -- r4 : the function to call (checked to be a JSFunction) | 2120 // -- r3 : the function to call (checked to be a JSFunction) |
| 2168 // -- r5 : the shared function info. | 2121 // -- r4 : the shared function info. |
| 2169 // -- cp : the function context. | 2122 // -- cp : the function context. |
| 2170 // ----------------------------------- | 2123 // ----------------------------------- |
| 2171 | 2124 |
| 2172 if (tail_call_mode == TailCallMode::kAllow) { | 2125 if (tail_call_mode == TailCallMode::kAllow) { |
| 2173 PrepareForTailCall(masm, r3, r6, r7, r8); | 2126 PrepareForTailCall(masm, r2, r5, r6, r7); |
| 2174 } | 2127 } |
| 2175 | 2128 |
| 2176 __ LoadWordArith( | 2129 __ LoadW( |
| 2177 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset)); | 2130 r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 2178 #if !V8_TARGET_ARCH_PPC64 | 2131 #if !V8_TARGET_ARCH_S390X |
| 2179 __ SmiUntag(r5); | 2132 __ SmiUntag(r4); |
| 2180 #endif | 2133 #endif |
| 2181 ParameterCount actual(r3); | 2134 ParameterCount actual(r2); |
| 2182 ParameterCount expected(r5); | 2135 ParameterCount expected(r4); |
| 2183 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION, | 2136 __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION, |
| 2184 CheckDebugStepCallWrapper()); | 2137 CheckDebugStepCallWrapper()); |
| 2185 | 2138 |
| 2186 // The function is a "classConstructor", need to raise an exception. | 2139 // The function is a "classConstructor", need to raise an exception. |
| 2187 __ bind(&class_constructor); | 2140 __ bind(&class_constructor); |
| 2188 { | 2141 { |
| 2189 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL); | 2142 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL); |
| 2190 __ push(r4); | 2143 __ push(r3); |
| 2191 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); | 2144 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); |
| 2192 } | 2145 } |
| 2193 } | 2146 } |
| 2194 | 2147 |
| 2195 | |
| 2196 namespace { | 2148 namespace { |
| 2197 | 2149 |
| 2198 void Generate_PushBoundArguments(MacroAssembler* masm) { | 2150 void Generate_PushBoundArguments(MacroAssembler* masm) { |
| 2199 // ----------- S t a t e ------------- | 2151 // ----------- S t a t e ------------- |
| 2200 // -- r3 : the number of arguments (not including the receiver) | 2152 // -- r2 : the number of arguments (not including the receiver) |
| 2201 // -- r4 : target (checked to be a JSBoundFunction) | 2153 // -- r3 : target (checked to be a JSBoundFunction) |
| 2202 // -- r6 : new.target (only in case of [[Construct]]) | 2154 // -- r5 : new.target (only in case of [[Construct]]) |
| 2203 // ----------------------------------- | 2155 // ----------------------------------- |
| 2204 | 2156 |
| 2205 // Load [[BoundArguments]] into r5 and length of that into r7. | 2157 // Load [[BoundArguments]] into r4 and length of that into r6. |
| 2206 Label no_bound_arguments; | 2158 Label no_bound_arguments; |
| 2207 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset)); | 2159 __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset)); |
| 2208 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset)); | 2160 __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset)); |
| 2209 __ SmiUntag(r7, SetRC); | 2161 __ SmiUntag(r6); |
| 2210 __ beq(&no_bound_arguments, cr0); | 2162 __ LoadAndTestP(r6, r6); |
| 2163 __ beq(&no_bound_arguments); |
| 2211 { | 2164 { |
| 2212 // ----------- S t a t e ------------- | 2165 // ----------- S t a t e ------------- |
| 2213 // -- r3 : the number of arguments (not including the receiver) | 2166 // -- r2 : the number of arguments (not including the receiver) |
| 2214 // -- r4 : target (checked to be a JSBoundFunction) | 2167 // -- r3 : target (checked to be a JSBoundFunction) |
| 2215 // -- r5 : the [[BoundArguments]] (implemented as FixedArray) | 2168 // -- r4 : the [[BoundArguments]] (implemented as FixedArray) |
| 2216 // -- r6 : new.target (only in case of [[Construct]]) | 2169 // -- r5 : new.target (only in case of [[Construct]]) |
| 2217 // -- r7 : the number of [[BoundArguments]] | 2170 // -- r6 : the number of [[BoundArguments]] |
| 2218 // ----------------------------------- | 2171 // ----------------------------------- |
| 2219 | 2172 |
| 2220 // Reserve stack space for the [[BoundArguments]]. | 2173 // Reserve stack space for the [[BoundArguments]]. |
| 2221 { | 2174 { |
| 2222 Label done; | 2175 Label done; |
| 2223 __ mr(r9, sp); // preserve previous stack pointer | 2176 __ LoadRR(r8, sp); // preserve previous stack pointer |
| 2224 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2)); | 2177 __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2)); |
| 2225 __ sub(sp, sp, r10); | 2178 __ SubP(sp, sp, r9); |
| 2226 // Check the stack for overflow. We are not trying to catch interruptions | 2179 // Check the stack for overflow. We are not trying to catch interruptions |
| 2227 // (i.e. debug break and preemption) here, so check the "real stack | 2180 // (i.e. debug break and preemption) here, so check the "real stack |
| 2228 // limit". | 2181 // limit". |
| 2229 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex); | 2182 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex); |
| 2230 __ bgt(&done); // Signed comparison. | 2183 __ bgt(&done); // Signed comparison. |
| 2231 // Restore the stack pointer. | 2184 // Restore the stack pointer. |
| 2232 __ mr(sp, r9); | 2185 __ LoadRR(sp, r8); |
| 2233 { | 2186 { |
| 2234 FrameScope scope(masm, StackFrame::MANUAL); | 2187 FrameScope scope(masm, StackFrame::MANUAL); |
| 2235 __ EnterFrame(StackFrame::INTERNAL); | 2188 __ EnterFrame(StackFrame::INTERNAL); |
| 2236 __ CallRuntime(Runtime::kThrowStackOverflow); | 2189 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 2237 } | 2190 } |
| 2238 __ bind(&done); | 2191 __ bind(&done); |
| 2239 } | 2192 } |
| 2240 | 2193 |
| 2241 // Relocate arguments down the stack. | 2194 // Relocate arguments down the stack. |
| 2242 // -- r3 : the number of arguments (not including the receiver) | 2195 // -- r2 : the number of arguments (not including the receiver) |
| 2243 // -- r9 : the previous stack pointer | 2196 // -- r8 : the previous stack pointer |
| 2244 // -- r10: the size of the [[BoundArguments]] | 2197 // -- r9: the size of the [[BoundArguments]] |
| 2245 { | 2198 { |
| 2246 Label skip, loop; | 2199 Label skip, loop; |
| 2247 __ li(r8, Operand::Zero()); | 2200 __ LoadImmP(r7, Operand::Zero()); |
| 2248 __ cmpi(r3, Operand::Zero()); | 2201 __ CmpP(r2, Operand::Zero()); |
| 2249 __ beq(&skip); | 2202 __ beq(&skip); |
| 2250 __ mtctr(r3); | 2203 __ LoadRR(r1, r2); |
| 2251 __ bind(&loop); | 2204 __ bind(&loop); |
| 2252 __ LoadPX(r0, MemOperand(r9, r8)); | 2205 __ LoadP(r0, MemOperand(r8, r7)); |
| 2253 __ StorePX(r0, MemOperand(sp, r8)); | 2206 __ StoreP(r0, MemOperand(sp, r7)); |
| 2254 __ addi(r8, r8, Operand(kPointerSize)); | 2207 __ AddP(r7, r7, Operand(kPointerSize)); |
| 2255 __ bdnz(&loop); | 2208 __ BranchOnCount(r1, &loop); |
| 2256 __ bind(&skip); | 2209 __ bind(&skip); |
| 2257 } | 2210 } |
| 2258 | 2211 |
| 2259 // Copy [[BoundArguments]] to the stack (below the arguments). | 2212 // Copy [[BoundArguments]] to the stack (below the arguments). |
| 2260 { | 2213 { |
| 2261 Label loop; | 2214 Label loop; |
| 2262 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 2215 __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 2263 __ add(r5, r5, r10); | 2216 __ AddP(r4, r4, r9); |
| 2264 __ mtctr(r7); | 2217 __ LoadRR(r1, r6); |
| 2265 __ bind(&loop); | 2218 __ bind(&loop); |
| 2266 __ LoadPU(r0, MemOperand(r5, -kPointerSize)); | 2219 __ LoadP(r0, MemOperand(r4, -kPointerSize)); |
| 2267 __ StorePX(r0, MemOperand(sp, r8)); | 2220 __ lay(r4, MemOperand(r4, -kPointerSize)); |
| 2268 __ addi(r8, r8, Operand(kPointerSize)); | 2221 __ StoreP(r0, MemOperand(sp, r7)); |
| 2269 __ bdnz(&loop); | 2222 __ AddP(r7, r7, Operand(kPointerSize)); |
| 2270 __ add(r3, r3, r7); | 2223 __ BranchOnCount(r1, &loop); |
| 2224 __ AddP(r2, r2, r6); |
| 2271 } | 2225 } |
| 2272 } | 2226 } |
| 2273 __ bind(&no_bound_arguments); | 2227 __ bind(&no_bound_arguments); |
| 2274 } | 2228 } |
| 2275 | 2229 |
| 2276 } // namespace | 2230 } // namespace |
| 2277 | 2231 |
| 2278 | |
| 2279 // static | 2232 // static |
| 2280 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, | 2233 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, |
| 2281 TailCallMode tail_call_mode) { | 2234 TailCallMode tail_call_mode) { |
| 2282 // ----------- S t a t e ------------- | 2235 // ----------- S t a t e ------------- |
| 2283 // -- r3 : the number of arguments (not including the receiver) | 2236 // -- r2 : the number of arguments (not including the receiver) |
| 2284 // -- r4 : the function to call (checked to be a JSBoundFunction) | 2237 // -- r3 : the function to call (checked to be a JSBoundFunction) |
| 2285 // ----------------------------------- | 2238 // ----------------------------------- |
| 2286 __ AssertBoundFunction(r4); | 2239 __ AssertBoundFunction(r3); |
| 2287 | 2240 |
| 2288 if (tail_call_mode == TailCallMode::kAllow) { | 2241 if (tail_call_mode == TailCallMode::kAllow) { |
| 2289 PrepareForTailCall(masm, r3, r6, r7, r8); | 2242 PrepareForTailCall(masm, r2, r5, r6, r7); |
| 2290 } | 2243 } |
| 2291 | 2244 |
| 2292 // Patch the receiver to [[BoundThis]]. | 2245 // Patch the receiver to [[BoundThis]]. |
| 2293 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset)); | 2246 __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset)); |
| 2294 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); | 2247 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); |
| 2295 __ StorePX(ip, MemOperand(sp, r0)); | 2248 __ StoreP(ip, MemOperand(sp, r1)); |
| 2296 | 2249 |
| 2297 // Push the [[BoundArguments]] onto the stack. | 2250 // Push the [[BoundArguments]] onto the stack. |
| 2298 Generate_PushBoundArguments(masm); | 2251 Generate_PushBoundArguments(masm); |
| 2299 | 2252 |
| 2300 // Call the [[BoundTargetFunction]] via the Call builtin. | 2253 // Call the [[BoundTargetFunction]] via the Call builtin. |
| 2301 __ LoadP(r4, | 2254 __ LoadP(r3, |
| 2302 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | 2255 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset)); |
| 2303 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, | 2256 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, |
| 2304 masm->isolate()))); | 2257 masm->isolate()))); |
| 2305 __ LoadP(ip, MemOperand(ip)); | 2258 __ LoadP(ip, MemOperand(ip)); |
| 2306 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 2259 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 2307 __ JumpToJSEntry(ip); | 2260 __ JumpToJSEntry(ip); |
| 2308 } | 2261 } |
| 2309 | 2262 |
| 2310 | |
| 2311 // static | 2263 // static |
| 2312 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, | 2264 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, |
| 2313 TailCallMode tail_call_mode) { | 2265 TailCallMode tail_call_mode) { |
| 2314 // ----------- S t a t e ------------- | 2266 // ----------- S t a t e ------------- |
| 2315 // -- r3 : the number of arguments (not including the receiver) | 2267 // -- r2 : the number of arguments (not including the receiver) |
| 2316 // -- r4 : the target to call (can be any Object). | 2268 // -- r3 : the target to call (can be any Object). |
| 2317 // ----------------------------------- | 2269 // ----------------------------------- |
| 2318 | 2270 |
| 2319 Label non_callable, non_function, non_smi; | 2271 Label non_callable, non_function, non_smi; |
| 2320 __ JumpIfSmi(r4, &non_callable); | 2272 __ JumpIfSmi(r3, &non_callable); |
| 2321 __ bind(&non_smi); | 2273 __ bind(&non_smi); |
| 2322 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE); | 2274 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE); |
| 2323 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), | 2275 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), |
| 2324 RelocInfo::CODE_TARGET, eq); | 2276 RelocInfo::CODE_TARGET, eq); |
| 2325 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE)); | 2277 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE)); |
| 2326 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), | 2278 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), |
| 2327 RelocInfo::CODE_TARGET, eq); | 2279 RelocInfo::CODE_TARGET, eq); |
| 2328 __ cmpi(r8, Operand(JS_PROXY_TYPE)); | 2280 __ CmpP(r7, Operand(JS_PROXY_TYPE)); |
| 2329 __ bne(&non_function); | 2281 __ bne(&non_function); |
| 2330 | 2282 |
| 2331 // 0. Prepare for tail call if necessary. | 2283 // 0. Prepare for tail call if necessary. |
| 2332 if (tail_call_mode == TailCallMode::kAllow) { | 2284 if (tail_call_mode == TailCallMode::kAllow) { |
| 2333 PrepareForTailCall(masm, r3, r6, r7, r8); | 2285 PrepareForTailCall(masm, r2, r5, r6, r7); |
| 2334 } | 2286 } |
| 2335 | 2287 |
| 2336 // 1. Runtime fallback for Proxy [[Call]]. | 2288 // 1. Runtime fallback for Proxy [[Call]]. |
| 2337 __ Push(r4); | 2289 __ Push(r3); |
| 2338 // Increase the arguments size to include the pushed function and the | 2290 // Increase the arguments size to include the pushed function and the |
| 2339 // existing receiver on the stack. | 2291 // existing receiver on the stack. |
| 2340 __ addi(r3, r3, Operand(2)); | 2292 __ AddP(r2, r2, Operand(2)); |
| 2341 // Tail-call to the runtime. | 2293 // Tail-call to the runtime. |
| 2342 __ JumpToExternalReference( | 2294 __ JumpToExternalReference( |
| 2343 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); | 2295 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); |
| 2344 | 2296 |
| 2345 // 2. Call to something else, which might have a [[Call]] internal method (if | 2297 // 2. Call to something else, which might have a [[Call]] internal method (if |
| 2346 // not we raise an exception). | 2298 // not we raise an exception). |
| 2347 __ bind(&non_function); | 2299 __ bind(&non_function); |
| 2348 // Check if target has a [[Call]] internal method. | 2300 // Check if target has a [[Call]] internal method. |
| 2349 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | 2301 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); |
| 2350 __ TestBit(r7, Map::kIsCallable, r0); | 2302 __ TestBit(r6, Map::kIsCallable, r0); |
| 2351 __ beq(&non_callable, cr0); | 2303 __ beq(&non_callable); |
| 2352 // Overwrite the original receiver the (original) target. | 2304 // Overwrite the original receiver the (original) target. |
| 2353 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2)); | 2305 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2)); |
| 2354 __ StorePX(r4, MemOperand(sp, r8)); | 2306 __ StoreP(r3, MemOperand(sp, r7)); |
| 2355 // Let the "call_as_function_delegate" take care of the rest. | 2307 // Let the "call_as_function_delegate" take care of the rest. |
| 2356 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4); | 2308 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3); |
| 2357 __ Jump(masm->isolate()->builtins()->CallFunction( | 2309 __ Jump(masm->isolate()->builtins()->CallFunction( |
| 2358 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), | 2310 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), |
| 2359 RelocInfo::CODE_TARGET); | 2311 RelocInfo::CODE_TARGET); |
| 2360 | 2312 |
| 2361 // 3. Call to something that is not callable. | 2313 // 3. Call to something that is not callable. |
| 2362 __ bind(&non_callable); | 2314 __ bind(&non_callable); |
| 2363 { | 2315 { |
| 2364 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2316 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 2365 __ Push(r4); | 2317 __ Push(r3); |
| 2366 __ CallRuntime(Runtime::kThrowCalledNonCallable); | 2318 __ CallRuntime(Runtime::kThrowCalledNonCallable); |
| 2367 } | 2319 } |
| 2368 } | 2320 } |
| 2369 | 2321 |
| 2370 | |
| 2371 // static | 2322 // static |
| 2372 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | 2323 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
| 2373 // ----------- S t a t e ------------- | 2324 // ----------- S t a t e ------------- |
| 2374 // -- r3 : the number of arguments (not including the receiver) | 2325 // -- r2 : the number of arguments (not including the receiver) |
| 2375 // -- r4 : the constructor to call (checked to be a JSFunction) | 2326 // -- r3 : the constructor to call (checked to be a JSFunction) |
| 2376 // -- r6 : the new target (checked to be a constructor) | 2327 // -- r5 : the new target (checked to be a constructor) |
| 2377 // ----------------------------------- | 2328 // ----------------------------------- |
| 2378 __ AssertFunction(r4); | 2329 __ AssertFunction(r3); |
| 2379 | 2330 |
| 2380 // Calling convention for function specific ConstructStubs require | 2331 // Calling convention for function specific ConstructStubs require |
| 2381 // r5 to contain either an AllocationSite or undefined. | 2332 // r4 to contain either an AllocationSite or undefined. |
| 2382 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 2333 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 2383 | 2334 |
| 2384 // Tail call to the function-specific construct stub (still in the caller | 2335 // Tail call to the function-specific construct stub (still in the caller |
| 2385 // context at this point). | 2336 // context at this point). |
| 2386 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 2337 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
| 2387 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); | 2338 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset)); |
| 2388 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); | 2339 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 2389 __ JumpToJSEntry(ip); | 2340 __ JumpToJSEntry(ip); |
| 2390 } | 2341 } |
| 2391 | 2342 |
| 2392 | |
| 2393 // static | 2343 // static |
| 2394 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { | 2344 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { |
| 2395 // ----------- S t a t e ------------- | 2345 // ----------- S t a t e ------------- |
| 2396 // -- r3 : the number of arguments (not including the receiver) | 2346 // -- r2 : the number of arguments (not including the receiver) |
| 2397 // -- r4 : the function to call (checked to be a JSBoundFunction) | 2347 // -- r3 : the function to call (checked to be a JSBoundFunction) |
| 2398 // -- r6 : the new target (checked to be a constructor) | 2348 // -- r5 : the new target (checked to be a constructor) |
| 2399 // ----------------------------------- | 2349 // ----------------------------------- |
| 2400 __ AssertBoundFunction(r4); | 2350 __ AssertBoundFunction(r3); |
| 2401 | 2351 |
| 2402 // Push the [[BoundArguments]] onto the stack. | 2352 // Push the [[BoundArguments]] onto the stack. |
| 2403 Generate_PushBoundArguments(masm); | 2353 Generate_PushBoundArguments(masm); |
| 2404 | 2354 |
| 2405 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. | 2355 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. |
| 2406 Label skip; | 2356 Label skip; |
| 2407 __ cmp(r4, r6); | 2357 __ CmpP(r3, r5); |
| 2408 __ bne(&skip); | 2358 __ bne(&skip); |
| 2409 __ LoadP(r6, | 2359 __ LoadP(r5, |
| 2410 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | 2360 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset)); |
| 2411 __ bind(&skip); | 2361 __ bind(&skip); |
| 2412 | 2362 |
| 2413 // Construct the [[BoundTargetFunction]] via the Construct builtin. | 2363 // Construct the [[BoundTargetFunction]] via the Construct builtin. |
| 2414 __ LoadP(r4, | 2364 __ LoadP(r3, |
| 2415 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | 2365 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset)); |
| 2416 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); | 2366 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); |
| 2417 __ LoadP(ip, MemOperand(ip)); | 2367 __ LoadP(ip, MemOperand(ip)); |
| 2418 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 2368 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 2419 __ JumpToJSEntry(ip); | 2369 __ JumpToJSEntry(ip); |
| 2420 } | 2370 } |
| 2421 | 2371 |
| 2422 | |
| 2423 // static | 2372 // static |
| 2424 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { | 2373 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { |
| 2425 // ----------- S t a t e ------------- | 2374 // ----------- S t a t e ------------- |
| 2426 // -- r3 : the number of arguments (not including the receiver) | 2375 // -- r2 : the number of arguments (not including the receiver) |
| 2427 // -- r4 : the constructor to call (checked to be a JSProxy) | 2376 // -- r3 : the constructor to call (checked to be a JSProxy) |
| 2428 // -- r6 : the new target (either the same as the constructor or | 2377 // -- r5 : the new target (either the same as the constructor or |
| 2429 // the JSFunction on which new was invoked initially) | 2378 // the JSFunction on which new was invoked initially) |
| 2430 // ----------------------------------- | 2379 // ----------------------------------- |
| 2431 | 2380 |
| 2432 // Call into the Runtime for Proxy [[Construct]]. | 2381 // Call into the Runtime for Proxy [[Construct]]. |
| 2433 __ Push(r4, r6); | 2382 __ Push(r3, r5); |
| 2434 // Include the pushed new_target, constructor and the receiver. | 2383 // Include the pushed new_target, constructor and the receiver. |
| 2435 __ addi(r3, r3, Operand(3)); | 2384 __ AddP(r2, r2, Operand(3)); |
| 2436 // Tail-call to the runtime. | 2385 // Tail-call to the runtime. |
| 2437 __ JumpToExternalReference( | 2386 __ JumpToExternalReference( |
| 2438 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); | 2387 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); |
| 2439 } | 2388 } |
| 2440 | 2389 |
| 2441 | |
| 2442 // static | 2390 // static |
| 2443 void Builtins::Generate_Construct(MacroAssembler* masm) { | 2391 void Builtins::Generate_Construct(MacroAssembler* masm) { |
| 2444 // ----------- S t a t e ------------- | 2392 // ----------- S t a t e ------------- |
| 2445 // -- r3 : the number of arguments (not including the receiver) | 2393 // -- r2 : the number of arguments (not including the receiver) |
| 2446 // -- r4 : the constructor to call (can be any Object) | 2394 // -- r3 : the constructor to call (can be any Object) |
| 2447 // -- r6 : the new target (either the same as the constructor or | 2395 // -- r5 : the new target (either the same as the constructor or |
| 2448 // the JSFunction on which new was invoked initially) | 2396 // the JSFunction on which new was invoked initially) |
| 2449 // ----------------------------------- | 2397 // ----------------------------------- |
| 2450 | 2398 |
| 2451 // Check if target is a Smi. | 2399 // Check if target is a Smi. |
| 2452 Label non_constructor; | 2400 Label non_constructor; |
| 2453 __ JumpIfSmi(r4, &non_constructor); | 2401 __ JumpIfSmi(r3, &non_constructor); |
| 2454 | 2402 |
| 2455 // Dispatch based on instance type. | 2403 // Dispatch based on instance type. |
| 2456 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE); | 2404 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE); |
| 2457 __ Jump(masm->isolate()->builtins()->ConstructFunction(), | 2405 __ Jump(masm->isolate()->builtins()->ConstructFunction(), |
| 2458 RelocInfo::CODE_TARGET, eq); | 2406 RelocInfo::CODE_TARGET, eq); |
| 2459 | 2407 |
| 2460 // Check if target has a [[Construct]] internal method. | 2408 // Check if target has a [[Construct]] internal method. |
| 2461 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset)); | 2409 __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset)); |
| 2462 __ TestBit(r5, Map::kIsConstructor, r0); | 2410 __ TestBit(r4, Map::kIsConstructor); |
| 2463 __ beq(&non_constructor, cr0); | 2411 __ beq(&non_constructor); |
| 2464 | 2412 |
| 2465 // Only dispatch to bound functions after checking whether they are | 2413 // Only dispatch to bound functions after checking whether they are |
| 2466 // constructors. | 2414 // constructors. |
| 2467 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE)); | 2415 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE)); |
| 2468 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), | 2416 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), |
| 2469 RelocInfo::CODE_TARGET, eq); | 2417 RelocInfo::CODE_TARGET, eq); |
| 2470 | 2418 |
| 2471 // Only dispatch to proxies after checking whether they are constructors. | 2419 // Only dispatch to proxies after checking whether they are constructors. |
| 2472 __ cmpi(r8, Operand(JS_PROXY_TYPE)); | 2420 __ CmpP(r7, Operand(JS_PROXY_TYPE)); |
| 2473 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, | 2421 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, |
| 2474 eq); | 2422 eq); |
| 2475 | 2423 |
| 2476 // Called Construct on an exotic Object with a [[Construct]] internal method. | 2424 // Called Construct on an exotic Object with a [[Construct]] internal method. |
| 2477 { | 2425 { |
| 2478 // Overwrite the original receiver with the (original) target. | 2426 // Overwrite the original receiver with the (original) target. |
| 2479 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2)); | 2427 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2)); |
| 2480 __ StorePX(r4, MemOperand(sp, r8)); | 2428 __ StoreP(r3, MemOperand(sp, r7)); |
| 2481 // Let the "call_as_constructor_delegate" take care of the rest. | 2429 // Let the "call_as_constructor_delegate" take care of the rest. |
| 2482 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4); | 2430 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3); |
| 2483 __ Jump(masm->isolate()->builtins()->CallFunction(), | 2431 __ Jump(masm->isolate()->builtins()->CallFunction(), |
| 2484 RelocInfo::CODE_TARGET); | 2432 RelocInfo::CODE_TARGET); |
| 2485 } | 2433 } |
| 2486 | 2434 |
| 2487 // Called Construct on an Object that doesn't have a [[Construct]] internal | 2435 // Called Construct on an Object that doesn't have a [[Construct]] internal |
| 2488 // method. | 2436 // method. |
| 2489 __ bind(&non_constructor); | 2437 __ bind(&non_constructor); |
| 2490 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | 2438 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), |
| 2491 RelocInfo::CODE_TARGET); | 2439 RelocInfo::CODE_TARGET); |
| 2492 } | 2440 } |
| 2493 | 2441 |
| 2494 | |
| 2495 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | 2442 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
| 2496 // ----------- S t a t e ------------- | 2443 // ----------- S t a t e ------------- |
| 2497 // -- r3 : actual number of arguments | 2444 // -- r2 : actual number of arguments |
| 2498 // -- r4 : function (passed through to callee) | 2445 // -- r3 : function (passed through to callee) |
| 2499 // -- r5 : expected number of arguments | 2446 // -- r4 : expected number of arguments |
| 2500 // -- r6 : new target (passed through to callee) | 2447 // -- r5 : new target (passed through to callee) |
| 2501 // ----------------------------------- | 2448 // ----------------------------------- |
| 2502 | 2449 |
| 2503 Label invoke, dont_adapt_arguments, stack_overflow; | 2450 Label invoke, dont_adapt_arguments, stack_overflow; |
| 2504 | 2451 |
| 2505 Label enough, too_few; | 2452 Label enough, too_few; |
| 2506 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); | 2453 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset)); |
| 2507 __ cmp(r3, r5); | 2454 __ CmpP(r2, r4); |
| 2508 __ blt(&too_few); | 2455 __ blt(&too_few); |
| 2509 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); | 2456 __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); |
| 2510 __ beq(&dont_adapt_arguments); | 2457 __ beq(&dont_adapt_arguments); |
| 2511 | 2458 |
| 2512 { // Enough parameters: actual >= expected | 2459 { // Enough parameters: actual >= expected |
| 2513 __ bind(&enough); | 2460 __ bind(&enough); |
| 2514 EnterArgumentsAdaptorFrame(masm); | 2461 EnterArgumentsAdaptorFrame(masm); |
| 2515 ArgumentAdaptorStackCheck(masm, &stack_overflow); | 2462 ArgumentAdaptorStackCheck(masm, &stack_overflow); |
| 2516 | 2463 |
| 2517 // Calculate copy start address into r3 and copy end address into r7. | 2464 // Calculate copy start address into r2 and copy end address into r6. |
| 2518 // r3: actual number of arguments as a smi | 2465 // r2: actual number of arguments as a smi |
| 2519 // r4: function | 2466 // r3: function |
| 2520 // r5: expected number of arguments | 2467 // r4: expected number of arguments |
| 2521 // r6: new target (passed through to callee) | 2468 // r5: new target (passed through to callee) |
| 2522 // ip: code entry to call | 2469 // ip: code entry to call |
| 2523 __ SmiToPtrArrayOffset(r3, r3); | 2470 __ SmiToPtrArrayOffset(r2, r2); |
| 2524 __ add(r3, r3, fp); | 2471 __ AddP(r2, fp); |
| 2525 // adjust for return address and receiver | 2472 // adjust for return address and receiver |
| 2526 __ addi(r3, r3, Operand(2 * kPointerSize)); | 2473 __ AddP(r2, r2, Operand(2 * kPointerSize)); |
| 2527 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2)); | 2474 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2)); |
| 2528 __ sub(r7, r3, r7); | 2475 __ SubP(r6, r2, r6); |
| 2529 | 2476 |
| 2530 // Copy the arguments (including the receiver) to the new stack frame. | 2477 // Copy the arguments (including the receiver) to the new stack frame. |
| 2531 // r3: copy start address | 2478 // r2: copy start address |
| 2532 // r4: function | 2479 // r3: function |
| 2533 // r5: expected number of arguments | 2480 // r4: expected number of arguments |
| 2534 // r6: new target (passed through to callee) | 2481 // r5: new target (passed through to callee) |
| 2535 // r7: copy end address | 2482 // r6: copy end address |
| 2536 // ip: code entry to call | 2483 // ip: code entry to call |
| 2537 | 2484 |
| 2538 Label copy; | 2485 Label copy; |
| 2539 __ bind(©); | 2486 __ bind(©); |
| 2540 __ LoadP(r0, MemOperand(r3, 0)); | 2487 __ LoadP(r0, MemOperand(r2, 0)); |
| 2541 __ push(r0); | 2488 __ push(r0); |
| 2542 __ cmp(r3, r7); // Compare before moving to next argument. | 2489 __ CmpP(r2, r6); // Compare before moving to next argument. |
| 2543 __ subi(r3, r3, Operand(kPointerSize)); | 2490 __ lay(r2, MemOperand(r2, -kPointerSize)); |
| 2544 __ bne(©); | 2491 __ bne(©); |
| 2545 | 2492 |
| 2546 __ b(&invoke); | 2493 __ b(&invoke); |
| 2547 } | 2494 } |
| 2548 | 2495 |
| 2549 { // Too few parameters: Actual < expected | 2496 { // Too few parameters: Actual < expected |
| 2550 __ bind(&too_few); | 2497 __ bind(&too_few); |
| 2551 | 2498 |
| 2552 // If the function is strong we need to throw an error. | 2499 // If the function is strong we need to throw an error. |
| 2553 Label no_strong_error; | 2500 Label no_strong_error; |
| 2554 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 2501 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
| 2555 __ lwz(r8, FieldMemOperand(r7, SharedFunctionInfo::kCompilerHintsOffset)); | 2502 __ LoadlW(r7, |
| 2556 __ TestBit(r8, SharedFunctionInfo::kStrongModeBit, r0); | 2503 FieldMemOperand(r6, SharedFunctionInfo::kCompilerHintsOffset)); |
| 2557 __ beq(&no_strong_error, cr0); | 2504 __ TestBit(r7, SharedFunctionInfo::kStrongModeBit, r0); |
| 2505 __ beq(&no_strong_error); |
| 2558 | 2506 |
| 2559 // What we really care about is the required number of arguments. | 2507 // What we really care about is the required number of arguments. |
| 2560 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kLengthOffset)); | 2508 __ LoadlW(r6, FieldMemOperand(r6, SharedFunctionInfo::kLengthOffset)); |
| 2561 #if V8_TARGET_ARCH_PPC64 | 2509 #if V8_TARGET_ARCH_S390X |
| 2562 // See commment near kLenghtOffset in src/objects.h | 2510 // See commment near kLenghtOffset in src/objects.h |
| 2563 __ srawi(r7, r7, kSmiTagSize); | 2511 __ ShiftRightArith(r6, r6, Operand(kSmiTagSize)); |
| 2564 #else | 2512 #else |
| 2565 __ SmiUntag(r7); | 2513 __ SmiUntag(r6); |
| 2566 #endif | 2514 #endif |
| 2567 __ cmp(r3, r7); | 2515 __ CmpP(r2, r6); |
| 2568 __ bge(&no_strong_error); | 2516 __ bge(&no_strong_error); |
| 2569 | 2517 |
| 2570 { | 2518 { |
| 2571 FrameScope frame(masm, StackFrame::MANUAL); | 2519 FrameScope frame(masm, StackFrame::MANUAL); |
| 2572 EnterArgumentsAdaptorFrame(masm); | 2520 EnterArgumentsAdaptorFrame(masm); |
| 2573 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments); | 2521 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments); |
| 2574 } | 2522 } |
| 2575 | 2523 |
| 2576 __ bind(&no_strong_error); | 2524 __ bind(&no_strong_error); |
| 2577 EnterArgumentsAdaptorFrame(masm); | 2525 EnterArgumentsAdaptorFrame(masm); |
| 2578 ArgumentAdaptorStackCheck(masm, &stack_overflow); | 2526 ArgumentAdaptorStackCheck(masm, &stack_overflow); |
| 2579 | 2527 |
| 2580 // Calculate copy start address into r0 and copy end address is fp. | 2528 // Calculate copy start address into r0 and copy end address is fp. |
| 2581 // r3: actual number of arguments as a smi | 2529 // r2: actual number of arguments as a smi |
| 2582 // r4: function | 2530 // r3: function |
| 2583 // r5: expected number of arguments | 2531 // r4: expected number of arguments |
| 2584 // r6: new target (passed through to callee) | 2532 // r5: new target (passed through to callee) |
| 2585 // ip: code entry to call | 2533 // ip: code entry to call |
| 2586 __ SmiToPtrArrayOffset(r3, r3); | 2534 __ SmiToPtrArrayOffset(r2, r2); |
| 2587 __ add(r3, r3, fp); | 2535 __ lay(r2, MemOperand(r2, fp)); |
| 2588 | 2536 |
| 2589 // Copy the arguments (including the receiver) to the new stack frame. | 2537 // Copy the arguments (including the receiver) to the new stack frame. |
| 2590 // r3: copy start address | 2538 // r2: copy start address |
| 2591 // r4: function | 2539 // r3: function |
| 2592 // r5: expected number of arguments | 2540 // r4: expected number of arguments |
| 2593 // r6: new target (passed through to callee) | 2541 // r5: new target (passed through to callee) |
| 2594 // ip: code entry to call | 2542 // ip: code entry to call |
| 2595 Label copy; | 2543 Label copy; |
| 2596 __ bind(©); | 2544 __ bind(©); |
| 2597 // Adjust load for return address and receiver. | 2545 // Adjust load for return address and receiver. |
| 2598 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize)); | 2546 __ LoadP(r0, MemOperand(r2, 2 * kPointerSize)); |
| 2599 __ push(r0); | 2547 __ push(r0); |
| 2600 __ cmp(r3, fp); // Compare before moving to next argument. | 2548 __ CmpP(r2, fp); // Compare before moving to next argument. |
| 2601 __ subi(r3, r3, Operand(kPointerSize)); | 2549 __ lay(r2, MemOperand(r2, -kPointerSize)); |
| 2602 __ bne(©); | 2550 __ bne(©); |
| 2603 | 2551 |
| 2604 // Fill the remaining expected arguments with undefined. | 2552 // Fill the remaining expected arguments with undefined. |
| 2605 // r4: function | 2553 // r3: function |
| 2606 // r5: expected number of arguments | 2554 // r4: expected number of argumentus |
| 2607 // r6: new target (passed through to callee) | |
| 2608 // ip: code entry to call | 2555 // ip: code entry to call |
| 2609 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 2556 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 2610 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2)); | 2557 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2)); |
| 2611 __ sub(r7, fp, r7); | 2558 __ SubP(r6, fp, r6); |
| 2612 // Adjust for frame. | 2559 // Adjust for frame. |
| 2613 __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | 2560 __ SubP(r6, r6, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + |
| 2614 2 * kPointerSize)); | 2561 2 * kPointerSize)); |
| 2615 | 2562 |
| 2616 Label fill; | 2563 Label fill; |
| 2617 __ bind(&fill); | 2564 __ bind(&fill); |
| 2618 __ push(r0); | 2565 __ push(r0); |
| 2619 __ cmp(sp, r7); | 2566 __ CmpP(sp, r6); |
| 2620 __ bne(&fill); | 2567 __ bne(&fill); |
| 2621 } | 2568 } |
| 2622 | 2569 |
| 2623 // Call the entry point. | 2570 // Call the entry point. |
| 2624 __ bind(&invoke); | 2571 __ bind(&invoke); |
| 2625 __ mr(r3, r5); | 2572 __ LoadRR(r2, r4); |
| 2626 // r3 : expected number of arguments | 2573 // r2 : expected number of arguments |
| 2627 // r4 : function (passed through to callee) | 2574 // r3 : function (passed through to callee) |
| 2628 // r6 : new target (passed through to callee) | 2575 // r5 : new target (passed through to callee) |
| 2629 __ CallJSEntry(ip); | 2576 __ CallJSEntry(ip); |
| 2630 | 2577 |
| 2631 // Store offset of return address for deoptimizer. | 2578 // Store offset of return address for deoptimizer. |
| 2632 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | 2579 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); |
| 2633 | 2580 |
| 2634 // Exit frame and return. | 2581 // Exit frame and return. |
| 2635 LeaveArgumentsAdaptorFrame(masm); | 2582 LeaveArgumentsAdaptorFrame(masm); |
| 2636 __ blr(); | 2583 __ Ret(); |
| 2637 | |
| 2638 | 2584 |
| 2639 // ------------------------------------------- | 2585 // ------------------------------------------- |
| 2640 // Dont adapt arguments. | 2586 // Dont adapt arguments. |
| 2641 // ------------------------------------------- | 2587 // ------------------------------------------- |
| 2642 __ bind(&dont_adapt_arguments); | 2588 __ bind(&dont_adapt_arguments); |
| 2643 __ JumpToJSEntry(ip); | 2589 __ JumpToJSEntry(ip); |
| 2644 | 2590 |
| 2645 __ bind(&stack_overflow); | 2591 __ bind(&stack_overflow); |
| 2646 { | 2592 { |
| 2647 FrameScope frame(masm, StackFrame::MANUAL); | 2593 FrameScope frame(masm, StackFrame::MANUAL); |
| 2648 __ CallRuntime(Runtime::kThrowStackOverflow); | 2594 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 2649 __ bkpt(0); | 2595 __ bkpt(0); |
| 2650 } | 2596 } |
| 2651 } | 2597 } |
| 2652 | 2598 |
| 2599 #undef __ |
| 2653 | 2600 |
| 2654 #undef __ | |
| 2655 } // namespace internal | 2601 } // namespace internal |
| 2656 } // namespace v8 | 2602 } // namespace v8 |
| 2657 | 2603 |
| 2658 #endif // V8_TARGET_ARCH_PPC | 2604 #endif // V8_TARGET_ARCH_S390 |
| OLD | NEW |