| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_PPC | 7 #if V8_TARGET_ARCH_PPC |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/debug.h" | 10 #include "src/debug.h" |
| (...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 226 __ bne(&convert_argument, cr0); | 226 __ bne(&convert_argument, cr0); |
| 227 __ mr(argument, r3); | 227 __ mr(argument, r3); |
| 228 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); | 228 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); |
| 229 __ b(&argument_is_string); | 229 __ b(&argument_is_string); |
| 230 | 230 |
| 231 // Invoke the conversion builtin and put the result into r5. | 231 // Invoke the conversion builtin and put the result into r5. |
| 232 __ bind(&convert_argument); | 232 __ bind(&convert_argument); |
| 233 __ push(function); // Preserve the function. | 233 __ push(function); // Preserve the function. |
| 234 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); | 234 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); |
| 235 { | 235 { |
| 236 FrameScope scope(masm, StackFrame::INTERNAL); | 236 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 237 __ push(r3); | 237 __ push(r3); |
| 238 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | 238 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
| 239 } | 239 } |
| 240 __ pop(function); | 240 __ pop(function); |
| 241 __ mr(argument, r3); | 241 __ mr(argument, r3); |
| 242 __ b(&argument_is_string); | 242 __ b(&argument_is_string); |
| 243 | 243 |
| 244 // Load the empty string into r5, remove the receiver from the | 244 // Load the empty string into r5, remove the receiver from the |
| 245 // stack, and jump back to the case where the argument is a string. | 245 // stack, and jump back to the case where the argument is a string. |
| 246 __ bind(&no_arguments); | 246 __ bind(&no_arguments); |
| 247 __ LoadRoot(argument, Heap::kempty_stringRootIndex); | 247 __ LoadRoot(argument, Heap::kempty_stringRootIndex); |
| 248 __ Drop(1); | 248 __ Drop(1); |
| 249 __ b(&argument_is_string); | 249 __ b(&argument_is_string); |
| 250 | 250 |
| 251 // At this point the argument is already a string. Call runtime to | 251 // At this point the argument is already a string. Call runtime to |
| 252 // create a string wrapper. | 252 // create a string wrapper. |
| 253 __ bind(&gc_required); | 253 __ bind(&gc_required); |
| 254 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7); | 254 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7); |
| 255 { | 255 { |
| 256 FrameScope scope(masm, StackFrame::INTERNAL); | 256 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 257 __ push(argument); | 257 __ push(argument); |
| 258 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 258 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
| 259 } | 259 } |
| 260 __ Ret(); | 260 __ Ret(); |
| 261 } | 261 } |
| 262 | 262 |
| 263 | 263 |
| 264 static void CallRuntimePassFunction(MacroAssembler* masm, | 264 static void CallRuntimePassFunction(MacroAssembler* masm, |
| 265 Runtime::FunctionId function_id) { | 265 Runtime::FunctionId function_id) { |
| 266 FrameScope scope(masm, StackFrame::INTERNAL); | 266 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 267 // Push a copy of the function onto the stack. | 267 // Push a copy of the function onto the stack. |
| 268 // Push function as parameter to the runtime call. | 268 // Push function as parameter to the runtime call. |
| 269 __ Push(r4, r4); | 269 __ Push(r4, r4); |
| 270 | 270 |
| 271 __ CallRuntime(function_id, 1); | 271 __ CallRuntime(function_id, 1); |
| 272 // Restore reciever. | 272 // Restore reciever. |
| 273 __ Pop(r4); | 273 __ Pop(r4); |
| 274 } | 274 } |
| 275 | 275 |
| 276 | 276 |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 347 // -- sp[...]: constructor arguments | 347 // -- sp[...]: constructor arguments |
| 348 // ----------------------------------- | 348 // ----------------------------------- |
| 349 | 349 |
| 350 // Should never create mementos for api functions. | 350 // Should never create mementos for api functions. |
| 351 DCHECK(!is_api_function || !create_memento); | 351 DCHECK(!is_api_function || !create_memento); |
| 352 | 352 |
| 353 Isolate* isolate = masm->isolate(); | 353 Isolate* isolate = masm->isolate(); |
| 354 | 354 |
| 355 // Enter a construct frame. | 355 // Enter a construct frame. |
| 356 { | 356 { |
| 357 FrameScope scope(masm, StackFrame::CONSTRUCT); | 357 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); |
| 358 | 358 |
| 359 if (create_memento) { | 359 if (create_memento) { |
| 360 __ AssertUndefinedOrAllocationSite(r5, r7); | 360 __ AssertUndefinedOrAllocationSite(r5, r7); |
| 361 __ push(r5); | 361 __ push(r5); |
| 362 } | 362 } |
| 363 | 363 |
| 364 // Preserve the two incoming parameters on the stack. | 364 // Preserve the two incoming parameters on the stack. |
| 365 __ SmiTag(r3); | 365 __ SmiTag(r3); |
| 366 __ Push(r3, r4); | 366 __ Push(r3, r4); |
| 367 | 367 |
| (...skipping 378 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 746 // -- r5 : allocation site or undefined | 746 // -- r5 : allocation site or undefined |
| 747 // -- r6 : original constructor | 747 // -- r6 : original constructor |
| 748 // -- lr : return address | 748 // -- lr : return address |
| 749 // -- sp[...]: constructor arguments | 749 // -- sp[...]: constructor arguments |
| 750 // ----------------------------------- | 750 // ----------------------------------- |
| 751 | 751 |
| 752 // TODO(dslomov): support pretenuring | 752 // TODO(dslomov): support pretenuring |
| 753 CHECK(!FLAG_pretenuring_call_new); | 753 CHECK(!FLAG_pretenuring_call_new); |
| 754 | 754 |
| 755 { | 755 { |
| 756 FrameScope scope(masm, StackFrame::CONSTRUCT); | 756 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); |
| 757 | 757 |
| 758 // Smi-tagged arguments count. | 758 // Smi-tagged arguments count. |
| 759 __ mr(r7, r3); | 759 __ mr(r7, r3); |
| 760 __ SmiTag(r7, SetRC); | 760 __ SmiTag(r7, SetRC); |
| 761 | 761 |
| 762 // receiver is the hole. | 762 // receiver is the hole. |
| 763 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 763 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 764 | 764 |
| 765 // smi arguments count, new.target, receiver | 765 // smi arguments count, new.target, receiver |
| 766 __ Push(r7, r6, ip); | 766 __ Push(r7, r6, ip); |
| (...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 955 } | 955 } |
| 956 | 956 |
| 957 | 957 |
| 958 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 958 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
| 959 CallRuntimePassFunction(masm, Runtime::kCompileLazy); | 959 CallRuntimePassFunction(masm, Runtime::kCompileLazy); |
| 960 GenerateTailCallToReturnedCode(masm); | 960 GenerateTailCallToReturnedCode(masm); |
| 961 } | 961 } |
| 962 | 962 |
| 963 | 963 |
| 964 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | 964 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { |
| 965 FrameScope scope(masm, StackFrame::INTERNAL); | 965 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 966 // Push a copy of the function onto the stack. | 966 // Push a copy of the function onto the stack. |
| 967 // Push function as parameter to the runtime call. | 967 // Push function as parameter to the runtime call. |
| 968 __ Push(r4, r4); | 968 __ Push(r4, r4); |
| 969 // Whether to compile in a background thread. | 969 // Whether to compile in a background thread. |
| 970 __ LoadRoot( | 970 __ LoadRoot( |
| 971 r0, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); | 971 r0, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); |
| 972 __ push(r0); | 972 __ push(r0); |
| 973 | 973 |
| 974 __ CallRuntime(Runtime::kCompileOptimized, 2); | 974 __ CallRuntime(Runtime::kCompileOptimized, 2); |
| 975 // Restore receiver. | 975 // Restore receiver. |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1073 | 1073 |
| 1074 | 1074 |
| 1075 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { | 1075 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { |
| 1076 Generate_MarkCodeAsExecutedOnce(masm); | 1076 Generate_MarkCodeAsExecutedOnce(masm); |
| 1077 } | 1077 } |
| 1078 | 1078 |
| 1079 | 1079 |
| 1080 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 1080 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
| 1081 SaveFPRegsMode save_doubles) { | 1081 SaveFPRegsMode save_doubles) { |
| 1082 { | 1082 { |
| 1083 FrameScope scope(masm, StackFrame::INTERNAL); | 1083 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1084 | 1084 |
| 1085 // Preserve registers across notification, this is important for compiled | 1085 // Preserve registers across notification, this is important for compiled |
| 1086 // stubs that tail call the runtime on deopts passing their parameters in | 1086 // stubs that tail call the runtime on deopts passing their parameters in |
| 1087 // registers. | 1087 // registers. |
| 1088 __ MultiPush(kJSCallerSaved | kCalleeSaved); | 1088 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
| 1089 // Pass the function and deoptimization type to the runtime system. | 1089 // Pass the function and deoptimization type to the runtime system. |
| 1090 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 1090 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); |
| 1091 __ MultiPop(kJSCallerSaved | kCalleeSaved); | 1091 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
| 1092 } | 1092 } |
| 1093 | 1093 |
| 1094 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state | 1094 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state |
| 1095 __ blr(); // Jump to miss handler | 1095 __ blr(); // Jump to miss handler |
| 1096 } | 1096 } |
| 1097 | 1097 |
| 1098 | 1098 |
| 1099 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 1099 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 1100 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 1100 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 1101 } | 1101 } |
| 1102 | 1102 |
| 1103 | 1103 |
| 1104 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 1104 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 1105 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 1105 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 1106 } | 1106 } |
| 1107 | 1107 |
| 1108 | 1108 |
| 1109 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 1109 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 1110 Deoptimizer::BailoutType type) { | 1110 Deoptimizer::BailoutType type) { |
| 1111 { | 1111 { |
| 1112 FrameScope scope(masm, StackFrame::INTERNAL); | 1112 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1113 // Pass the function and deoptimization type to the runtime system. | 1113 // Pass the function and deoptimization type to the runtime system. |
| 1114 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); | 1114 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); |
| 1115 __ push(r3); | 1115 __ push(r3); |
| 1116 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 1116 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
| 1117 } | 1117 } |
| 1118 | 1118 |
| 1119 // Get the full codegen state from the stack and untag it -> r9. | 1119 // Get the full codegen state from the stack and untag it -> r9. |
| 1120 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize)); | 1120 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize)); |
| 1121 __ SmiUntag(r9); | 1121 __ SmiUntag(r9); |
| 1122 // Switch on the state. | 1122 // Switch on the state. |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1150 | 1150 |
| 1151 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 1151 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
| 1152 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 1152 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 1153 } | 1153 } |
| 1154 | 1154 |
| 1155 | 1155 |
| 1156 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1156 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 1157 // Lookup the function in the JavaScript frame. | 1157 // Lookup the function in the JavaScript frame. |
| 1158 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1158 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1159 { | 1159 { |
| 1160 FrameScope scope(masm, StackFrame::INTERNAL); | 1160 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1161 // Pass function as argument. | 1161 // Pass function as argument. |
| 1162 __ push(r3); | 1162 __ push(r3); |
| 1163 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | 1163 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
| 1164 } | 1164 } |
| 1165 | 1165 |
| 1166 // If the code object is null, just return to the unoptimized code. | 1166 // If the code object is null, just return to the unoptimized code. |
| 1167 Label skip; | 1167 Label skip; |
| 1168 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); | 1168 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); |
| 1169 __ bne(&skip); | 1169 __ bne(&skip); |
| 1170 __ Ret(); | 1170 __ Ret(); |
| 1171 | 1171 |
| 1172 __ bind(&skip); | 1172 __ bind(&skip); |
| 1173 | 1173 |
| 1174 // Load deoptimization data from the code object. | 1174 // Load deoptimization data from the code object. |
| 1175 // <deopt_data> = <code>[#deoptimization_data_offset] | 1175 // <deopt_data> = <code>[#deoptimization_data_offset] |
| 1176 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset)); | 1176 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset)); |
| 1177 | 1177 |
| 1178 { | 1178 { |
| 1179 ConstantPoolUnavailableScope constant_pool_unavailable(masm); |
| 1179 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start | 1180 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start |
| 1180 | 1181 |
| 1182 if (FLAG_enable_embedded_constant_pool) { |
| 1183 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3); |
| 1184 } |
| 1185 |
| 1181 // Load the OSR entrypoint offset from the deoptimization data. | 1186 // Load the OSR entrypoint offset from the deoptimization data. |
| 1182 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | 1187 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] |
| 1183 __ LoadP(r4, FieldMemOperand( | 1188 __ LoadP(r4, FieldMemOperand( |
| 1184 r4, FixedArray::OffsetOfElementAt( | 1189 r4, FixedArray::OffsetOfElementAt( |
| 1185 DeoptimizationInputData::kOsrPcOffsetIndex))); | 1190 DeoptimizationInputData::kOsrPcOffsetIndex))); |
| 1186 __ SmiUntag(r4); | 1191 __ SmiUntag(r4); |
| 1187 | 1192 |
| 1188 // Compute the target address = code start + osr_offset | 1193 // Compute the target address = code start + osr_offset |
| 1189 __ add(r0, r3, r4); | 1194 __ add(r0, r3, r4); |
| 1190 | 1195 |
| 1191 // And "return" to the OSR entry point of the function. | 1196 // And "return" to the OSR entry point of the function. |
| 1192 __ mtlr(r0); | 1197 __ mtlr(r0); |
| 1193 __ blr(); | 1198 __ blr(); |
| 1194 } | 1199 } |
| 1195 } | 1200 } |
| 1196 | 1201 |
| 1197 | 1202 |
| 1198 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1203 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 1199 // We check the stack limit as indicator that recompilation might be done. | 1204 // We check the stack limit as indicator that recompilation might be done. |
| 1200 Label ok; | 1205 Label ok; |
| 1201 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 1206 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 1202 __ cmpl(sp, ip); | 1207 __ cmpl(sp, ip); |
| 1203 __ bge(&ok); | 1208 __ bge(&ok); |
| 1204 { | 1209 { |
| 1205 FrameScope scope(masm, StackFrame::INTERNAL); | 1210 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1206 __ CallRuntime(Runtime::kStackGuard, 0); | 1211 __ CallRuntime(Runtime::kStackGuard, 0); |
| 1207 } | 1212 } |
| 1208 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1213 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 1209 RelocInfo::CODE_TARGET); | 1214 RelocInfo::CODE_TARGET); |
| 1210 | 1215 |
| 1211 __ bind(&ok); | 1216 __ bind(&ok); |
| 1212 __ Ret(); | 1217 __ Ret(); |
| 1213 } | 1218 } |
| 1214 | 1219 |
| 1215 | 1220 |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1286 __ beq(&use_global_proxy); | 1291 __ beq(&use_global_proxy); |
| 1287 | 1292 |
| 1288 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1293 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 1289 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE); | 1294 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE); |
| 1290 __ bge(&shift_arguments); | 1295 __ bge(&shift_arguments); |
| 1291 | 1296 |
| 1292 __ bind(&convert_to_object); | 1297 __ bind(&convert_to_object); |
| 1293 | 1298 |
| 1294 { | 1299 { |
| 1295 // Enter an internal frame in order to preserve argument count. | 1300 // Enter an internal frame in order to preserve argument count. |
| 1296 FrameScope scope(masm, StackFrame::INTERNAL); | 1301 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1297 __ SmiTag(r3); | 1302 __ SmiTag(r3); |
| 1298 __ Push(r3, r5); | 1303 __ Push(r3, r5); |
| 1299 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1304 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 1300 __ mr(r5, r3); | 1305 __ mr(r5, r3); |
| 1301 | 1306 |
| 1302 __ pop(r3); | 1307 __ pop(r3); |
| 1303 __ SmiUntag(r3); | 1308 __ SmiUntag(r3); |
| 1304 | 1309 |
| 1305 // Exit the internal frame. | 1310 // Exit the internal frame. |
| 1306 } | 1311 } |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1453 __ SmiUntag(r3, key); | 1458 __ SmiUntag(r3, key); |
| 1454 } | 1459 } |
| 1455 | 1460 |
| 1456 | 1461 |
| 1457 // Used by FunctionApply and ReflectApply | 1462 // Used by FunctionApply and ReflectApply |
| 1458 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { | 1463 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { |
| 1459 const int kFormalParameters = targetIsArgument ? 3 : 2; | 1464 const int kFormalParameters = targetIsArgument ? 3 : 2; |
| 1460 const int kStackSize = kFormalParameters + 1; | 1465 const int kStackSize = kFormalParameters + 1; |
| 1461 | 1466 |
| 1462 { | 1467 { |
| 1463 FrameScope frame_scope(masm, StackFrame::INTERNAL); | 1468 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1464 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; | 1469 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; |
| 1465 const int kReceiverOffset = kArgumentsOffset + kPointerSize; | 1470 const int kReceiverOffset = kArgumentsOffset + kPointerSize; |
| 1466 const int kFunctionOffset = kReceiverOffset + kPointerSize; | 1471 const int kFunctionOffset = kReceiverOffset + kPointerSize; |
| 1467 | 1472 |
| 1468 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function | 1473 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function |
| 1469 __ push(r3); | 1474 __ push(r3); |
| 1470 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array | 1475 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array |
| 1471 __ push(r3); | 1476 __ push(r3); |
| 1472 if (targetIsArgument) { | 1477 if (targetIsArgument) { |
| 1473 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION); | 1478 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1582 __ addi(sp, sp, Operand(kStackSize * kPointerSize)); | 1587 __ addi(sp, sp, Operand(kStackSize * kPointerSize)); |
| 1583 __ blr(); | 1588 __ blr(); |
| 1584 } | 1589 } |
| 1585 | 1590 |
| 1586 | 1591 |
| 1587 static void Generate_ConstructHelper(MacroAssembler* masm) { | 1592 static void Generate_ConstructHelper(MacroAssembler* masm) { |
| 1588 const int kFormalParameters = 3; | 1593 const int kFormalParameters = 3; |
| 1589 const int kStackSize = kFormalParameters + 1; | 1594 const int kStackSize = kFormalParameters + 1; |
| 1590 | 1595 |
| 1591 { | 1596 { |
| 1592 FrameScope frame_scope(masm, StackFrame::INTERNAL); | 1597 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1593 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; | 1598 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; |
| 1594 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; | 1599 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; |
| 1595 const int kFunctionOffset = kArgumentsOffset + kPointerSize; | 1600 const int kFunctionOffset = kArgumentsOffset + kPointerSize; |
| 1596 | 1601 |
| 1597 // If newTarget is not supplied, set it to constructor | 1602 // If newTarget is not supplied, set it to constructor |
| 1598 Label validate_arguments; | 1603 Label validate_arguments; |
| 1599 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); | 1604 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); |
| 1600 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 1605 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |
| 1601 __ bne(&validate_arguments); | 1606 __ bne(&validate_arguments); |
| 1602 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); | 1607 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1682 __ cmp(r8, r0); | 1687 __ cmp(r8, r0); |
| 1683 __ ble(stack_overflow); // Signed comparison. | 1688 __ ble(stack_overflow); // Signed comparison. |
| 1684 } | 1689 } |
| 1685 | 1690 |
| 1686 | 1691 |
| 1687 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | 1692 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
| 1688 __ SmiTag(r3); | 1693 __ SmiTag(r3); |
| 1689 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 1694 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 1690 __ mflr(r0); | 1695 __ mflr(r0); |
| 1691 __ push(r0); | 1696 __ push(r0); |
| 1692 __ Push(fp, r7, r4, r3); | 1697 if (FLAG_enable_embedded_constant_pool) { |
| 1698 __ Push(fp, kConstantPoolRegister, r7, r4, r3); |
| 1699 } else { |
| 1700 __ Push(fp, r7, r4, r3); |
| 1701 } |
| 1693 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | 1702 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1694 kPointerSize)); | 1703 kPointerSize)); |
| 1695 } | 1704 } |
| 1696 | 1705 |
| 1697 | 1706 |
| 1698 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | 1707 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
| 1699 // ----------- S t a t e ------------- | 1708 // ----------- S t a t e ------------- |
| 1700 // -- r3 : result being passed through | 1709 // -- r3 : result being passed through |
| 1701 // ----------------------------------- | 1710 // ----------------------------------- |
| 1702 // Get the number of arguments passed (as a smi), tear down the frame and | 1711 // Get the number of arguments passed (as a smi), tear down the frame and |
| (...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1863 __ bkpt(0); | 1872 __ bkpt(0); |
| 1864 } | 1873 } |
| 1865 } | 1874 } |
| 1866 | 1875 |
| 1867 | 1876 |
| 1868 #undef __ | 1877 #undef __ |
| 1869 } // namespace internal | 1878 } // namespace internal |
| 1870 } // namespace v8 | 1879 } // namespace v8 |
| 1871 | 1880 |
| 1872 #endif // V8_TARGET_ARCH_PPC | 1881 #endif // V8_TARGET_ARCH_PPC |
| OLD | NEW |