OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1033 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1044 __ cmpp(rax, rbx); | 1044 __ cmpp(rax, rbx); |
1045 __ j(not_equal, | 1045 __ j(not_equal, |
1046 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1046 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1047 RelocInfo::CODE_TARGET); | 1047 RelocInfo::CODE_TARGET); |
1048 | 1048 |
1049 ParameterCount expected(0); | 1049 ParameterCount expected(0); |
1050 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper()); | 1050 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper()); |
1051 } | 1051 } |
1052 | 1052 |
1053 | 1053 |
1054 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1054 // Used by FunctionApply and ReflectApply |
1055 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { | |
1056 const int kFormalParameters = targetIsArgument ? 3 : 2; | |
1057 const int kStackSize = kFormalParameters + 1; | |
1058 | |
1055 // Stack at entry: | 1059 // Stack at entry: |
1056 // rsp : return address | 1060 // rsp : return address |
1057 // rsp[8] : arguments | 1061 // rsp[8] : arguments |
1058 // rsp[16] : receiver ("this") | 1062 // rsp[16] : receiver ("this") |
1059 // rsp[24] : function | 1063 // rsp[24] : function |
1060 { | 1064 { |
1061 FrameScope frame_scope(masm, StackFrame::INTERNAL); | 1065 FrameScope frame_scope(masm, StackFrame::INTERNAL); |
1062 // Stack frame: | 1066 // Stack frame: |
1063 // rbp : Old base pointer | 1067 // rbp : Old base pointer |
1064 // rbp[8] : return address | 1068 // rbp[8] : return address |
1065 // rbp[16] : function arguments | 1069 // rbp[16] : function arguments |
1066 // rbp[24] : receiver | 1070 // rbp[24] : receiver |
1067 // rbp[32] : function | 1071 // rbp[32] : function |
1068 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; | 1072 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; |
1069 static const int kReceiverOffset = kArgumentsOffset + kPointerSize; | 1073 static const int kReceiverOffset = kArgumentsOffset + kPointerSize; |
1070 static const int kFunctionOffset = kReceiverOffset + kPointerSize; | 1074 static const int kFunctionOffset = kReceiverOffset + kPointerSize; |
1071 | 1075 |
1072 __ Push(Operand(rbp, kFunctionOffset)); | 1076 __ Push(Operand(rbp, kFunctionOffset)); |
1073 __ Push(Operand(rbp, kArgumentsOffset)); | 1077 __ Push(Operand(rbp, kArgumentsOffset)); |
1074 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1078 if (targetIsArgument) { |
1079 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION); | |
1080 } else { | |
1081 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | |
1082 } | |
1075 | 1083 |
1076 // Check the stack for overflow. We are not trying to catch | 1084 // Check the stack for overflow. We are not trying to catch |
1077 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1085 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1078 // limit" is checked. | 1086 // limit" is checked. |
1079 Label okay; | 1087 Label okay; |
1080 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); | 1088 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); |
1081 __ movp(rcx, rsp); | 1089 __ movp(rcx, rsp); |
1082 // Make rcx the space we have left. The stack might already be overflowed | 1090 // Make rcx the space we have left. The stack might already be overflowed |
1083 // here which will cause rcx to become negative. | 1091 // here which will cause rcx to become negative. |
1084 __ subp(rcx, kScratchRegister); | 1092 __ subp(rcx, kScratchRegister); |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1196 // Call the function. | 1204 // Call the function. |
1197 Label call_proxy; | 1205 Label call_proxy; |
1198 ParameterCount actual(rax); | 1206 ParameterCount actual(rax); |
1199 __ SmiToInteger32(rax, key); | 1207 __ SmiToInteger32(rax, key); |
1200 __ movp(rdi, Operand(rbp, kFunctionOffset)); | 1208 __ movp(rdi, Operand(rbp, kFunctionOffset)); |
1201 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 1209 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
1202 __ j(not_equal, &call_proxy); | 1210 __ j(not_equal, &call_proxy); |
1203 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper()); | 1211 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper()); |
1204 | 1212 |
1205 frame_scope.GenerateLeaveFrame(); | 1213 frame_scope.GenerateLeaveFrame(); |
1206 __ ret(3 * kPointerSize); // remove this, receiver, and arguments | 1214 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments |
1207 | 1215 |
1208 // Call the function proxy. | 1216 // Call the function proxy. |
1209 __ bind(&call_proxy); | 1217 __ bind(&call_proxy); |
1210 __ Push(rdi); // add function proxy as last argument | 1218 __ Push(rdi); // add function proxy as last argument |
1211 __ incp(rax); | 1219 __ incp(rax); |
1212 __ Set(rbx, 0); | 1220 __ Set(rbx, 0); |
1213 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 1221 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
1214 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1222 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1215 RelocInfo::CODE_TARGET); | 1223 RelocInfo::CODE_TARGET); |
1216 | 1224 |
1217 // Leave internal frame. | 1225 // Leave internal frame. |
1218 } | 1226 } |
1219 __ ret(3 * kPointerSize); // remove this, receiver, and arguments | 1227 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments |
1220 } | 1228 } |
1221 | 1229 |
1222 | 1230 |
1231 // Used by ReflectConstruct | |
1232 static void Generate_ConstructHelper(MacroAssembler* masm) { | |
arv (Not doing code reviews)
2015/03/04 09:45:01
Can the code in here be better shared with Generat
caitp (gmail)
2015/03/04 13:24:14
It was my first approach, but it made things reall
| |
1233 const int kFormalParameters = 3; | |
1234 const int kStackSize = kFormalParameters + 1; | |
1235 | |
1236 // Stack at entry: | |
1237 // rsp : return address | |
1238 // rsp[8] : original constructor (new.target) | |
1239 // rsp[16] : arguments | |
1240 // rsp[24] : constructor | |
1241 { | |
1242 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
1243 // Stack frame: | |
1244 // rbp : Old base pointer | |
1245 // rbp[8] : return address | |
1246 // rbp[16] : function arguments | |
1247 // rbp[24] : receiver | |
1248 // rbp[32] : function | |
1249 static const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; | |
1250 static const int kArgumentsOffset = kNewTargetOffset + kPointerSize; | |
1251 static const int kFunctionOffset = kArgumentsOffset + kPointerSize; | |
1252 | |
1253 // If newTarget is not supplied, set it to constructor | |
1254 Label validate_arguments; | |
1255 __ movp(rax, Operand(rbp, kNewTargetOffset)); | |
1256 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | |
1257 __ j(not_equal, &validate_arguments, Label::kNear); | |
1258 __ movp(rax, Operand(rbp, kFunctionOffset)); | |
1259 __ movp(Operand(rbp, kNewTargetOffset), rax); | |
1260 | |
1261 // Validate arguments | |
1262 __ bind(&validate_arguments); | |
1263 __ Push(Operand(rbp, kFunctionOffset)); | |
1264 __ Push(Operand(rbp, kArgumentsOffset)); | |
1265 __ Push(Operand(rbp, kNewTargetOffset)); | |
1266 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION); | |
1267 | |
1268 // Check the stack for overflow. We are not trying to catch | |
1269 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
1270 // limit" is checked. | |
1271 Label okay; | |
1272 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); | |
1273 __ movp(rcx, rsp); | |
1274 // Make rcx the space we have left. The stack might already be overflowed | |
1275 // here which will cause rcx to become negative. | |
1276 __ subp(rcx, kScratchRegister); | |
1277 // Make rdx the space we need for the array when it is unrolled onto the | |
1278 // stack. | |
1279 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); | |
1280 // Check if the arguments will overflow the stack. | |
1281 __ cmpp(rcx, rdx); | |
1282 __ j(greater, &okay); // Signed comparison. | |
1283 | |
1284 // Out of stack space. | |
1285 __ Push(Operand(rbp, kFunctionOffset)); | |
1286 __ Push(rax); | |
1287 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); | |
1288 __ bind(&okay); | |
1289 // End of stack check. | |
1290 | |
1291 // Push current index and limit. | |
1292 const int kLimitOffset = | |
1293 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; | |
1294 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; | |
1295 __ Push(rax); // limit | |
1296 __ Push(Immediate(0)); // index | |
1297 // Push newTarget and callee functions | |
1298 __ Push(Operand(rbp, kNewTargetOffset)); | |
1299 __ Push(Operand(rbp, kFunctionOffset)); | |
1300 | |
1301 // Copy all arguments from the array to the stack. | |
1302 Label entry, loop; | |
1303 Register receiver = LoadDescriptor::ReceiverRegister(); | |
1304 Register key = LoadDescriptor::NameRegister(); | |
1305 __ movp(key, Operand(rbp, kIndexOffset)); | |
1306 __ jmp(&entry); | |
1307 __ bind(&loop); | |
1308 __ movp(receiver, Operand(rbp, kArgumentsOffset)); // load arguments | |
1309 | |
1310 // Use inline caching to speed up access to arguments. | |
1311 if (FLAG_vector_ics) { | |
1312 // TODO(mvstanton): Vector-based ics need additional infrastructure to | |
1313 // be embedded here. For now, just call the runtime. | |
1314 __ Push(receiver); | |
1315 __ Push(key); | |
1316 __ CallRuntime(Runtime::kGetProperty, 2); | |
1317 } else { | |
1318 Handle<Code> ic = CodeFactory::KeyedLoadIC(masm->isolate()).code(); | |
1319 __ Call(ic, RelocInfo::CODE_TARGET); | |
1320 // It is important that we do not have a test instruction after the | |
1321 // call. A test instruction after the call is used to indicate that | |
1322 // we have generated an inline version of the keyed load. In this | |
1323 // case, we know that we are not generating a test instruction next. | |
1324 } | |
1325 | |
1326 // Push the nth argument. | |
1327 __ Push(rax); | |
1328 | |
1329 // Update the index on the stack and in register key. | |
1330 __ movp(key, Operand(rbp, kIndexOffset)); | |
1331 __ SmiAddConstant(key, key, Smi::FromInt(1)); | |
1332 __ movp(Operand(rbp, kIndexOffset), key); | |
1333 | |
1334 __ bind(&entry); | |
1335 __ cmpp(key, Operand(rbp, kLimitOffset)); | |
1336 __ j(not_equal, &loop); | |
1337 | |
1338 // Use undefined feedback vector | |
1339 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); | |
1340 | |
1341 __ movp(rax, Operand(rbp, kLimitOffset)); | |
1342 __ SmiToInteger64(rax, rax); | |
1343 __ movp(rdi, Operand(rbp, kFunctionOffset)); | |
1344 | |
1345 // Call the function. | |
1346 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL); | |
1347 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | |
1348 | |
1349 __ Drop(1); | |
1350 | |
1351 // Leave internal frame. | |
1352 } | |
1353 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments | |
1354 } | |
1355 | |
1356 | |
1357 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | |
1358 Generate_ApplyHelper(masm, false); | |
1359 } | |
1360 | |
1361 | |
1362 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | |
1363 Generate_ApplyHelper(masm, true); | |
1364 } | |
1365 | |
1366 | |
1367 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | |
1368 Generate_ConstructHelper(masm); | |
1369 } | |
1370 | |
1371 | |
1223 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | 1372 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
1224 // ----------- S t a t e ------------- | 1373 // ----------- S t a t e ------------- |
1225 // -- rax : argc | 1374 // -- rax : argc |
1226 // -- rsp[0] : return address | 1375 // -- rsp[0] : return address |
1227 // -- rsp[8] : last argument | 1376 // -- rsp[8] : last argument |
1228 // ----------------------------------- | 1377 // ----------------------------------- |
1229 Label generic_array_code; | 1378 Label generic_array_code; |
1230 | 1379 |
1231 // Get the InternalArray function. | 1380 // Get the InternalArray function. |
1232 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); | 1381 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); |
(...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1615 __ bind(&ok); | 1764 __ bind(&ok); |
1616 __ ret(0); | 1765 __ ret(0); |
1617 } | 1766 } |
1618 | 1767 |
1619 | 1768 |
1620 #undef __ | 1769 #undef __ |
1621 | 1770 |
1622 } } // namespace v8::internal | 1771 } } // namespace v8::internal |
1623 | 1772 |
1624 #endif // V8_TARGET_ARCH_X64 | 1773 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |