| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 1303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1314 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 1314 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 1315 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); | 1315 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); |
| 1316 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); | 1316 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); |
| 1317 __ jmp(rcx); | 1317 __ jmp(rcx); |
| 1318 | 1318 |
| 1319 __ bind(&non_function); | 1319 __ bind(&non_function); |
| 1320 __ movp(rdx, rdi); | 1320 __ movp(rdx, rdi); |
| 1321 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1321 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1322 } | 1322 } |
| 1323 | 1323 |
| 1324 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, |
| 1325 Register slot) { |
| 1326 __ SmiAddConstant(FieldOperand(feedback_vector, slot, times_pointer_size, |
| 1327 FixedArray::kHeaderSize + kPointerSize), |
| 1328 Smi::FromInt(1)); |
| 1329 } |
| 1324 | 1330 |
| 1325 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | 1331 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
| 1326 // rdi - function | 1332 // rdi - function |
| 1327 // rdx - slot id | 1333 // rdx - slot id |
| 1328 // rbx - vector | 1334 // rbx - vector |
| 1329 // rcx - allocation site (loaded from vector[slot]). | 1335 // rcx - allocation site (loaded from vector[slot]). |
| 1330 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); | 1336 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); |
| 1331 __ cmpp(rdi, r8); | 1337 __ cmpp(rdi, r8); |
| 1332 __ j(not_equal, miss); | 1338 __ j(not_equal, miss); |
| 1333 | 1339 |
| 1334 __ movp(rax, Immediate(arg_count())); | 1340 __ movp(rax, Immediate(arg_count())); |
| 1335 | 1341 |
| 1336 // Increment the call count for monomorphic function calls. | 1342 // Increment the call count for monomorphic function calls. |
| 1337 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, | 1343 IncrementCallCount(masm, rbx, rdx); |
| 1338 FixedArray::kHeaderSize + kPointerSize), | |
| 1339 Smi::FromInt(1)); | |
| 1340 | 1344 |
| 1341 __ movp(rbx, rcx); | 1345 __ movp(rbx, rcx); |
| 1342 __ movp(rdx, rdi); | 1346 __ movp(rdx, rdi); |
| 1343 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 1347 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
| 1344 __ TailCallStub(&stub); | 1348 __ TailCallStub(&stub); |
| 1345 } | 1349 } |
| 1346 | 1350 |
| 1347 | 1351 |
| 1348 void CallICStub::Generate(MacroAssembler* masm) { | 1352 void CallICStub::Generate(MacroAssembler* masm) { |
| 1349 // ----------- S t a t e ------------- | 1353 // ----------- S t a t e ------------- |
| 1350 // -- rdi - function | 1354 // -- rdi - function |
| 1351 // -- rdx - slot id | 1355 // -- rdx - slot id |
| 1352 // -- rbx - vector | 1356 // -- rbx - vector |
| 1353 // ----------------------------------- | 1357 // ----------------------------------- |
| 1354 Isolate* isolate = masm->isolate(); | 1358 Isolate* isolate = masm->isolate(); |
| 1355 Label extra_checks_or_miss, call, call_function; | 1359 Label extra_checks_or_miss, call, call_function, call_count_incremented; |
| 1356 int argc = arg_count(); | 1360 int argc = arg_count(); |
| 1357 StackArgumentsAccessor args(rsp, argc); | 1361 StackArgumentsAccessor args(rsp, argc); |
| 1358 ParameterCount actual(argc); | 1362 ParameterCount actual(argc); |
| 1359 | 1363 |
| 1360 // The checks. First, does rdi match the recorded monomorphic target? | 1364 // The checks. First, does rdi match the recorded monomorphic target? |
| 1361 __ SmiToInteger32(rdx, rdx); | 1365 __ SmiToInteger32(rdx, rdx); |
| 1362 __ movp(rcx, | 1366 __ movp(rcx, |
| 1363 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); | 1367 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); |
| 1364 | 1368 |
| 1365 // We don't know that we have a weak cell. We might have a private symbol | 1369 // We don't know that we have a weak cell. We might have a private symbol |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1376 WeakCell::kValueOffset && | 1380 WeakCell::kValueOffset && |
| 1377 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | 1381 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
| 1378 | 1382 |
| 1379 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset)); | 1383 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset)); |
| 1380 __ j(not_equal, &extra_checks_or_miss); | 1384 __ j(not_equal, &extra_checks_or_miss); |
| 1381 | 1385 |
| 1382 // The compare above could have been a SMI/SMI comparison. Guard against this | 1386 // The compare above could have been a SMI/SMI comparison. Guard against this |
| 1383 // convincing us that we have a monomorphic JSFunction. | 1387 // convincing us that we have a monomorphic JSFunction. |
| 1384 __ JumpIfSmi(rdi, &extra_checks_or_miss); | 1388 __ JumpIfSmi(rdi, &extra_checks_or_miss); |
| 1385 | 1389 |
| 1390 __ bind(&call_function); |
| 1386 // Increment the call count for monomorphic function calls. | 1391 // Increment the call count for monomorphic function calls. |
| 1387 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, | 1392 IncrementCallCount(masm, rbx, rdx); |
| 1388 FixedArray::kHeaderSize + kPointerSize), | |
| 1389 Smi::FromInt(1)); | |
| 1390 | 1393 |
| 1391 __ bind(&call_function); | |
| 1392 __ Set(rax, argc); | 1394 __ Set(rax, argc); |
| 1393 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), | 1395 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), |
| 1394 tail_call_mode()), | 1396 tail_call_mode()), |
| 1395 RelocInfo::CODE_TARGET); | 1397 RelocInfo::CODE_TARGET); |
| 1396 | 1398 |
| 1397 __ bind(&extra_checks_or_miss); | 1399 __ bind(&extra_checks_or_miss); |
| 1398 Label uninitialized, miss, not_allocation_site; | 1400 Label uninitialized, miss, not_allocation_site; |
| 1399 | 1401 |
| 1400 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); | 1402 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); |
| 1401 __ j(equal, &call); | 1403 __ j(equal, &call); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 1421 | 1423 |
| 1422 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 1424 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
| 1423 // to handle it here. More complex cases are dealt with in the runtime. | 1425 // to handle it here. More complex cases are dealt with in the runtime. |
| 1424 __ AssertNotSmi(rcx); | 1426 __ AssertNotSmi(rcx); |
| 1425 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); | 1427 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); |
| 1426 __ j(not_equal, &miss); | 1428 __ j(not_equal, &miss); |
| 1427 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), | 1429 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), |
| 1428 TypeFeedbackVector::MegamorphicSentinel(isolate)); | 1430 TypeFeedbackVector::MegamorphicSentinel(isolate)); |
| 1429 | 1431 |
| 1430 __ bind(&call); | 1432 __ bind(&call); |
| 1433 |
| 1434 // Increment the call count for megamorphic function calls. |
| 1435 IncrementCallCount(masm, rbx, rdx); |
| 1436 |
| 1437 __ bind(&call_count_incremented); |
| 1431 __ Set(rax, argc); | 1438 __ Set(rax, argc); |
| 1432 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), | 1439 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), |
| 1433 RelocInfo::CODE_TARGET); | 1440 RelocInfo::CODE_TARGET); |
| 1434 | 1441 |
| 1435 __ bind(&uninitialized); | 1442 __ bind(&uninitialized); |
| 1436 | 1443 |
| 1437 // We are going monomorphic, provided we actually have a JSFunction. | 1444 // We are going monomorphic, provided we actually have a JSFunction. |
| 1438 __ JumpIfSmi(rdi, &miss); | 1445 __ JumpIfSmi(rdi, &miss); |
| 1439 | 1446 |
| 1440 // Goto miss case if we do not have a function. | 1447 // Goto miss case if we do not have a function. |
| 1441 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 1448 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 1442 __ j(not_equal, &miss); | 1449 __ j(not_equal, &miss); |
| 1443 | 1450 |
| 1444 // Make sure the function is not the Array() function, which requires special | 1451 // Make sure the function is not the Array() function, which requires special |
| 1445 // behavior on MISS. | 1452 // behavior on MISS. |
| 1446 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); | 1453 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); |
| 1447 __ cmpp(rdi, rcx); | 1454 __ cmpp(rdi, rcx); |
| 1448 __ j(equal, &miss); | 1455 __ j(equal, &miss); |
| 1449 | 1456 |
| 1450 // Make sure the function belongs to the same native context. | 1457 // Make sure the function belongs to the same native context. |
| 1451 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); | 1458 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 1452 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); | 1459 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); |
| 1453 __ cmpp(rcx, NativeContextOperand()); | 1460 __ cmpp(rcx, NativeContextOperand()); |
| 1454 __ j(not_equal, &miss); | 1461 __ j(not_equal, &miss); |
| 1455 | 1462 |
| 1456 // Initialize the call counter. | |
| 1457 __ Move(FieldOperand(rbx, rdx, times_pointer_size, | |
| 1458 FixedArray::kHeaderSize + kPointerSize), | |
| 1459 Smi::FromInt(1)); | |
| 1460 | |
| 1461 // Store the function. Use a stub since we need a frame for allocation. | 1463 // Store the function. Use a stub since we need a frame for allocation. |
| 1462 // rbx - vector | 1464 // rbx - vector |
| 1463 // rdx - slot (needs to be in smi form) | 1465 // rdx - slot (needs to be in smi form) |
| 1464 // rdi - function | 1466 // rdi - function |
| 1465 { | 1467 { |
| 1466 FrameScope scope(masm, StackFrame::INTERNAL); | 1468 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1467 CreateWeakCellStub create_stub(isolate); | 1469 CreateWeakCellStub create_stub(isolate); |
| 1468 | 1470 |
| 1469 __ Integer32ToSmi(rdx, rdx); | 1471 __ Integer32ToSmi(rdx, rdx); |
| 1472 __ Push(rbx); |
| 1473 __ Push(rdx); |
| 1470 __ Push(rdi); | 1474 __ Push(rdi); |
| 1471 __ Push(rsi); | 1475 __ Push(rsi); |
| 1472 __ CallStub(&create_stub); | 1476 __ CallStub(&create_stub); |
| 1473 __ Pop(rsi); | 1477 __ Pop(rsi); |
| 1474 __ Pop(rdi); | 1478 __ Pop(rdi); |
| 1479 __ Pop(rdx); |
| 1480 __ Pop(rbx); |
| 1481 __ SmiToInteger32(rdx, rdx); |
| 1475 } | 1482 } |
| 1476 | 1483 |
| 1477 __ jmp(&call_function); | 1484 __ jmp(&call_function); |
| 1478 | 1485 |
| 1479 // We are here because tracing is on or we encountered a MISS case we can't | 1486 // We are here because tracing is on or we encountered a MISS case we can't |
| 1480 // handle here. | 1487 // handle here. |
| 1481 __ bind(&miss); | 1488 __ bind(&miss); |
| 1482 GenerateMiss(masm); | 1489 GenerateMiss(masm); |
| 1483 | 1490 |
| 1484 __ jmp(&call); | 1491 __ jmp(&call_count_incremented); |
| 1485 | 1492 |
| 1486 // Unreachable | 1493 // Unreachable |
| 1487 __ int3(); | 1494 __ int3(); |
| 1488 } | 1495 } |
| 1489 | 1496 |
| 1490 | |
| 1491 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 1497 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 1492 FrameScope scope(masm, StackFrame::INTERNAL); | 1498 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1493 | 1499 |
| 1494 // Push the receiver and the function and feedback info. | 1500 // Push the receiver and the function and feedback info. |
| 1501 __ Integer32ToSmi(rdx, rdx); |
| 1495 __ Push(rdi); | 1502 __ Push(rdi); |
| 1496 __ Push(rbx); | 1503 __ Push(rbx); |
| 1497 __ Integer32ToSmi(rdx, rdx); | |
| 1498 __ Push(rdx); | 1504 __ Push(rdx); |
| 1499 | 1505 |
| 1500 // Call the entry. | 1506 // Call the entry. |
| 1501 __ CallRuntime(Runtime::kCallIC_Miss); | 1507 __ CallRuntime(Runtime::kCallIC_Miss); |
| 1502 | 1508 |
| 1503 // Move result to edi and exit the internal frame. | 1509 // Move result to edi and exit the internal frame. |
| 1504 __ movp(rdi, rax); | 1510 __ movp(rdi, rax); |
| 1505 } | 1511 } |
| 1506 | 1512 |
| 1507 | |
| 1508 bool CEntryStub::NeedsImmovableCode() { | 1513 bool CEntryStub::NeedsImmovableCode() { |
| 1509 return false; | 1514 return false; |
| 1510 } | 1515 } |
| 1511 | 1516 |
| 1512 | 1517 |
| 1513 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 1518 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 1514 CEntryStub::GenerateAheadOfTime(isolate); | 1519 CEntryStub::GenerateAheadOfTime(isolate); |
| 1515 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1520 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 1516 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 1521 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 1517 // It is important that the store buffer overflow stubs are generated first. | 1522 // It is important that the store buffer overflow stubs are generated first. |
| (...skipping 3651 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5169 kStackUnwindSpace, nullptr, return_value_operand, | 5174 kStackUnwindSpace, nullptr, return_value_operand, |
| 5170 NULL); | 5175 NULL); |
| 5171 } | 5176 } |
| 5172 | 5177 |
| 5173 #undef __ | 5178 #undef __ |
| 5174 | 5179 |
| 5175 } // namespace internal | 5180 } // namespace internal |
| 5176 } // namespace v8 | 5181 } // namespace v8 |
| 5177 | 5182 |
| 5178 #endif // V8_TARGET_ARCH_X64 | 5183 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |