| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 1278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1289 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 1289 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 1290 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); | 1290 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); |
| 1291 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); | 1291 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); |
| 1292 __ jmp(rcx); | 1292 __ jmp(rcx); |
| 1293 | 1293 |
| 1294 __ bind(&non_function); | 1294 __ bind(&non_function); |
| 1295 __ movp(rdx, rdi); | 1295 __ movp(rdx, rdi); |
| 1296 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1296 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1297 } | 1297 } |
| 1298 | 1298 |
| 1299 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, | |
| 1300 Register slot) { | |
| 1301 __ SmiAddConstant(FieldOperand(feedback_vector, slot, times_pointer_size, | |
| 1302 FixedArray::kHeaderSize + kPointerSize), | |
| 1303 Smi::FromInt(1)); | |
| 1304 } | |
| 1305 | |
| 1306 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | |
| 1307 // rdi - function | |
| 1308 // rdx - slot id | |
| 1309 // rbx - vector | |
| 1310 // rcx - allocation site (loaded from vector[slot]). | |
| 1311 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); | |
| 1312 __ cmpp(rdi, r8); | |
| 1313 __ j(not_equal, miss); | |
| 1314 | |
| 1315 // Increment the call count for monomorphic function calls. | |
| 1316 IncrementCallCount(masm, rbx, rdx); | |
| 1317 | |
| 1318 __ movp(rbx, rcx); | |
| 1319 __ movp(rdx, rdi); | |
| 1320 ArrayConstructorStub stub(masm->isolate()); | |
| 1321 __ TailCallStub(&stub); | |
| 1322 } | |
| 1323 | |
| 1324 | |
| 1325 void CallICStub::Generate(MacroAssembler* masm) { | |
| 1326 // ----------- S t a t e ------------- | |
| 1327 // -- rax - number of arguments | |
| 1328 // -- rdi - function | |
| 1329 // -- rdx - slot id | |
| 1330 // -- rbx - vector | |
| 1331 // ----------------------------------- | |
| 1332 Isolate* isolate = masm->isolate(); | |
| 1333 Label extra_checks_or_miss, call, call_function, call_count_incremented; | |
| 1334 | |
| 1335 // The checks. First, does rdi match the recorded monomorphic target? | |
| 1336 __ SmiToInteger32(rdx, rdx); | |
| 1337 __ movp(rcx, | |
| 1338 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); | |
| 1339 | |
| 1340 // We don't know that we have a weak cell. We might have a private symbol | |
| 1341 // or an AllocationSite, but the memory is safe to examine. | |
| 1342 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | |
| 1343 // FixedArray. | |
| 1344 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | |
| 1345 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | |
| 1346 // computed, meaning that it can't appear to be a pointer. If the low bit is | |
| 1347 // 0, then hash is computed, but the 0 bit prevents the field from appearing | |
| 1348 // to be a pointer. | |
| 1349 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | |
| 1350 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | |
| 1351 WeakCell::kValueOffset && | |
| 1352 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | |
| 1353 | |
| 1354 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset)); | |
| 1355 __ j(not_equal, &extra_checks_or_miss); | |
| 1356 | |
| 1357 // The compare above could have been a SMI/SMI comparison. Guard against this | |
| 1358 // convincing us that we have a monomorphic JSFunction. | |
| 1359 __ JumpIfSmi(rdi, &extra_checks_or_miss); | |
| 1360 | |
| 1361 __ bind(&call_function); | |
| 1362 // Increment the call count for monomorphic function calls. | |
| 1363 IncrementCallCount(masm, rbx, rdx); | |
| 1364 | |
| 1365 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), | |
| 1366 tail_call_mode()), | |
| 1367 RelocInfo::CODE_TARGET); | |
| 1368 | |
| 1369 __ bind(&extra_checks_or_miss); | |
| 1370 Label uninitialized, miss, not_allocation_site; | |
| 1371 | |
| 1372 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); | |
| 1373 __ j(equal, &call); | |
| 1374 | |
| 1375 // Check if we have an allocation site. | |
| 1376 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | |
| 1377 Heap::kAllocationSiteMapRootIndex); | |
| 1378 __ j(not_equal, ¬_allocation_site); | |
| 1379 | |
| 1380 // We have an allocation site. | |
| 1381 HandleArrayCase(masm, &miss); | |
| 1382 | |
| 1383 __ bind(¬_allocation_site); | |
| 1384 | |
| 1385 // The following cases attempt to handle MISS cases without going to the | |
| 1386 // runtime. | |
| 1387 if (FLAG_trace_ic) { | |
| 1388 __ jmp(&miss); | |
| 1389 } | |
| 1390 | |
| 1391 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); | |
| 1392 __ j(equal, &uninitialized); | |
| 1393 | |
| 1394 // We are going megamorphic. If the feedback is a JSFunction, it is fine | |
| 1395 // to handle it here. More complex cases are dealt with in the runtime. | |
| 1396 __ AssertNotSmi(rcx); | |
| 1397 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); | |
| 1398 __ j(not_equal, &miss); | |
| 1399 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), | |
| 1400 TypeFeedbackVector::MegamorphicSentinel(isolate)); | |
| 1401 | |
| 1402 __ bind(&call); | |
| 1403 | |
| 1404 // Increment the call count for megamorphic function calls. | |
| 1405 IncrementCallCount(masm, rbx, rdx); | |
| 1406 | |
| 1407 __ bind(&call_count_incremented); | |
| 1408 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), | |
| 1409 RelocInfo::CODE_TARGET); | |
| 1410 | |
| 1411 __ bind(&uninitialized); | |
| 1412 | |
| 1413 // We are going monomorphic, provided we actually have a JSFunction. | |
| 1414 __ JumpIfSmi(rdi, &miss); | |
| 1415 | |
| 1416 // Goto miss case if we do not have a function. | |
| 1417 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | |
| 1418 __ j(not_equal, &miss); | |
| 1419 | |
| 1420 // Make sure the function is not the Array() function, which requires special | |
| 1421 // behavior on MISS. | |
| 1422 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); | |
| 1423 __ cmpp(rdi, rcx); | |
| 1424 __ j(equal, &miss); | |
| 1425 | |
| 1426 // Make sure the function belongs to the same native context. | |
| 1427 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); | |
| 1428 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); | |
| 1429 __ cmpp(rcx, NativeContextOperand()); | |
| 1430 __ j(not_equal, &miss); | |
| 1431 | |
| 1432 // Store the function. Use a stub since we need a frame for allocation. | |
| 1433 // rbx - vector | |
| 1434 // rdx - slot (needs to be in smi form) | |
| 1435 // rdi - function | |
| 1436 { | |
| 1437 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1438 CreateWeakCellStub create_stub(isolate); | |
| 1439 | |
| 1440 __ Integer32ToSmi(rax, rax); | |
| 1441 __ Integer32ToSmi(rdx, rdx); | |
| 1442 __ Push(rax); | |
| 1443 __ Push(rbx); | |
| 1444 __ Push(rdx); | |
| 1445 __ Push(rdi); | |
| 1446 __ Push(rsi); | |
| 1447 __ CallStub(&create_stub); | |
| 1448 __ Pop(rsi); | |
| 1449 __ Pop(rdi); | |
| 1450 __ Pop(rdx); | |
| 1451 __ Pop(rbx); | |
| 1452 __ Pop(rax); | |
| 1453 __ SmiToInteger32(rdx, rdx); | |
| 1454 __ SmiToInteger32(rax, rax); | |
| 1455 } | |
| 1456 | |
| 1457 __ jmp(&call_function); | |
| 1458 | |
| 1459 // We are here because tracing is on or we encountered a MISS case we can't | |
| 1460 // handle here. | |
| 1461 __ bind(&miss); | |
| 1462 GenerateMiss(masm); | |
| 1463 | |
| 1464 __ jmp(&call_count_incremented); | |
| 1465 | |
| 1466 // Unreachable | |
| 1467 __ int3(); | |
| 1468 } | |
| 1469 | |
| 1470 void CallICStub::GenerateMiss(MacroAssembler* masm) { | |
| 1471 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1472 | |
| 1473 // Preserve the number of arguments. | |
| 1474 __ Integer32ToSmi(rax, rax); | |
| 1475 __ Push(rax); | |
| 1476 | |
| 1477 // Push the receiver and the function and feedback info. | |
| 1478 __ Integer32ToSmi(rdx, rdx); | |
| 1479 __ Push(rdi); | |
| 1480 __ Push(rbx); | |
| 1481 __ Push(rdx); | |
| 1482 | |
| 1483 // Call the entry. | |
| 1484 __ CallRuntime(Runtime::kCallIC_Miss); | |
| 1485 | |
| 1486 // Move result to edi and exit the internal frame. | |
| 1487 __ movp(rdi, rax); | |
| 1488 | |
| 1489 // Restore number of arguments. | |
| 1490 __ Pop(rax); | |
| 1491 __ SmiToInteger32(rax, rax); | |
| 1492 } | |
| 1493 | |
| 1494 bool CEntryStub::NeedsImmovableCode() { | 1299 bool CEntryStub::NeedsImmovableCode() { |
| 1495 return false; | 1300 return false; |
| 1496 } | 1301 } |
| 1497 | 1302 |
| 1498 | 1303 |
| 1499 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 1304 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 1500 CEntryStub::GenerateAheadOfTime(isolate); | 1305 CEntryStub::GenerateAheadOfTime(isolate); |
| 1501 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1306 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 1502 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 1307 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 1503 // It is important that the store buffer overflow stubs are generated first. | 1308 // It is important that the store buffer overflow stubs are generated first. |
| (...skipping 2622 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4126 kStackUnwindSpace, nullptr, return_value_operand, | 3931 kStackUnwindSpace, nullptr, return_value_operand, |
| 4127 NULL); | 3932 NULL); |
| 4128 } | 3933 } |
| 4129 | 3934 |
| 4130 #undef __ | 3935 #undef __ |
| 4131 | 3936 |
| 4132 } // namespace internal | 3937 } // namespace internal |
| 4133 } // namespace v8 | 3938 } // namespace v8 |
| 4134 | 3939 |
| 4135 #endif // V8_TARGET_ARCH_X64 | 3940 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |