| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 | 
| 6 | 6 | 
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" | 
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" | 
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" | 
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" | 
| (...skipping 1390 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1401   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 1401   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 
| 1402   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); | 1402   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); | 
| 1403   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); | 1403   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); | 
| 1404   __ jmp(ecx); | 1404   __ jmp(ecx); | 
| 1405 | 1405 | 
| 1406   __ bind(&non_function); | 1406   __ bind(&non_function); | 
| 1407   __ mov(edx, edi); | 1407   __ mov(edx, edi); | 
| 1408   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1408   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 
| 1409 } | 1409 } | 
| 1410 | 1410 | 
| 1411 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, |  | 
| 1412                                Register slot) { |  | 
| 1413   __ add(FieldOperand(feedback_vector, slot, times_half_pointer_size, |  | 
| 1414                       FixedArray::kHeaderSize + kPointerSize), |  | 
| 1415          Immediate(Smi::FromInt(1))); |  | 
| 1416 } |  | 
| 1417 |  | 
| 1418 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |  | 
| 1419   // eax - number of arguments |  | 
| 1420   // edi - function |  | 
| 1421   // edx - slot id |  | 
| 1422   // ebx - vector |  | 
| 1423   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); |  | 
| 1424   __ cmp(edi, ecx); |  | 
| 1425   __ j(not_equal, miss); |  | 
| 1426 |  | 
| 1427   // Reload ecx. |  | 
| 1428   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, |  | 
| 1429                            FixedArray::kHeaderSize)); |  | 
| 1430 |  | 
| 1431   // Increment the call count for monomorphic function calls. |  | 
| 1432   IncrementCallCount(masm, ebx, edx); |  | 
| 1433 |  | 
| 1434   __ mov(ebx, ecx); |  | 
| 1435   __ mov(edx, edi); |  | 
| 1436   ArrayConstructorStub stub(masm->isolate()); |  | 
| 1437   __ TailCallStub(&stub); |  | 
| 1438 |  | 
| 1439   // Unreachable. |  | 
| 1440 } |  | 
| 1441 |  | 
| 1442 |  | 
| 1443 void CallICStub::Generate(MacroAssembler* masm) { |  | 
| 1444   // edi - number of arguments |  | 
| 1445   // edi - function |  | 
| 1446   // edx - slot id |  | 
| 1447   // ebx - vector |  | 
| 1448   Isolate* isolate = masm->isolate(); |  | 
| 1449   Label extra_checks_or_miss, call, call_function, call_count_incremented; |  | 
| 1450 |  | 
| 1451   // The checks. First, does edi match the recorded monomorphic target? |  | 
| 1452   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, |  | 
| 1453                            FixedArray::kHeaderSize)); |  | 
| 1454 |  | 
| 1455   // We don't know that we have a weak cell. We might have a private symbol |  | 
| 1456   // or an AllocationSite, but the memory is safe to examine. |  | 
| 1457   // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |  | 
| 1458   // FixedArray. |  | 
| 1459   // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |  | 
| 1460   // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |  | 
| 1461   // computed, meaning that it can't appear to be a pointer. If the low bit is |  | 
| 1462   // 0, then hash is computed, but the 0 bit prevents the field from appearing |  | 
| 1463   // to be a pointer. |  | 
| 1464   STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |  | 
| 1465   STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |  | 
| 1466                     WeakCell::kValueOffset && |  | 
| 1467                 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |  | 
| 1468 |  | 
| 1469   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); |  | 
| 1470   __ j(not_equal, &extra_checks_or_miss); |  | 
| 1471 |  | 
| 1472   // The compare above could have been a SMI/SMI comparison. Guard against this |  | 
| 1473   // convincing us that we have a monomorphic JSFunction. |  | 
| 1474   __ JumpIfSmi(edi, &extra_checks_or_miss); |  | 
| 1475 |  | 
| 1476   __ bind(&call_function); |  | 
| 1477 |  | 
| 1478   // Increment the call count for monomorphic function calls. |  | 
| 1479   IncrementCallCount(masm, ebx, edx); |  | 
| 1480 |  | 
| 1481   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), |  | 
| 1482                                                     tail_call_mode()), |  | 
| 1483           RelocInfo::CODE_TARGET); |  | 
| 1484 |  | 
| 1485   __ bind(&extra_checks_or_miss); |  | 
| 1486   Label uninitialized, miss, not_allocation_site; |  | 
| 1487 |  | 
| 1488   __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); |  | 
| 1489   __ j(equal, &call); |  | 
| 1490 |  | 
| 1491   // Check if we have an allocation site. |  | 
| 1492   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), |  | 
| 1493                  Heap::kAllocationSiteMapRootIndex); |  | 
| 1494   __ j(not_equal, ¬_allocation_site); |  | 
| 1495 |  | 
| 1496   // We have an allocation site. |  | 
| 1497   HandleArrayCase(masm, &miss); |  | 
| 1498 |  | 
| 1499   __ bind(¬_allocation_site); |  | 
| 1500 |  | 
| 1501   // The following cases attempt to handle MISS cases without going to the |  | 
| 1502   // runtime. |  | 
| 1503   if (FLAG_trace_ic) { |  | 
| 1504     __ jmp(&miss); |  | 
| 1505   } |  | 
| 1506 |  | 
| 1507   __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); |  | 
| 1508   __ j(equal, &uninitialized); |  | 
| 1509 |  | 
| 1510   // We are going megamorphic. If the feedback is a JSFunction, it is fine |  | 
| 1511   // to handle it here. More complex cases are dealt with in the runtime. |  | 
| 1512   __ AssertNotSmi(ecx); |  | 
| 1513   __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx); |  | 
| 1514   __ j(not_equal, &miss); |  | 
| 1515   __ mov( |  | 
| 1516       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), |  | 
| 1517       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); |  | 
| 1518 |  | 
| 1519   __ bind(&call); |  | 
| 1520 |  | 
| 1521   // Increment the call count for megamorphic function calls. |  | 
| 1522   IncrementCallCount(masm, ebx, edx); |  | 
| 1523 |  | 
| 1524   __ bind(&call_count_incremented); |  | 
| 1525 |  | 
| 1526   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), |  | 
| 1527           RelocInfo::CODE_TARGET); |  | 
| 1528 |  | 
| 1529   __ bind(&uninitialized); |  | 
| 1530 |  | 
| 1531   // We are going monomorphic, provided we actually have a JSFunction. |  | 
| 1532   __ JumpIfSmi(edi, &miss); |  | 
| 1533 |  | 
| 1534   // Goto miss case if we do not have a function. |  | 
| 1535   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |  | 
| 1536   __ j(not_equal, &miss); |  | 
| 1537 |  | 
| 1538   // Make sure the function is not the Array() function, which requires special |  | 
| 1539   // behavior on MISS. |  | 
| 1540   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); |  | 
| 1541   __ cmp(edi, ecx); |  | 
| 1542   __ j(equal, &miss); |  | 
| 1543 |  | 
| 1544   // Make sure the function belongs to the same native context. |  | 
| 1545   __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset)); |  | 
| 1546   __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX)); |  | 
| 1547   __ cmp(ecx, NativeContextOperand()); |  | 
| 1548   __ j(not_equal, &miss); |  | 
| 1549 |  | 
| 1550   // Store the function. Use a stub since we need a frame for allocation. |  | 
| 1551   // eax - number of arguments |  | 
| 1552   // ebx - vector |  | 
| 1553   // edx - slot |  | 
| 1554   // edi - function |  | 
| 1555   { |  | 
| 1556     FrameScope scope(masm, StackFrame::INTERNAL); |  | 
| 1557     CreateWeakCellStub create_stub(isolate); |  | 
| 1558     __ SmiTag(eax); |  | 
| 1559     __ push(eax); |  | 
| 1560     __ push(ebx); |  | 
| 1561     __ push(edx); |  | 
| 1562     __ push(edi); |  | 
| 1563     __ push(esi); |  | 
| 1564     __ CallStub(&create_stub); |  | 
| 1565     __ pop(esi); |  | 
| 1566     __ pop(edi); |  | 
| 1567     __ pop(edx); |  | 
| 1568     __ pop(ebx); |  | 
| 1569     __ pop(eax); |  | 
| 1570     __ SmiUntag(eax); |  | 
| 1571   } |  | 
| 1572 |  | 
| 1573   __ jmp(&call_function); |  | 
| 1574 |  | 
| 1575   // We are here because tracing is on or we encountered a MISS case we can't |  | 
| 1576   // handle here. |  | 
| 1577   __ bind(&miss); |  | 
| 1578   GenerateMiss(masm); |  | 
| 1579 |  | 
| 1580   __ jmp(&call_count_incremented); |  | 
| 1581 |  | 
| 1582   // Unreachable |  | 
| 1583   __ int3(); |  | 
| 1584 } |  | 
| 1585 |  | 
| 1586 |  | 
| 1587 void CallICStub::GenerateMiss(MacroAssembler* masm) { |  | 
| 1588   FrameScope scope(masm, StackFrame::INTERNAL); |  | 
| 1589 |  | 
| 1590   // Preserve the number of arguments. |  | 
| 1591   __ SmiTag(eax); |  | 
| 1592   __ push(eax); |  | 
| 1593 |  | 
| 1594   // Push the function and feedback info. |  | 
| 1595   __ push(edi); |  | 
| 1596   __ push(ebx); |  | 
| 1597   __ push(edx); |  | 
| 1598 |  | 
| 1599   // Call the entry. |  | 
| 1600   __ CallRuntime(Runtime::kCallIC_Miss); |  | 
| 1601 |  | 
| 1602   // Move result to edi and exit the internal frame. |  | 
| 1603   __ mov(edi, eax); |  | 
| 1604 |  | 
| 1605   // Restore number of arguments. |  | 
| 1606   __ pop(eax); |  | 
| 1607   __ SmiUntag(eax); |  | 
| 1608 } |  | 
| 1609 |  | 
| 1610 |  | 
| 1611 bool CEntryStub::NeedsImmovableCode() { | 1411 bool CEntryStub::NeedsImmovableCode() { | 
| 1612   return false; | 1412   return false; | 
| 1613 } | 1413 } | 
| 1614 | 1414 | 
| 1615 | 1415 | 
| 1616 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 1416 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 
| 1617   CEntryStub::GenerateAheadOfTime(isolate); | 1417   CEntryStub::GenerateAheadOfTime(isolate); | 
| 1618   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1418   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 
| 1619   StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 1419   StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 
| 1620   // It is important that the store buffer overflow stubs are generated first. | 1420   // It is important that the store buffer overflow stubs are generated first. | 
| (...skipping 2573 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 4194                            kStackUnwindSpace, nullptr, return_value_operand, | 3994                            kStackUnwindSpace, nullptr, return_value_operand, | 
| 4195                            NULL); | 3995                            NULL); | 
| 4196 } | 3996 } | 
| 4197 | 3997 | 
| 4198 #undef __ | 3998 #undef __ | 
| 4199 | 3999 | 
| 4200 }  // namespace internal | 4000 }  // namespace internal | 
| 4201 }  // namespace v8 | 4001 }  // namespace v8 | 
| 4202 | 4002 | 
| 4203 #endif  // V8_TARGET_ARCH_IA32 | 4003 #endif  // V8_TARGET_ARCH_IA32 | 
| OLD | NEW | 
|---|