Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 181 | 181 |
| 182 // Deferred code is the last part of the instruction sequence. Mark | 182 // Deferred code is the last part of the instruction sequence. Mark |
| 183 // the generated code as done unless we bailed out. | 183 // the generated code as done unless we bailed out. |
| 184 if (!is_aborted()) status_ = DONE; | 184 if (!is_aborted()) status_ = DONE; |
| 185 return !is_aborted(); | 185 return !is_aborted(); |
| 186 } | 186 } |
| 187 | 187 |
| 188 | 188 |
| 189 bool LCodeGen::GenerateSafepointTable() { | 189 bool LCodeGen::GenerateSafepointTable() { |
| 190 ASSERT(is_done()); | 190 ASSERT(is_done()); |
| 191 // Ensure that patching a deoptimization point won't overwrite the table. | 191 // Ensure that there is space at the end of the code to write a number |
| 192 for (int i = 0; i < Assembler::kCallInstructionLength; i++) { | 192 // of jump instructions, as well as to afford writing a call near the end |
| 193 masm()->int3(); | 193 // of the code. |
| 194 // The jumps are used when there isn't room in the code stream to write | |
| 195 // a long call instruction. Instead it writes a shorter call to a | |
| 196 // jump instruction in the same code object. | |
| 197 // The calls are used when lazy deoptimizing a function and calls to a | |
| 198 // deoptimization function. | |
| 199 int short_deopts = safepoints_.CountShortDeoptimizationIntervals( | |
| 200 static_cast<unsigned>(MacroAssembler::kJumpInstructionLength)); | |
| 201 int byte_count = MacroAssembler::kCallInstructionLength + | |
|
Kevin Millikin (Chromium)
2011/02/04 11:34:54
There is already padding in SafepointTable::Emit i
Lasse Reichstein
2011/02/04 12:32:13
I'll remove the extra padding here for now.
| |
| 202 (short_deopts) * MacroAssembler::kJumpInstructionLength; | |
| 203 while (byte_count > 0) { | |
| 204 __ int3(); | |
| 205 byte_count--; | |
| 194 } | 206 } |
| 195 safepoints_.Emit(masm(), StackSlotCount()); | 207 safepoints_.Emit(masm(), StackSlotCount()); |
| 196 return !is_aborted(); | 208 return !is_aborted(); |
| 197 } | 209 } |
| 198 | 210 |
| 199 | 211 |
| 200 Register LCodeGen::ToRegister(int index) const { | 212 Register LCodeGen::ToRegister(int index) const { |
| 201 return Register::FromAllocationIndex(index); | 213 return Register::FromAllocationIndex(index); |
| 202 } | 214 } |
| 203 | 215 |
| (...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 492 inlined_function_count_ = deoptimization_literals_.length(); | 504 inlined_function_count_ = deoptimization_literals_.length(); |
| 493 } | 505 } |
| 494 | 506 |
| 495 | 507 |
| 496 void LCodeGen::RecordSafepoint( | 508 void LCodeGen::RecordSafepoint( |
| 497 LPointerMap* pointers, | 509 LPointerMap* pointers, |
| 498 Safepoint::Kind kind, | 510 Safepoint::Kind kind, |
| 499 int arguments, | 511 int arguments, |
| 500 int deoptimization_index) { | 512 int deoptimization_index) { |
| 501 const ZoneList<LOperand*>* operands = pointers->operands(); | 513 const ZoneList<LOperand*>* operands = pointers->operands(); |
| 514 | |
| 502 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 515 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 503 kind, arguments, deoptimization_index); | 516 kind, arguments, deoptimization_index); |
| 504 for (int i = 0; i < operands->length(); i++) { | 517 for (int i = 0; i < operands->length(); i++) { |
| 505 LOperand* pointer = operands->at(i); | 518 LOperand* pointer = operands->at(i); |
| 506 if (pointer->IsStackSlot()) { | 519 if (pointer->IsStackSlot()) { |
| 507 safepoint.DefinePointerSlot(pointer->index()); | 520 safepoint.DefinePointerSlot(pointer->index()); |
| 508 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 521 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 509 safepoint.DefinePointerRegister(ToRegister(pointer)); | 522 safepoint.DefinePointerRegister(ToRegister(pointer)); |
| 510 } | 523 } |
| 511 } | 524 } |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 656 } | 669 } |
| 657 | 670 |
| 658 | 671 |
| 659 void LCodeGen::DoConstantT(LConstantT* instr) { | 672 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 660 ASSERT(instr->result()->IsRegister()); | 673 ASSERT(instr->result()->IsRegister()); |
| 661 __ Move(ToRegister(instr->result()), instr->value()); | 674 __ Move(ToRegister(instr->result()), instr->value()); |
| 662 } | 675 } |
| 663 | 676 |
| 664 | 677 |
| 665 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { | 678 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { |
| 666 Abort("Unimplemented: %s", "DoJSArrayLength"); | 679 Register result = ToRegister(instr->result()); |
| 680 Register array = ToRegister(instr->InputAt(0)); | |
| 681 __ movq(result, FieldOperand(array, JSArray::kLengthOffset)); | |
| 667 } | 682 } |
| 668 | 683 |
| 669 | 684 |
| 670 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { | 685 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { |
| 671 Abort("Unimplemented: %s", "DoFixedArrayLength"); | 686 Abort("Unimplemented: %s", "DoFixedArrayLength"); |
| 672 } | 687 } |
| 673 | 688 |
| 674 | 689 |
| 675 void LCodeGen::DoValueOf(LValueOf* instr) { | 690 void LCodeGen::DoValueOf(LValueOf* instr) { |
| 676 Abort("Unimplemented: %s", "DoValueOf"); | 691 Abort("Unimplemented: %s", "DoValueOf"); |
| (...skipping 705 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1382 __ push(rax); | 1397 __ push(rax); |
| 1383 __ CallRuntime(Runtime::kTraceExit, 1); | 1398 __ CallRuntime(Runtime::kTraceExit, 1); |
| 1384 } | 1399 } |
| 1385 __ movq(rsp, rbp); | 1400 __ movq(rsp, rbp); |
| 1386 __ pop(rbp); | 1401 __ pop(rbp); |
| 1387 __ ret((ParameterCount() + 1) * kPointerSize); | 1402 __ ret((ParameterCount() + 1) * kPointerSize); |
| 1388 } | 1403 } |
| 1389 | 1404 |
| 1390 | 1405 |
| 1391 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { | 1406 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { |
| 1392 Register result = ToRegister(instr->result()); | 1407 Abort("Unimplemented: %s", "DoLoadGlobal"); |
| 1393 if (result.is(rax)) { | |
| 1394 __ load_rax(instr->hydrogen()->cell().location(), | |
| 1395 RelocInfo::GLOBAL_PROPERTY_CELL); | |
| 1396 } else { | |
| 1397 __ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL); | |
| 1398 __ movq(result, Operand(result, 0)); | |
| 1399 } | |
| 1400 if (instr->hydrogen()->check_hole_value()) { | |
| 1401 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | |
| 1402 DeoptimizeIf(equal, instr->environment()); | |
| 1403 } | |
| 1404 } | 1408 } |
| 1405 | 1409 |
| 1406 | 1410 |
| 1407 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { | 1411 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { |
| 1408 Register value = ToRegister(instr->InputAt(0)); | 1412 Register value = ToRegister(instr->InputAt(0)); |
| 1409 if (value.is(rax)) { | 1413 Register temp = ToRegister(instr->TempAt(0)); |
| 1414 ASSERT(!value.is(temp)); | |
| 1415 bool check_hole = instr->hydrogen()->check_hole_value(); | |
| 1416 if (!check_hole && value.is(rax)) { | |
| 1410 __ store_rax(instr->hydrogen()->cell().location(), | 1417 __ store_rax(instr->hydrogen()->cell().location(), |
| 1411 RelocInfo::GLOBAL_PROPERTY_CELL); | 1418 RelocInfo::GLOBAL_PROPERTY_CELL); |
| 1412 } else { | 1419 return; |
| 1413 __ movq(kScratchRegister, | |
| 1414 Handle<Object>::cast(instr->hydrogen()->cell()), | |
| 1415 RelocInfo::GLOBAL_PROPERTY_CELL); | |
| 1416 __ movq(Operand(kScratchRegister, 0), value); | |
| 1417 } | 1420 } |
| 1421 // If the cell we are storing to contains the hole it could have | |
| 1422 // been deleted from the property dictionary. In that case, we need | |
| 1423 // to update the property details in the property dictionary to mark | |
| 1424 // it as no longer deleted. We deoptimize in that case. | |
| 1425 __ movq(temp, | |
| 1426 Handle<Object>::cast(instr->hydrogen()->cell()), | |
| 1427 RelocInfo::GLOBAL_PROPERTY_CELL); | |
| 1428 if (check_hole) { | |
| 1429 __ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex); | |
| 1430 DeoptimizeIf(equal, instr->environment()); | |
| 1431 } | |
| 1432 __ movq(Operand(temp, 0), value); | |
| 1418 } | 1433 } |
| 1419 | 1434 |
| 1420 | 1435 |
| 1421 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 1436 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
| 1422 Abort("Unimplemented: %s", "DoLoadContextSlot"); | 1437 Abort("Unimplemented: %s", "DoLoadContextSlot"); |
| 1423 } | 1438 } |
| 1424 | 1439 |
| 1425 | 1440 |
| 1426 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { | 1441 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| 1427 Register object = ToRegister(instr->InputAt(0)); | 1442 Register object = ToRegister(instr->InputAt(0)); |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1513 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { | 1528 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { |
| 1514 Register result = ToRegister(instr->result()); | 1529 Register result = ToRegister(instr->result()); |
| 1515 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 1530 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 1516 __ movq(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset)); | 1531 __ movq(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset)); |
| 1517 } | 1532 } |
| 1518 | 1533 |
| 1519 | 1534 |
| 1520 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 1535 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 1521 int arity, | 1536 int arity, |
| 1522 LInstruction* instr) { | 1537 LInstruction* instr) { |
| 1523 Abort("Unimplemented: %s", "CallKnownFunction"); | 1538 // Change context if needed. |
| 1539 bool change_context = | |
| 1540 (graph()->info()->closure()->context() != function->context()) || | |
| 1541 scope()->contains_with() || | |
| 1542 (scope()->num_heap_slots() > 0); | |
| 1543 if (change_context) { | |
| 1544 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | |
| 1545 } | |
| 1546 | |
| 1547 // Set rax to arguments count if adaption is not needed. Assumes that rax | |
| 1548 // is available to write to at this point. | |
| 1549 if (!function->NeedsArgumentsAdaption()) { | |
| 1550 __ Set(rax, arity); | |
| 1551 } | |
| 1552 | |
| 1553 LPointerMap* pointers = instr->pointer_map(); | |
| 1554 RecordPosition(pointers->position()); | |
| 1555 | |
| 1556 // Invoke function. | |
| 1557 if (*function == *graph()->info()->closure()) { | |
| 1558 __ CallSelf(); | |
| 1559 } else { | |
| 1560 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | |
| 1561 } | |
| 1562 | |
| 1563 // Setup deoptimization. | |
| 1564 RegisterLazyDeoptimization(instr); | |
| 1565 | |
| 1566 // Restore context. | |
| 1567 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 1524 } | 1568 } |
| 1525 | 1569 |
| 1526 | 1570 |
| 1527 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 1571 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 1528 Abort("Unimplemented: %s", "DoCallConstantFunction"); | 1572 Abort("Unimplemented: %s", "DoCallConstantFunction"); |
| 1529 } | 1573 } |
| 1530 | 1574 |
| 1531 | 1575 |
| 1532 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { | 1576 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
| 1533 Abort("Unimplemented: %s", "DoDeferredMathAbsTaggedHeapNumber"); | 1577 Abort("Unimplemented: %s", "DoDeferredMathAbsTaggedHeapNumber"); |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1598 Abort("Unimplemented: %s", "DoCallFunction"); | 1642 Abort("Unimplemented: %s", "DoCallFunction"); |
| 1599 } | 1643 } |
| 1600 | 1644 |
| 1601 | 1645 |
| 1602 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 1646 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
| 1603 Abort("Unimplemented: %s", "DoCallGlobal"); | 1647 Abort("Unimplemented: %s", "DoCallGlobal"); |
| 1604 } | 1648 } |
| 1605 | 1649 |
| 1606 | 1650 |
| 1607 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 1651 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
| 1608 Abort("Unimplemented: %s", "DoCallKnownGlobal"); | 1652 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1653 __ Move(rdi, instr->target()); | |
| 1654 CallKnownFunction(instr->target(), instr->arity(), instr); | |
| 1609 } | 1655 } |
| 1610 | 1656 |
| 1611 | 1657 |
| 1612 void LCodeGen::DoCallNew(LCallNew* instr) { | 1658 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 1613 ASSERT(ToRegister(instr->InputAt(0)).is(rdi)); | 1659 ASSERT(ToRegister(instr->InputAt(0)).is(rdi)); |
| 1614 ASSERT(ToRegister(instr->result()).is(rax)); | 1660 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1615 | 1661 |
| 1616 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); | 1662 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); |
| 1617 __ Set(rax, instr->arity()); | 1663 __ Set(rax, instr->arity()); |
| 1618 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); | 1664 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); |
| (...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1804 ASSERT(input->IsRegister()); | 1850 ASSERT(input->IsRegister()); |
| 1805 Condition cc = masm()->CheckSmi(ToRegister(input)); | 1851 Condition cc = masm()->CheckSmi(ToRegister(input)); |
| 1806 if (instr->condition() != equal) { | 1852 if (instr->condition() != equal) { |
| 1807 cc = NegateCondition(cc); | 1853 cc = NegateCondition(cc); |
| 1808 } | 1854 } |
| 1809 DeoptimizeIf(cc, instr->environment()); | 1855 DeoptimizeIf(cc, instr->environment()); |
| 1810 } | 1856 } |
| 1811 | 1857 |
| 1812 | 1858 |
| 1813 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 1859 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
| 1814 Abort("Unimplemented: %s", "DoCheckInstanceType"); | 1860 Register input = ToRegister(instr->InputAt(0)); |
| 1861 InstanceType first = instr->hydrogen()->first(); | |
| 1862 InstanceType last = instr->hydrogen()->last(); | |
| 1863 | |
| 1864 __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); | |
| 1865 | |
| 1866 // If there is only one type in the interval check for equality. | |
| 1867 if (first == last) { | |
| 1868 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), | |
| 1869 Immediate(static_cast<int8_t>(first))); | |
| 1870 DeoptimizeIf(not_equal, instr->environment()); | |
| 1871 } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) { | |
| 1872 // String has a dedicated bit in instance type. | |
| 1873 __ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), | |
| 1874 Immediate(kIsNotStringMask)); | |
| 1875 DeoptimizeIf(not_zero, instr->environment()); | |
| 1876 } else { | |
| 1877 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), | |
| 1878 Immediate(static_cast<int8_t>(first))); | |
| 1879 DeoptimizeIf(below, instr->environment()); | |
| 1880 // Omit check for the last type. | |
| 1881 if (last != LAST_TYPE) { | |
| 1882 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), | |
| 1883 Immediate(static_cast<int8_t>(last))); | |
| 1884 DeoptimizeIf(above, instr->environment()); | |
| 1885 } | |
| 1886 } | |
| 1815 } | 1887 } |
| 1816 | 1888 |
| 1817 | 1889 |
| 1818 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { | 1890 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { |
| 1819 ASSERT(instr->InputAt(0)->IsRegister()); | 1891 ASSERT(instr->InputAt(0)->IsRegister()); |
| 1820 Register reg = ToRegister(instr->InputAt(0)); | 1892 Register reg = ToRegister(instr->InputAt(0)); |
| 1821 __ Cmp(reg, instr->hydrogen()->target()); | 1893 __ Cmp(reg, instr->hydrogen()->target()); |
| 1822 DeoptimizeIf(not_equal, instr->environment()); | 1894 DeoptimizeIf(not_equal, instr->environment()); |
| 1823 } | 1895 } |
| 1824 | 1896 |
| (...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2006 | 2078 |
| 2007 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 2079 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
| 2008 Abort("Unimplemented: %s", "DoOsrEntry"); | 2080 Abort("Unimplemented: %s", "DoOsrEntry"); |
| 2009 } | 2081 } |
| 2010 | 2082 |
| 2011 #undef __ | 2083 #undef __ |
| 2012 | 2084 |
| 2013 } } // namespace v8::internal | 2085 } } // namespace v8::internal |
| 2014 | 2086 |
| 2015 #endif // V8_TARGET_ARCH_X64 | 2087 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |