| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 169 } | 169 } |
| 170 | 170 |
| 171 | 171 |
| 172 void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( | 172 void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( |
| 173 Isolate* isolate, | 173 Isolate* isolate, |
| 174 CodeStubInterfaceDescriptor* descriptor) { | 174 CodeStubInterfaceDescriptor* descriptor) { |
| 175 InitializeArrayConstructorDescriptor(isolate, descriptor, -1); | 175 InitializeArrayConstructorDescriptor(isolate, descriptor, -1); |
| 176 } | 176 } |
| 177 | 177 |
| 178 | 178 |
| 179 void ToBooleanStub::InitializeInterfaceDescriptor( |
| 180 Isolate* isolate, |
| 181 CodeStubInterfaceDescriptor* descriptor) { |
| 182 static Register registers[] = { a0 }; |
| 183 descriptor->register_param_count_ = 1; |
| 184 descriptor->register_params_ = registers; |
| 185 descriptor->deoptimization_handler_ = |
| 186 FUNCTION_ADDR(ToBooleanIC_Miss); |
| 187 descriptor->SetMissHandler( |
| 188 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); |
| 189 } |
| 190 |
| 191 |
| 179 #define __ ACCESS_MASM(masm) | 192 #define __ ACCESS_MASM(masm) |
| 180 | 193 |
| 181 static void EmitIdenticalObjectComparison(MacroAssembler* masm, | 194 static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
| 182 Label* slow, | 195 Label* slow, |
| 183 Condition cc); | 196 Condition cc); |
| 184 static void EmitSmiNonsmiComparison(MacroAssembler* masm, | 197 static void EmitSmiNonsmiComparison(MacroAssembler* masm, |
| 185 Register lhs, | 198 Register lhs, |
| 186 Register rhs, | 199 Register rhs, |
| 187 Label* rhs_not_nan, | 200 Label* rhs_not_nan, |
| 188 Label* slow, | 201 Label* slow, |
| (...skipping 1304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1493 | 1506 |
| 1494 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 1507 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 1495 // tagged as a small integer. | 1508 // tagged as a small integer. |
| 1496 __ InvokeBuiltin(native, JUMP_FUNCTION); | 1509 __ InvokeBuiltin(native, JUMP_FUNCTION); |
| 1497 | 1510 |
| 1498 __ bind(&miss); | 1511 __ bind(&miss); |
| 1499 GenerateMiss(masm); | 1512 GenerateMiss(masm); |
| 1500 } | 1513 } |
| 1501 | 1514 |
| 1502 | 1515 |
| 1503 // The stub expects its argument in the tos_ register and returns its result in | |
| 1504 // it, too: zero for false, and a non-zero value for true. | |
| 1505 void ToBooleanStub::Generate(MacroAssembler* masm) { | |
| 1506 Label patch; | |
| 1507 const Register map = t5.is(tos_) ? t3 : t5; | |
| 1508 | |
| 1509 // undefined -> false. | |
| 1510 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); | |
| 1511 | |
| 1512 // Boolean -> its value. | |
| 1513 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false); | |
| 1514 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true); | |
| 1515 | |
| 1516 // 'null' -> false. | |
| 1517 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false); | |
| 1518 | |
| 1519 if (types_.Contains(SMI)) { | |
| 1520 // Smis: 0 -> false, all other -> true | |
| 1521 __ And(at, tos_, kSmiTagMask); | |
| 1522 // tos_ contains the correct return value already | |
| 1523 __ Ret(eq, at, Operand(zero_reg)); | |
| 1524 } else if (types_.NeedsMap()) { | |
| 1525 // If we need a map later and have a Smi -> patch. | |
| 1526 __ JumpIfSmi(tos_, &patch); | |
| 1527 } | |
| 1528 | |
| 1529 if (types_.NeedsMap()) { | |
| 1530 __ lw(map, FieldMemOperand(tos_, HeapObject::kMapOffset)); | |
| 1531 | |
| 1532 if (types_.CanBeUndetectable()) { | |
| 1533 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); | |
| 1534 __ And(at, at, Operand(1 << Map::kIsUndetectable)); | |
| 1535 // Undetectable -> false. | |
| 1536 __ Movn(tos_, zero_reg, at); | |
| 1537 __ Ret(ne, at, Operand(zero_reg)); | |
| 1538 } | |
| 1539 } | |
| 1540 | |
| 1541 if (types_.Contains(SPEC_OBJECT)) { | |
| 1542 // Spec object -> true. | |
| 1543 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset)); | |
| 1544 // tos_ contains the correct non-zero return value already. | |
| 1545 __ Ret(ge, at, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
| 1546 } | |
| 1547 | |
| 1548 if (types_.Contains(STRING)) { | |
| 1549 // String value -> false iff empty. | |
| 1550 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset)); | |
| 1551 Label skip; | |
| 1552 __ Branch(&skip, ge, at, Operand(FIRST_NONSTRING_TYPE)); | |
| 1553 __ Ret(USE_DELAY_SLOT); // the string length is OK as the return value | |
| 1554 __ lw(tos_, FieldMemOperand(tos_, String::kLengthOffset)); | |
| 1555 __ bind(&skip); | |
| 1556 } | |
| 1557 | |
| 1558 if (types_.Contains(HEAP_NUMBER)) { | |
| 1559 // Heap number -> false iff +0, -0, or NaN. | |
| 1560 Label not_heap_number; | |
| 1561 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | |
| 1562 __ Branch(¬_heap_number, ne, map, Operand(at)); | |
| 1563 Label zero_or_nan, number; | |
| 1564 __ ldc1(f2, FieldMemOperand(tos_, HeapNumber::kValueOffset)); | |
| 1565 __ BranchF(&number, &zero_or_nan, ne, f2, kDoubleRegZero); | |
| 1566 // "tos_" is a register, and contains a non zero value by default. | |
| 1567 // Hence we only need to overwrite "tos_" with zero to return false for | |
| 1568 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. | |
| 1569 __ bind(&zero_or_nan); | |
| 1570 __ mov(tos_, zero_reg); | |
| 1571 __ bind(&number); | |
| 1572 __ Ret(); | |
| 1573 __ bind(¬_heap_number); | |
| 1574 } | |
| 1575 | |
| 1576 __ bind(&patch); | |
| 1577 GenerateTypeTransition(masm); | |
| 1578 } | |
| 1579 | |
| 1580 | |
| 1581 void ToBooleanStub::CheckOddball(MacroAssembler* masm, | |
| 1582 Type type, | |
| 1583 Heap::RootListIndex value, | |
| 1584 bool result) { | |
| 1585 if (types_.Contains(type)) { | |
| 1586 // If we see an expected oddball, return its ToBoolean value tos_. | |
| 1587 __ LoadRoot(at, value); | |
| 1588 __ Subu(at, at, tos_); // This is a check for equality for the movz below. | |
| 1589 // The value of a root is never NULL, so we can avoid loading a non-null | |
| 1590 // value into tos_ when we want to return 'true'. | |
| 1591 if (!result) { | |
| 1592 __ Movz(tos_, zero_reg, at); | |
| 1593 } | |
| 1594 __ Ret(eq, at, Operand(zero_reg)); | |
| 1595 } | |
| 1596 } | |
| 1597 | |
| 1598 | |
| 1599 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { | |
| 1600 __ Move(a3, tos_); | |
| 1601 __ li(a2, Operand(Smi::FromInt(tos_.code()))); | |
| 1602 __ li(a1, Operand(Smi::FromInt(types_.ToByte()))); | |
| 1603 __ Push(a3, a2, a1); | |
| 1604 // Patch the caller to an appropriate specialized stub and return the | |
| 1605 // operation result to the caller of the stub. | |
| 1606 __ TailCallExternalReference( | |
| 1607 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), | |
| 1608 3, | |
| 1609 1); | |
| 1610 } | |
| 1611 | |
| 1612 | |
| 1613 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 1516 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 1614 // We don't allow a GC during a store buffer overflow so there is no need to | 1517 // We don't allow a GC during a store buffer overflow so there is no need to |
| 1615 // store the registers in any particular way, but we do have to store and | 1518 // store the registers in any particular way, but we do have to store and |
| 1616 // restore them. | 1519 // restore them. |
| 1617 __ MultiPush(kJSCallerSaved | ra.bit()); | 1520 __ MultiPush(kJSCallerSaved | ra.bit()); |
| 1618 if (save_doubles_ == kSaveFPRegs) { | 1521 if (save_doubles_ == kSaveFPRegs) { |
| 1619 __ MultiPushFPU(kCallerSavedFPU); | 1522 __ MultiPushFPU(kCallerSavedFPU); |
| 1620 } | 1523 } |
| 1621 const int argument_count = 1; | 1524 const int argument_count = 1; |
| 1622 const int fp_argument_count = 0; | 1525 const int fp_argument_count = 0; |
| (...skipping 6224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7847 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | 7750 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); |
| 7848 } | 7751 } |
| 7849 } | 7752 } |
| 7850 | 7753 |
| 7851 | 7754 |
| 7852 #undef __ | 7755 #undef __ |
| 7853 | 7756 |
| 7854 } } // namespace v8::internal | 7757 } } // namespace v8::internal |
| 7855 | 7758 |
| 7856 #endif // V8_TARGET_ARCH_MIPS | 7759 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |