| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 6246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6257 Label slow; // Fallthrough label. | 6257 Label slow; // Fallthrough label. |
| 6258 // Equality is almost reflexive (everything but NaN), so start by testing | 6258 // Equality is almost reflexive (everything but NaN), so start by testing |
| 6259 // for "identity and not NaN". | 6259 // for "identity and not NaN". |
| 6260 { | 6260 { |
| 6261 Label not_identical; | 6261 Label not_identical; |
| 6262 __ cmpq(rax, rdx); | 6262 __ cmpq(rax, rdx); |
| 6263 __ j(not_equal, ¬_identical); | 6263 __ j(not_equal, ¬_identical); |
| 6264 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), | 6264 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), |
| 6265 // so we do the second best thing - test it ourselves. | 6265 // so we do the second best thing - test it ourselves. |
| 6266 | 6266 |
| 6267 Label return_equal; | 6267 if (never_nan_nan_) { |
| 6268 Label heap_number; | 6268 __ xor_(rax, rax); |
| 6269 // If it's not a heap number, then return equal. | 6269 __ ret(0); |
| 6270 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), | 6270 } else { |
| 6271 Factory::heap_number_map()); | 6271 Label return_equal; |
| 6272 __ j(equal, &heap_number); | 6272 Label heap_number; |
| 6273 __ bind(&return_equal); | 6273 // If it's not a heap number, then return equal. |
| 6274 __ xor_(rax, rax); | 6274 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), |
| 6275 __ ret(0); | 6275 Factory::heap_number_map()); |
| 6276 __ j(equal, &heap_number); |
| 6277 __ bind(&return_equal); |
| 6278 __ xor_(rax, rax); |
| 6279 __ ret(0); |
| 6276 | 6280 |
| 6277 __ bind(&heap_number); | 6281 __ bind(&heap_number); |
| 6278 // It is a heap number, so return non-equal if it's NaN and equal if it's | 6282 // It is a heap number, so return non-equal if it's NaN and equal if |
| 6279 // not NaN. | 6283 // it's not NaN. |
| 6280 // The representation of NaN values has all exponent bits (52..62) set, | 6284 // The representation of NaN values has all exponent bits (52..62) set, |
| 6281 // and not all mantissa bits (0..51) clear. | 6285 // and not all mantissa bits (0..51) clear. |
| 6282 // We only allow QNaNs, which have bit 51 set (which also rules out | 6286 // We only allow QNaNs, which have bit 51 set (which also rules out |
| 6283 // the value being Infinity). | 6287 // the value being Infinity). |
| 6284 | 6288 |
| 6285 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e., | 6289 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e., |
| 6286 // all bits in the mask are set. We only need to check the word | 6290 // all bits in the mask are set. We only need to check the word |
| 6287 // that contains the exponent and high bit of the mantissa. | 6291 // that contains the exponent and high bit of the mantissa. |
| 6288 ASSERT_NE(0, (kQuietNaNHighBitsMask << 1) & 0x80000000u); | 6292 ASSERT_NE(0, (kQuietNaNHighBitsMask << 1) & 0x80000000u); |
| 6289 __ movl(rdx, FieldOperand(rdx, HeapNumber::kExponentOffset)); | 6293 __ movl(rdx, FieldOperand(rdx, HeapNumber::kExponentOffset)); |
| 6290 __ xorl(rax, rax); | 6294 __ xorl(rax, rax); |
| 6291 __ addl(rdx, rdx); // Shift value and mask so mask applies to top bits. | 6295 __ addl(rdx, rdx); // Shift value and mask so mask applies to top bits. |
| 6292 __ cmpl(rdx, Immediate(kQuietNaNHighBitsMask << 1)); | 6296 __ cmpl(rdx, Immediate(kQuietNaNHighBitsMask << 1)); |
| 6293 __ setcc(above_equal, rax); | 6297 __ setcc(above_equal, rax); |
| 6294 __ ret(0); | 6298 __ ret(0); |
| 6299 } |
| 6295 | 6300 |
| 6296 __ bind(¬_identical); | 6301 __ bind(¬_identical); |
| 6297 } | 6302 } |
| 6298 | 6303 |
| 6299 // If we're doing a strict equality comparison, we don't have to do | 6304 // If we're doing a strict equality comparison, we don't have to do |
| 6300 // type conversion, so we generate code to do fast comparison for objects | 6305 // type conversion, so we generate code to do fast comparison for objects |
| 6301 // and oddballs. Non-smi numbers and strings still go through the usual | 6306 // and oddballs. Non-smi numbers and strings still go through the usual |
| 6302 // slow-case code. | 6307 // slow-case code. |
| 6303 if (strict_) { | 6308 if (strict_) { |
| 6304 // If either is a Smi (we know that not both are), then they can only | 6309 // If either is a Smi (we know that not both are), then they can only |
| (...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6431 | 6436 |
| 6432 | 6437 |
| 6433 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm, | 6438 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm, |
| 6434 Label* label, | 6439 Label* label, |
| 6435 Register object, | 6440 Register object, |
| 6436 Register scratch) { | 6441 Register scratch) { |
| 6437 __ JumpIfSmi(object, label); | 6442 __ JumpIfSmi(object, label); |
| 6438 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); | 6443 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); |
| 6439 __ movzxbq(scratch, | 6444 __ movzxbq(scratch, |
| 6440 FieldOperand(scratch, Map::kInstanceTypeOffset)); | 6445 FieldOperand(scratch, Map::kInstanceTypeOffset)); |
| 6441 __ and_(scratch, Immediate(kIsSymbolMask | kIsNotStringMask)); | 6446 // Ensure that no non-strings have the symbol bit set. |
| 6442 __ cmpb(scratch, Immediate(kSymbolTag | kStringTag)); | 6447 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE); |
| 6443 __ j(not_equal, label); | 6448 ASSERT(kSymbolTag != 0); |
| 6449 __ test(scratch, Immediate(kIsSymbolMask)); |
| 6450 __ j(zero, label); |
| 6444 } | 6451 } |
| 6445 | 6452 |
| 6446 | 6453 |
| 6447 // Call the function just below TOS on the stack with the given | 6454 // Call the function just below TOS on the stack with the given |
| 6448 // arguments. The receiver is the TOS. | 6455 // arguments. The receiver is the TOS. |
| 6449 void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args, | 6456 void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args, |
| 6450 int position) { | 6457 int position) { |
| 6451 // Push the arguments ("left-to-right") on the stack. | 6458 // Push the arguments ("left-to-right") on the stack. |
| 6452 int arg_count = args->length(); | 6459 int arg_count = args->length(); |
| 6453 for (int i = 0; i < arg_count; i++) { | 6460 for (int i = 0; i < arg_count; i++) { |
| (...skipping 1279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7733 // returning. | 7740 // returning. |
| 7734 if (!HasArgumentsInRegisters()) { | 7741 if (!HasArgumentsInRegisters()) { |
| 7735 __ ret(2 * kPointerSize); // Remove both operands | 7742 __ ret(2 * kPointerSize); // Remove both operands |
| 7736 } else { | 7743 } else { |
| 7737 __ ret(0); | 7744 __ ret(0); |
| 7738 } | 7745 } |
| 7739 } | 7746 } |
| 7740 | 7747 |
| 7741 | 7748 |
| 7742 int CompareStub::MinorKey() { | 7749 int CompareStub::MinorKey() { |
| 7743 // Encode the two parameters in a unique 16 bit value. | 7750 // Encode the three parameters in a unique 16 bit value. |
| 7744 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); | 7751 ASSERT(static_cast<unsigned>(cc_) < (1 << 14)); |
| 7745 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); | 7752 int nnn_value = (never_nan_nan_ ? 2 : 0); |
| 7753 if (cc_ != equal) nnn_value = 0; // Avoid duplicate stubs. |
| 7754 return (static_cast<unsigned>(cc_) << 2) | nnn_value | (strict_ ? 1 : 0); |
| 7755 } |
| 7756 |
| 7757 |
| 7758 const char* CompareStub::GetName() { |
| 7759 switch(cc_) { |
| 7760 case less: return "CompareStub_LT"; |
| 7761 case greater: return "CompareStub_GT"; |
| 7762 case less_equal: return "CompareStub_LE"; |
| 7763 case greater_equal: return "CompareStub_GE"; |
| 7764 case not_equal: { |
| 7765 if (strict_) { |
| 7766 if (never_nan_nan_) { |
| 7767 return "CompareStub_NE_STRICT_NO_NAN"; |
| 7768 } else { |
| 7769 return "CompareStub_NE_STRICT"; |
| 7770 } |
| 7771 } else { |
| 7772 if (never_nan_nan_) { |
| 7773 return "CompareStub_NE_NO_NAN"; |
| 7774 } else { |
| 7775 return "CompareStub_NE"; |
| 7776 } |
| 7777 } |
| 7778 } |
| 7779 case equal: { |
| 7780 if (strict_) { |
| 7781 if (never_nan_nan_) { |
| 7782 return "CompareStub_EQ_STRICT_NO_NAN"; |
| 7783 } else { |
| 7784 return "CompareStub_EQ_STRICT"; |
| 7785 } |
| 7786 } else { |
| 7787 if (never_nan_nan_) { |
| 7788 return "CompareStub_EQ_NO_NAN"; |
| 7789 } else { |
| 7790 return "CompareStub_EQ"; |
| 7791 } |
| 7792 } |
| 7793 } |
| 7794 default: return "CompareStub"; |
| 7795 } |
| 7746 } | 7796 } |
| 7747 | 7797 |
| 7748 | 7798 |
| 7749 void StringAddStub::Generate(MacroAssembler* masm) { | 7799 void StringAddStub::Generate(MacroAssembler* masm) { |
| 7750 Label string_add_runtime; | 7800 Label string_add_runtime; |
| 7751 | 7801 |
| 7752 // Load the two arguments. | 7802 // Load the two arguments. |
| 7753 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument. | 7803 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument. |
| 7754 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument. | 7804 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument. |
| 7755 | 7805 |
| (...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8062 masm.GetCode(&desc); | 8112 masm.GetCode(&desc); |
| 8063 // Call the function from C++. | 8113 // Call the function from C++. |
| 8064 return FUNCTION_CAST<ModuloFunction>(buffer); | 8114 return FUNCTION_CAST<ModuloFunction>(buffer); |
| 8065 } | 8115 } |
| 8066 | 8116 |
| 8067 #endif | 8117 #endif |
| 8068 | 8118 |
| 8069 #undef __ | 8119 #undef __ |
| 8070 | 8120 |
| 8071 } } // namespace v8::internal | 8121 } } // namespace v8::internal |
| OLD | NEW |