Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(157)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 146082: X64 implementation: comparison operations. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1586 matching lines...) Expand 10 before | Expand all | Expand 10 after
1597 } 1597 }
1598 1598
1599 Load(node->left()); 1599 Load(node->left());
1600 Load(node->right()); 1600 Load(node->right());
1601 GenericBinaryOperation(node->op(), node->type(), overwrite_mode); 1601 GenericBinaryOperation(node->op(), node->type(), overwrite_mode);
1602 } 1602 }
1603 } 1603 }
1604 1604
1605 1605
1606 1606
1607 void CodeGenerator::VisitCompareOperation(CompareOperation* a) { 1607 void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
1608 UNIMPLEMENTED(); 1608 Comment cmnt(masm_, "[ CompareOperation");
1609
1610 // Get the expressions from the node.
1611 Expression* left = node->left();
1612 Expression* right = node->right();
1613 Token::Value op = node->op();
1614 // To make typeof testing for natives implemented in JavaScript really
1615 // efficient, we generate special code for expressions of the form:
1616 // 'typeof <expression> == <string>'.
1617 UnaryOperation* operation = left->AsUnaryOperation();
1618 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
1619 (operation != NULL && operation->op() == Token::TYPEOF) &&
1620 (right->AsLiteral() != NULL &&
1621 right->AsLiteral()->handle()->IsString())) {
1622 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
1623
1624 // Load the operand and move it to a register.
1625 LoadTypeofExpression(operation->expression());
1626 Result answer = frame_->Pop();
1627 answer.ToRegister();
1628
1629 if (check->Equals(Heap::number_symbol())) {
1630 __ testl(answer.reg(), Immediate(kSmiTagMask));
1631 destination()->true_target()->Branch(zero);
1632 frame_->Spill(answer.reg());
1633 __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
1634 __ Cmp(answer.reg(), Factory::heap_number_map());
1635 answer.Unuse();
1636 destination()->Split(equal);
1637
1638 } else if (check->Equals(Heap::string_symbol())) {
1639 __ testl(answer.reg(), Immediate(kSmiTagMask));
1640 destination()->false_target()->Branch(zero);
1641
1642 // It can be an undetectable string object.
1643 __ movq(kScratchRegister,
1644 FieldOperand(answer.reg(), HeapObject::kMapOffset));
1645 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
1646 Immediate(1 << Map::kIsUndetectable));
1647 destination()->false_target()->Branch(not_zero);
1648 __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE);
1649 answer.Unuse();
1650 destination()->Split(below); // Unsigned byte comparison needed.
1651
1652 } else if (check->Equals(Heap::boolean_symbol())) {
1653 __ Cmp(answer.reg(), Factory::true_value());
1654 destination()->true_target()->Branch(equal);
1655 __ Cmp(answer.reg(), Factory::false_value());
1656 answer.Unuse();
1657 destination()->Split(equal);
1658
1659 } else if (check->Equals(Heap::undefined_symbol())) {
1660 __ Cmp(answer.reg(), Factory::undefined_value());
1661 destination()->true_target()->Branch(equal);
1662
1663 __ testl(answer.reg(), Immediate(kSmiTagMask));
1664 destination()->false_target()->Branch(zero);
1665
1666 // It can be an undetectable object.
1667 __ movq(kScratchRegister,
1668 FieldOperand(answer.reg(), HeapObject::kMapOffset));
1669 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
1670 Immediate(1 << Map::kIsUndetectable));
1671 answer.Unuse();
1672 destination()->Split(not_zero);
1673
1674 } else if (check->Equals(Heap::function_symbol())) {
1675 __ testl(answer.reg(), Immediate(kSmiTagMask));
1676 destination()->false_target()->Branch(zero);
1677 frame_->Spill(answer.reg());
1678 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
1679 answer.Unuse();
1680 destination()->Split(equal);
1681
1682 } else if (check->Equals(Heap::object_symbol())) {
1683 __ testl(answer.reg(), Immediate(kSmiTagMask));
1684 destination()->false_target()->Branch(zero);
1685 __ Cmp(answer.reg(), Factory::null_value());
1686 destination()->true_target()->Branch(equal);
1687
1688 // It can be an undetectable object.
1689 __ movq(kScratchRegister,
1690 FieldOperand(answer.reg(), HeapObject::kMapOffset));
1691 __ movb(kScratchRegister,
1692 FieldOperand(kScratchRegister, Map::kBitFieldOffset));
1693 __ testb(kScratchRegister, Immediate(1 << Map::kIsUndetectable));
1694 destination()->false_target()->Branch(not_zero);
1695 __ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
1696 destination()->false_target()->Branch(below);
1697 __ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
1698 answer.Unuse();
1699 destination()->Split(below_equal);
1700 } else {
1701 // Uncommon case: typeof testing against a string literal that is
1702 // never returned from the typeof operator.
1703 answer.Unuse();
1704 destination()->Goto(false);
1705 }
1706 return;
1707 }
1708
1709 Condition cc = no_condition;
1710 bool strict = false;
1711 switch (op) {
1712 case Token::EQ_STRICT:
1713 strict = true;
1714 // Fall through
1715 case Token::EQ:
1716 cc = equal;
1717 break;
1718 case Token::LT:
1719 cc = less;
1720 break;
1721 case Token::GT:
1722 cc = greater;
1723 break;
1724 case Token::LTE:
1725 cc = less_equal;
1726 break;
1727 case Token::GTE:
1728 cc = greater_equal;
1729 break;
1730 case Token::IN: {
1731 Load(left);
1732 Load(right);
1733 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
1734 frame_->Push(&answer); // push the result
1735 return;
1736 }
1737 case Token::INSTANCEOF: {
1738 Load(left);
1739 Load(right);
1740 InstanceofStub stub;
1741 Result answer = frame_->CallStub(&stub, 2);
1742 answer.ToRegister();
1743 __ testq(answer.reg(), answer.reg());
1744 answer.Unuse();
1745 destination()->Split(zero);
1746 return;
1747 }
1748 default:
1749 UNREACHABLE();
1750 }
1751 Load(left);
1752 Load(right);
1753 Comparison(cc, strict, destination());
1609 } 1754 }
1610 1755
1611 1756
1612 void CodeGenerator::VisitThisFunction(ThisFunction* node) { 1757 void CodeGenerator::VisitThisFunction(ThisFunction* node) {
1613 frame_->PushFunction(); 1758 frame_->PushFunction();
1614 } 1759 }
1615 1760
1616 1761
1617 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { 1762 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) {
1618 UNIMPLEMENTED(); 1763 UNIMPLEMENTED();
(...skipping 572 matching lines...) Expand 10 before | Expand all | Expand 10 after
2191 2336
2192 void CodeGenerator::LoadGlobalReceiver() { 2337 void CodeGenerator::LoadGlobalReceiver() {
2193 Result temp = allocator_->Allocate(); 2338 Result temp = allocator_->Allocate();
2194 Register reg = temp.reg(); 2339 Register reg = temp.reg();
2195 __ movq(reg, GlobalObject()); 2340 __ movq(reg, GlobalObject());
2196 __ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset)); 2341 __ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
2197 frame_->Push(&temp); 2342 frame_->Push(&temp);
2198 } 2343 }
2199 2344
2200 2345
2346 // TODO(1241834): Get rid of this function in favor of just using Load, now
2347 // that we have the INSIDE_TYPEOF typeof state. => Need to handle global
2348 // variables w/o reference errors elsewhere.
2349 void CodeGenerator::LoadTypeofExpression(Expression* x) {
2350 Variable* variable = x->AsVariableProxy()->AsVariable();
2351 if (variable != NULL && !variable->is_this() && variable->is_global()) {
2352 // NOTE: This is somewhat nasty. We force the compiler to load
2353 // the variable as if through '<global>.<variable>' to make sure we
2354 // do not get reference errors.
2355 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
2356 Literal key(variable->name());
2357 // TODO(1241834): Fetch the position from the variable instead of using
2358 // no position.
2359 Property property(&global, &key, RelocInfo::kNoPosition);
2360 Load(&property);
2361 } else {
2362 Load(x, INSIDE_TYPEOF);
2363 }
2364 }
2365
2366
2367 class CompareStub: public CodeStub {
2368 public:
2369 CompareStub(Condition cc, bool strict) : cc_(cc), strict_(strict) { }
2370
2371 void Generate(MacroAssembler* masm);
2372
2373 private:
2374 Condition cc_;
2375 bool strict_;
2376
2377 Major MajorKey() { return Compare; }
2378
2379 int MinorKey() {
2380 // Encode the three parameters in a unique 16 bit value.
2381 ASSERT(static_cast<int>(cc_) < (1 << 15));
2382 return (static_cast<int>(cc_) << 1) | (strict_ ? 1 : 0);
2383 }
2384
2385 // Branch to the label if the given object isn't a symbol.
2386 void BranchIfNonSymbol(MacroAssembler* masm,
2387 Label* label,
2388 Register object);
2389
2390 #ifdef DEBUG
2391 void Print() {
2392 PrintF("CompareStub (cc %d), (strict %s)\n",
2393 static_cast<int>(cc_),
2394 strict_ ? "true" : "false");
2395 }
2396 #endif
2397 };
2398
2399
2400 void CodeGenerator::Comparison(Condition cc,
2401 bool strict,
2402 ControlDestination* dest) {
2403 // Strict only makes sense for equality comparisons.
2404 ASSERT(!strict || cc == equal);
2405
2406 Result left_side;
2407 Result right_side;
2408 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2409 if (cc == greater || cc == less_equal) {
2410 cc = ReverseCondition(cc);
2411 left_side = frame_->Pop();
2412 right_side = frame_->Pop();
2413 } else {
2414 right_side = frame_->Pop();
2415 left_side = frame_->Pop();
2416 }
2417 ASSERT(cc == less || cc == equal || cc == greater_equal);
2418
2419 // If either side is a constant smi, optimize the comparison.
2420 bool left_side_constant_smi =
2421 left_side.is_constant() && left_side.handle()->IsSmi();
2422 bool right_side_constant_smi =
2423 right_side.is_constant() && right_side.handle()->IsSmi();
2424 bool left_side_constant_null =
2425 left_side.is_constant() && left_side.handle()->IsNull();
2426 bool right_side_constant_null =
2427 right_side.is_constant() && right_side.handle()->IsNull();
2428
2429 if (left_side_constant_smi || right_side_constant_smi) {
2430 if (left_side_constant_smi && right_side_constant_smi) {
2431 // Trivial case, comparing two constants.
2432 int left_value = Smi::cast(*left_side.handle())->value();
2433 int right_value = Smi::cast(*right_side.handle())->value();
2434 switch (cc) {
2435 case less:
2436 dest->Goto(left_value < right_value);
2437 break;
2438 case equal:
2439 dest->Goto(left_value == right_value);
2440 break;
2441 case greater_equal:
2442 dest->Goto(left_value >= right_value);
2443 break;
2444 default:
2445 UNREACHABLE();
2446 }
2447 } else { // Only one side is a constant Smi.
2448 // If left side is a constant Smi, reverse the operands.
2449 // Since one side is a constant Smi, conversion order does not matter.
2450 if (left_side_constant_smi) {
2451 Result temp = left_side;
2452 left_side = right_side;
2453 right_side = temp;
2454 cc = ReverseCondition(cc);
2455 // This may reintroduce greater or less_equal as the value of cc.
2456 // CompareStub and the inline code both support all values of cc.
2457 }
2458 // Implement comparison against a constant Smi, inlining the case
2459 // where both sides are Smis.
2460 left_side.ToRegister();
2461
2462 // Here we split control flow to the stub call and inlined cases
2463 // before finally splitting it to the control destination. We use
2464 // a jump target and branching to duplicate the virtual frame at
2465 // the first split. We manually handle the off-frame references
2466 // by reconstituting them on the non-fall-through path.
2467 JumpTarget is_smi;
2468 Register left_reg = left_side.reg();
2469 Handle<Object> right_val = right_side.handle();
2470 __ testl(left_side.reg(), Immediate(kSmiTagMask));
2471 is_smi.Branch(zero, taken);
2472
2473 // Setup and call the compare stub.
2474 CompareStub stub(cc, strict);
2475 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2476 result.ToRegister();
2477 __ cmpq(result.reg(), Immediate(0));
2478 result.Unuse();
2479 dest->true_target()->Branch(cc);
2480 dest->false_target()->Jump();
2481
2482 is_smi.Bind();
2483 left_side = Result(left_reg);
2484 right_side = Result(right_val);
2485 // Test smi equality and comparison by signed int comparison.
2486 if (IsUnsafeSmi(right_side.handle())) {
2487 right_side.ToRegister();
2488 __ cmpq(left_side.reg(), right_side.reg());
2489 } else {
2490 __ Cmp(left_side.reg(), right_side.handle());
2491 }
2492 left_side.Unuse();
2493 right_side.Unuse();
2494 dest->Split(cc);
2495 }
2496 } else if (cc == equal &&
2497 (left_side_constant_null || right_side_constant_null)) {
2498 // To make null checks efficient, we check if either the left side or
2499 // the right side is the constant 'null'.
2500 // If so, we optimize the code by inlining a null check instead of
2501 // calling the (very) general runtime routine for checking equality.
2502 Result operand = left_side_constant_null ? right_side : left_side;
2503 right_side.Unuse();
2504 left_side.Unuse();
2505 operand.ToRegister();
2506 __ Cmp(operand.reg(), Factory::null_value());
2507 if (strict) {
2508 operand.Unuse();
2509 dest->Split(equal);
2510 } else {
2511 // The 'null' value is only equal to 'undefined' if using non-strict
2512 // comparisons.
2513 dest->true_target()->Branch(equal);
2514 __ Cmp(operand.reg(), Factory::undefined_value());
2515 dest->true_target()->Branch(equal);
2516 __ testl(operand.reg(), Immediate(kSmiTagMask));
2517 dest->false_target()->Branch(equal);
2518
2519 // It can be an undetectable object.
2520 // Use a scratch register in preference to spilling operand.reg().
2521 Result temp = allocator()->Allocate();
2522 ASSERT(temp.is_valid());
2523 __ movq(temp.reg(),
2524 FieldOperand(operand.reg(), HeapObject::kMapOffset));
2525 __ testb(FieldOperand(temp.reg(), Map::kBitFieldOffset),
2526 Immediate(1 << Map::kIsUndetectable));
2527 temp.Unuse();
2528 operand.Unuse();
2529 dest->Split(not_zero);
2530 }
2531 } else { // Neither side is a constant Smi or null.
2532 // If either side is a non-smi constant, skip the smi check.
2533 bool known_non_smi =
2534 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
2535 (right_side.is_constant() && !right_side.handle()->IsSmi());
2536 left_side.ToRegister();
2537 right_side.ToRegister();
2538
2539 if (known_non_smi) {
2540 // When non-smi, call out to the compare stub.
2541 CompareStub stub(cc, strict);
2542 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2543 if (cc == equal) {
2544 __ testq(answer.reg(), answer.reg());
2545 } else {
2546 __ cmpq(answer.reg(), Immediate(0));
2547 }
2548 answer.Unuse();
2549 dest->Split(cc);
2550 } else {
2551 // Here we split control flow to the stub call and inlined cases
2552 // before finally splitting it to the control destination. We use
2553 // a jump target and branching to duplicate the virtual frame at
2554 // the first split. We manually handle the off-frame references
2555 // by reconstituting them on the non-fall-through path.
2556 JumpTarget is_smi;
2557 Register left_reg = left_side.reg();
2558 Register right_reg = right_side.reg();
2559
2560 __ movq(kScratchRegister, left_side.reg());
2561 __ or_(kScratchRegister, right_side.reg());
2562 __ testl(kScratchRegister, Immediate(kSmiTagMask));
2563 is_smi.Branch(zero, taken);
2564 // When non-smi, call out to the compare stub.
2565 CompareStub stub(cc, strict);
2566 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2567 if (cc == equal) {
2568 __ testq(answer.reg(), answer.reg());
2569 } else {
2570 __ cmpq(answer.reg(), Immediate(0));
2571 }
2572 answer.Unuse();
2573 dest->true_target()->Branch(cc);
2574 dest->false_target()->Jump();
2575
2576 is_smi.Bind();
2577 left_side = Result(left_reg);
2578 right_side = Result(right_reg);
2579 __ cmpq(left_side.reg(), right_side.reg());
2580 right_side.Unuse();
2581 left_side.Unuse();
2582 dest->Split(cc);
2583 }
2584 }
2585 }
2586
2587
2201 // Flag that indicates whether or not the code that handles smi arguments 2588 // Flag that indicates whether or not the code that handles smi arguments
2202 // should be placed in the stub, inlined, or omitted entirely. 2589 // should be placed in the stub, inlined, or omitted entirely.
2203 enum GenericBinaryFlags { 2590 enum GenericBinaryFlags {
2204 SMI_CODE_IN_STUB, 2591 SMI_CODE_IN_STUB,
2205 SMI_CODE_INLINED 2592 SMI_CODE_INLINED
2206 }; 2593 };
2207 2594
2208 2595
2209 class FloatingPointHelper : public AllStatic { 2596 class FloatingPointHelper : public AllStatic {
2210 public: 2597 public:
(...skipping 1096 matching lines...) Expand 10 before | Expand all | Expand 10 after
3307 3694
3308 3695
3309 3696
3310 3697
3311 // End of CodeGenerator implementation. 3698 // End of CodeGenerator implementation.
3312 3699
3313 void UnarySubStub::Generate(MacroAssembler* masm) { 3700 void UnarySubStub::Generate(MacroAssembler* masm) {
3314 UNIMPLEMENTED(); 3701 UNIMPLEMENTED();
3315 } 3702 }
3316 3703
3317 class CompareStub: public CodeStub { 3704
3318 public: 3705 void CompareStub::Generate(MacroAssembler* masm) {
3319 CompareStub(Condition cc, bool strict) : cc_(cc), strict_(strict) { } 3706 Label call_builtin, done;
3320 3707
3321 void Generate(MacroAssembler* masm); 3708 // NOTICE! This code is only reached after a smi-fast-case check, so
3322 3709 // it is certain that at least one operand isn't a smi.
3323 private: 3710
3324 Condition cc_; 3711 if (cc_ == equal) { // Both strict and non-strict.
3325 bool strict_; 3712 Label slow; // Fallthrough label.
3326 3713 // Equality is almost reflexive (everything but NaN), so start by testing
3327 Major MajorKey() { return Compare; } 3714 // for "identity and not NaN".
3328 3715 {
3329 int MinorKey() { 3716 Label not_identical;
3330 // Encode the three parameters in a unique 16 bit value. 3717 __ cmpq(rax, rdx);
3331 ASSERT(static_cast<int>(cc_) < (1 << 15)); 3718 __ j(not_equal, &not_identical);
3332 return (static_cast<int>(cc_) << 1) | (strict_ ? 1 : 0); 3719 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
3720 // so we do the second best thing - test it ourselves.
3721
3722 Label return_equal;
3723 Label heap_number;
3724 // If it's not a heap number, then return equal.
3725 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
3726 Factory::heap_number_map());
3727 __ j(equal, &heap_number);
3728 __ bind(&return_equal);
3729 __ xor_(rax, rax);
3730 __ ret(0);
3731
3732 __ bind(&heap_number);
3733 // It is a heap number, so return non-equal if it's NaN and equal if it's
3734 // not NaN.
3735 // The representation of NaN values has all exponent bits (52..62) set,
3736 // and not all mantissa bits (0..51) clear.
3737 // Read double representation into rax.
3738 __ movq(rbx, 0x7ff0000000000000, RelocInfo::NONE);
3739 __ movq(rax, FieldOperand(rdx, HeapNumber::kValueOffset));
3740 // Test that exponent bits are all set.
3741 __ or_(rbx, rax);
3742 __ cmpq(rbx, rax);
3743 __ j(not_equal, &return_equal);
3744 // Shift out flag and all exponent bits, retaining only mantissa.
3745 __ shl(rax, Immediate(12));
3746 // If all bits in the mantissa are zero the number is Infinity, and
3747 // we return zero. Otherwise it is a NaN, and we return non-zero.
3748 // So just return rax.
3749 __ ret(0);
3750
3751 __ bind(&not_identical);
3752 }
3753
3754 // If we're doing a strict equality comparison, we don't have to do
3755 // type conversion, so we generate code to do fast comparison for objects
3756 // and oddballs. Non-smi numbers and strings still go through the usual
3757 // slow-case code.
3758 if (strict_) {
3759 // If either is a Smi (we know that not both are), then they can only
3760 // be equal if the other is a HeapNumber. If so, use the slow case.
3761 {
3762 Label not_smis;
3763 ASSERT_EQ(0, kSmiTag);
3764 ASSERT_EQ(0, Smi::FromInt(0));
3765 __ movq(rcx, Immediate(kSmiTagMask));
3766 __ and_(rcx, rax);
3767 __ testq(rcx, rdx);
3768 __ j(not_zero, &not_smis);
3769 // One operand is a smi.
3770
3771 // Check whether the non-smi is a heap number.
3772 ASSERT_EQ(1, kSmiTagMask);
3773 // rcx still holds rax & kSmiTag, which is either zero or one.
3774 __ decq(rcx); // If rax is a smi, all 1s, else all 0s.
3775 __ movq(rbx, rdx);
3776 __ xor_(rbx, rax);
3777 __ and_(rbx, rcx); // rbx holds either 0 or rax ^ rdx.
3778 __ xor_(rbx, rax);
3779 // if rax was smi, rbx is now rdx, else rax.
3780
3781 // Check if the non-smi operand is a heap number.
3782 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
3783 Factory::heap_number_map());
3784 // If heap number, handle it in the slow case.
3785 __ j(equal, &slow);
3786 // Return non-equal (ebx is not zero)
3787 __ movq(rax, rbx);
3788 __ ret(0);
3789
3790 __ bind(&not_smis);
3791 }
3792
3793 // If either operand is a JSObject or an oddball value, then they are not
3794 // equal since their pointers are different
3795 // There is no test for undetectability in strict equality.
3796
3797 // If the first object is a JS object, we have done pointer comparison.
3798 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3799 Label first_non_object;
3800 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
3801 __ j(below, &first_non_object);
3802 // Return non-zero (rax is not zero)
3803 Label return_not_equal;
3804 ASSERT(kHeapObjectTag != 0);
3805 __ bind(&return_not_equal);
3806 __ ret(0);
3807
3808 __ bind(&first_non_object);
3809 // Check for oddballs: true, false, null, undefined.
3810 __ CmpInstanceType(rcx, ODDBALL_TYPE);
3811 __ j(equal, &return_not_equal);
3812
3813 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
3814 __ j(above_equal, &return_not_equal);
3815
3816 // Check for oddballs: true, false, null, undefined.
3817 __ CmpInstanceType(rcx, ODDBALL_TYPE);
3818 __ j(equal, &return_not_equal);
3819
3820 // Fall through to the general case.
3821 }
3822 __ bind(&slow);
3333 } 3823 }
3334 3824
3335 #ifdef DEBUG 3825 // Push arguments below the return address.
3336 void Print() { 3826 __ pop(rcx);
3337 PrintF("CompareStub (cc %d), (strict %s)\n", 3827 __ push(rax);
3338 static_cast<int>(cc_), 3828 __ push(rdx);
3339 strict_ ? "true" : "false"); 3829 __ push(rcx);
3830
3831 // Inlined floating point compare.
3832 // Call builtin if operands are not floating point or smi.
3833 Label check_for_symbols;
3834 // TODO(X64): Implement floating point comparisons;
3835 __ int3();
3836
3837 // TODO(1243847): Use cmov below once CpuFeatures are properly hooked up.
3838 Label below_lbl, above_lbl;
3839 // use edx, eax to convert unsigned to signed comparison
3840 __ j(below, &below_lbl);
3841 __ j(above, &above_lbl);
3842
3843 __ xor_(rax, rax); // equal
3844 __ ret(2 * kPointerSize);
3845
3846 __ bind(&below_lbl);
3847 __ movq(rax, Immediate(-1));
3848 __ ret(2 * kPointerSize);
3849
3850 __ bind(&above_lbl);
3851 __ movq(rax, Immediate(1));
3852 __ ret(2 * kPointerSize); // eax, edx were pushed
3853
3854 // Fast negative check for symbol-to-symbol equality.
3855 __ bind(&check_for_symbols);
3856 if (cc_ == equal) {
3857 BranchIfNonSymbol(masm, &call_builtin, rax);
3858 BranchIfNonSymbol(masm, &call_builtin, rdx);
3859
3860 // We've already checked for object identity, so if both operands
3861 // are symbols they aren't equal. Register eax already holds a
3862 // non-zero value, which indicates not equal, so just return.
3863 __ ret(2 * kPointerSize);
3340 } 3864 }
3341 #endif 3865
3342 }; 3866 __ bind(&call_builtin);
3343 3867 // must swap argument order
3344 3868 __ pop(rcx);
3345 void CompareStub::Generate(MacroAssembler* masm) { 3869 __ pop(rdx);
3870 __ pop(rax);
3871 __ push(rdx);
3872 __ push(rax);
3873
3874 // Figure out which native to call and setup the arguments.
3875 Builtins::JavaScript builtin;
3876 if (cc_ == equal) {
3877 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3878 } else {
3879 builtin = Builtins::COMPARE;
3880 int ncr; // NaN compare result
3881 if (cc_ == less || cc_ == less_equal) {
3882 ncr = GREATER;
3883 } else {
3884 ASSERT(cc_ == greater || cc_ == greater_equal); // remaining cases
3885 ncr = LESS;
3886 }
3887 __ push(Immediate(Smi::FromInt(ncr)));
3888 }
3889
3890 // Restore return address on the stack.
3891 __ push(rcx);
3892
3893 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
3894 // tagged as a small integer.
3895 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
3896 }
3897
3898
3899 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3900 Label* label,
3901 Register object) {
3902 __ testl(object, Immediate(kSmiTagMask));
3903 __ j(zero, label);
3904 __ movq(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset));
3905 __ movzxbq(kScratchRegister,
3906 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
3907 __ and_(kScratchRegister, Immediate(kIsSymbolMask | kIsNotStringMask));
3908 __ cmpb(kScratchRegister, Immediate(kSymbolTag | kStringTag));
3909 __ j(not_equal, label);
3346 } 3910 }
3347 3911
3348 3912
3349 void StackCheckStub::Generate(MacroAssembler* masm) { 3913 void StackCheckStub::Generate(MacroAssembler* masm) {
3350 } 3914 }
3351 3915
3352 3916
3353 class CallFunctionStub: public CodeStub { 3917 class CallFunctionStub: public CodeStub {
3354 public: 3918 public:
3355 CallFunctionStub(int argc, InLoopFlag in_loop) 3919 CallFunctionStub(int argc, InLoopFlag in_loop)
(...skipping 1060 matching lines...) Expand 10 before | Expand all | Expand 10 after
4416 break; 4980 break;
4417 default: 4981 default:
4418 UNREACHABLE(); 4982 UNREACHABLE();
4419 } 4983 }
4420 } 4984 }
4421 4985
4422 4986
4423 #undef __ 4987 #undef __
4424 4988
4425 } } // namespace v8::internal 4989 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698