OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2441 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2452 } | 2452 } |
2453 | 2453 |
2454 // Resolve the call. | 2454 // Resolve the call. |
2455 Result result = | 2455 Result result = |
2456 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 2); | 2456 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 2); |
2457 | 2457 |
2458 // Touch up the stack with the right values for the function and the | 2458 // Touch up the stack with the right values for the function and the |
2459 // receiver. Use a scratch register to avoid destroying the result. | 2459 // receiver. Use a scratch register to avoid destroying the result. |
2460 Result scratch = allocator_->Allocate(); | 2460 Result scratch = allocator_->Allocate(); |
2461 ASSERT(scratch.is_valid()); | 2461 ASSERT(scratch.is_valid()); |
2462 __ movl(scratch.reg(), | 2462 __ movq(scratch.reg(), |
2463 FieldOperand(result.reg(), FixedArray::OffsetOfElementAt(0))); | 2463 FieldOperand(result.reg(), FixedArray::OffsetOfElementAt(0))); |
2464 frame_->SetElementAt(arg_count + 1, &scratch); | 2464 frame_->SetElementAt(arg_count + 1, &scratch); |
2465 | 2465 |
2466 // We can reuse the result register now. | 2466 // We can reuse the result register now. |
2467 frame_->Spill(result.reg()); | 2467 frame_->Spill(result.reg()); |
2468 __ movl(result.reg(), | 2468 __ movq(result.reg(), |
2469 FieldOperand(result.reg(), FixedArray::OffsetOfElementAt(1))); | 2469 FieldOperand(result.reg(), FixedArray::OffsetOfElementAt(1))); |
2470 frame_->SetElementAt(arg_count, &result); | 2470 frame_->SetElementAt(arg_count, &result); |
2471 | 2471 |
2472 // Call the function. | 2472 // Call the function. |
2473 CodeForSourcePosition(node->position()); | 2473 CodeForSourcePosition(node->position()); |
2474 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; | 2474 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; |
2475 CallFunctionStub call_function(arg_count, in_loop); | 2475 CallFunctionStub call_function(arg_count, in_loop); |
2476 result = frame_->CallStub(&call_function, arg_count + 1); | 2476 result = frame_->CallStub(&call_function, arg_count + 1); |
2477 | 2477 |
2478 // Restore the context and overwrite the function on the stack with | 2478 // Restore the context and overwrite the function on the stack with |
(...skipping 1830 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4309 result.ToRegister(); | 4309 result.ToRegister(); |
4310 __ testq(result.reg(), result.reg()); | 4310 __ testq(result.reg(), result.reg()); |
4311 result.Unuse(); | 4311 result.Unuse(); |
4312 dest->true_target()->Branch(cc); | 4312 dest->true_target()->Branch(cc); |
4313 dest->false_target()->Jump(); | 4313 dest->false_target()->Jump(); |
4314 | 4314 |
4315 is_smi.Bind(); | 4315 is_smi.Bind(); |
4316 left_side = Result(left_reg); | 4316 left_side = Result(left_reg); |
4317 right_side = Result(right_val); | 4317 right_side = Result(right_val); |
4318 // Test smi equality and comparison by signed int comparison. | 4318 // Test smi equality and comparison by signed int comparison. |
4319 if (IsUnsafeSmi(right_side.handle())) { | 4319 // Both sides are smis, so we can use an Immediate. |
4320 right_side.ToRegister(); | 4320 __ cmpl(left_side.reg(), Immediate(Smi::cast(*right_side.handle()))); |
4321 __ cmpq(left_side.reg(), right_side.reg()); | |
4322 } else { | |
4323 __ Cmp(left_side.reg(), right_side.handle()); | |
4324 } | |
4325 left_side.Unuse(); | 4321 left_side.Unuse(); |
4326 right_side.Unuse(); | 4322 right_side.Unuse(); |
4327 dest->Split(cc); | 4323 dest->Split(cc); |
4328 } | 4324 } |
4329 } else if (cc == equal && | 4325 } else if (cc == equal && |
4330 (left_side_constant_null || right_side_constant_null)) { | 4326 (left_side_constant_null || right_side_constant_null)) { |
4331 // To make null checks efficient, we check if either the left side or | 4327 // To make null checks efficient, we check if either the left side or |
4332 // the right side is the constant 'null'. | 4328 // the right side is the constant 'null'. |
4333 // If so, we optimize the code by inlining a null check instead of | 4329 // If so, we optimize the code by inlining a null check instead of |
4334 // calling the (very) general runtime routine for checking equality. | 4330 // calling the (very) general runtime routine for checking equality. |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4366 bool known_non_smi = | 4362 bool known_non_smi = |
4367 (left_side.is_constant() && !left_side.handle()->IsSmi()) || | 4363 (left_side.is_constant() && !left_side.handle()->IsSmi()) || |
4368 (right_side.is_constant() && !right_side.handle()->IsSmi()); | 4364 (right_side.is_constant() && !right_side.handle()->IsSmi()); |
4369 left_side.ToRegister(); | 4365 left_side.ToRegister(); |
4370 right_side.ToRegister(); | 4366 right_side.ToRegister(); |
4371 | 4367 |
4372 if (known_non_smi) { | 4368 if (known_non_smi) { |
4373 // When non-smi, call out to the compare stub. | 4369 // When non-smi, call out to the compare stub. |
4374 CompareStub stub(cc, strict); | 4370 CompareStub stub(cc, strict); |
4375 Result answer = frame_->CallStub(&stub, &left_side, &right_side); | 4371 Result answer = frame_->CallStub(&stub, &left_side, &right_side); |
4376 __ testq(answer.reg(), answer.reg()); // Both zero and sign flag right. | 4372 // The result is a Smi, which is negative, zero, or positive. |
| 4373 __ testl(answer.reg(), answer.reg()); // Both zero and sign flag right. |
4377 answer.Unuse(); | 4374 answer.Unuse(); |
4378 dest->Split(cc); | 4375 dest->Split(cc); |
4379 } else { | 4376 } else { |
4380 // Here we split control flow to the stub call and inlined cases | 4377 // Here we split control flow to the stub call and inlined cases |
4381 // before finally splitting it to the control destination. We use | 4378 // before finally splitting it to the control destination. We use |
4382 // a jump target and branching to duplicate the virtual frame at | 4379 // a jump target and branching to duplicate the virtual frame at |
4383 // the first split. We manually handle the off-frame references | 4380 // the first split. We manually handle the off-frame references |
4384 // by reconstituting them on the non-fall-through path. | 4381 // by reconstituting them on the non-fall-through path. |
4385 JumpTarget is_smi; | 4382 JumpTarget is_smi; |
4386 Register left_reg = left_side.reg(); | 4383 Register left_reg = left_side.reg(); |
4387 Register right_reg = right_side.reg(); | 4384 Register right_reg = right_side.reg(); |
4388 | 4385 |
4389 __ movq(kScratchRegister, left_reg); | 4386 __ movq(kScratchRegister, left_reg); |
4390 __ or_(kScratchRegister, right_reg); | 4387 __ or_(kScratchRegister, right_reg); |
4391 __ testl(kScratchRegister, Immediate(kSmiTagMask)); | 4388 __ testl(kScratchRegister, Immediate(kSmiTagMask)); |
4392 is_smi.Branch(zero, taken); | 4389 is_smi.Branch(zero, taken); |
4393 // When non-smi, call out to the compare stub. | 4390 // When non-smi, call out to the compare stub. |
4394 CompareStub stub(cc, strict); | 4391 CompareStub stub(cc, strict); |
4395 Result answer = frame_->CallStub(&stub, &left_side, &right_side); | 4392 Result answer = frame_->CallStub(&stub, &left_side, &right_side); |
4396 if (cc == equal) { | 4393 __ testl(answer.reg(), answer.reg()); // Sets both zero and sign flags. |
4397 __ testq(answer.reg(), answer.reg()); | |
4398 } else { | |
4399 __ cmpq(answer.reg(), Immediate(0)); | |
4400 } | |
4401 answer.Unuse(); | 4394 answer.Unuse(); |
4402 dest->true_target()->Branch(cc); | 4395 dest->true_target()->Branch(cc); |
4403 dest->false_target()->Jump(); | 4396 dest->false_target()->Jump(); |
4404 | 4397 |
4405 is_smi.Bind(); | 4398 is_smi.Bind(); |
4406 left_side = Result(left_reg); | 4399 left_side = Result(left_reg); |
4407 right_side = Result(right_reg); | 4400 right_side = Result(right_reg); |
4408 __ cmpq(left_side.reg(), right_side.reg()); | 4401 __ cmpl(left_side.reg(), right_side.reg()); |
4409 right_side.Unuse(); | 4402 right_side.Unuse(); |
4410 left_side.Unuse(); | 4403 left_side.Unuse(); |
4411 dest->Split(cc); | 4404 dest->Split(cc); |
4412 } | 4405 } |
4413 } | 4406 } |
4414 } | 4407 } |
4415 | 4408 |
4416 | 4409 |
4417 // Flag that indicates whether or not the code that handles smi arguments | 4410 // Flag that indicates whether or not the code that handles smi arguments |
4418 // should be placed in the stub, inlined, or omitted entirely. | 4411 // should be placed in the stub, inlined, or omitted entirely. |
(...skipping 1020 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5439 __ and_(rcx, Immediate(kStringSizeMask)); | 5432 __ and_(rcx, Immediate(kStringSizeMask)); |
5440 __ cmpq(rcx, Immediate(kShortStringTag)); | 5433 __ cmpq(rcx, Immediate(kShortStringTag)); |
5441 __ j(not_equal, &true_result); // Empty string is always short. | 5434 __ j(not_equal, &true_result); // Empty string is always short. |
5442 __ movq(rdx, FieldOperand(rax, String::kLengthOffset)); | 5435 __ movq(rdx, FieldOperand(rax, String::kLengthOffset)); |
5443 __ shr(rdx, Immediate(String::kShortLengthShift)); | 5436 __ shr(rdx, Immediate(String::kShortLengthShift)); |
5444 __ j(zero, &false_result); | 5437 __ j(zero, &false_result); |
5445 __ jmp(&true_result); | 5438 __ jmp(&true_result); |
5446 | 5439 |
5447 __ bind(¬_string); | 5440 __ bind(¬_string); |
5448 // HeapNumber => false iff +0, -0, or NaN. | 5441 // HeapNumber => false iff +0, -0, or NaN. |
| 5442 // These three cases set C3 when compared to zero in the FPU. |
5449 __ Cmp(rdx, Factory::heap_number_map()); | 5443 __ Cmp(rdx, Factory::heap_number_map()); |
5450 __ j(not_equal, &true_result); | 5444 __ j(not_equal, &true_result); |
5451 // TODO(x64): Don't use fp stack, use MMX registers? | 5445 // TODO(x64): Don't use fp stack, use MMX registers? |
5452 __ fldz(); // Load zero onto fp stack | 5446 __ fldz(); // Load zero onto fp stack |
5453 // Load heap-number double value onto fp stack | 5447 // Load heap-number double value onto fp stack |
5454 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); | 5448 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); |
5455 __ fucompp(); // Compare and pop both values. | 5449 __ fucompp(); // Compare and pop both values. |
5456 __ movq(kScratchRegister, rax); | 5450 __ movq(kScratchRegister, rax); |
5457 __ fnstsw_ax(); // Store fp status word in ax, no checking for exceptions. | 5451 __ fnstsw_ax(); // Store fp status word in ax, no checking for exceptions. |
5458 __ testb(rax, Immediate(0x08)); // Test FP condition flag C3. | 5452 __ testl(rax, Immediate(0x4000)); // Test FP condition flag C3, bit 16. |
5459 __ movq(rax, kScratchRegister); | 5453 __ movq(rax, kScratchRegister); |
5460 __ j(zero, &false_result); | 5454 __ j(not_zero, &false_result); |
5461 // Fall through to |true_result|. | 5455 // Fall through to |true_result|. |
5462 | 5456 |
5463 // Return 1/0 for true/false in rax. | 5457 // Return 1/0 for true/false in rax. |
5464 __ bind(&true_result); | 5458 __ bind(&true_result); |
5465 __ movq(rax, Immediate(1)); | 5459 __ movq(rax, Immediate(1)); |
5466 __ ret(1 * kPointerSize); | 5460 __ ret(1 * kPointerSize); |
5467 __ bind(&false_result); | 5461 __ bind(&false_result); |
5468 __ xor_(rax, rax); | 5462 __ xor_(rax, rax); |
5469 __ ret(1 * kPointerSize); | 5463 __ ret(1 * kPointerSize); |
5470 } | 5464 } |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5620 __ movq(rbx, 0x7ff0000000000000, RelocInfo::NONE); | 5614 __ movq(rbx, 0x7ff0000000000000, RelocInfo::NONE); |
5621 __ movq(rax, FieldOperand(rdx, HeapNumber::kValueOffset)); | 5615 __ movq(rax, FieldOperand(rdx, HeapNumber::kValueOffset)); |
5622 // Test that exponent bits are all set. | 5616 // Test that exponent bits are all set. |
5623 __ or_(rbx, rax); | 5617 __ or_(rbx, rax); |
5624 __ cmpq(rbx, rax); | 5618 __ cmpq(rbx, rax); |
5625 __ j(not_equal, &return_equal); | 5619 __ j(not_equal, &return_equal); |
5626 // Shift out flag and all exponent bits, retaining only mantissa. | 5620 // Shift out flag and all exponent bits, retaining only mantissa. |
5627 __ shl(rax, Immediate(12)); | 5621 __ shl(rax, Immediate(12)); |
5628 // If all bits in the mantissa are zero the number is Infinity, and | 5622 // If all bits in the mantissa are zero the number is Infinity, and |
5629 // we return zero. Otherwise it is a NaN, and we return non-zero. | 5623 // we return zero. Otherwise it is a NaN, and we return non-zero. |
5630 // So just return rax. | 5624 // We cannot just return rax because only eax is tested on return. |
| 5625 // TODO(X64): Solve this using movcc, when implemented. |
| 5626 __ movq(kScratchRegister, rax); |
| 5627 __ shr(kScratchRegister, Immediate(32)); |
| 5628 __ or_(rax, kScratchRegister); |
5631 __ ret(0); | 5629 __ ret(0); |
5632 | 5630 |
5633 __ bind(¬_identical); | 5631 __ bind(¬_identical); |
5634 } | 5632 } |
5635 | 5633 |
5636 // If we're doing a strict equality comparison, we don't have to do | 5634 // If we're doing a strict equality comparison, we don't have to do |
5637 // type conversion, so we generate code to do fast comparison for objects | 5635 // type conversion, so we generate code to do fast comparison for objects |
5638 // and oddballs. Non-smi numbers and strings still go through the usual | 5636 // and oddballs. Non-smi numbers and strings still go through the usual |
5639 // slow-case code. | 5637 // slow-case code. |
5640 if (strict_) { | 5638 if (strict_) { |
(...skipping 17 matching lines...) Expand all Loading... |
5658 __ xor_(rbx, rax); | 5656 __ xor_(rbx, rax); |
5659 __ and_(rbx, rcx); // rbx holds either 0 or rax ^ rdx. | 5657 __ and_(rbx, rcx); // rbx holds either 0 or rax ^ rdx. |
5660 __ xor_(rbx, rax); | 5658 __ xor_(rbx, rax); |
5661 // if rax was smi, rbx is now rdx, else rax. | 5659 // if rax was smi, rbx is now rdx, else rax. |
5662 | 5660 |
5663 // Check if the non-smi operand is a heap number. | 5661 // Check if the non-smi operand is a heap number. |
5664 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), | 5662 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
5665 Factory::heap_number_map()); | 5663 Factory::heap_number_map()); |
5666 // If heap number, handle it in the slow case. | 5664 // If heap number, handle it in the slow case. |
5667 __ j(equal, &slow); | 5665 __ j(equal, &slow); |
5668 // Return non-equal (ebx is not zero) | 5666 // Return non-equal. ebx (the lower half of rbx) is not zero. |
5669 __ movq(rax, rbx); | 5667 __ movq(rax, rbx); |
5670 __ ret(0); | 5668 __ ret(0); |
5671 | 5669 |
5672 __ bind(¬_smis); | 5670 __ bind(¬_smis); |
5673 } | 5671 } |
5674 | 5672 |
5675 // If either operand is a JSObject or an oddball value, then they are not | 5673 // If either operand is a JSObject or an oddball value, then they are not |
5676 // equal since their pointers are different | 5674 // equal since their pointers are different |
5677 // There is no test for undetectability in strict equality. | 5675 // There is no test for undetectability in strict equality. |
5678 | 5676 |
5679 // If the first object is a JS object, we have done pointer comparison. | 5677 // If the first object is a JS object, we have done pointer comparison. |
5680 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); | 5678 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
5681 Label first_non_object; | 5679 Label first_non_object; |
5682 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); | 5680 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); |
5683 __ j(below, &first_non_object); | 5681 __ j(below, &first_non_object); |
5684 // Return non-zero (rax is not zero) | 5682 // Return non-zero (eax (not rax) is not zero) |
5685 Label return_not_equal; | 5683 Label return_not_equal; |
5686 ASSERT(kHeapObjectTag != 0); | 5684 ASSERT(kHeapObjectTag != 0); |
5687 __ bind(&return_not_equal); | 5685 __ bind(&return_not_equal); |
5688 __ ret(0); | 5686 __ ret(0); |
5689 | 5687 |
5690 __ bind(&first_non_object); | 5688 __ bind(&first_non_object); |
5691 // Check for oddballs: true, false, null, undefined. | 5689 // Check for oddballs: true, false, null, undefined. |
5692 __ CmpInstanceType(rcx, ODDBALL_TYPE); | 5690 __ CmpInstanceType(rcx, ODDBALL_TYPE); |
5693 __ j(equal, &return_not_equal); | 5691 __ j(equal, &return_not_equal); |
5694 | 5692 |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5738 __ movq(rax, Immediate(1)); | 5736 __ movq(rax, Immediate(1)); |
5739 __ ret(2 * kPointerSize); // rax, rdx were pushed | 5737 __ ret(2 * kPointerSize); // rax, rdx were pushed |
5740 | 5738 |
5741 // Fast negative check for symbol-to-symbol equality. | 5739 // Fast negative check for symbol-to-symbol equality. |
5742 __ bind(&check_for_symbols); | 5740 __ bind(&check_for_symbols); |
5743 if (cc_ == equal) { | 5741 if (cc_ == equal) { |
5744 BranchIfNonSymbol(masm, &call_builtin, rax, kScratchRegister); | 5742 BranchIfNonSymbol(masm, &call_builtin, rax, kScratchRegister); |
5745 BranchIfNonSymbol(masm, &call_builtin, rdx, kScratchRegister); | 5743 BranchIfNonSymbol(masm, &call_builtin, rdx, kScratchRegister); |
5746 | 5744 |
5747 // We've already checked for object identity, so if both operands | 5745 // We've already checked for object identity, so if both operands |
5748 // are symbols they aren't equal. Register rax already holds a | 5746 // are symbols they aren't equal. Register eax (not rax) already holds a |
5749 // non-zero value, which indicates not equal, so just return. | 5747 // non-zero value, which indicates not equal, so just return. |
5750 __ ret(2 * kPointerSize); | 5748 __ ret(2 * kPointerSize); |
5751 } | 5749 } |
5752 | 5750 |
5753 __ bind(&call_builtin); | 5751 __ bind(&call_builtin); |
5754 // must swap argument order | 5752 // must swap argument order |
5755 __ pop(rcx); | 5753 __ pop(rcx); |
5756 __ pop(rdx); | 5754 __ pop(rdx); |
5757 __ pop(rax); | 5755 __ pop(rax); |
5758 __ push(rdx); | 5756 __ push(rdx); |
(...skipping 1176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6935 int CompareStub::MinorKey() { | 6933 int CompareStub::MinorKey() { |
6936 // Encode the two parameters in a unique 16 bit value. | 6934 // Encode the two parameters in a unique 16 bit value. |
6937 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); | 6935 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); |
6938 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); | 6936 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); |
6939 } | 6937 } |
6940 | 6938 |
6941 | 6939 |
6942 #undef __ | 6940 #undef __ |
6943 | 6941 |
6944 } } // namespace v8::internal | 6942 } } // namespace v8::internal |
OLD | NEW |