OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2279 // where both sides are Smis. | 2279 // where both sides are Smis. |
2280 left_side.ToRegister(); | 2280 left_side.ToRegister(); |
2281 Register left_reg = left_side.reg(); | 2281 Register left_reg = left_side.reg(); |
2282 Handle<Object> right_val = right_side.handle(); | 2282 Handle<Object> right_val = right_side.handle(); |
2283 | 2283 |
2284 // Here we split control flow to the stub call and inlined cases | 2284 // Here we split control flow to the stub call and inlined cases |
2285 // before finally splitting it to the control destination. We use | 2285 // before finally splitting it to the control destination. We use |
2286 // a jump target and branching to duplicate the virtual frame at | 2286 // a jump target and branching to duplicate the virtual frame at |
2287 // the first split. We manually handle the off-frame references | 2287 // the first split. We manually handle the off-frame references |
2288 // by reconstituting them on the non-fall-through path. | 2288 // by reconstituting them on the non-fall-through path. |
| 2289 JumpTarget is_smi; |
| 2290 __ test(left_side.reg(), Immediate(kSmiTagMask)); |
| 2291 is_smi.Branch(zero, taken); |
2289 | 2292 |
2290 if (left_side.is_smi()) { | 2293 bool is_for_loop_compare = (node->AsCompareOperation() != NULL) |
2291 if (FLAG_debug_code) { | 2294 && node->AsCompareOperation()->is_for_loop_condition(); |
2292 __ AbortIfNotSmi(left_side.reg(), "Argument not a smi"); | 2295 if (!is_for_loop_compare |
| 2296 && CpuFeatures::IsSupported(SSE2) |
| 2297 && right_val->IsSmi()) { |
| 2298 // Right side is a constant smi and left side has been checked |
| 2299 // not to be a smi. |
| 2300 CpuFeatures::Scope use_sse2(SSE2); |
| 2301 JumpTarget not_number; |
| 2302 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset), |
| 2303 Immediate(Factory::heap_number_map())); |
| 2304 not_number.Branch(not_equal, &left_side); |
| 2305 __ movdbl(xmm1, |
| 2306 FieldOperand(left_reg, HeapNumber::kValueOffset)); |
| 2307 int value = Smi::cast(*right_val)->value(); |
| 2308 if (value == 0) { |
| 2309 __ xorpd(xmm0, xmm0); |
| 2310 } else { |
| 2311 Result temp = allocator()->Allocate(); |
| 2312 __ mov(temp.reg(), Immediate(value)); |
| 2313 __ cvtsi2sd(xmm0, Operand(temp.reg())); |
| 2314 temp.Unuse(); |
2293 } | 2315 } |
2294 } else { | 2316 __ comisd(xmm1, xmm0); |
2295 JumpTarget is_smi; | 2317 // Jump to builtin for NaN. |
2296 __ test(left_side.reg(), Immediate(kSmiTagMask)); | 2318 not_number.Branch(parity_even, &left_side); |
2297 is_smi.Branch(zero, taken); | 2319 left_side.Unuse(); |
2298 | 2320 Condition double_cc = cc; |
2299 bool is_for_loop_compare = (node->AsCompareOperation() != NULL) | 2321 switch (cc) { |
2300 && node->AsCompareOperation()->is_for_loop_condition(); | 2322 case less: double_cc = below; break; |
2301 if (!is_for_loop_compare | 2323 case equal: double_cc = equal; break; |
2302 && CpuFeatures::IsSupported(SSE2) | 2324 case less_equal: double_cc = below_equal; break; |
2303 && right_val->IsSmi()) { | 2325 case greater: double_cc = above; break; |
2304 // Right side is a constant smi and left side has been checked | 2326 case greater_equal: double_cc = above_equal; break; |
2305 // not to be a smi. | 2327 default: UNREACHABLE(); |
2306 CpuFeatures::Scope use_sse2(SSE2); | |
2307 JumpTarget not_number; | |
2308 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset), | |
2309 Immediate(Factory::heap_number_map())); | |
2310 not_number.Branch(not_equal, &left_side); | |
2311 __ movdbl(xmm1, | |
2312 FieldOperand(left_reg, HeapNumber::kValueOffset)); | |
2313 int value = Smi::cast(*right_val)->value(); | |
2314 if (value == 0) { | |
2315 __ xorpd(xmm0, xmm0); | |
2316 } else { | |
2317 Result temp = allocator()->Allocate(); | |
2318 __ mov(temp.reg(), Immediate(value)); | |
2319 __ cvtsi2sd(xmm0, Operand(temp.reg())); | |
2320 temp.Unuse(); | |
2321 } | |
2322 __ comisd(xmm1, xmm0); | |
2323 // Jump to builtin for NaN. | |
2324 not_number.Branch(parity_even, &left_side); | |
2325 left_side.Unuse(); | |
2326 Condition double_cc = cc; | |
2327 switch (cc) { | |
2328 case less: double_cc = below; break; | |
2329 case equal: double_cc = equal; break; | |
2330 case less_equal: double_cc = below_equal; break; | |
2331 case greater: double_cc = above; break; | |
2332 case greater_equal: double_cc = above_equal; break; | |
2333 default: UNREACHABLE(); | |
2334 } | |
2335 dest->true_target()->Branch(double_cc); | |
2336 dest->false_target()->Jump(); | |
2337 not_number.Bind(&left_side); | |
2338 } | 2328 } |
2339 | 2329 dest->true_target()->Branch(double_cc); |
2340 // Setup and call the compare stub. | |
2341 CompareStub stub(cc, strict, kCantBothBeNaN); | |
2342 Result result = frame_->CallStub(&stub, &left_side, &right_side); | |
2343 result.ToRegister(); | |
2344 __ cmp(result.reg(), 0); | |
2345 result.Unuse(); | |
2346 dest->true_target()->Branch(cc); | |
2347 dest->false_target()->Jump(); | 2330 dest->false_target()->Jump(); |
2348 | 2331 not_number.Bind(&left_side); |
2349 is_smi.Bind(); | |
2350 } | 2332 } |
2351 | 2333 |
| 2334 // Setup and call the compare stub. |
| 2335 CompareStub stub(cc, strict, kCantBothBeNaN); |
| 2336 Result result = frame_->CallStub(&stub, &left_side, &right_side); |
| 2337 result.ToRegister(); |
| 2338 __ cmp(result.reg(), 0); |
| 2339 result.Unuse(); |
| 2340 dest->true_target()->Branch(cc); |
| 2341 dest->false_target()->Jump(); |
| 2342 |
| 2343 is_smi.Bind(); |
2352 left_side = Result(left_reg); | 2344 left_side = Result(left_reg); |
2353 right_side = Result(right_val); | 2345 right_side = Result(right_val); |
2354 // Test smi equality and comparison by signed int comparison. | 2346 // Test smi equality and comparison by signed int comparison. |
2355 if (IsUnsafeSmi(right_side.handle())) { | 2347 if (IsUnsafeSmi(right_side.handle())) { |
2356 right_side.ToRegister(); | 2348 right_side.ToRegister(); |
2357 __ cmp(left_side.reg(), Operand(right_side.reg())); | 2349 __ cmp(left_side.reg(), Operand(right_side.reg())); |
2358 } else { | 2350 } else { |
2359 __ cmp(Operand(left_side.reg()), Immediate(right_side.handle())); | 2351 __ cmp(Operand(left_side.reg()), Immediate(right_side.handle())); |
2360 } | 2352 } |
2361 left_side.Unuse(); | 2353 left_side.Unuse(); |
(...skipping 1218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3580 body.Bind(); | 3572 body.Bind(); |
3581 } | 3573 } |
3582 break; | 3574 break; |
3583 } | 3575 } |
3584 case ALWAYS_FALSE: | 3576 case ALWAYS_FALSE: |
3585 UNREACHABLE(); | 3577 UNREACHABLE(); |
3586 break; | 3578 break; |
3587 } | 3579 } |
3588 | 3580 |
3589 CheckStack(); // TODO(1222600): ignore if body contains calls. | 3581 CheckStack(); // TODO(1222600): ignore if body contains calls. |
3590 | |
3591 // If we have (a) a loop with a compile-time constant trip count | |
3592 // and (b) the loop induction variable is not assignend inside the | |
3593 // loop we update the number type of the induction variable to be smi. | |
3594 | |
3595 if (node->is_fast_smi_loop()) { | |
3596 // Set number type of the loop variable to smi. | |
3597 Slot* slot = node->loop_variable()->slot(); | |
3598 ASSERT(slot->type() == Slot::LOCAL); | |
3599 frame_->SetTypeForLocalAt(slot->index(), NumberInfo::Smi()); | |
3600 if (FLAG_debug_code) { | |
3601 frame_->PushLocalAt(slot->index()); | |
3602 Result var = frame_->Pop(); | |
3603 var.ToRegister(); | |
3604 __ AbortIfNotSmi(var.reg(), "Loop variable not a smi."); | |
3605 } | |
3606 } | |
3607 | |
3608 Visit(node->body()); | 3582 Visit(node->body()); |
3609 | 3583 |
3610 // If there is an update expression, compile it if necessary. | 3584 // If there is an update expression, compile it if necessary. |
3611 if (node->next() != NULL) { | 3585 if (node->next() != NULL) { |
3612 if (node->continue_target()->is_linked()) { | 3586 if (node->continue_target()->is_linked()) { |
3613 node->continue_target()->Bind(); | 3587 node->continue_target()->Bind(); |
3614 } | 3588 } |
3615 | 3589 |
3616 // Control can reach the update by falling out of the body or by a | 3590 // Control can reach the update by falling out of the body or by a |
3617 // continue. | 3591 // continue. |
(...skipping 3025 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6643 // and smi tag checks. | 6617 // and smi tag checks. |
6644 // | 6618 // |
6645 // We allocate and clear the temporary byte register before | 6619 // We allocate and clear the temporary byte register before |
6646 // performing the count operation since clearing the register using | 6620 // performing the count operation since clearing the register using |
6647 // xor will clear the overflow flag. | 6621 // xor will clear the overflow flag. |
6648 Result tmp = allocator_->AllocateByteRegisterWithoutSpilling(); | 6622 Result tmp = allocator_->AllocateByteRegisterWithoutSpilling(); |
6649 if (tmp.is_valid()) { | 6623 if (tmp.is_valid()) { |
6650 __ Set(tmp.reg(), Immediate(0)); | 6624 __ Set(tmp.reg(), Immediate(0)); |
6651 } | 6625 } |
6652 | 6626 |
| 6627 DeferredCode* deferred = NULL; |
| 6628 if (is_postfix) { |
| 6629 deferred = new DeferredPostfixCountOperation(new_value.reg(), |
| 6630 old_value.reg(), |
| 6631 is_increment); |
| 6632 } else { |
| 6633 deferred = new DeferredPrefixCountOperation(new_value.reg(), |
| 6634 is_increment); |
| 6635 } |
6653 | 6636 |
6654 if (is_increment) { | 6637 if (is_increment) { |
6655 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1))); | 6638 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1))); |
6656 } else { | 6639 } else { |
6657 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1))); | 6640 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1))); |
6658 } | 6641 } |
6659 | 6642 |
6660 if (new_value.is_smi()) { | 6643 // If the count operation didn't overflow and the result is a valid |
6661 if (FLAG_debug_code) { | 6644 // smi, we're done. Otherwise, we jump to the deferred slow-case |
6662 __ AbortIfNotSmi(new_value.reg(), "Argument not a smi"); | 6645 // code. |
6663 } | 6646 if (tmp.is_valid()) { |
6664 if (tmp.is_valid()) tmp.Unuse(); | 6647 // We combine the overflow and the smi tag check if we could |
| 6648 // successfully allocate a temporary byte register. |
| 6649 __ setcc(overflow, tmp.reg()); |
| 6650 __ or_(Operand(tmp.reg()), new_value.reg()); |
| 6651 __ test(tmp.reg(), Immediate(kSmiTagMask)); |
| 6652 tmp.Unuse(); |
| 6653 deferred->Branch(not_zero); |
6665 } else { | 6654 } else { |
6666 DeferredCode* deferred = NULL; | 6655 // Otherwise we test separately for overflow and smi tag. |
6667 if (is_postfix) { | 6656 deferred->Branch(overflow); |
6668 deferred = new DeferredPostfixCountOperation(new_value.reg(), | 6657 __ test(new_value.reg(), Immediate(kSmiTagMask)); |
6669 old_value.reg(), | 6658 deferred->Branch(not_zero); |
6670 is_increment); | |
6671 } else { | |
6672 deferred = new DeferredPrefixCountOperation(new_value.reg(), | |
6673 is_increment); | |
6674 } | |
6675 | |
6676 // If the count operation didn't overflow and the result is a valid | |
6677 // smi, we're done. Otherwise, we jump to the deferred slow-case | |
6678 // code. | |
6679 if (tmp.is_valid()) { | |
6680 // We combine the overflow and the smi tag check if we could | |
6681 // successfully allocate a temporary byte register. | |
6682 __ setcc(overflow, tmp.reg()); | |
6683 __ or_(Operand(tmp.reg()), new_value.reg()); | |
6684 __ test(tmp.reg(), Immediate(kSmiTagMask)); | |
6685 tmp.Unuse(); | |
6686 deferred->Branch(not_zero); | |
6687 } else { | |
6688 // Otherwise we test separately for overflow and smi tag. | |
6689 deferred->Branch(overflow); | |
6690 __ test(new_value.reg(), Immediate(kSmiTagMask)); | |
6691 deferred->Branch(not_zero); | |
6692 } | |
6693 deferred->BindExit(); | |
6694 } | 6659 } |
| 6660 deferred->BindExit(); |
6695 | 6661 |
6696 // Postfix: store the old value in the allocated slot under the | 6662 // Postfix: store the old value in the allocated slot under the |
6697 // reference. | 6663 // reference. |
6698 if (is_postfix) frame_->SetElementAt(target.size(), &old_value); | 6664 if (is_postfix) frame_->SetElementAt(target.size(), &old_value); |
6699 | 6665 |
6700 frame_->Push(&new_value); | 6666 frame_->Push(&new_value); |
6701 // Non-constant: update the reference. | 6667 // Non-constant: update the reference. |
6702 if (!is_const) target.SetValue(NOT_CONST_INIT); | 6668 if (!is_const) target.SetValue(NOT_CONST_INIT); |
6703 } | 6669 } |
6704 | 6670 |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6850 // never return a constant/immutable object. | 6816 // never return a constant/immutable object. |
6851 OverwriteMode overwrite_mode = NO_OVERWRITE; | 6817 OverwriteMode overwrite_mode = NO_OVERWRITE; |
6852 if (node->left()->AsBinaryOperation() != NULL && | 6818 if (node->left()->AsBinaryOperation() != NULL && |
6853 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) { | 6819 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) { |
6854 overwrite_mode = OVERWRITE_LEFT; | 6820 overwrite_mode = OVERWRITE_LEFT; |
6855 } else if (node->right()->AsBinaryOperation() != NULL && | 6821 } else if (node->right()->AsBinaryOperation() != NULL && |
6856 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) { | 6822 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) { |
6857 overwrite_mode = OVERWRITE_RIGHT; | 6823 overwrite_mode = OVERWRITE_RIGHT; |
6858 } | 6824 } |
6859 | 6825 |
6860 if (node->left()->IsTrivial()) { | 6826 Load(node->left()); |
6861 Load(node->right()); | 6827 Load(node->right()); |
6862 Result right = frame_->Pop(); | |
6863 frame_->Push(node->left()); | |
6864 frame_->Push(&right); | |
6865 } else { | |
6866 Load(node->left()); | |
6867 Load(node->right()); | |
6868 } | |
6869 GenericBinaryOperation(node->op(), node->type(), overwrite_mode); | 6828 GenericBinaryOperation(node->op(), node->type(), overwrite_mode); |
6870 } | 6829 } |
6871 } | 6830 } |
6872 | 6831 |
6873 | 6832 |
6874 void CodeGenerator::VisitThisFunction(ThisFunction* node) { | 6833 void CodeGenerator::VisitThisFunction(ThisFunction* node) { |
6875 frame_->PushFunction(); | 6834 frame_->PushFunction(); |
6876 } | 6835 } |
6877 | 6836 |
6878 | 6837 |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7058 Result answer = frame_->CallStub(&stub, 2); | 7017 Result answer = frame_->CallStub(&stub, 2); |
7059 answer.ToRegister(); | 7018 answer.ToRegister(); |
7060 __ test(answer.reg(), Operand(answer.reg())); | 7019 __ test(answer.reg(), Operand(answer.reg())); |
7061 answer.Unuse(); | 7020 answer.Unuse(); |
7062 destination()->Split(zero); | 7021 destination()->Split(zero); |
7063 return; | 7022 return; |
7064 } | 7023 } |
7065 default: | 7024 default: |
7066 UNREACHABLE(); | 7025 UNREACHABLE(); |
7067 } | 7026 } |
7068 | 7027 if (!left_already_loaded) Load(left); |
7069 if (left->IsTrivial()) { | 7028 Load(right); |
7070 if (!left_already_loaded) { | |
7071 Load(right); | |
7072 Result right_result = frame_->Pop(); | |
7073 frame_->Push(left); | |
7074 frame_->Push(&right_result); | |
7075 } else { | |
7076 Load(right); | |
7077 } | |
7078 } else { | |
7079 if (!left_already_loaded) Load(left); | |
7080 Load(right); | |
7081 } | |
7082 Comparison(node, cc, strict, destination()); | 7029 Comparison(node, cc, strict, destination()); |
7083 } | 7030 } |
7084 | 7031 |
7085 | 7032 |
7086 #ifdef DEBUG | 7033 #ifdef DEBUG |
7087 bool CodeGenerator::HasValidEntryRegisters() { | 7034 bool CodeGenerator::HasValidEntryRegisters() { |
7088 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0)) | 7035 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0)) |
7089 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0)) | 7036 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0)) |
7090 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0)) | 7037 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0)) |
7091 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0)) | 7038 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0)) |
(...skipping 4714 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11806 | 11753 |
11807 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 11754 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
11808 // tagged as a small integer. | 11755 // tagged as a small integer. |
11809 __ bind(&runtime); | 11756 __ bind(&runtime); |
11810 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 11757 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
11811 } | 11758 } |
11812 | 11759 |
11813 #undef __ | 11760 #undef __ |
11814 | 11761 |
11815 } } // namespace v8::internal | 11762 } } // namespace v8::internal |
OLD | NEW |