OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2279 // where both sides are Smis. | 2279 // where both sides are Smis. |
2280 left_side.ToRegister(); | 2280 left_side.ToRegister(); |
2281 Register left_reg = left_side.reg(); | 2281 Register left_reg = left_side.reg(); |
2282 Handle<Object> right_val = right_side.handle(); | 2282 Handle<Object> right_val = right_side.handle(); |
2283 | 2283 |
2284 // Here we split control flow to the stub call and inlined cases | 2284 // Here we split control flow to the stub call and inlined cases |
2285 // before finally splitting it to the control destination. We use | 2285 // before finally splitting it to the control destination. We use |
2286 // a jump target and branching to duplicate the virtual frame at | 2286 // a jump target and branching to duplicate the virtual frame at |
2287 // the first split. We manually handle the off-frame references | 2287 // the first split. We manually handle the off-frame references |
2288 // by reconstituting them on the non-fall-through path. | 2288 // by reconstituting them on the non-fall-through path. |
2289 JumpTarget is_smi; | |
2290 __ test(left_side.reg(), Immediate(kSmiTagMask)); | |
2291 is_smi.Branch(zero, taken); | |
2292 | 2289 |
2293 bool is_for_loop_compare = (node->AsCompareOperation() != NULL) | 2290 if (left_side.is_smi()) { |
2294 && node->AsCompareOperation()->is_for_loop_condition(); | 2291 if (FLAG_debug_code) { |
2295 if (!is_for_loop_compare | 2292 __ AbortIfNotSmi(left_side.reg(), "Argument not a smi"); |
2296 && CpuFeatures::IsSupported(SSE2) | |
2297 && right_val->IsSmi()) { | |
2298 // Right side is a constant smi and left side has been checked | |
2299 // not to be a smi. | |
2300 CpuFeatures::Scope use_sse2(SSE2); | |
2301 JumpTarget not_number; | |
2302 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset), | |
2303 Immediate(Factory::heap_number_map())); | |
2304 not_number.Branch(not_equal, &left_side); | |
2305 __ movdbl(xmm1, | |
2306 FieldOperand(left_reg, HeapNumber::kValueOffset)); | |
2307 int value = Smi::cast(*right_val)->value(); | |
2308 if (value == 0) { | |
2309 __ xorpd(xmm0, xmm0); | |
2310 } else { | |
2311 Result temp = allocator()->Allocate(); | |
2312 __ mov(temp.reg(), Immediate(value)); | |
2313 __ cvtsi2sd(xmm0, Operand(temp.reg())); | |
2314 temp.Unuse(); | |
2315 } | 2293 } |
2316 __ comisd(xmm1, xmm0); | 2294 } else { |
2317 // Jump to builtin for NaN. | 2295 JumpTarget is_smi; |
2318 not_number.Branch(parity_even, &left_side); | 2296 __ test(left_side.reg(), Immediate(kSmiTagMask)); |
2319 left_side.Unuse(); | 2297 is_smi.Branch(zero, taken); |
2320 Condition double_cc = cc; | 2298 |
2321 switch (cc) { | 2299 bool is_for_loop_compare = (node->AsCompareOperation() != NULL) |
2322 case less: double_cc = below; break; | 2300 && node->AsCompareOperation()->is_for_loop_condition(); |
2323 case equal: double_cc = equal; break; | 2301 if (!is_for_loop_compare |
2324 case less_equal: double_cc = below_equal; break; | 2302 && CpuFeatures::IsSupported(SSE2) |
2325 case greater: double_cc = above; break; | 2303 && right_val->IsSmi()) { |
2326 case greater_equal: double_cc = above_equal; break; | 2304 // Right side is a constant smi and left side has been checked |
2327 default: UNREACHABLE(); | 2305 // not to be a smi. |
| 2306 CpuFeatures::Scope use_sse2(SSE2); |
| 2307 JumpTarget not_number; |
| 2308 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset), |
| 2309 Immediate(Factory::heap_number_map())); |
| 2310 not_number.Branch(not_equal, &left_side); |
| 2311 __ movdbl(xmm1, |
| 2312 FieldOperand(left_reg, HeapNumber::kValueOffset)); |
| 2313 int value = Smi::cast(*right_val)->value(); |
| 2314 if (value == 0) { |
| 2315 __ xorpd(xmm0, xmm0); |
| 2316 } else { |
| 2317 Result temp = allocator()->Allocate(); |
| 2318 __ mov(temp.reg(), Immediate(value)); |
| 2319 __ cvtsi2sd(xmm0, Operand(temp.reg())); |
| 2320 temp.Unuse(); |
| 2321 } |
| 2322 __ comisd(xmm1, xmm0); |
| 2323 // Jump to builtin for NaN. |
| 2324 not_number.Branch(parity_even, &left_side); |
| 2325 left_side.Unuse(); |
| 2326 Condition double_cc = cc; |
| 2327 switch (cc) { |
| 2328 case less: double_cc = below; break; |
| 2329 case equal: double_cc = equal; break; |
| 2330 case less_equal: double_cc = below_equal; break; |
| 2331 case greater: double_cc = above; break; |
| 2332 case greater_equal: double_cc = above_equal; break; |
| 2333 default: UNREACHABLE(); |
| 2334 } |
| 2335 dest->true_target()->Branch(double_cc); |
| 2336 dest->false_target()->Jump(); |
| 2337 not_number.Bind(&left_side); |
2328 } | 2338 } |
2329 dest->true_target()->Branch(double_cc); | 2339 |
| 2340 // Setup and call the compare stub. |
| 2341 CompareStub stub(cc, strict, kCantBothBeNaN); |
| 2342 Result result = frame_->CallStub(&stub, &left_side, &right_side); |
| 2343 result.ToRegister(); |
| 2344 __ cmp(result.reg(), 0); |
| 2345 result.Unuse(); |
| 2346 dest->true_target()->Branch(cc); |
2330 dest->false_target()->Jump(); | 2347 dest->false_target()->Jump(); |
2331 not_number.Bind(&left_side); | 2348 |
| 2349 is_smi.Bind(); |
2332 } | 2350 } |
2333 | 2351 |
2334 // Setup and call the compare stub. | |
2335 CompareStub stub(cc, strict, kCantBothBeNaN); | |
2336 Result result = frame_->CallStub(&stub, &left_side, &right_side); | |
2337 result.ToRegister(); | |
2338 __ cmp(result.reg(), 0); | |
2339 result.Unuse(); | |
2340 dest->true_target()->Branch(cc); | |
2341 dest->false_target()->Jump(); | |
2342 | |
2343 is_smi.Bind(); | |
2344 left_side = Result(left_reg); | 2352 left_side = Result(left_reg); |
2345 right_side = Result(right_val); | 2353 right_side = Result(right_val); |
2346 // Test smi equality and comparison by signed int comparison. | 2354 // Test smi equality and comparison by signed int comparison. |
2347 if (IsUnsafeSmi(right_side.handle())) { | 2355 if (IsUnsafeSmi(right_side.handle())) { |
2348 right_side.ToRegister(); | 2356 right_side.ToRegister(); |
2349 __ cmp(left_side.reg(), Operand(right_side.reg())); | 2357 __ cmp(left_side.reg(), Operand(right_side.reg())); |
2350 } else { | 2358 } else { |
2351 __ cmp(Operand(left_side.reg()), Immediate(right_side.handle())); | 2359 __ cmp(Operand(left_side.reg()), Immediate(right_side.handle())); |
2352 } | 2360 } |
2353 left_side.Unuse(); | 2361 left_side.Unuse(); |
(...skipping 1218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3572 body.Bind(); | 3580 body.Bind(); |
3573 } | 3581 } |
3574 break; | 3582 break; |
3575 } | 3583 } |
3576 case ALWAYS_FALSE: | 3584 case ALWAYS_FALSE: |
3577 UNREACHABLE(); | 3585 UNREACHABLE(); |
3578 break; | 3586 break; |
3579 } | 3587 } |
3580 | 3588 |
3581 CheckStack(); // TODO(1222600): ignore if body contains calls. | 3589 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 3590 |
| 3591 // If we have (a) a loop with a compile-time constant trip count |
| 3592 // and (b) the loop induction variable is not assignend inside the |
| 3593 // loop we update the number type of the induction variable to be smi. |
| 3594 |
| 3595 if (node->is_fast_smi_loop()) { |
| 3596 // Set number type of the loop variable to smi. |
| 3597 Slot* slot = node->loop_variable()->slot(); |
| 3598 ASSERT(slot->type() == Slot::LOCAL); |
| 3599 frame_->SetTypeForLocalAt(slot->index(), NumberInfo::Smi()); |
| 3600 if (FLAG_debug_code) { |
| 3601 frame_->PushLocalAt(slot->index()); |
| 3602 Result var = frame_->Pop(); |
| 3603 var.ToRegister(); |
| 3604 __ AbortIfNotSmi(var.reg(), "Loop variable not a smi."); |
| 3605 } |
| 3606 } |
| 3607 |
3582 Visit(node->body()); | 3608 Visit(node->body()); |
3583 | 3609 |
3584 // If there is an update expression, compile it if necessary. | 3610 // If there is an update expression, compile it if necessary. |
3585 if (node->next() != NULL) { | 3611 if (node->next() != NULL) { |
3586 if (node->continue_target()->is_linked()) { | 3612 if (node->continue_target()->is_linked()) { |
3587 node->continue_target()->Bind(); | 3613 node->continue_target()->Bind(); |
3588 } | 3614 } |
3589 | 3615 |
3590 // Control can reach the update by falling out of the body or by a | 3616 // Control can reach the update by falling out of the body or by a |
3591 // continue. | 3617 // continue. |
(...skipping 3025 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6617 // and smi tag checks. | 6643 // and smi tag checks. |
6618 // | 6644 // |
6619 // We allocate and clear the temporary byte register before | 6645 // We allocate and clear the temporary byte register before |
6620 // performing the count operation since clearing the register using | 6646 // performing the count operation since clearing the register using |
6621 // xor will clear the overflow flag. | 6647 // xor will clear the overflow flag. |
6622 Result tmp = allocator_->AllocateByteRegisterWithoutSpilling(); | 6648 Result tmp = allocator_->AllocateByteRegisterWithoutSpilling(); |
6623 if (tmp.is_valid()) { | 6649 if (tmp.is_valid()) { |
6624 __ Set(tmp.reg(), Immediate(0)); | 6650 __ Set(tmp.reg(), Immediate(0)); |
6625 } | 6651 } |
6626 | 6652 |
6627 DeferredCode* deferred = NULL; | |
6628 if (is_postfix) { | |
6629 deferred = new DeferredPostfixCountOperation(new_value.reg(), | |
6630 old_value.reg(), | |
6631 is_increment); | |
6632 } else { | |
6633 deferred = new DeferredPrefixCountOperation(new_value.reg(), | |
6634 is_increment); | |
6635 } | |
6636 | 6653 |
6637 if (is_increment) { | 6654 if (is_increment) { |
6638 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1))); | 6655 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1))); |
6639 } else { | 6656 } else { |
6640 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1))); | 6657 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1))); |
6641 } | 6658 } |
6642 | 6659 |
6643 // If the count operation didn't overflow and the result is a valid | 6660 if (new_value.is_smi()) { |
6644 // smi, we're done. Otherwise, we jump to the deferred slow-case | 6661 if (FLAG_debug_code) { |
6645 // code. | 6662 __ AbortIfNotSmi(new_value.reg(), "Argument not a smi"); |
6646 if (tmp.is_valid()) { | 6663 } |
6647 // We combine the overflow and the smi tag check if we could | 6664 if (tmp.is_valid()) tmp.Unuse(); |
6648 // successfully allocate a temporary byte register. | |
6649 __ setcc(overflow, tmp.reg()); | |
6650 __ or_(Operand(tmp.reg()), new_value.reg()); | |
6651 __ test(tmp.reg(), Immediate(kSmiTagMask)); | |
6652 tmp.Unuse(); | |
6653 deferred->Branch(not_zero); | |
6654 } else { | 6665 } else { |
6655 // Otherwise we test separately for overflow and smi tag. | 6666 DeferredCode* deferred = NULL; |
6656 deferred->Branch(overflow); | 6667 if (is_postfix) { |
6657 __ test(new_value.reg(), Immediate(kSmiTagMask)); | 6668 deferred = new DeferredPostfixCountOperation(new_value.reg(), |
6658 deferred->Branch(not_zero); | 6669 old_value.reg(), |
| 6670 is_increment); |
| 6671 } else { |
| 6672 deferred = new DeferredPrefixCountOperation(new_value.reg(), |
| 6673 is_increment); |
| 6674 } |
| 6675 |
| 6676 // If the count operation didn't overflow and the result is a valid |
| 6677 // smi, we're done. Otherwise, we jump to the deferred slow-case |
| 6678 // code. |
| 6679 if (tmp.is_valid()) { |
| 6680 // We combine the overflow and the smi tag check if we could |
| 6681 // successfully allocate a temporary byte register. |
| 6682 __ setcc(overflow, tmp.reg()); |
| 6683 __ or_(Operand(tmp.reg()), new_value.reg()); |
| 6684 __ test(tmp.reg(), Immediate(kSmiTagMask)); |
| 6685 tmp.Unuse(); |
| 6686 deferred->Branch(not_zero); |
| 6687 } else { |
| 6688 // Otherwise we test separately for overflow and smi tag. |
| 6689 deferred->Branch(overflow); |
| 6690 __ test(new_value.reg(), Immediate(kSmiTagMask)); |
| 6691 deferred->Branch(not_zero); |
| 6692 } |
| 6693 deferred->BindExit(); |
6659 } | 6694 } |
6660 deferred->BindExit(); | |
6661 | 6695 |
6662 // Postfix: store the old value in the allocated slot under the | 6696 // Postfix: store the old value in the allocated slot under the |
6663 // reference. | 6697 // reference. |
6664 if (is_postfix) frame_->SetElementAt(target.size(), &old_value); | 6698 if (is_postfix) frame_->SetElementAt(target.size(), &old_value); |
6665 | 6699 |
6666 frame_->Push(&new_value); | 6700 frame_->Push(&new_value); |
6667 // Non-constant: update the reference. | 6701 // Non-constant: update the reference. |
6668 if (!is_const) target.SetValue(NOT_CONST_INIT); | 6702 if (!is_const) target.SetValue(NOT_CONST_INIT); |
6669 } | 6703 } |
6670 | 6704 |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6816 // never return a constant/immutable object. | 6850 // never return a constant/immutable object. |
6817 OverwriteMode overwrite_mode = NO_OVERWRITE; | 6851 OverwriteMode overwrite_mode = NO_OVERWRITE; |
6818 if (node->left()->AsBinaryOperation() != NULL && | 6852 if (node->left()->AsBinaryOperation() != NULL && |
6819 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) { | 6853 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) { |
6820 overwrite_mode = OVERWRITE_LEFT; | 6854 overwrite_mode = OVERWRITE_LEFT; |
6821 } else if (node->right()->AsBinaryOperation() != NULL && | 6855 } else if (node->right()->AsBinaryOperation() != NULL && |
6822 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) { | 6856 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) { |
6823 overwrite_mode = OVERWRITE_RIGHT; | 6857 overwrite_mode = OVERWRITE_RIGHT; |
6824 } | 6858 } |
6825 | 6859 |
6826 Load(node->left()); | 6860 if (node->left()->IsTrivial()) { |
6827 Load(node->right()); | 6861 Load(node->right()); |
| 6862 Result right = frame_->Pop(); |
| 6863 frame_->Push(node->left()); |
| 6864 frame_->Push(&right); |
| 6865 } else { |
| 6866 Load(node->left()); |
| 6867 Load(node->right()); |
| 6868 } |
6828 GenericBinaryOperation(node->op(), node->type(), overwrite_mode); | 6869 GenericBinaryOperation(node->op(), node->type(), overwrite_mode); |
6829 } | 6870 } |
6830 } | 6871 } |
6831 | 6872 |
6832 | 6873 |
6833 void CodeGenerator::VisitThisFunction(ThisFunction* node) { | 6874 void CodeGenerator::VisitThisFunction(ThisFunction* node) { |
6834 frame_->PushFunction(); | 6875 frame_->PushFunction(); |
6835 } | 6876 } |
6836 | 6877 |
6837 | 6878 |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7017 Result answer = frame_->CallStub(&stub, 2); | 7058 Result answer = frame_->CallStub(&stub, 2); |
7018 answer.ToRegister(); | 7059 answer.ToRegister(); |
7019 __ test(answer.reg(), Operand(answer.reg())); | 7060 __ test(answer.reg(), Operand(answer.reg())); |
7020 answer.Unuse(); | 7061 answer.Unuse(); |
7021 destination()->Split(zero); | 7062 destination()->Split(zero); |
7022 return; | 7063 return; |
7023 } | 7064 } |
7024 default: | 7065 default: |
7025 UNREACHABLE(); | 7066 UNREACHABLE(); |
7026 } | 7067 } |
7027 if (!left_already_loaded) Load(left); | 7068 |
7028 Load(right); | 7069 if (left->IsTrivial()) { |
| 7070 if (!left_already_loaded) { |
| 7071 Load(right); |
| 7072 Result right_result = frame_->Pop(); |
| 7073 frame_->Push(left); |
| 7074 frame_->Push(&right_result); |
| 7075 } else { |
| 7076 Load(right); |
| 7077 } |
| 7078 } else { |
| 7079 if (!left_already_loaded) Load(left); |
| 7080 Load(right); |
| 7081 } |
7029 Comparison(node, cc, strict, destination()); | 7082 Comparison(node, cc, strict, destination()); |
7030 } | 7083 } |
7031 | 7084 |
7032 | 7085 |
7033 #ifdef DEBUG | 7086 #ifdef DEBUG |
7034 bool CodeGenerator::HasValidEntryRegisters() { | 7087 bool CodeGenerator::HasValidEntryRegisters() { |
7035 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0)) | 7088 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0)) |
7036 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0)) | 7089 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0)) |
7037 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0)) | 7090 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0)) |
7038 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0)) | 7091 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0)) |
(...skipping 4714 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11753 | 11806 |
11754 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 11807 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
11755 // tagged as a small integer. | 11808 // tagged as a small integer. |
11756 __ bind(&runtime); | 11809 __ bind(&runtime); |
11757 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 11810 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
11758 } | 11811 } |
11759 | 11812 |
11760 #undef __ | 11813 #undef __ |
11761 | 11814 |
11762 } } // namespace v8::internal | 11815 } } // namespace v8::internal |
OLD | NEW |