OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 11 matching lines...) Expand all Loading... |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #if defined(V8_TARGET_ARCH_IA32) | 30 #if defined(V8_TARGET_ARCH_IA32) |
31 | 31 |
| 32 #include "codegen-inl.h" |
32 #include "bootstrapper.h" | 33 #include "bootstrapper.h" |
33 #include "code-stubs-ia32.h" | 34 #include "code-stubs.h" |
34 #include "codegen-inl.h" | |
35 #include "compiler.h" | 35 #include "compiler.h" |
36 #include "debug.h" | 36 #include "debug.h" |
37 #include "ic-inl.h" | 37 #include "ic-inl.h" |
38 #include "parser.h" | 38 #include "parser.h" |
39 #include "regexp-macro-assembler.h" | 39 #include "regexp-macro-assembler.h" |
40 #include "register-allocator-inl.h" | 40 #include "register-allocator-inl.h" |
41 #include "scopes.h" | 41 #include "scopes.h" |
42 #include "virtual-frame-inl.h" | 42 #include "virtual-frame-inl.h" |
43 | 43 |
44 namespace v8 { | 44 namespace v8 { |
(...skipping 1399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1444 | 1444 |
1445 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left); | 1445 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left); |
1446 | 1446 |
1447 Result answer; | 1447 Result answer; |
1448 if (left_is_non_smi_constant || right_is_non_smi_constant) { | 1448 if (left_is_non_smi_constant || right_is_non_smi_constant) { |
1449 // Go straight to the slow case, with no smi code. | 1449 // Go straight to the slow case, with no smi code. |
1450 GenericBinaryOpStub stub(op, | 1450 GenericBinaryOpStub stub(op, |
1451 overwrite_mode, | 1451 overwrite_mode, |
1452 NO_SMI_CODE_IN_STUB, | 1452 NO_SMI_CODE_IN_STUB, |
1453 operands_type); | 1453 operands_type); |
1454 answer = stub.GenerateCall(masm_, frame_, &left, &right); | 1454 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right); |
1455 } else if (right_is_smi_constant) { | 1455 } else if (right_is_smi_constant) { |
1456 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(), | 1456 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(), |
1457 false, overwrite_mode); | 1457 false, overwrite_mode); |
1458 } else if (left_is_smi_constant) { | 1458 } else if (left_is_smi_constant) { |
1459 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(), | 1459 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(), |
1460 true, overwrite_mode); | 1460 true, overwrite_mode); |
1461 } else { | 1461 } else { |
1462 // Set the flags based on the operation, type and loop nesting level. | 1462 // Set the flags based on the operation, type and loop nesting level. |
1463 // Bit operations always assume they likely operate on Smis. Still only | 1463 // Bit operations always assume they likely operate on Smis. Still only |
1464 // generate the inline Smi check code if this operation is part of a loop. | 1464 // generate the inline Smi check code if this operation is part of a loop. |
1465 // For all other operations only inline the Smi check code for likely smis | 1465 // For all other operations only inline the Smi check code for likely smis |
1466 // if the operation is part of a loop. | 1466 // if the operation is part of a loop. |
1467 if (loop_nesting() > 0 && | 1467 if (loop_nesting() > 0 && |
1468 (Token::IsBitOp(op) || | 1468 (Token::IsBitOp(op) || |
1469 operands_type.IsInteger32() || | 1469 operands_type.IsInteger32() || |
1470 expr->type()->IsLikelySmi())) { | 1470 expr->type()->IsLikelySmi())) { |
1471 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode); | 1471 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode); |
1472 } else { | 1472 } else { |
1473 GenericBinaryOpStub stub(op, | 1473 GenericBinaryOpStub stub(op, |
1474 overwrite_mode, | 1474 overwrite_mode, |
1475 NO_GENERIC_BINARY_FLAGS, | 1475 NO_GENERIC_BINARY_FLAGS, |
1476 operands_type); | 1476 operands_type); |
1477 answer = stub.GenerateCall(masm_, frame_, &left, &right); | 1477 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right); |
1478 } | 1478 } |
1479 } | 1479 } |
1480 | 1480 |
1481 answer.set_type_info(result_type); | 1481 answer.set_type_info(result_type); |
1482 frame_->Push(&answer); | 1482 frame_->Push(&answer); |
1483 } | 1483 } |
1484 | 1484 |
1485 | 1485 |
| 1486 Result CodeGenerator::GenerateGenericBinaryOpStubCall(GenericBinaryOpStub* stub, |
| 1487 Result* left, |
| 1488 Result* right) { |
| 1489 if (stub->ArgsInRegistersSupported()) { |
| 1490 stub->SetArgsInRegisters(); |
| 1491 return frame_->CallStub(stub, left, right); |
| 1492 } else { |
| 1493 frame_->Push(left); |
| 1494 frame_->Push(right); |
| 1495 return frame_->CallStub(stub, 2); |
| 1496 } |
| 1497 } |
| 1498 |
| 1499 |
1486 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) { | 1500 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) { |
1487 Object* answer_object = Heap::undefined_value(); | 1501 Object* answer_object = Heap::undefined_value(); |
1488 switch (op) { | 1502 switch (op) { |
1489 case Token::ADD: | 1503 case Token::ADD: |
1490 if (Smi::IsValid(left + right)) { | 1504 if (Smi::IsValid(left + right)) { |
1491 answer_object = Smi::FromInt(left + right); | 1505 answer_object = Smi::FromInt(left + right); |
1492 } | 1506 } |
1493 break; | 1507 break; |
1494 case Token::SUB: | 1508 case Token::SUB: |
1495 if (Smi::IsValid(left - right)) { | 1509 if (Smi::IsValid(left - right)) { |
(...skipping 8313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9809 break; | 9823 break; |
9810 } | 9824 } |
9811 | 9825 |
9812 case UNLOADED: | 9826 case UNLOADED: |
9813 case ILLEGAL: | 9827 case ILLEGAL: |
9814 UNREACHABLE(); | 9828 UNREACHABLE(); |
9815 } | 9829 } |
9816 } | 9830 } |
9817 | 9831 |
9818 | 9832 |
9819 Result GenericBinaryOpStub::GenerateCall(MacroAssembler* masm, | |
9820 VirtualFrame* frame, | |
9821 Result* left, | |
9822 Result* right) { | |
9823 if (ArgsInRegistersSupported()) { | |
9824 SetArgsInRegisters(); | |
9825 return frame->CallStub(this, left, right); | |
9826 } else { | |
9827 frame->Push(left); | |
9828 frame->Push(right); | |
9829 return frame->CallStub(this, 2); | |
9830 } | |
9831 } | |
9832 | |
9833 | |
9834 #undef __ | 9833 #undef __ |
9835 | 9834 |
9836 #define __ masm. | 9835 #define __ masm. |
9837 | 9836 |
9838 MemCopyFunction CreateMemCopyFunction() { | 9837 MemCopyFunction CreateMemCopyFunction() { |
9839 size_t actual_size; | 9838 size_t actual_size; |
9840 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize, | 9839 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize, |
9841 &actual_size, | 9840 &actual_size, |
9842 true)); | 9841 true)); |
9843 CHECK(buffer); | 9842 CHECK(buffer); |
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10034 masm.GetCode(&desc); | 10033 masm.GetCode(&desc); |
10035 // Call the function from C++. | 10034 // Call the function from C++. |
10036 return FUNCTION_CAST<MemCopyFunction>(buffer); | 10035 return FUNCTION_CAST<MemCopyFunction>(buffer); |
10037 } | 10036 } |
10038 | 10037 |
10039 #undef __ | 10038 #undef __ |
10040 | 10039 |
10041 } } // namespace v8::internal | 10040 } } // namespace v8::internal |
10042 | 10041 |
10043 #endif // V8_TARGET_ARCH_IA32 | 10042 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |