OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2021 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2032 ASSERT(r.IsTagged()); | 2032 ASSERT(r.IsTagged()); |
2033 Register reg = ToRegister(instr->value()); | 2033 Register reg = ToRegister(instr->value()); |
2034 HType type = instr->hydrogen()->value()->type(); | 2034 HType type = instr->hydrogen()->value()->type(); |
2035 if (type.IsBoolean()) { | 2035 if (type.IsBoolean()) { |
2036 ASSERT(!info()->IsStub()); | 2036 ASSERT(!info()->IsStub()); |
2037 __ LoadRoot(at, Heap::kTrueValueRootIndex); | 2037 __ LoadRoot(at, Heap::kTrueValueRootIndex); |
2038 EmitBranch(instr, eq, reg, Operand(at)); | 2038 EmitBranch(instr, eq, reg, Operand(at)); |
2039 } else if (type.IsSmi()) { | 2039 } else if (type.IsSmi()) { |
2040 ASSERT(!info()->IsStub()); | 2040 ASSERT(!info()->IsStub()); |
2041 EmitBranch(instr, ne, reg, Operand(zero_reg)); | 2041 EmitBranch(instr, ne, reg, Operand(zero_reg)); |
| 2042 } else if (type.IsJSArray()) { |
| 2043 ASSERT(!info()->IsStub()); |
| 2044 EmitBranch(instr, al, zero_reg, Operand(zero_reg)); |
| 2045 } else if (type.IsHeapNumber()) { |
| 2046 ASSERT(!info()->IsStub()); |
| 2047 DoubleRegister dbl_scratch = double_scratch0(); |
| 2048 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); |
| 2049 // Test the double value. Zero and NaN are false. |
| 2050 EmitBranchF(instr, nue, dbl_scratch, kDoubleRegZero); |
| 2051 } else if (type.IsString()) { |
| 2052 ASSERT(!info()->IsStub()); |
| 2053 __ lw(at, FieldMemOperand(reg, String::kLengthOffset)); |
| 2054 EmitBranch(instr, ne, at, Operand(zero_reg)); |
2042 } else { | 2055 } else { |
2043 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); | 2056 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
2044 // Avoid deopts in the case where we've never executed this path before. | 2057 // Avoid deopts in the case where we've never executed this path before. |
2045 if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); | 2058 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); |
2046 | 2059 |
2047 if (expected.Contains(ToBooleanStub::UNDEFINED)) { | 2060 if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
2048 // undefined -> false. | 2061 // undefined -> false. |
2049 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 2062 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
2050 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); | 2063 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); |
2051 } | 2064 } |
2052 if (expected.Contains(ToBooleanStub::BOOLEAN)) { | 2065 if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
2053 // Boolean -> its value. | 2066 // Boolean -> its value. |
2054 __ LoadRoot(at, Heap::kTrueValueRootIndex); | 2067 __ LoadRoot(at, Heap::kTrueValueRootIndex); |
2055 __ Branch(instr->TrueLabel(chunk_), eq, reg, Operand(at)); | 2068 __ Branch(instr->TrueLabel(chunk_), eq, reg, Operand(at)); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2115 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 2128 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
2116 __ Branch(¬_heap_number, ne, map, Operand(at)); | 2129 __ Branch(¬_heap_number, ne, map, Operand(at)); |
2117 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); | 2130 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); |
2118 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), | 2131 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), |
2119 ne, dbl_scratch, kDoubleRegZero); | 2132 ne, dbl_scratch, kDoubleRegZero); |
2120 // Falls through if dbl_scratch == 0. | 2133 // Falls through if dbl_scratch == 0. |
2121 __ Branch(instr->FalseLabel(chunk_)); | 2134 __ Branch(instr->FalseLabel(chunk_)); |
2122 __ bind(¬_heap_number); | 2135 __ bind(¬_heap_number); |
2123 } | 2136 } |
2124 | 2137 |
2125 // We've seen something for the first time -> deopt. | 2138 if (!expected.IsGeneric()) { |
2126 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg)); | 2139 // We've seen something for the first time -> deopt. |
| 2140 // This can only happen if we are not generic already. |
| 2141 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg)); |
| 2142 } |
2127 } | 2143 } |
2128 } | 2144 } |
2129 } | 2145 } |
2130 | 2146 |
2131 | 2147 |
2132 void LCodeGen::EmitGoto(int block) { | 2148 void LCodeGen::EmitGoto(int block) { |
2133 if (!IsNextEmittedBlock(block)) { | 2149 if (!IsNextEmittedBlock(block)) { |
2134 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 2150 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); |
2135 } | 2151 } |
2136 } | 2152 } |
(...skipping 3737 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5874 __ Subu(scratch, result, scratch); | 5890 __ Subu(scratch, result, scratch); |
5875 __ lw(result, FieldMemOperand(scratch, | 5891 __ lw(result, FieldMemOperand(scratch, |
5876 FixedArray::kHeaderSize - kPointerSize)); | 5892 FixedArray::kHeaderSize - kPointerSize)); |
5877 __ bind(&done); | 5893 __ bind(&done); |
5878 } | 5894 } |
5879 | 5895 |
5880 | 5896 |
5881 #undef __ | 5897 #undef __ |
5882 | 5898 |
5883 } } // namespace v8::internal | 5899 } } // namespace v8::internal |
OLD | NEW |