OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2191 if (r.IsInteger32() || r.IsSmi()) { | 2191 if (r.IsInteger32() || r.IsSmi()) { |
2192 ASSERT(!info()->IsStub()); | 2192 ASSERT(!info()->IsStub()); |
2193 Register reg = ToRegister(instr->value()); | 2193 Register reg = ToRegister(instr->value()); |
2194 __ cmp(reg, Operand::Zero()); | 2194 __ cmp(reg, Operand::Zero()); |
2195 EmitBranch(true_block, false_block, ne); | 2195 EmitBranch(true_block, false_block, ne); |
2196 } else if (r.IsDouble()) { | 2196 } else if (r.IsDouble()) { |
2197 ASSERT(!info()->IsStub()); | 2197 ASSERT(!info()->IsStub()); |
2198 DwVfpRegister reg = ToDoubleRegister(instr->value()); | 2198 DwVfpRegister reg = ToDoubleRegister(instr->value()); |
2199 // Test the double value. Zero and NaN are false. | 2199 // Test the double value. Zero and NaN are false. |
2200 __ VFPCompareAndSetFlags(reg, 0.0); | 2200 __ VFPCompareAndSetFlags(reg, 0.0); |
2201 __ cmp(r0, r0, vs); // If NaN, set the Z flag. | 2201 __ cmp(r0, r0, vs); // If NaN, set the Z flag. (NaN -> false) |
2202 EmitBranch(true_block, false_block, ne); | 2202 EmitBranch(true_block, false_block, ne); |
2203 } else { | 2203 } else { |
2204 ASSERT(r.IsTagged()); | 2204 ASSERT(r.IsTagged()); |
2205 Register reg = ToRegister(instr->value()); | 2205 Register reg = ToRegister(instr->value()); |
2206 HType type = instr->hydrogen()->value()->type(); | 2206 HType type = instr->hydrogen()->value()->type(); |
2207 if (type.IsBoolean()) { | 2207 if (type.IsBoolean()) { |
2208 ASSERT(!info()->IsStub()); | 2208 ASSERT(!info()->IsStub()); |
2209 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 2209 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
2210 EmitBranch(true_block, false_block, eq); | 2210 EmitBranch(true_block, false_block, eq); |
2211 } else if (type.IsSmi()) { | 2211 } else if (type.IsSmi()) { |
2212 ASSERT(!info()->IsStub()); | 2212 ASSERT(!info()->IsStub()); |
2213 __ cmp(reg, Operand::Zero()); | 2213 __ cmp(reg, Operand::Zero()); |
2214 EmitBranch(true_block, false_block, ne); | 2214 EmitBranch(true_block, false_block, ne); |
| 2215 } else if (type.IsJSArray()) { |
| 2216 ASSERT(!info()->IsStub()); |
| 2217 EmitBranch(true_block, false_block, al); |
| 2218 } else if (type.IsHeapNumber()) { |
| 2219 ASSERT(!info()->IsStub()); |
| 2220 DwVfpRegister dbl_scratch = double_scratch0(); |
| 2221 __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); |
| 2222 // Test the double value. Zero and NaN are false. |
| 2223 __ VFPCompareAndSetFlags(dbl_scratch, 0.0); |
| 2224 __ cmp(r0, r0, vs); // If NaN, set the Z flag. (NaN) |
| 2225 EmitBranch(true_block, false_block, ne); |
| 2226 } else if (type.IsString()) { |
| 2227 ASSERT(!info()->IsStub()); |
| 2228 __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset)); |
| 2229 __ cmp(ip, Operand::Zero()); |
| 2230 EmitBranch(true_block, false_block, ne); |
2215 } else { | 2231 } else { |
2216 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 2232 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
2217 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 2233 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
2218 | 2234 |
2219 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); | 2235 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
2220 // Avoid deopts in the case where we've never executed this path before. | 2236 // Avoid deopts in the case where we've never executed this path before. |
2221 if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); | 2237 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); |
2222 | 2238 |
2223 if (expected.Contains(ToBooleanStub::UNDEFINED)) { | 2239 if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
2224 // undefined -> false. | 2240 // undefined -> false. |
2225 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); | 2241 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); |
2226 __ b(eq, false_label); | 2242 __ b(eq, false_label); |
2227 } | 2243 } |
2228 if (expected.Contains(ToBooleanStub::BOOLEAN)) { | 2244 if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
2229 // Boolean -> its value. | 2245 // Boolean -> its value. |
2230 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 2246 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
2231 __ b(eq, true_label); | 2247 __ b(eq, true_label); |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2292 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | 2308 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
2293 __ b(ne, ¬_heap_number); | 2309 __ b(ne, ¬_heap_number); |
2294 __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); | 2310 __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); |
2295 __ VFPCompareAndSetFlags(dbl_scratch, 0.0); | 2311 __ VFPCompareAndSetFlags(dbl_scratch, 0.0); |
2296 __ cmp(r0, r0, vs); // NaN -> false. | 2312 __ cmp(r0, r0, vs); // NaN -> false. |
2297 __ b(eq, false_label); // +0, -0 -> false. | 2313 __ b(eq, false_label); // +0, -0 -> false. |
2298 __ b(true_label); | 2314 __ b(true_label); |
2299 __ bind(¬_heap_number); | 2315 __ bind(¬_heap_number); |
2300 } | 2316 } |
2301 | 2317 |
2302 // We've seen something for the first time -> deopt. | 2318 if (!expected.IsGeneric()) { |
2303 DeoptimizeIf(al, instr->environment()); | 2319 // We've seen something for the first time -> deopt. |
| 2320 // This can only happen if we are not generic already. |
| 2321 DeoptimizeIf(al, instr->environment()); |
| 2322 } |
2304 } | 2323 } |
2305 } | 2324 } |
2306 } | 2325 } |
2307 | 2326 |
2308 | 2327 |
2309 void LCodeGen::EmitGoto(int block) { | 2328 void LCodeGen::EmitGoto(int block) { |
2310 if (!IsNextEmittedBlock(block)) { | 2329 if (!IsNextEmittedBlock(block)) { |
2311 __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block))); | 2330 __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block))); |
2312 } | 2331 } |
2313 } | 2332 } |
(...skipping 3566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5880 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5899 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
5881 __ ldr(result, FieldMemOperand(scratch, | 5900 __ ldr(result, FieldMemOperand(scratch, |
5882 FixedArray::kHeaderSize - kPointerSize)); | 5901 FixedArray::kHeaderSize - kPointerSize)); |
5883 __ bind(&done); | 5902 __ bind(&done); |
5884 } | 5903 } |
5885 | 5904 |
5886 | 5905 |
5887 #undef __ | 5906 #undef __ |
5888 | 5907 |
5889 } } // namespace v8::internal | 5908 } } // namespace v8::internal |
OLD | NEW |