OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2117 } else { | 2117 } else { |
2118 __ j(cc, chunk_->GetAssemblyLabel(left_block)); | 2118 __ j(cc, chunk_->GetAssemblyLabel(left_block)); |
2119 __ jmp(chunk_->GetAssemblyLabel(right_block)); | 2119 __ jmp(chunk_->GetAssemblyLabel(right_block)); |
2120 } | 2120 } |
2121 } | 2121 } |
2122 | 2122 |
2123 | 2123 |
2124 void LCodeGen::DoBranch(LBranch* instr) { | 2124 void LCodeGen::DoBranch(LBranch* instr) { |
2125 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 2125 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
2126 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 2126 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
2127 CpuFeatureScope scope(masm(), SSE2); | |
2128 | 2127 |
2128 FOR_ASSERT(bool considered_typefeedback = false); | |
2129 Representation r = instr->hydrogen()->value()->representation(); | 2129 Representation r = instr->hydrogen()->value()->representation(); |
2130 if (r.IsSmiOrInteger32()) { | 2130 if (r.IsSmiOrInteger32()) { |
2131 Register reg = ToRegister(instr->value()); | 2131 Register reg = ToRegister(instr->value()); |
2132 __ test(reg, Operand(reg)); | 2132 __ test(reg, Operand(reg)); |
2133 EmitBranch(true_block, false_block, not_zero); | 2133 EmitBranch(true_block, false_block, not_zero); |
2134 } else if (r.IsDouble()) { | 2134 } else if (r.IsDouble()) { |
2135 CpuFeatureScope scope(masm(), SSE2); | |
2135 XMMRegister reg = ToDoubleRegister(instr->value()); | 2136 XMMRegister reg = ToDoubleRegister(instr->value()); |
2136 __ xorps(xmm0, xmm0); | 2137 __ xorps(xmm0, xmm0); |
2137 __ ucomisd(reg, xmm0); | 2138 __ ucomisd(reg, xmm0); |
2138 EmitBranch(true_block, false_block, not_equal); | 2139 EmitBranch(true_block, false_block, not_equal); |
2139 } else { | 2140 } else { |
2140 ASSERT(r.IsTagged()); | 2141 ASSERT(r.IsTagged()); |
2141 Register reg = ToRegister(instr->value()); | 2142 Register reg = ToRegister(instr->value()); |
2142 HType type = instr->hydrogen()->value()->type(); | 2143 HType type = instr->hydrogen()->value()->type(); |
2143 if (type.IsBoolean()) { | 2144 if (type.IsBoolean()) { |
2144 __ cmp(reg, factory()->true_value()); | 2145 __ cmp(reg, factory()->true_value()); |
2145 EmitBranch(true_block, false_block, equal); | 2146 EmitBranch(true_block, false_block, equal); |
2146 } else if (type.IsSmi()) { | 2147 } else if (type.IsSmi()) { |
2147 __ test(reg, Operand(reg)); | 2148 __ test(reg, Operand(reg)); |
2148 EmitBranch(true_block, false_block, not_equal); | 2149 EmitBranch(true_block, false_block, not_equal); |
2149 } else { | 2150 } else { |
2150 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 2151 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
2151 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 2152 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
2152 | 2153 |
2154 FOR_ASSERT(considered_typefeedback = true); | |
2153 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); | 2155 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
2154 // Avoid deopts in the case where we've never executed this path before. | 2156 // Avoid deopts in the case where we've never executed this path before. |
2155 if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); | 2157 if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); |
2156 | 2158 |
2157 if (expected.Contains(ToBooleanStub::UNDEFINED)) { | 2159 if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
2158 // undefined -> false. | 2160 // undefined -> false. |
2159 __ cmp(reg, factory()->undefined_value()); | 2161 __ cmp(reg, factory()->undefined_value()); |
2160 __ j(equal, false_label); | 2162 __ j(equal, false_label); |
2161 } | 2163 } |
2162 if (expected.Contains(ToBooleanStub::BOOLEAN)) { | 2164 if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2220 __ CmpInstanceType(map, SYMBOL_TYPE); | 2222 __ CmpInstanceType(map, SYMBOL_TYPE); |
2221 __ j(equal, true_label); | 2223 __ j(equal, true_label); |
2222 } | 2224 } |
2223 | 2225 |
2224 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { | 2226 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { |
2225 // heap number -> false iff +0, -0, or NaN. | 2227 // heap number -> false iff +0, -0, or NaN. |
2226 Label not_heap_number; | 2228 Label not_heap_number; |
2227 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 2229 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), |
2228 factory()->heap_number_map()); | 2230 factory()->heap_number_map()); |
2229 __ j(not_equal, ¬_heap_number, Label::kNear); | 2231 __ j(not_equal, ¬_heap_number, Label::kNear); |
2230 __ xorps(xmm0, xmm0); | 2232 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { |
2231 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); | 2233 CpuFeatureScope scope(masm(), SSE2); |
2234 __ xorps(xmm0, xmm0); | |
2235 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); | |
2236 } else { | |
2237 __ fldz(); | |
2238 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset)); | |
2239 __ FCmp(); | |
2240 } | |
2232 __ j(zero, false_label); | 2241 __ j(zero, false_label); |
2233 __ jmp(true_label); | 2242 __ jmp(true_label); |
2234 __ bind(¬_heap_number); | 2243 __ bind(¬_heap_number); |
2235 } | 2244 } |
2236 | 2245 |
2237 // We've seen something for the first time -> deopt. | 2246 // We've seen something for the first time -> deopt. |
2238 DeoptimizeIf(no_condition, instr->environment()); | 2247 DeoptimizeIf(no_condition, instr->environment()); |
2239 } | 2248 } |
2240 } | 2249 } |
2250 // Make sure not to poison the stub cache. | |
2251 ASSERT(info()->IsStub() ? considered_typefeedback : true); | |
Toon Verwaest
2013/05/28 09:33:32
Just move the ASSERT(!info()->IsStub()) to all oth
| |
2241 } | 2252 } |
2242 | 2253 |
2243 | 2254 |
2244 void LCodeGen::EmitGoto(int block) { | 2255 void LCodeGen::EmitGoto(int block) { |
2245 if (!IsNextEmittedBlock(block)) { | 2256 if (!IsNextEmittedBlock(block)) { |
2246 __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block))); | 2257 __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block))); |
2247 } | 2258 } |
2248 } | 2259 } |
2249 | 2260 |
2250 | 2261 |
(...skipping 4325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6576 FixedArray::kHeaderSize - kPointerSize)); | 6587 FixedArray::kHeaderSize - kPointerSize)); |
6577 __ bind(&done); | 6588 __ bind(&done); |
6578 } | 6589 } |
6579 | 6590 |
6580 | 6591 |
6581 #undef __ | 6592 #undef __ |
6582 | 6593 |
6583 } } // namespace v8::internal | 6594 } } // namespace v8::internal |
6584 | 6595 |
6585 #endif // V8_TARGET_ARCH_IA32 | 6596 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |