OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/change-lowering.h" | 5 #include "src/compiler/change-lowering.h" |
6 | 6 |
7 #include "src/address-map.h" | 7 #include "src/address-map.h" |
8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
9 #include "src/compiler/js-graph.h" | 9 #include "src/compiler/js-graph.h" |
10 #include "src/compiler/linkage.h" | 10 #include "src/compiler/linkage.h" |
11 #include "src/compiler/machine-operator.h" | 11 #include "src/compiler/machine-operator.h" |
12 #include "src/compiler/node-properties.h" | 12 #include "src/compiler/node-properties.h" |
13 #include "src/compiler/operator-properties.h" | 13 #include "src/compiler/operator-properties.h" |
14 #include "src/compiler/simplified-operator.h" | 14 #include "src/compiler/simplified-operator.h" |
15 | 15 |
16 namespace v8 { | 16 namespace v8 { |
17 namespace internal { | 17 namespace internal { |
18 namespace compiler { | 18 namespace compiler { |
19 | 19 |
20 ChangeLowering::~ChangeLowering() {} | 20 ChangeLowering::~ChangeLowering() {} |
21 | 21 |
22 | 22 |
23 Reduction ChangeLowering::Reduce(Node* node) { | 23 Reduction ChangeLowering::Reduce(Node* node) { |
24 Node* control = graph()->start(); | 24 Node* control = graph()->start(); |
25 switch (node->opcode()) { | 25 switch (node->opcode()) { |
26 case IrOpcode::kChangeBitToBool: | 26 case IrOpcode::kChangeBitToBool: |
27 return ReduceChangeBitToBool(node->InputAt(0), control); | 27 return ChangeBitToBool(node->InputAt(0), control); |
28 case IrOpcode::kChangeBoolToBit: | 28 case IrOpcode::kChangeBoolToBit: |
29 return ReduceChangeBoolToBit(node->InputAt(0)); | 29 return ChangeBoolToBit(node->InputAt(0)); |
30 case IrOpcode::kChangeInt31ToTagged: | 30 case IrOpcode::kChangeInt31ToTagged: |
31 return ReduceChangeInt31ToTagged(node->InputAt(0), control); | 31 return ChangeInt31ToTagged(node->InputAt(0), control); |
32 case IrOpcode::kChangeTaggedSignedToInt32: | 32 case IrOpcode::kChangeTaggedSignedToInt32: |
33 return ReduceChangeTaggedSignedToInt32(node->InputAt(0)); | 33 return ChangeTaggedSignedToInt32(node->InputAt(0)); |
34 case IrOpcode::kLoadField: | 34 case IrOpcode::kLoadField: |
35 return ReduceLoadField(node); | 35 return LoadField(node); |
36 case IrOpcode::kStoreField: | 36 case IrOpcode::kStoreField: |
37 return ReduceStoreField(node); | 37 return StoreField(node); |
38 case IrOpcode::kLoadElement: | 38 case IrOpcode::kLoadElement: |
39 return ReduceLoadElement(node); | 39 return LoadElement(node); |
40 case IrOpcode::kStoreElement: | 40 case IrOpcode::kStoreElement: |
41 return ReduceStoreElement(node); | 41 return StoreElement(node); |
42 case IrOpcode::kAllocate: | 42 case IrOpcode::kAllocate: |
43 return ReduceAllocate(node); | 43 return Allocate(node); |
44 case IrOpcode::kObjectIsSmi: | 44 case IrOpcode::kObjectIsSmi: |
45 return ReduceObjectIsSmi(node); | 45 return ObjectIsSmi(node); |
| 46 case IrOpcode::kChangeInt32ToTagged: |
| 47 case IrOpcode::kChangeUint32ToTagged: |
| 48 case IrOpcode::kChangeFloat64ToTagged: |
| 49 FATAL("Changes should be already lowered during effect linearization."); |
| 50 break; |
46 default: | 51 default: |
47 return NoChange(); | 52 return NoChange(); |
48 } | 53 } |
49 UNREACHABLE(); | 54 UNREACHABLE(); |
50 return NoChange(); | 55 return NoChange(); |
51 } | 56 } |
52 | 57 |
| 58 |
| 59 Node* ChangeLowering::HeapNumberValueIndexConstant() { |
| 60 return jsgraph()->IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag); |
| 61 } |
| 62 |
53 Node* ChangeLowering::SmiShiftBitsConstant() { | 63 Node* ChangeLowering::SmiShiftBitsConstant() { |
54 return jsgraph()->IntPtrConstant(kSmiShiftSize + kSmiTagSize); | 64 return jsgraph()->IntPtrConstant(kSmiShiftSize + kSmiTagSize); |
55 } | 65 } |
56 | 66 |
| 67 Node* ChangeLowering::ChangeInt32ToFloat64(Node* value) { |
| 68 return graph()->NewNode(machine()->ChangeInt32ToFloat64(), value); |
| 69 } |
| 70 |
57 Node* ChangeLowering::ChangeInt32ToSmi(Node* value) { | 71 Node* ChangeLowering::ChangeInt32ToSmi(Node* value) { |
58 if (machine()->Is64()) { | 72 if (machine()->Is64()) { |
59 value = graph()->NewNode(machine()->ChangeInt32ToInt64(), value); | 73 value = graph()->NewNode(machine()->ChangeInt32ToInt64(), value); |
60 } | 74 } |
61 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant()); | 75 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant()); |
62 } | 76 } |
63 | 77 |
| 78 |
| 79 Node* ChangeLowering::ChangeSmiToFloat64(Node* value) { |
| 80 return ChangeInt32ToFloat64(ChangeSmiToWord32(value)); |
| 81 } |
| 82 |
64 Node* ChangeLowering::ChangeSmiToWord32(Node* value) { | 83 Node* ChangeLowering::ChangeSmiToWord32(Node* value) { |
65 value = graph()->NewNode(machine()->WordSar(), value, SmiShiftBitsConstant()); | 84 value = graph()->NewNode(machine()->WordSar(), value, SmiShiftBitsConstant()); |
66 if (machine()->Is64()) { | 85 if (machine()->Is64()) { |
67 value = graph()->NewNode(machine()->TruncateInt64ToInt32(), value); | 86 value = graph()->NewNode(machine()->TruncateInt64ToInt32(), value); |
68 } | 87 } |
69 return value; | 88 return value; |
70 } | 89 } |
71 | 90 |
72 | 91 |
73 Node* ChangeLowering::ChangeUint32ToFloat64(Node* value) { | 92 Node* ChangeLowering::ChangeUint32ToFloat64(Node* value) { |
74 return graph()->NewNode(machine()->ChangeUint32ToFloat64(), value); | 93 return graph()->NewNode(machine()->ChangeUint32ToFloat64(), value); |
75 } | 94 } |
76 | 95 |
77 Reduction ChangeLowering::ReduceChangeBitToBool(Node* value, Node* control) { | 96 |
| 97 Node* ChangeLowering::ChangeUint32ToSmi(Node* value) { |
| 98 if (machine()->Is64()) { |
| 99 value = graph()->NewNode(machine()->ChangeUint32ToUint64(), value); |
| 100 } |
| 101 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant()); |
| 102 } |
| 103 |
| 104 |
| 105 Node* ChangeLowering::LoadHeapNumberValue(Node* value, Node* control) { |
| 106 return graph()->NewNode(machine()->Load(MachineType::Float64()), value, |
| 107 HeapNumberValueIndexConstant(), graph()->start(), |
| 108 control); |
| 109 } |
| 110 |
| 111 |
| 112 Node* ChangeLowering::TestNotSmi(Node* value) { |
| 113 STATIC_ASSERT(kSmiTag == 0); |
| 114 STATIC_ASSERT(kSmiTagMask == 1); |
| 115 return graph()->NewNode(machine()->WordAnd(), value, |
| 116 jsgraph()->IntPtrConstant(kSmiTagMask)); |
| 117 } |
| 118 |
| 119 |
| 120 Reduction ChangeLowering::ChangeBitToBool(Node* value, Node* control) { |
78 return Replace( | 121 return Replace( |
79 graph()->NewNode(common()->Select(MachineRepresentation::kTagged), value, | 122 graph()->NewNode(common()->Select(MachineRepresentation::kTagged), value, |
80 jsgraph()->TrueConstant(), jsgraph()->FalseConstant())); | 123 jsgraph()->TrueConstant(), jsgraph()->FalseConstant())); |
81 } | 124 } |
82 | 125 |
83 Reduction ChangeLowering::ReduceChangeBoolToBit(Node* value) { | 126 |
| 127 Reduction ChangeLowering::ChangeBoolToBit(Node* value) { |
84 return Replace(graph()->NewNode(machine()->WordEqual(), value, | 128 return Replace(graph()->NewNode(machine()->WordEqual(), value, |
85 jsgraph()->TrueConstant())); | 129 jsgraph()->TrueConstant())); |
86 } | 130 } |
87 | 131 |
88 Reduction ChangeLowering::ReduceChangeInt31ToTagged(Node* value, | 132 Reduction ChangeLowering::ChangeInt31ToTagged(Node* value, Node* control) { |
89 Node* control) { | |
90 return Replace(ChangeInt32ToSmi(value)); | 133 return Replace(ChangeInt32ToSmi(value)); |
91 } | 134 } |
92 | 135 |
93 Reduction ChangeLowering::ReduceChangeTaggedSignedToInt32(Node* value) { | 136 Reduction ChangeLowering::ChangeTaggedSignedToInt32(Node* value) { |
94 return Replace(ChangeSmiToWord32(value)); | 137 return Replace(ChangeSmiToWord32(value)); |
95 } | 138 } |
96 | 139 |
97 namespace { | 140 namespace { |
98 | 141 |
99 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, | 142 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, |
100 MachineRepresentation representation) { | 143 MachineRepresentation representation, |
101 // TODO(bmeurer): Optimize write barriers based on input. | 144 Type* field_type, Type* input_type) { |
| 145 if (field_type->Is(Type::TaggedSigned()) || |
| 146 input_type->Is(Type::TaggedSigned())) { |
| 147 // Write barriers are only for writes of heap objects. |
| 148 return kNoWriteBarrier; |
| 149 } |
| 150 if (input_type->Is(Type::BooleanOrNullOrUndefined())) { |
| 151 // Write barriers are not necessary when storing true, false, null or |
| 152 // undefined, because these special oddballs are always in the root set. |
| 153 return kNoWriteBarrier; |
| 154 } |
102 if (base_is_tagged == kTaggedBase && | 155 if (base_is_tagged == kTaggedBase && |
103 representation == MachineRepresentation::kTagged) { | 156 representation == MachineRepresentation::kTagged) { |
| 157 if (input_type->IsConstant() && |
| 158 input_type->AsConstant()->Value()->IsHeapObject()) { |
| 159 Handle<HeapObject> input = |
| 160 Handle<HeapObject>::cast(input_type->AsConstant()->Value()); |
| 161 if (input->IsMap()) { |
| 162 // Write barriers for storing maps are cheaper. |
| 163 return kMapWriteBarrier; |
| 164 } |
| 165 Isolate* const isolate = input->GetIsolate(); |
| 166 RootIndexMap root_index_map(isolate); |
| 167 int root_index = root_index_map.Lookup(*input); |
| 168 if (root_index != RootIndexMap::kInvalidRootIndex && |
| 169 isolate->heap()->RootIsImmortalImmovable(root_index)) { |
| 170 // Write barriers are unnecessary for immortal immovable roots. |
| 171 return kNoWriteBarrier; |
| 172 } |
| 173 } |
| 174 if (field_type->Is(Type::TaggedPointer()) || |
| 175 input_type->Is(Type::TaggedPointer())) { |
| 176 // Write barriers for heap objects don't need a Smi check. |
| 177 return kPointerWriteBarrier; |
| 178 } |
| 179 // Write barriers are only for writes into heap objects (i.e. tagged base). |
104 return kFullWriteBarrier; | 180 return kFullWriteBarrier; |
105 } | 181 } |
106 return kNoWriteBarrier; | 182 return kNoWriteBarrier; |
107 } | 183 } |
108 | 184 |
| 185 |
109 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, | 186 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, |
110 MachineRepresentation representation, | 187 MachineRepresentation representation, |
111 int field_offset) { | 188 int field_offset, Type* field_type, |
| 189 Type* input_type) { |
112 if (base_is_tagged == kTaggedBase && field_offset == HeapObject::kMapOffset) { | 190 if (base_is_tagged == kTaggedBase && field_offset == HeapObject::kMapOffset) { |
113 // Write barriers for storing maps are cheaper. | 191 // Write barriers for storing maps are cheaper. |
114 return kMapWriteBarrier; | 192 return kMapWriteBarrier; |
115 } | 193 } |
116 return ComputeWriteBarrierKind(base_is_tagged, representation); | 194 return ComputeWriteBarrierKind(base_is_tagged, representation, field_type, |
| 195 input_type); |
117 } | 196 } |
118 | 197 |
119 } // namespace | 198 } // namespace |
120 | 199 |
121 Reduction ChangeLowering::ReduceLoadField(Node* node) { | 200 |
| 201 Reduction ChangeLowering::LoadField(Node* node) { |
122 const FieldAccess& access = FieldAccessOf(node->op()); | 202 const FieldAccess& access = FieldAccessOf(node->op()); |
123 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); | 203 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); |
124 node->InsertInput(graph()->zone(), 1, offset); | 204 node->InsertInput(graph()->zone(), 1, offset); |
125 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); | 205 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); |
126 return Changed(node); | 206 return Changed(node); |
127 } | 207 } |
128 | 208 |
129 Reduction ChangeLowering::ReduceStoreField(Node* node) { | 209 |
| 210 Reduction ChangeLowering::StoreField(Node* node) { |
130 const FieldAccess& access = FieldAccessOf(node->op()); | 211 const FieldAccess& access = FieldAccessOf(node->op()); |
| 212 Type* type = NodeProperties::GetType(node->InputAt(1)); |
131 WriteBarrierKind kind = ComputeWriteBarrierKind( | 213 WriteBarrierKind kind = ComputeWriteBarrierKind( |
132 access.base_is_tagged, access.machine_type.representation(), | 214 access.base_is_tagged, access.machine_type.representation(), |
133 access.offset); | 215 access.offset, access.type, type); |
134 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); | 216 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); |
135 node->InsertInput(graph()->zone(), 1, offset); | 217 node->InsertInput(graph()->zone(), 1, offset); |
136 NodeProperties::ChangeOp(node, | 218 NodeProperties::ChangeOp(node, |
137 machine()->Store(StoreRepresentation( | 219 machine()->Store(StoreRepresentation( |
138 access.machine_type.representation(), kind))); | 220 access.machine_type.representation(), kind))); |
139 return Changed(node); | 221 return Changed(node); |
140 } | 222 } |
141 | 223 |
142 | 224 |
143 Node* ChangeLowering::ComputeIndex(const ElementAccess& access, | 225 Node* ChangeLowering::ComputeIndex(const ElementAccess& access, |
(...skipping 12 matching lines...) Expand all Loading... |
156 } | 238 } |
157 if (machine()->Is64()) { | 239 if (machine()->Is64()) { |
158 // TODO(turbofan): This is probably only correct for typed arrays, and only | 240 // TODO(turbofan): This is probably only correct for typed arrays, and only |
159 // if the typed arrays are at most 2GiB in size, which happens to match | 241 // if the typed arrays are at most 2GiB in size, which happens to match |
160 // exactly our current situation. | 242 // exactly our current situation. |
161 index = graph()->NewNode(machine()->ChangeUint32ToUint64(), index); | 243 index = graph()->NewNode(machine()->ChangeUint32ToUint64(), index); |
162 } | 244 } |
163 return index; | 245 return index; |
164 } | 246 } |
165 | 247 |
166 Reduction ChangeLowering::ReduceLoadElement(Node* node) { | 248 |
| 249 Reduction ChangeLowering::LoadElement(Node* node) { |
167 const ElementAccess& access = ElementAccessOf(node->op()); | 250 const ElementAccess& access = ElementAccessOf(node->op()); |
168 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); | 251 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); |
169 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); | 252 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); |
170 return Changed(node); | 253 return Changed(node); |
171 } | 254 } |
172 | 255 |
173 Reduction ChangeLowering::ReduceStoreElement(Node* node) { | 256 |
| 257 Reduction ChangeLowering::StoreElement(Node* node) { |
174 const ElementAccess& access = ElementAccessOf(node->op()); | 258 const ElementAccess& access = ElementAccessOf(node->op()); |
| 259 Type* type = NodeProperties::GetType(node->InputAt(2)); |
175 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); | 260 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); |
176 NodeProperties::ChangeOp( | 261 NodeProperties::ChangeOp( |
177 node, | 262 node, machine()->Store(StoreRepresentation( |
178 machine()->Store(StoreRepresentation( | 263 access.machine_type.representation(), |
179 access.machine_type.representation(), | 264 ComputeWriteBarrierKind(access.base_is_tagged, |
180 ComputeWriteBarrierKind(access.base_is_tagged, | 265 access.machine_type.representation(), |
181 access.machine_type.representation())))); | 266 access.type, type)))); |
182 return Changed(node); | 267 return Changed(node); |
183 } | 268 } |
184 | 269 |
185 Reduction ChangeLowering::ReduceAllocate(Node* node) { | 270 |
| 271 Reduction ChangeLowering::Allocate(Node* node) { |
186 PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op()); | 272 PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op()); |
187 Callable callable = CodeFactory::Allocate(isolate(), pretenure); | 273 Callable callable = CodeFactory::Allocate(isolate(), pretenure); |
188 Node* target = jsgraph()->HeapConstant(callable.code()); | 274 Node* target = jsgraph()->HeapConstant(callable.code()); |
189 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | 275 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( |
190 isolate(), jsgraph()->zone(), callable.descriptor(), 0, | 276 isolate(), jsgraph()->zone(), callable.descriptor(), 0, |
191 CallDescriptor::kNoFlags, Operator::kNoThrow); | 277 CallDescriptor::kNoFlags, Operator::kNoThrow); |
192 const Operator* op = common()->Call(descriptor); | 278 const Operator* op = common()->Call(descriptor); |
193 node->InsertInput(graph()->zone(), 0, target); | 279 node->InsertInput(graph()->zone(), 0, target); |
194 node->InsertInput(graph()->zone(), 2, jsgraph()->NoContextConstant()); | 280 node->InsertInput(graph()->zone(), 2, jsgraph()->NoContextConstant()); |
195 NodeProperties::ChangeOp(node, op); | 281 NodeProperties::ChangeOp(node, op); |
196 return Changed(node); | 282 return Changed(node); |
197 } | 283 } |
198 | 284 |
199 Reduction ChangeLowering::ReduceObjectIsSmi(Node* node) { | 285 Node* ChangeLowering::IsSmi(Node* value) { |
| 286 return graph()->NewNode( |
| 287 machine()->WordEqual(), |
| 288 graph()->NewNode(machine()->WordAnd(), value, |
| 289 jsgraph()->IntPtrConstant(kSmiTagMask)), |
| 290 jsgraph()->IntPtrConstant(kSmiTag)); |
| 291 } |
| 292 |
| 293 Node* ChangeLowering::LoadHeapObjectMap(Node* object, Node* control) { |
| 294 return graph()->NewNode( |
| 295 machine()->Load(MachineType::AnyTagged()), object, |
| 296 jsgraph()->IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), |
| 297 graph()->start(), control); |
| 298 } |
| 299 |
| 300 Node* ChangeLowering::LoadMapBitField(Node* map) { |
| 301 return graph()->NewNode( |
| 302 machine()->Load(MachineType::Uint8()), map, |
| 303 jsgraph()->IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag), |
| 304 graph()->start(), graph()->start()); |
| 305 } |
| 306 |
| 307 Node* ChangeLowering::LoadMapInstanceType(Node* map) { |
| 308 return graph()->NewNode( |
| 309 machine()->Load(MachineType::Uint8()), map, |
| 310 jsgraph()->IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag), |
| 311 graph()->start(), graph()->start()); |
| 312 } |
| 313 |
| 314 Reduction ChangeLowering::ObjectIsSmi(Node* node) { |
200 node->ReplaceInput(0, | 315 node->ReplaceInput(0, |
201 graph()->NewNode(machine()->WordAnd(), node->InputAt(0), | 316 graph()->NewNode(machine()->WordAnd(), node->InputAt(0), |
202 jsgraph()->IntPtrConstant(kSmiTagMask))); | 317 jsgraph()->IntPtrConstant(kSmiTagMask))); |
203 node->AppendInput(graph()->zone(), jsgraph()->IntPtrConstant(kSmiTag)); | 318 node->AppendInput(graph()->zone(), jsgraph()->IntPtrConstant(kSmiTag)); |
204 NodeProperties::ChangeOp(node, machine()->WordEqual()); | 319 NodeProperties::ChangeOp(node, machine()->WordEqual()); |
205 return Changed(node); | 320 return Changed(node); |
206 } | 321 } |
207 | 322 |
208 Isolate* ChangeLowering::isolate() const { return jsgraph()->isolate(); } | 323 Isolate* ChangeLowering::isolate() const { return jsgraph()->isolate(); } |
209 | 324 |
210 | 325 |
211 Graph* ChangeLowering::graph() const { return jsgraph()->graph(); } | 326 Graph* ChangeLowering::graph() const { return jsgraph()->graph(); } |
212 | 327 |
213 | 328 |
214 CommonOperatorBuilder* ChangeLowering::common() const { | 329 CommonOperatorBuilder* ChangeLowering::common() const { |
215 return jsgraph()->common(); | 330 return jsgraph()->common(); |
216 } | 331 } |
217 | 332 |
218 | 333 |
219 MachineOperatorBuilder* ChangeLowering::machine() const { | 334 MachineOperatorBuilder* ChangeLowering::machine() const { |
220 return jsgraph()->machine(); | 335 return jsgraph()->machine(); |
221 } | 336 } |
222 | 337 |
223 } // namespace compiler | 338 } // namespace compiler |
224 } // namespace internal | 339 } // namespace internal |
225 } // namespace v8 | 340 } // namespace v8 |
OLD | NEW |