Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(448)

Side by Side Diff: src/compiler/change-lowering.cc

Issue 1926023002: [turbofan] Run everything after representation selection concurrently. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Remove some dead code. Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/change-lowering.h ('k') | src/compiler/common-node-cache.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/change-lowering.h" 5 #include "src/compiler/change-lowering.h"
6 6
7 #include "src/address-map.h" 7 #include "src/address-map.h"
8 #include "src/code-factory.h" 8 #include "src/code-factory.h"
9 #include "src/compiler/js-graph.h" 9 #include "src/compiler/js-graph.h"
10 #include "src/compiler/linkage.h" 10 #include "src/compiler/linkage.h"
11 #include "src/compiler/machine-operator.h" 11 #include "src/compiler/machine-operator.h"
12 #include "src/compiler/node-properties.h" 12 #include "src/compiler/node-properties.h"
13 #include "src/compiler/operator-properties.h" 13 #include "src/compiler/operator-properties.h"
14 #include "src/compiler/simplified-operator.h" 14 #include "src/compiler/simplified-operator.h"
15 15
16 namespace v8 { 16 namespace v8 {
17 namespace internal { 17 namespace internal {
18 namespace compiler { 18 namespace compiler {
19 19
20 ChangeLowering::~ChangeLowering() {} 20 ChangeLowering::~ChangeLowering() {}
21 21
22 22
23 Reduction ChangeLowering::Reduce(Node* node) { 23 Reduction ChangeLowering::Reduce(Node* node) {
24 Node* control = graph()->start(); 24 Node* control = graph()->start();
25 switch (node->opcode()) { 25 switch (node->opcode()) {
26 case IrOpcode::kChangeBitToBool: 26 case IrOpcode::kChangeBitToBool:
27 return ChangeBitToBool(node->InputAt(0), control); 27 return ReduceChangeBitToBool(node->InputAt(0), control);
28 case IrOpcode::kChangeBoolToBit: 28 case IrOpcode::kChangeBoolToBit:
29 return ChangeBoolToBit(node->InputAt(0)); 29 return ReduceChangeBoolToBit(node->InputAt(0));
30 case IrOpcode::kChangeInt31ToTagged: 30 case IrOpcode::kChangeInt31ToTagged:
31 return ChangeInt31ToTagged(node->InputAt(0), control); 31 return ReduceChangeInt31ToTagged(node->InputAt(0), control);
32 case IrOpcode::kChangeTaggedSignedToInt32: 32 case IrOpcode::kChangeTaggedSignedToInt32:
33 return ChangeTaggedSignedToInt32(node->InputAt(0)); 33 return ReduceChangeTaggedSignedToInt32(node->InputAt(0));
34 case IrOpcode::kLoadField: 34 case IrOpcode::kLoadField:
35 return LoadField(node); 35 return ReduceLoadField(node);
36 case IrOpcode::kStoreField: 36 case IrOpcode::kStoreField:
37 return StoreField(node); 37 return ReduceStoreField(node);
38 case IrOpcode::kLoadElement: 38 case IrOpcode::kLoadElement:
39 return LoadElement(node); 39 return ReduceLoadElement(node);
40 case IrOpcode::kStoreElement: 40 case IrOpcode::kStoreElement:
41 return StoreElement(node); 41 return ReduceStoreElement(node);
42 case IrOpcode::kAllocate: 42 case IrOpcode::kAllocate:
43 return Allocate(node); 43 return ReduceAllocate(node);
44 case IrOpcode::kObjectIsSmi: 44 case IrOpcode::kObjectIsSmi:
45 return ObjectIsSmi(node); 45 return ReduceObjectIsSmi(node);
46 case IrOpcode::kChangeInt32ToTagged:
47 case IrOpcode::kChangeUint32ToTagged:
48 case IrOpcode::kChangeFloat64ToTagged:
49 FATAL("Changes should be already lowered during effect linearization.");
50 break;
51 default: 46 default:
52 return NoChange(); 47 return NoChange();
53 } 48 }
54 UNREACHABLE(); 49 UNREACHABLE();
55 return NoChange(); 50 return NoChange();
56 } 51 }
57 52
58
59 Node* ChangeLowering::HeapNumberValueIndexConstant() {
60 return jsgraph()->IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag);
61 }
62
63 Node* ChangeLowering::SmiShiftBitsConstant() { 53 Node* ChangeLowering::SmiShiftBitsConstant() {
64 return jsgraph()->IntPtrConstant(kSmiShiftSize + kSmiTagSize); 54 return jsgraph()->IntPtrConstant(kSmiShiftSize + kSmiTagSize);
65 } 55 }
66 56
67 Node* ChangeLowering::ChangeInt32ToFloat64(Node* value) {
68 return graph()->NewNode(machine()->ChangeInt32ToFloat64(), value);
69 }
70
71 Node* ChangeLowering::ChangeInt32ToSmi(Node* value) { 57 Node* ChangeLowering::ChangeInt32ToSmi(Node* value) {
72 if (machine()->Is64()) { 58 if (machine()->Is64()) {
73 value = graph()->NewNode(machine()->ChangeInt32ToInt64(), value); 59 value = graph()->NewNode(machine()->ChangeInt32ToInt64(), value);
74 } 60 }
75 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant()); 61 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant());
76 } 62 }
77 63
78
79 Node* ChangeLowering::ChangeSmiToFloat64(Node* value) {
80 return ChangeInt32ToFloat64(ChangeSmiToWord32(value));
81 }
82
83 Node* ChangeLowering::ChangeSmiToWord32(Node* value) { 64 Node* ChangeLowering::ChangeSmiToWord32(Node* value) {
84 value = graph()->NewNode(machine()->WordSar(), value, SmiShiftBitsConstant()); 65 value = graph()->NewNode(machine()->WordSar(), value, SmiShiftBitsConstant());
85 if (machine()->Is64()) { 66 if (machine()->Is64()) {
86 value = graph()->NewNode(machine()->TruncateInt64ToInt32(), value); 67 value = graph()->NewNode(machine()->TruncateInt64ToInt32(), value);
87 } 68 }
88 return value; 69 return value;
89 } 70 }
90 71
91 72
92 Node* ChangeLowering::ChangeUint32ToFloat64(Node* value) { 73 Node* ChangeLowering::ChangeUint32ToFloat64(Node* value) {
93 return graph()->NewNode(machine()->ChangeUint32ToFloat64(), value); 74 return graph()->NewNode(machine()->ChangeUint32ToFloat64(), value);
94 } 75 }
95 76
96 77 Reduction ChangeLowering::ReduceChangeBitToBool(Node* value, Node* control) {
97 Node* ChangeLowering::ChangeUint32ToSmi(Node* value) {
98 if (machine()->Is64()) {
99 value = graph()->NewNode(machine()->ChangeUint32ToUint64(), value);
100 }
101 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant());
102 }
103
104
105 Node* ChangeLowering::LoadHeapNumberValue(Node* value, Node* control) {
106 return graph()->NewNode(machine()->Load(MachineType::Float64()), value,
107 HeapNumberValueIndexConstant(), graph()->start(),
108 control);
109 }
110
111
112 Node* ChangeLowering::TestNotSmi(Node* value) {
113 STATIC_ASSERT(kSmiTag == 0);
114 STATIC_ASSERT(kSmiTagMask == 1);
115 return graph()->NewNode(machine()->WordAnd(), value,
116 jsgraph()->IntPtrConstant(kSmiTagMask));
117 }
118
119
120 Reduction ChangeLowering::ChangeBitToBool(Node* value, Node* control) {
121 return Replace( 78 return Replace(
122 graph()->NewNode(common()->Select(MachineRepresentation::kTagged), value, 79 graph()->NewNode(common()->Select(MachineRepresentation::kTagged), value,
123 jsgraph()->TrueConstant(), jsgraph()->FalseConstant())); 80 jsgraph()->TrueConstant(), jsgraph()->FalseConstant()));
124 } 81 }
125 82
126 83 Reduction ChangeLowering::ReduceChangeBoolToBit(Node* value) {
127 Reduction ChangeLowering::ChangeBoolToBit(Node* value) {
128 return Replace(graph()->NewNode(machine()->WordEqual(), value, 84 return Replace(graph()->NewNode(machine()->WordEqual(), value,
129 jsgraph()->TrueConstant())); 85 jsgraph()->TrueConstant()));
130 } 86 }
131 87
132 Reduction ChangeLowering::ChangeInt31ToTagged(Node* value, Node* control) { 88 Reduction ChangeLowering::ReduceChangeInt31ToTagged(Node* value,
89 Node* control) {
133 return Replace(ChangeInt32ToSmi(value)); 90 return Replace(ChangeInt32ToSmi(value));
134 } 91 }
135 92
136 Reduction ChangeLowering::ChangeTaggedSignedToInt32(Node* value) { 93 Reduction ChangeLowering::ReduceChangeTaggedSignedToInt32(Node* value) {
137 return Replace(ChangeSmiToWord32(value)); 94 return Replace(ChangeSmiToWord32(value));
138 } 95 }
139 96
140 namespace { 97 namespace {
141 98
142 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, 99 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged,
143 MachineRepresentation representation, 100 MachineRepresentation representation,
144 Type* field_type, Type* input_type) { 101 Node* value) {
145 if (field_type->Is(Type::TaggedSigned()) || 102 // TODO(bmeurer): Optimize write barriers based on input.
146 input_type->Is(Type::TaggedSigned())) {
147 // Write barriers are only for writes of heap objects.
148 return kNoWriteBarrier;
149 }
150 if (input_type->Is(Type::BooleanOrNullOrUndefined())) {
151 // Write barriers are not necessary when storing true, false, null or
152 // undefined, because these special oddballs are always in the root set.
153 return kNoWriteBarrier;
154 }
155 if (base_is_tagged == kTaggedBase && 103 if (base_is_tagged == kTaggedBase &&
156 representation == MachineRepresentation::kTagged) { 104 representation == MachineRepresentation::kTagged) {
157 if (input_type->IsConstant() && 105 if (value->opcode() == IrOpcode::kHeapConstant) {
158 input_type->AsConstant()->Value()->IsHeapObject()) { 106 return kPointerWriteBarrier;
159 Handle<HeapObject> input = 107 } else if (value->opcode() == IrOpcode::kNumberConstant) {
160 Handle<HeapObject>::cast(input_type->AsConstant()->Value()); 108 double const number_value = OpParameter<double>(value);
161 if (input->IsMap()) { 109 if (IsSmiDouble(number_value)) return kNoWriteBarrier;
162 // Write barriers for storing maps are cheaper.
163 return kMapWriteBarrier;
164 }
165 Isolate* const isolate = input->GetIsolate();
166 RootIndexMap root_index_map(isolate);
167 int root_index = root_index_map.Lookup(*input);
168 if (root_index != RootIndexMap::kInvalidRootIndex &&
169 isolate->heap()->RootIsImmortalImmovable(root_index)) {
170 // Write barriers are unnecessary for immortal immovable roots.
171 return kNoWriteBarrier;
172 }
173 }
174 if (field_type->Is(Type::TaggedPointer()) ||
175 input_type->Is(Type::TaggedPointer())) {
176 // Write barriers for heap objects don't need a Smi check.
177 return kPointerWriteBarrier; 110 return kPointerWriteBarrier;
178 } 111 }
179 // Write barriers are only for writes into heap objects (i.e. tagged base).
180 return kFullWriteBarrier; 112 return kFullWriteBarrier;
181 } 113 }
182 return kNoWriteBarrier; 114 return kNoWriteBarrier;
183 } 115 }
184 116
185
186 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, 117 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged,
187 MachineRepresentation representation, 118 MachineRepresentation representation,
188 int field_offset, Type* field_type, 119 int field_offset, Node* value) {
189 Type* input_type) {
190 if (base_is_tagged == kTaggedBase && field_offset == HeapObject::kMapOffset) { 120 if (base_is_tagged == kTaggedBase && field_offset == HeapObject::kMapOffset) {
191 // Write barriers for storing maps are cheaper. 121 // Write barriers for storing maps are cheaper.
192 return kMapWriteBarrier; 122 return kMapWriteBarrier;
193 } 123 }
194 return ComputeWriteBarrierKind(base_is_tagged, representation, field_type, 124 return ComputeWriteBarrierKind(base_is_tagged, representation, value);
195 input_type);
196 } 125 }
197 126
198 } // namespace 127 } // namespace
199 128
200 129 Reduction ChangeLowering::ReduceLoadField(Node* node) {
201 Reduction ChangeLowering::LoadField(Node* node) {
202 const FieldAccess& access = FieldAccessOf(node->op()); 130 const FieldAccess& access = FieldAccessOf(node->op());
203 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); 131 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag());
204 node->InsertInput(graph()->zone(), 1, offset); 132 node->InsertInput(graph()->zone(), 1, offset);
205 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); 133 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
206 return Changed(node); 134 return Changed(node);
207 } 135 }
208 136
209 137 Reduction ChangeLowering::ReduceStoreField(Node* node) {
210 Reduction ChangeLowering::StoreField(Node* node) {
211 const FieldAccess& access = FieldAccessOf(node->op()); 138 const FieldAccess& access = FieldAccessOf(node->op());
212 Type* type = NodeProperties::GetType(node->InputAt(1));
213 WriteBarrierKind kind = ComputeWriteBarrierKind( 139 WriteBarrierKind kind = ComputeWriteBarrierKind(
214 access.base_is_tagged, access.machine_type.representation(), 140 access.base_is_tagged, access.machine_type.representation(),
215 access.offset, access.type, type); 141 access.offset, node->InputAt(1));
216 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); 142 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag());
217 node->InsertInput(graph()->zone(), 1, offset); 143 node->InsertInput(graph()->zone(), 1, offset);
218 NodeProperties::ChangeOp(node, 144 NodeProperties::ChangeOp(node,
219 machine()->Store(StoreRepresentation( 145 machine()->Store(StoreRepresentation(
220 access.machine_type.representation(), kind))); 146 access.machine_type.representation(), kind)));
221 return Changed(node); 147 return Changed(node);
222 } 148 }
223 149
224 150
225 Node* ChangeLowering::ComputeIndex(const ElementAccess& access, 151 Node* ChangeLowering::ComputeIndex(const ElementAccess& access,
(...skipping 12 matching lines...) Expand all
238 } 164 }
239 if (machine()->Is64()) { 165 if (machine()->Is64()) {
240 // TODO(turbofan): This is probably only correct for typed arrays, and only 166 // TODO(turbofan): This is probably only correct for typed arrays, and only
241 // if the typed arrays are at most 2GiB in size, which happens to match 167 // if the typed arrays are at most 2GiB in size, which happens to match
242 // exactly our current situation. 168 // exactly our current situation.
243 index = graph()->NewNode(machine()->ChangeUint32ToUint64(), index); 169 index = graph()->NewNode(machine()->ChangeUint32ToUint64(), index);
244 } 170 }
245 return index; 171 return index;
246 } 172 }
247 173
248 174 Reduction ChangeLowering::ReduceLoadElement(Node* node) {
249 Reduction ChangeLowering::LoadElement(Node* node) {
250 const ElementAccess& access = ElementAccessOf(node->op()); 175 const ElementAccess& access = ElementAccessOf(node->op());
251 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); 176 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1)));
252 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); 177 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
253 return Changed(node); 178 return Changed(node);
254 } 179 }
255 180
256 181 Reduction ChangeLowering::ReduceStoreElement(Node* node) {
257 Reduction ChangeLowering::StoreElement(Node* node) {
258 const ElementAccess& access = ElementAccessOf(node->op()); 182 const ElementAccess& access = ElementAccessOf(node->op());
259 Type* type = NodeProperties::GetType(node->InputAt(2));
260 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); 183 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1)));
261 NodeProperties::ChangeOp( 184 NodeProperties::ChangeOp(
262 node, machine()->Store(StoreRepresentation( 185 node, machine()->Store(StoreRepresentation(
263 access.machine_type.representation(), 186 access.machine_type.representation(),
264 ComputeWriteBarrierKind(access.base_is_tagged, 187 ComputeWriteBarrierKind(access.base_is_tagged,
265 access.machine_type.representation(), 188 access.machine_type.representation(),
266 access.type, type)))); 189 node->InputAt(2)))));
267 return Changed(node); 190 return Changed(node);
268 } 191 }
269 192
270 193 Reduction ChangeLowering::ReduceAllocate(Node* node) {
271 Reduction ChangeLowering::Allocate(Node* node) {
272 PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op()); 194 PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op());
273 Callable callable = CodeFactory::Allocate(isolate(), pretenure); 195 Node* target = pretenure == NOT_TENURED
274 Node* target = jsgraph()->HeapConstant(callable.code()); 196 ? jsgraph()->AllocateInNewSpaceStubConstant()
275 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( 197 : jsgraph()->AllocateInOldSpaceStubConstant();
276 isolate(), jsgraph()->zone(), callable.descriptor(), 0,
277 CallDescriptor::kNoFlags, Operator::kNoThrow);
278 const Operator* op = common()->Call(descriptor);
279 node->InsertInput(graph()->zone(), 0, target); 198 node->InsertInput(graph()->zone(), 0, target);
280 node->InsertInput(graph()->zone(), 2, jsgraph()->NoContextConstant()); 199 if (!allocate_operator_.is_set()) {
281 NodeProperties::ChangeOp(node, op); 200 CallDescriptor* descriptor =
201 Linkage::GetAllocateCallDescriptor(graph()->zone());
202 allocate_operator_.set(common()->Call(descriptor));
203 }
204 NodeProperties::ChangeOp(node, allocate_operator_.get());
282 return Changed(node); 205 return Changed(node);
283 } 206 }
284 207
285 Node* ChangeLowering::IsSmi(Node* value) { 208 Reduction ChangeLowering::ReduceObjectIsSmi(Node* node) {
286 return graph()->NewNode(
287 machine()->WordEqual(),
288 graph()->NewNode(machine()->WordAnd(), value,
289 jsgraph()->IntPtrConstant(kSmiTagMask)),
290 jsgraph()->IntPtrConstant(kSmiTag));
291 }
292
293 Node* ChangeLowering::LoadHeapObjectMap(Node* object, Node* control) {
294 return graph()->NewNode(
295 machine()->Load(MachineType::AnyTagged()), object,
296 jsgraph()->IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag),
297 graph()->start(), control);
298 }
299
300 Node* ChangeLowering::LoadMapBitField(Node* map) {
301 return graph()->NewNode(
302 machine()->Load(MachineType::Uint8()), map,
303 jsgraph()->IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag),
304 graph()->start(), graph()->start());
305 }
306
307 Node* ChangeLowering::LoadMapInstanceType(Node* map) {
308 return graph()->NewNode(
309 machine()->Load(MachineType::Uint8()), map,
310 jsgraph()->IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag),
311 graph()->start(), graph()->start());
312 }
313
314 Reduction ChangeLowering::ObjectIsSmi(Node* node) {
315 node->ReplaceInput(0, 209 node->ReplaceInput(0,
316 graph()->NewNode(machine()->WordAnd(), node->InputAt(0), 210 graph()->NewNode(machine()->WordAnd(), node->InputAt(0),
317 jsgraph()->IntPtrConstant(kSmiTagMask))); 211 jsgraph()->IntPtrConstant(kSmiTagMask)));
318 node->AppendInput(graph()->zone(), jsgraph()->IntPtrConstant(kSmiTag)); 212 node->AppendInput(graph()->zone(), jsgraph()->IntPtrConstant(kSmiTag));
319 NodeProperties::ChangeOp(node, machine()->WordEqual()); 213 NodeProperties::ChangeOp(node, machine()->WordEqual());
320 return Changed(node); 214 return Changed(node);
321 } 215 }
322 216
323 Isolate* ChangeLowering::isolate() const { return jsgraph()->isolate(); } 217 Isolate* ChangeLowering::isolate() const { return jsgraph()->isolate(); }
324 218
325 219
326 Graph* ChangeLowering::graph() const { return jsgraph()->graph(); } 220 Graph* ChangeLowering::graph() const { return jsgraph()->graph(); }
327 221
328 222
329 CommonOperatorBuilder* ChangeLowering::common() const { 223 CommonOperatorBuilder* ChangeLowering::common() const {
330 return jsgraph()->common(); 224 return jsgraph()->common();
331 } 225 }
332 226
333 227
334 MachineOperatorBuilder* ChangeLowering::machine() const { 228 MachineOperatorBuilder* ChangeLowering::machine() const {
335 return jsgraph()->machine(); 229 return jsgraph()->machine();
336 } 230 }
337 231
338 } // namespace compiler 232 } // namespace compiler
339 } // namespace internal 233 } // namespace internal
340 } // namespace v8 234 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/change-lowering.h ('k') | src/compiler/common-node-cache.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698