Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(74)

Side by Side Diff: src/compiler/change-lowering.cc

Issue 1963583004: [turbofan] Initial version of allocation folding and write barrier elimination. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Jaros comments; Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/change-lowering.h ('k') | src/compiler/js-create-lowering.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/change-lowering.h"
6
7 #include "src/compiler/js-graph.h"
8 #include "src/compiler/linkage.h"
9 #include "src/compiler/machine-operator.h"
10 #include "src/compiler/node-properties.h"
11 #include "src/compiler/simplified-operator.h"
12
13 namespace v8 {
14 namespace internal {
15 namespace compiler {
16
17 ChangeLowering::~ChangeLowering() {}
18
19
20 Reduction ChangeLowering::Reduce(Node* node) {
21 switch (node->opcode()) {
22 case IrOpcode::kLoadField:
23 return ReduceLoadField(node);
24 case IrOpcode::kStoreField:
25 return ReduceStoreField(node);
26 case IrOpcode::kLoadElement:
27 return ReduceLoadElement(node);
28 case IrOpcode::kStoreElement:
29 return ReduceStoreElement(node);
30 case IrOpcode::kAllocate:
31 return ReduceAllocate(node);
32 default:
33 return NoChange();
34 }
35 UNREACHABLE();
36 return NoChange();
37 }
38
39 Reduction ChangeLowering::ReduceLoadField(Node* node) {
40 const FieldAccess& access = FieldAccessOf(node->op());
41 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag());
42 node->InsertInput(graph()->zone(), 1, offset);
43 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
44 return Changed(node);
45 }
46
47 Reduction ChangeLowering::ReduceStoreField(Node* node) {
48 const FieldAccess& access = FieldAccessOf(node->op());
49 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag());
50 node->InsertInput(graph()->zone(), 1, offset);
51 NodeProperties::ChangeOp(node, machine()->Store(StoreRepresentation(
52 access.machine_type.representation(),
53 access.write_barrier_kind)));
54 return Changed(node);
55 }
56
57
58 Node* ChangeLowering::ComputeIndex(const ElementAccess& access,
59 Node* const key) {
60 Node* index = key;
61 const int element_size_shift =
62 ElementSizeLog2Of(access.machine_type.representation());
63 if (element_size_shift) {
64 index = graph()->NewNode(machine()->Word32Shl(), index,
65 jsgraph()->Int32Constant(element_size_shift));
66 }
67 const int fixed_offset = access.header_size - access.tag();
68 if (fixed_offset) {
69 index = graph()->NewNode(machine()->Int32Add(), index,
70 jsgraph()->Int32Constant(fixed_offset));
71 }
72 if (machine()->Is64()) {
73 // TODO(turbofan): This is probably only correct for typed arrays, and only
74 // if the typed arrays are at most 2GiB in size, which happens to match
75 // exactly our current situation.
76 index = graph()->NewNode(machine()->ChangeUint32ToUint64(), index);
77 }
78 return index;
79 }
80
81 Reduction ChangeLowering::ReduceLoadElement(Node* node) {
82 const ElementAccess& access = ElementAccessOf(node->op());
83 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1)));
84 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
85 return Changed(node);
86 }
87
88 Reduction ChangeLowering::ReduceStoreElement(Node* node) {
89 const ElementAccess& access = ElementAccessOf(node->op());
90 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1)));
91 NodeProperties::ChangeOp(node, machine()->Store(StoreRepresentation(
92 access.machine_type.representation(),
93 access.write_barrier_kind)));
94 return Changed(node);
95 }
96
97 Reduction ChangeLowering::ReduceAllocate(Node* node) {
98 PretenureFlag const pretenure = OpParameter<PretenureFlag>(node->op());
99
100 Node* size = node->InputAt(0);
101 Node* effect = node->InputAt(1);
102 Node* control = node->InputAt(2);
103
104 if (machine()->Is64()) {
105 size = graph()->NewNode(machine()->ChangeInt32ToInt64(), size);
106 }
107
108 Node* top_address = jsgraph()->ExternalConstant(
109 pretenure == NOT_TENURED
110 ? ExternalReference::new_space_allocation_top_address(isolate())
111 : ExternalReference::old_space_allocation_top_address(isolate()));
112 Node* limit_address = jsgraph()->ExternalConstant(
113 pretenure == NOT_TENURED
114 ? ExternalReference::new_space_allocation_limit_address(isolate())
115 : ExternalReference::old_space_allocation_limit_address(isolate()));
116
117 Node* top = effect =
118 graph()->NewNode(machine()->Load(MachineType::Pointer()), top_address,
119 jsgraph()->IntPtrConstant(0), effect, control);
120 Node* limit = effect =
121 graph()->NewNode(machine()->Load(MachineType::Pointer()), limit_address,
122 jsgraph()->IntPtrConstant(0), effect, control);
123
124 Node* new_top = graph()->NewNode(machine()->IntAdd(), top, size);
125
126 Node* check = graph()->NewNode(machine()->UintLessThan(), new_top, limit);
127 Node* branch =
128 graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
129
130 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
131 Node* etrue = effect;
132 Node* vtrue;
133 {
134 etrue = graph()->NewNode(
135 machine()->Store(StoreRepresentation(
136 MachineType::PointerRepresentation(), kNoWriteBarrier)),
137 top_address, jsgraph()->IntPtrConstant(0), new_top, etrue, if_true);
138 vtrue = graph()->NewNode(
139 machine()->BitcastWordToTagged(),
140 graph()->NewNode(machine()->IntAdd(), top,
141 jsgraph()->IntPtrConstant(kHeapObjectTag)));
142 }
143
144 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
145 Node* efalse = effect;
146 Node* vfalse;
147 {
148 Node* target = pretenure == NOT_TENURED
149 ? jsgraph()->AllocateInNewSpaceStubConstant()
150 : jsgraph()->AllocateInOldSpaceStubConstant();
151 if (!allocate_operator_.is_set()) {
152 CallDescriptor* descriptor =
153 Linkage::GetAllocateCallDescriptor(graph()->zone());
154 allocate_operator_.set(common()->Call(descriptor));
155 }
156 vfalse = efalse = graph()->NewNode(allocate_operator_.get(), target, size,
157 efalse, if_false);
158 }
159
160 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
161 effect = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
162 Node* value = graph()->NewNode(
163 common()->Phi(MachineRepresentation::kTagged, 2), vtrue, vfalse, control);
164
165 ReplaceWithValue(node, value, effect);
166 return Replace(value);
167 }
168
169 Isolate* ChangeLowering::isolate() const { return jsgraph()->isolate(); }
170
171
172 Graph* ChangeLowering::graph() const { return jsgraph()->graph(); }
173
174
175 CommonOperatorBuilder* ChangeLowering::common() const {
176 return jsgraph()->common();
177 }
178
179
180 MachineOperatorBuilder* ChangeLowering::machine() const {
181 return jsgraph()->machine();
182 }
183
184 } // namespace compiler
185 } // namespace internal
186 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/change-lowering.h ('k') | src/compiler/js-create-lowering.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698