Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(134)

Side by Side Diff: src/code-stub-assembler.cc

Issue 1875583003: Separate CodeAssembler and CodeStubAssembler (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix gn build. Again. Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-stub-assembler.h" 5 #include "src/code-stub-assembler.h"
6
7 #include <ostream>
8
9 #include "src/code-factory.h" 6 #include "src/code-factory.h"
10 #include "src/compiler/graph.h"
11 #include "src/compiler/instruction-selector.h"
12 #include "src/compiler/linkage.h"
13 #include "src/compiler/pipeline.h"
14 #include "src/compiler/raw-machine-assembler.h"
15 #include "src/compiler/schedule.h"
16 #include "src/frames.h"
17 #include "src/interface-descriptors.h"
18 #include "src/interpreter/bytecodes.h"
19 #include "src/machine-type.h"
20 #include "src/macro-assembler.h"
21 #include "src/zone.h"
22 7
23 namespace v8 { 8 namespace v8 {
24 namespace internal { 9 namespace internal {
25 namespace compiler { 10
11 using compiler::Node;
26 12
27 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, 13 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
28 const CallInterfaceDescriptor& descriptor, 14 const CallInterfaceDescriptor& descriptor,
29 Code::Flags flags, const char* name, 15 Code::Flags flags, const char* name,
30 size_t result_size) 16 size_t result_size)
31 : CodeStubAssembler( 17 : compiler::CodeAssembler(isolate, zone, descriptor, flags, name,
32 isolate, zone, 18 result_size) {}
33 Linkage::GetStubCallDescriptor(
34 isolate, zone, descriptor, descriptor.GetStackParameterCount(),
35 CallDescriptor::kNoFlags, Operator::kNoProperties,
36 MachineType::AnyTagged(), result_size),
37 flags, name) {}
38 19
39 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, 20 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
40 int parameter_count, Code::Flags flags, 21 int parameter_count, Code::Flags flags,
41 const char* name) 22 const char* name)
42 : CodeStubAssembler(isolate, zone, Linkage::GetJSCallDescriptor( 23 : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {}
43 zone, false, parameter_count,
44 CallDescriptor::kNoFlags),
45 flags, name) {}
46
47 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
48 CallDescriptor* call_descriptor,
49 Code::Flags flags, const char* name)
50 : raw_assembler_(new RawMachineAssembler(
51 isolate, new (zone) Graph(zone), call_descriptor,
52 MachineType::PointerRepresentation(),
53 InstructionSelector::SupportedMachineOperatorFlags())),
54 flags_(flags),
55 name_(name),
56 code_generated_(false),
57 variables_(zone) {}
58
59 CodeStubAssembler::~CodeStubAssembler() {}
60
61 void CodeStubAssembler::CallPrologue() {}
62
63 void CodeStubAssembler::CallEpilogue() {}
64
65 Handle<Code> CodeStubAssembler::GenerateCode() {
66 DCHECK(!code_generated_);
67
68 Schedule* schedule = raw_assembler_->Export();
69 Handle<Code> code = Pipeline::GenerateCodeForCodeStub(
70 isolate(), raw_assembler_->call_descriptor(), graph(), schedule, flags_,
71 name_);
72
73 code_generated_ = true;
74 return code;
75 }
76
77
78 Node* CodeStubAssembler::Int32Constant(int value) {
79 return raw_assembler_->Int32Constant(value);
80 }
81
82
83 Node* CodeStubAssembler::IntPtrConstant(intptr_t value) {
84 return raw_assembler_->IntPtrConstant(value);
85 }
86
87
88 Node* CodeStubAssembler::NumberConstant(double value) {
89 return raw_assembler_->NumberConstant(value);
90 }
91
92 Node* CodeStubAssembler::SmiConstant(Smi* value) {
93 return IntPtrConstant(bit_cast<intptr_t>(value));
94 }
95
96 Node* CodeStubAssembler::HeapConstant(Handle<HeapObject> object) {
97 return raw_assembler_->HeapConstant(object);
98 }
99
100
101 Node* CodeStubAssembler::BooleanConstant(bool value) {
102 return raw_assembler_->BooleanConstant(value);
103 }
104
105 Node* CodeStubAssembler::ExternalConstant(ExternalReference address) {
106 return raw_assembler_->ExternalConstant(address);
107 }
108
109 Node* CodeStubAssembler::Float64Constant(double value) {
110 return raw_assembler_->Float64Constant(value);
111 }
112
113 Node* CodeStubAssembler::BooleanMapConstant() {
114 return HeapConstant(isolate()->factory()->boolean_map());
115 }
116
117 Node* CodeStubAssembler::EmptyStringConstant() {
118 return LoadRoot(Heap::kempty_stringRootIndex);
119 }
120
121 Node* CodeStubAssembler::HeapNumberMapConstant() {
122 return HeapConstant(isolate()->factory()->heap_number_map());
123 }
124
125 Node* CodeStubAssembler::NaNConstant() {
126 return LoadRoot(Heap::kNanValueRootIndex);
127 }
128
129 Node* CodeStubAssembler::NoContextConstant() {
130 return SmiConstant(Smi::FromInt(0));
131 }
132
133 Node* CodeStubAssembler::NullConstant() {
134 return LoadRoot(Heap::kNullValueRootIndex);
135 }
136
137 Node* CodeStubAssembler::UndefinedConstant() {
138 return LoadRoot(Heap::kUndefinedValueRootIndex);
139 }
140
141 Node* CodeStubAssembler::Parameter(int value) {
142 return raw_assembler_->Parameter(value);
143 }
144
145 void CodeStubAssembler::Return(Node* value) {
146 return raw_assembler_->Return(value);
147 }
148
149 void CodeStubAssembler::Bind(CodeStubAssembler::Label* label) {
150 return label->Bind();
151 }
152
153 Node* CodeStubAssembler::LoadFramePointer() {
154 return raw_assembler_->LoadFramePointer();
155 }
156
157 Node* CodeStubAssembler::LoadParentFramePointer() {
158 return raw_assembler_->LoadParentFramePointer();
159 }
160
161 Node* CodeStubAssembler::LoadStackPointer() {
162 return raw_assembler_->LoadStackPointer();
163 }
164
165 Node* CodeStubAssembler::SmiShiftBitsConstant() {
166 return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
167 }
168 24
169 Node* CodeStubAssembler::Float64Round(Node* x) { 25 Node* CodeStubAssembler::Float64Round(Node* x) {
170 Node* one = Float64Constant(1.0); 26 Node* one = Float64Constant(1.0);
171 Node* one_half = Float64Constant(0.5); 27 Node* one_half = Float64Constant(0.5);
172 28
173 Variable var_x(this, MachineRepresentation::kFloat64); 29 Variable var_x(this, MachineRepresentation::kFloat64);
174 Label return_x(this); 30 Label return_x(this);
175 31
176 // Round up {x} towards Infinity. 32 // Round up {x} towards Infinity.
177 var_x.Bind(Float64Ceil(x)); 33 var_x.Bind(Float64Ceil(x));
178 34
179 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x), 35 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
180 &return_x); 36 &return_x);
181 var_x.Bind(Float64Sub(var_x.value(), one)); 37 var_x.Bind(Float64Sub(var_x.value(), one));
182 Goto(&return_x); 38 Goto(&return_x);
183 39
184 Bind(&return_x); 40 Bind(&return_x);
185 return var_x.value(); 41 return var_x.value();
186 } 42 }
187 43
188 Node* CodeStubAssembler::Float64Ceil(Node* x) { 44 Node* CodeStubAssembler::Float64Ceil(Node* x) {
189 if (raw_assembler_->machine()->Float64RoundUp().IsSupported()) { 45 if (IsFloat64RoundUpSupported()) {
190 return raw_assembler_->Float64RoundUp(x); 46 return Float64RoundUp(x);
191 } 47 }
192 48
193 Node* one = Float64Constant(1.0); 49 Node* one = Float64Constant(1.0);
194 Node* zero = Float64Constant(0.0); 50 Node* zero = Float64Constant(0.0);
195 Node* two_52 = Float64Constant(4503599627370496.0E0); 51 Node* two_52 = Float64Constant(4503599627370496.0E0);
196 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); 52 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
197 53
198 Variable var_x(this, MachineRepresentation::kFloat64); 54 Variable var_x(this, MachineRepresentation::kFloat64);
199 Label return_x(this), return_minus_x(this); 55 Label return_x(this), return_minus_x(this);
200 var_x.Bind(x); 56 var_x.Bind(x);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
232 88
233 Bind(&return_minus_x); 89 Bind(&return_minus_x);
234 var_x.Bind(Float64Neg(var_x.value())); 90 var_x.Bind(Float64Neg(var_x.value()));
235 Goto(&return_x); 91 Goto(&return_x);
236 92
237 Bind(&return_x); 93 Bind(&return_x);
238 return var_x.value(); 94 return var_x.value();
239 } 95 }
240 96
241 Node* CodeStubAssembler::Float64Floor(Node* x) { 97 Node* CodeStubAssembler::Float64Floor(Node* x) {
242 if (raw_assembler_->machine()->Float64RoundDown().IsSupported()) { 98 if (IsFloat64RoundDownSupported()) {
243 return raw_assembler_->Float64RoundDown(x); 99 return Float64RoundDown(x);
244 } 100 }
245 101
246 Node* one = Float64Constant(1.0); 102 Node* one = Float64Constant(1.0);
247 Node* zero = Float64Constant(0.0); 103 Node* zero = Float64Constant(0.0);
248 Node* two_52 = Float64Constant(4503599627370496.0E0); 104 Node* two_52 = Float64Constant(4503599627370496.0E0);
249 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); 105 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
250 106
251 Variable var_x(this, MachineRepresentation::kFloat64); 107 Variable var_x(this, MachineRepresentation::kFloat64);
252 Label return_x(this), return_minus_x(this); 108 Label return_x(this), return_minus_x(this);
253 var_x.Bind(x); 109 var_x.Bind(x);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
285 141
286 Bind(&return_minus_x); 142 Bind(&return_minus_x);
287 var_x.Bind(Float64Neg(var_x.value())); 143 var_x.Bind(Float64Neg(var_x.value()));
288 Goto(&return_x); 144 Goto(&return_x);
289 145
290 Bind(&return_x); 146 Bind(&return_x);
291 return var_x.value(); 147 return var_x.value();
292 } 148 }
293 149
294 Node* CodeStubAssembler::Float64Trunc(Node* x) { 150 Node* CodeStubAssembler::Float64Trunc(Node* x) {
295 if (raw_assembler_->machine()->Float64RoundTruncate().IsSupported()) { 151 if (IsFloat64RoundTruncateSupported()) {
296 return raw_assembler_->Float64RoundTruncate(x); 152 return Float64RoundTruncate(x);
297 } 153 }
298 154
299 Node* one = Float64Constant(1.0); 155 Node* one = Float64Constant(1.0);
300 Node* zero = Float64Constant(0.0); 156 Node* zero = Float64Constant(0.0);
301 Node* two_52 = Float64Constant(4503599627370496.0E0); 157 Node* two_52 = Float64Constant(4503599627370496.0E0);
302 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); 158 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
303 159
304 Variable var_x(this, MachineRepresentation::kFloat64); 160 Variable var_x(this, MachineRepresentation::kFloat64);
305 Label return_x(this), return_minus_x(this); 161 Label return_x(this), return_minus_x(this);
306 var_x.Bind(x); 162 var_x.Bind(x);
307 163
308 // Check if {x} is greater than 0. 164 // Check if {x} is greater than 0.
309 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this); 165 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
310 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero, 166 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
311 &if_xnotgreaterthanzero); 167 &if_xnotgreaterthanzero);
312 168
313 Bind(&if_xgreaterthanzero); 169 Bind(&if_xgreaterthanzero);
314 { 170 {
315 if (raw_assembler_->machine()->Float64RoundDown().IsSupported()) { 171 if (IsFloat64RoundDownSupported()) {
316 var_x.Bind(raw_assembler_->Float64RoundDown(x)); 172 var_x.Bind(Float64RoundDown(x));
317 } else { 173 } else {
318 // Just return {x} unless it's in the range ]0,2^52[. 174 // Just return {x} unless it's in the range ]0,2^52[.
319 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x); 175 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
320 176
321 // Round positive {x} towards -Infinity. 177 // Round positive {x} towards -Infinity.
322 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52)); 178 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
323 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x); 179 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
324 var_x.Bind(Float64Sub(var_x.value(), one)); 180 var_x.Bind(Float64Sub(var_x.value(), one));
325 } 181 }
326 Goto(&return_x); 182 Goto(&return_x);
327 } 183 }
328 184
329 Bind(&if_xnotgreaterthanzero); 185 Bind(&if_xnotgreaterthanzero);
330 { 186 {
331 if (raw_assembler_->machine()->Float64RoundUp().IsSupported()) { 187 if (IsFloat64RoundUpSupported()) {
332 var_x.Bind(raw_assembler_->Float64RoundUp(x)); 188 var_x.Bind(Float64RoundUp(x));
333 Goto(&return_x); 189 Goto(&return_x);
334 } else { 190 } else {
335 // Just return {x} unless its in the range ]-2^52,0[. 191 // Just return {x} unless its in the range ]-2^52,0[.
336 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x); 192 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
337 GotoUnless(Float64LessThan(x, zero), &return_x); 193 GotoUnless(Float64LessThan(x, zero), &return_x);
338 194
339 // Round negated {x} towards -Infinity and return result negated. 195 // Round negated {x} towards -Infinity and return result negated.
340 Node* minus_x = Float64Neg(x); 196 Node* minus_x = Float64Neg(x);
341 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52)); 197 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
342 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x); 198 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
343 var_x.Bind(Float64Sub(var_x.value(), one)); 199 var_x.Bind(Float64Sub(var_x.value(), one));
344 Goto(&return_minus_x); 200 Goto(&return_minus_x);
345 } 201 }
346 } 202 }
347 203
348 Bind(&return_minus_x); 204 Bind(&return_minus_x);
349 var_x.Bind(Float64Neg(var_x.value())); 205 var_x.Bind(Float64Neg(var_x.value()));
350 Goto(&return_x); 206 Goto(&return_x);
351 207
352 Bind(&return_x); 208 Bind(&return_x);
353 return var_x.value(); 209 return var_x.value();
354 } 210 }
355 211
356 Node* CodeStubAssembler::SmiTag(Node* value) {
357 return raw_assembler_->WordShl(value, SmiShiftBitsConstant());
358 }
359
360 Node* CodeStubAssembler::SmiUntag(Node* value) {
361 return raw_assembler_->WordSar(value, SmiShiftBitsConstant());
362 }
363
364 Node* CodeStubAssembler::SmiFromWord32(Node* value) { 212 Node* CodeStubAssembler::SmiFromWord32(Node* value) {
365 if (raw_assembler_->machine()->Is64()) { 213 if (Is64()) {
366 value = raw_assembler_->ChangeInt32ToInt64(value); 214 value = ChangeInt32ToInt64(value);
367 } 215 }
368 return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); 216 return WordShl(value, SmiShiftBitsConstant());
369 } 217 }
370 218
371 Node* CodeStubAssembler::SmiToWord32(Node* value) { 219 Node* CodeStubAssembler::SmiToWord32(Node* value) {
372 Node* result = raw_assembler_->WordSar(value, SmiShiftBitsConstant()); 220 Node* result = WordSar(value, SmiShiftBitsConstant());
373 if (raw_assembler_->machine()->Is64()) { 221 if (Is64()) {
374 result = raw_assembler_->TruncateInt64ToInt32(result); 222 result = TruncateInt64ToInt32(result);
375 } 223 }
376 return result; 224 return result;
377 } 225 }
378 226
379 Node* CodeStubAssembler::SmiToFloat64(Node* value) { 227 Node* CodeStubAssembler::SmiToFloat64(Node* value) {
380 return ChangeInt32ToFloat64(SmiUntag(value)); 228 return ChangeInt32ToFloat64(SmiUntag(value));
381 } 229 }
382 230
383 Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); } 231 Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); }
384 232
(...skipping 29 matching lines...) Expand all
414 Bind(&if_a); 262 Bind(&if_a);
415 min.Bind(a); 263 min.Bind(a);
416 Goto(&join); 264 Goto(&join);
417 Bind(&if_b); 265 Bind(&if_b);
418 min.Bind(b); 266 min.Bind(b);
419 Goto(&join); 267 Goto(&join);
420 Bind(&join); 268 Bind(&join);
421 return min.value(); 269 return min.value();
422 } 270 }
423 271
424 #define DEFINE_CODE_STUB_ASSEMBER_BINARY_OP(name) \
425 Node* CodeStubAssembler::name(Node* a, Node* b) { \
426 return raw_assembler_->name(a, b); \
427 }
428 CODE_STUB_ASSEMBLER_BINARY_OP_LIST(DEFINE_CODE_STUB_ASSEMBER_BINARY_OP)
429 #undef DEFINE_CODE_STUB_ASSEMBER_BINARY_OP
430
431 Node* CodeStubAssembler::WordShl(Node* value, int shift) {
432 return raw_assembler_->WordShl(value, IntPtrConstant(shift));
433 }
434
435 #define DEFINE_CODE_STUB_ASSEMBER_UNARY_OP(name) \
436 Node* CodeStubAssembler::name(Node* a) { return raw_assembler_->name(a); }
437 CODE_STUB_ASSEMBLER_UNARY_OP_LIST(DEFINE_CODE_STUB_ASSEMBER_UNARY_OP)
438 #undef DEFINE_CODE_STUB_ASSEMBER_UNARY_OP
439
440 Node* CodeStubAssembler::WordIsSmi(Node* a) { 272 Node* CodeStubAssembler::WordIsSmi(Node* a) {
441 return WordEqual(raw_assembler_->WordAnd(a, IntPtrConstant(kSmiTagMask)), 273 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0));
442 IntPtrConstant(0));
443 } 274 }
444 275
445 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { 276 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
446 return WordEqual( 277 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
447 raw_assembler_->WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), 278 IntPtrConstant(0));
448 IntPtrConstant(0));
449 } 279 }
450 280
451 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, 281 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
452 MachineType rep) { 282 MachineType rep) {
453 return raw_assembler_->Load(rep, buffer, IntPtrConstant(offset)); 283 return Load(rep, buffer, IntPtrConstant(offset));
454 } 284 }
455 285
456 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, 286 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
457 MachineType rep) { 287 MachineType rep) {
458 return raw_assembler_->Load(rep, object, 288 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
459 IntPtrConstant(offset - kHeapObjectTag));
460 }
461
462 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
463 Node* object, int offset, Node* value, MachineRepresentation rep) {
464 return StoreNoWriteBarrier(rep, object,
465 IntPtrConstant(offset - kHeapObjectTag), value);
466 } 289 }
467 290
468 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) { 291 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
469 return Load(MachineType::Float64(), object, 292 return Load(MachineType::Float64(), object,
470 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag)); 293 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag));
471 } 294 }
472 295
473 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { 296 Node* CodeStubAssembler::LoadMap(Node* object) {
474 return StoreNoWriteBarrier( 297 return LoadObjectField(object, HeapObject::kMapOffset);
475 MachineRepresentation::kFloat64, object,
476 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value);
477 } 298 }
478 299
479 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) { 300 Node* CodeStubAssembler::LoadInstanceType(Node* object) {
480 Node* value = LoadHeapNumberValue(object); 301 return LoadMapInstanceType(LoadMap(object));
481 return raw_assembler_->TruncateFloat64ToInt32(TruncationMode::kJavaScript, 302 }
482 value); 303
304 Node* CodeStubAssembler::LoadElements(Node* object) {
305 return LoadObjectField(object, JSObject::kElementsOffset);
306 }
307
308 Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) {
309 return LoadObjectField(array, FixedArrayBase::kLengthOffset);
483 } 310 }
484 311
485 Node* CodeStubAssembler::LoadMapBitField(Node* map) { 312 Node* CodeStubAssembler::LoadMapBitField(Node* map) {
486 return Load(MachineType::Uint8(), map, 313 return Load(MachineType::Uint8(), map,
487 IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag)); 314 IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag));
488 } 315 }
489 316
490 Node* CodeStubAssembler::LoadMapBitField2(Node* map) { 317 Node* CodeStubAssembler::LoadMapBitField2(Node* map) {
491 return Load(MachineType::Uint8(), map, 318 return Load(MachineType::Uint8(), map,
492 IntPtrConstant(Map::kBitField2Offset - kHeapObjectTag)); 319 IntPtrConstant(Map::kBitField2Offset - kHeapObjectTag));
(...skipping 15 matching lines...) Expand all
508 335
509 Node* CodeStubAssembler::LoadNameHash(Node* name) { 336 Node* CodeStubAssembler::LoadNameHash(Node* name) {
510 return Load(MachineType::Uint32(), name, 337 return Load(MachineType::Uint32(), name,
511 IntPtrConstant(Name::kHashFieldOffset - kHeapObjectTag)); 338 IntPtrConstant(Name::kHashFieldOffset - kHeapObjectTag));
512 } 339 }
513 340
514 Node* CodeStubAssembler::LoadFixedArrayElementInt32Index( 341 Node* CodeStubAssembler::LoadFixedArrayElementInt32Index(
515 Node* object, Node* index, int additional_offset) { 342 Node* object, Node* index, int additional_offset) {
516 Node* header_size = IntPtrConstant(additional_offset + 343 Node* header_size = IntPtrConstant(additional_offset +
517 FixedArray::kHeaderSize - kHeapObjectTag); 344 FixedArray::kHeaderSize - kHeapObjectTag);
518 if (raw_assembler_->machine()->Is64()) { 345 if (Is64()) {
519 index = ChangeInt32ToInt64(index); 346 index = ChangeInt32ToInt64(index);
520 } 347 }
521 Node* scaled_index = WordShl(index, IntPtrConstant(kPointerSizeLog2)); 348 Node* scaled_index = WordShl(index, IntPtrConstant(kPointerSizeLog2));
522 Node* offset = IntPtrAdd(scaled_index, header_size); 349 Node* offset = IntPtrAdd(scaled_index, header_size);
523 return Load(MachineType::AnyTagged(), object, offset); 350 return Load(MachineType::AnyTagged(), object, offset);
524 } 351 }
525 352
526 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) { 353 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
527 return Load(MachineType::Uint8(), map, 354 return Load(MachineType::Uint8(), map,
528 IntPtrConstant(Map::kInstanceSizeOffset - kHeapObjectTag)); 355 IntPtrConstant(Map::kInstanceSizeOffset - kHeapObjectTag));
(...skipping 11 matching lines...) Expand all
540 : WordShl(smi_index, 367 : WordShl(smi_index,
541 IntPtrConstant(kPointerSizeLog2 - kSmiShiftBits)); 368 IntPtrConstant(kPointerSizeLog2 - kSmiShiftBits));
542 Node* offset = IntPtrAdd(scaled_index, header_size); 369 Node* offset = IntPtrAdd(scaled_index, header_size);
543 return Load(MachineType::AnyTagged(), object, offset); 370 return Load(MachineType::AnyTagged(), object, offset);
544 } 371 }
545 372
546 Node* CodeStubAssembler::LoadFixedArrayElementConstantIndex(Node* object, 373 Node* CodeStubAssembler::LoadFixedArrayElementConstantIndex(Node* object,
547 int index) { 374 int index) {
548 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag + 375 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag +
549 index * kPointerSize); 376 index * kPointerSize);
550 return raw_assembler_->Load(MachineType::AnyTagged(), object, offset); 377 return Load(MachineType::AnyTagged(), object, offset);
378 }
379
380 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
381 return StoreNoWriteBarrier(
382 MachineRepresentation::kFloat64, object,
383 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value);
384 }
385
386 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
387 Node* object, int offset, Node* value, MachineRepresentation rep) {
388 return StoreNoWriteBarrier(rep, object,
389 IntPtrConstant(offset - kHeapObjectTag), value);
390 }
391
392 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
393 return StoreNoWriteBarrier(
394 MachineRepresentation::kTagged, object,
395 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
551 } 396 }
552 397
553 Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object, 398 Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object,
554 Node* index, 399 Node* index,
555 Node* value) { 400 Node* value) {
556 Node* offset = 401 Node* offset =
557 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), 402 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)),
558 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); 403 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
559 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, 404 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
560 value); 405 value);
561 } 406 }
562 407
563 Node* CodeStubAssembler::StoreFixedArrayElementInt32Index(Node* object, 408 Node* CodeStubAssembler::StoreFixedArrayElementInt32Index(Node* object,
564 Node* index, 409 Node* index,
565 Node* value) { 410 Node* value) {
566 if (raw_assembler_->machine()->Is64()) { 411 if (Is64()) {
567 index = ChangeInt32ToInt64(index); 412 index = ChangeInt32ToInt64(index);
568 } 413 }
569 Node* offset = 414 Node* offset =
570 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), 415 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)),
571 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); 416 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
572 return Store(MachineRepresentation::kTagged, object, offset, value); 417 return Store(MachineRepresentation::kTagged, object, offset, value);
573 } 418 }
574 419
575 Node* CodeStubAssembler::LoadRoot(Heap::RootListIndex root_index) {
576 if (isolate()->heap()->RootCanBeTreatedAsConstant(root_index)) {
577 Handle<Object> root = isolate()->heap()->root_handle(root_index);
578 if (root->IsSmi()) {
579 return SmiConstant(Smi::cast(*root));
580 } else {
581 return HeapConstant(Handle<HeapObject>::cast(root));
582 }
583 }
584
585 compiler::Node* roots_array_start =
586 ExternalConstant(ExternalReference::roots_array_start(isolate()));
587 USE(roots_array_start);
588
589 // TODO(danno): Implement thee root-access case where the root is not constant
590 // and must be loaded from the root array.
591 UNIMPLEMENTED();
592 return nullptr;
593 }
594
595 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
596 AllocationFlags flags,
597 Node* top_address,
598 Node* limit_address) {
599 Node* top = Load(MachineType::Pointer(), top_address);
600 Node* limit = Load(MachineType::Pointer(), limit_address);
601
602 // If there's not enough space, call the runtime.
603 RawMachineLabel runtime_call(RawMachineLabel::kDeferred), no_runtime_call,
604 merge_runtime;
605 raw_assembler_->Branch(
606 raw_assembler_->IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes),
607 &runtime_call, &no_runtime_call);
608
609 raw_assembler_->Bind(&runtime_call);
610 // AllocateInTargetSpace does not use the context.
611 Node* context = IntPtrConstant(0);
612 Node* runtime_flags = SmiTag(Int32Constant(
613 AllocateDoubleAlignFlag::encode(false) |
614 AllocateTargetSpace::encode(flags & kPretenured
615 ? AllocationSpace::OLD_SPACE
616 : AllocationSpace::NEW_SPACE)));
617 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context,
618 SmiTag(size_in_bytes), runtime_flags);
619 raw_assembler_->Goto(&merge_runtime);
620
621 // When there is enough space, return `top' and bump it up.
622 raw_assembler_->Bind(&no_runtime_call);
623 Node* no_runtime_result = top;
624 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
625 IntPtrAdd(top, size_in_bytes));
626 no_runtime_result =
627 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag));
628 raw_assembler_->Goto(&merge_runtime);
629
630 raw_assembler_->Bind(&merge_runtime);
631 return raw_assembler_->Phi(MachineType::PointerRepresentation(),
632 runtime_result, no_runtime_result);
633 }
634
635 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
636 AllocationFlags flags,
637 Node* top_address,
638 Node* limit_address) {
639 Node* top = Load(MachineType::Pointer(), top_address);
640 Node* limit = Load(MachineType::Pointer(), limit_address);
641 Node* adjusted_size = size_in_bytes;
642 if (flags & kDoubleAlignment) {
643 // TODO(epertoso): Simd128 alignment.
644 RawMachineLabel aligned, not_aligned, merge;
645 raw_assembler_->Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)),
646 &not_aligned, &aligned);
647
648 raw_assembler_->Bind(&not_aligned);
649 Node* not_aligned_size =
650 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
651 raw_assembler_->Goto(&merge);
652
653 raw_assembler_->Bind(&aligned);
654 raw_assembler_->Goto(&merge);
655
656 raw_assembler_->Bind(&merge);
657 adjusted_size = raw_assembler_->Phi(MachineType::PointerRepresentation(),
658 not_aligned_size, adjusted_size);
659 }
660
661 Node* address = AllocateRawUnaligned(adjusted_size, kNone, top, limit);
662
663 RawMachineLabel needs_filler, doesnt_need_filler, merge_address;
664 raw_assembler_->Branch(
665 raw_assembler_->IntPtrEqual(adjusted_size, size_in_bytes),
666 &doesnt_need_filler, &needs_filler);
667
668 raw_assembler_->Bind(&needs_filler);
669 // Store a filler and increase the address by kPointerSize.
670 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
671 // it when Simd128 alignment is supported.
672 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
673 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
674 Node* address_with_filler = IntPtrAdd(address, IntPtrConstant(kPointerSize));
675 raw_assembler_->Goto(&merge_address);
676
677 raw_assembler_->Bind(&doesnt_need_filler);
678 Node* address_without_filler = address;
679 raw_assembler_->Goto(&merge_address);
680
681 raw_assembler_->Bind(&merge_address);
682 address = raw_assembler_->Phi(MachineType::PointerRepresentation(),
683 address_with_filler, address_without_filler);
684 // Update the top.
685 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
686 IntPtrAdd(top, adjusted_size));
687 return address;
688 }
689
690 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
691 bool const new_space = !(flags & kPretenured);
692 Node* top_address = ExternalConstant(
693 new_space
694 ? ExternalReference::new_space_allocation_top_address(isolate())
695 : ExternalReference::old_space_allocation_top_address(isolate()));
696 Node* limit_address = ExternalConstant(
697 new_space
698 ? ExternalReference::new_space_allocation_limit_address(isolate())
699 : ExternalReference::old_space_allocation_limit_address(isolate()));
700
701 #ifdef V8_HOST_ARCH_32_BIT
702 if (flags & kDoubleAlignment) {
703 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address,
704 limit_address);
705 }
706 #endif
707
708 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address,
709 limit_address);
710 }
711
712 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
713 return IntPtrAdd(previous, IntPtrConstant(offset));
714 }
715
716 Node* CodeStubAssembler::AllocateHeapNumber() { 420 Node* CodeStubAssembler::AllocateHeapNumber() {
717 Node* result = Allocate(HeapNumber::kSize, kNone); 421 Node* result = Allocate(HeapNumber::kSize, kNone);
718 StoreMapNoWriteBarrier(result, HeapNumberMapConstant()); 422 StoreMapNoWriteBarrier(result, HeapNumberMapConstant());
719 return result; 423 return result;
720 } 424 }
721 425
722 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) { 426 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) {
723 Node* result = AllocateHeapNumber(); 427 Node* result = AllocateHeapNumber();
724 StoreHeapNumberValue(result, value); 428 StoreHeapNumberValue(result, value);
725 return result; 429 return result;
(...skipping 12 matching lines...) Expand all
738 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length) { 442 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length) {
739 Node* result = Allocate(SeqTwoByteString::SizeFor(length)); 443 Node* result = Allocate(SeqTwoByteString::SizeFor(length));
740 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex)); 444 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
741 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset, 445 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
742 SmiConstant(Smi::FromInt(length))); 446 SmiConstant(Smi::FromInt(length)));
743 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset, 447 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
744 IntPtrConstant(String::kEmptyHashField)); 448 IntPtrConstant(String::kEmptyHashField));
745 return result; 449 return result;
746 } 450 }
747 451
748 Node* CodeStubAssembler::Load(MachineType rep, Node* base) { 452 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
749 return raw_assembler_->Load(rep, base); 453 // We might need to loop once due to ToNumber conversion.
454 Variable var_value(this, MachineRepresentation::kTagged),
455 var_result(this, MachineRepresentation::kFloat64);
456 Label loop(this, &var_value), done_loop(this, &var_result);
457 var_value.Bind(value);
458 Goto(&loop);
459 Bind(&loop);
460 {
461 // Load the current {value}.
462 value = var_value.value();
463
464 // Check if the {value} is a Smi or a HeapObject.
465 Label if_valueissmi(this), if_valueisnotsmi(this);
466 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
467
468 Bind(&if_valueissmi);
469 {
470 // Convert the Smi {value}.
471 var_result.Bind(SmiToFloat64(value));
472 Goto(&done_loop);
473 }
474
475 Bind(&if_valueisnotsmi);
476 {
477 // Check if {value} is a HeapNumber.
478 Label if_valueisheapnumber(this),
479 if_valueisnotheapnumber(this, Label::kDeferred);
480 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
481 &if_valueisheapnumber, &if_valueisnotheapnumber);
482
483 Bind(&if_valueisheapnumber);
484 {
485 // Load the floating point value.
486 var_result.Bind(LoadHeapNumberValue(value));
487 Goto(&done_loop);
488 }
489
490 Bind(&if_valueisnotheapnumber);
491 {
492 // Convert the {value} to a Number first.
493 Callable callable = CodeFactory::NonNumberToNumber(isolate());
494 var_value.Bind(CallStub(callable, context, value));
495 Goto(&loop);
496 }
497 }
498 }
499 Bind(&done_loop);
500 return var_result.value();
750 } 501 }
751 502
752 Node* CodeStubAssembler::Load(MachineType rep, Node* base, Node* index) { 503 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
753 return raw_assembler_->Load(rep, base, index); 504 // We might need to loop once due to ToNumber conversion.
505 Variable var_value(this, MachineRepresentation::kTagged),
506 var_result(this, MachineRepresentation::kWord32);
507 Label loop(this, &var_value), done_loop(this, &var_result);
508 var_value.Bind(value);
509 Goto(&loop);
510 Bind(&loop);
511 {
512 // Load the current {value}.
513 value = var_value.value();
514
515 // Check if the {value} is a Smi or a HeapObject.
516 Label if_valueissmi(this), if_valueisnotsmi(this);
517 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
518
519 Bind(&if_valueissmi);
520 {
521 // Convert the Smi {value}.
522 var_result.Bind(SmiToWord32(value));
523 Goto(&done_loop);
524 }
525
526 Bind(&if_valueisnotsmi);
527 {
528 // Check if {value} is a HeapNumber.
529 Label if_valueisheapnumber(this),
530 if_valueisnotheapnumber(this, Label::kDeferred);
531 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
532 &if_valueisheapnumber, &if_valueisnotheapnumber);
533
534 Bind(&if_valueisheapnumber);
535 {
536 // Truncate the floating point value.
537 var_result.Bind(TruncateHeapNumberValueToWord32(value));
538 Goto(&done_loop);
539 }
540
541 Bind(&if_valueisnotheapnumber);
542 {
543 // Convert the {value} to a Number first.
544 Callable callable = CodeFactory::NonNumberToNumber(isolate());
545 var_value.Bind(CallStub(callable, context, value));
546 Goto(&loop);
547 }
548 }
549 }
550 Bind(&done_loop);
551 return var_result.value();
754 } 552 }
755 553
756 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, 554 Node* CodeStubAssembler::TruncateFloat64ToInt32(Node* value) {
757 Node* value) { 555 return TruncateFloat64ToInt32JavaScript(value);
758 return raw_assembler_->Store(rep, base, value, kFullWriteBarrier);
759 } 556 }
760 557
761 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, 558 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
762 Node* index, Node* value) { 559 Node* value = LoadHeapNumberValue(object);
763 return raw_assembler_->Store(rep, base, index, value, kFullWriteBarrier); 560 return TruncateFloat64ToInt32(value);
764 }
765
766 Node* CodeStubAssembler::StoreNoWriteBarrier(MachineRepresentation rep,
767 Node* base, Node* value) {
768 return raw_assembler_->Store(rep, base, value, kNoWriteBarrier);
769 }
770
771 Node* CodeStubAssembler::StoreNoWriteBarrier(MachineRepresentation rep,
772 Node* base, Node* index,
773 Node* value) {
774 return raw_assembler_->Store(rep, base, index, value, kNoWriteBarrier);
775 }
776
777 Node* CodeStubAssembler::Projection(int index, Node* value) {
778 return raw_assembler_->Projection(index, value);
779 }
780
781 Node* CodeStubAssembler::LoadMap(Node* object) {
782 return LoadObjectField(object, HeapObject::kMapOffset);
783 }
784
785 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
786 return StoreNoWriteBarrier(
787 MachineRepresentation::kTagged, object,
788 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
789 }
790
791 Node* CodeStubAssembler::LoadInstanceType(Node* object) {
792 return LoadMapInstanceType(LoadMap(object));
793 }
794
795 Node* CodeStubAssembler::LoadElements(Node* object) {
796 return LoadObjectField(object, JSObject::kElementsOffset);
797 }
798
799 Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) {
800 return LoadObjectField(array, FixedArrayBase::kLengthOffset);
801 }
802
803 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
804 uint32_t mask) {
805 return raw_assembler_->Word32Shr(
806 raw_assembler_->Word32And(word32, raw_assembler_->Int32Constant(mask)),
807 raw_assembler_->Int32Constant(shift));
808 } 561 }
809 562
810 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) { 563 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
811 Node* value32 = raw_assembler_->TruncateFloat64ToInt32( 564 Node* value32 = TruncateFloat64ToInt32RoundToZero(value);
812 TruncationMode::kRoundToZero, value);
813 Node* value64 = ChangeInt32ToFloat64(value32); 565 Node* value64 = ChangeInt32ToFloat64(value32);
814 566
815 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this); 567 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
816 568
817 Label if_valueisequal(this), if_valueisnotequal(this); 569 Label if_valueisequal(this), if_valueisnotequal(this);
818 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal); 570 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
819 Bind(&if_valueisequal); 571 Bind(&if_valueisequal);
820 { 572 {
821 Label if_valueiszero(this), if_valueisnotzero(this); 573 Label if_valueiszero(this), if_valueisnotzero(this);
822 Branch(Float64Equal(value, Float64Constant(0.0)), &if_valueiszero, 574 Branch(Float64Equal(value, Float64Constant(0.0)), &if_valueiszero,
823 &if_valueisnotzero); 575 &if_valueisnotzero);
824 576
825 Bind(&if_valueiszero); 577 Bind(&if_valueiszero);
826 BranchIfInt32LessThan(raw_assembler_->Float64ExtractHighWord32(value), 578 BranchIfInt32LessThan(Float64ExtractHighWord32(value), Int32Constant(0),
827 Int32Constant(0), &if_valueisheapnumber, 579 &if_valueisheapnumber, &if_valueisint32);
828 &if_valueisint32);
829 580
830 Bind(&if_valueisnotzero); 581 Bind(&if_valueisnotzero);
831 Goto(&if_valueisint32); 582 Goto(&if_valueisint32);
832 } 583 }
833 Bind(&if_valueisnotequal); 584 Bind(&if_valueisnotequal);
834 Goto(&if_valueisheapnumber); 585 Goto(&if_valueisheapnumber);
835 586
836 Variable var_result(this, MachineRepresentation::kTagged); 587 Variable var_result(this, MachineRepresentation::kTagged);
837 Bind(&if_valueisint32); 588 Bind(&if_valueisint32);
838 { 589 {
839 if (raw_assembler_->machine()->Is64()) { 590 if (Is64()) {
840 Node* result = SmiTag(ChangeInt32ToInt64(value32)); 591 Node* result = SmiTag(ChangeInt32ToInt64(value32));
841 var_result.Bind(result); 592 var_result.Bind(result);
842 Goto(&if_join); 593 Goto(&if_join);
843 } else { 594 } else {
844 Node* pair = Int32AddWithOverflow(value32, value32); 595 Node* pair = Int32AddWithOverflow(value32, value32);
845 Node* overflow = Projection(1, pair); 596 Node* overflow = Projection(1, pair);
846 Label if_overflow(this, Label::kDeferred), if_notoverflow(this); 597 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
847 Branch(overflow, &if_overflow, &if_notoverflow); 598 Branch(overflow, &if_overflow, &if_notoverflow);
848 Bind(&if_overflow); 599 Bind(&if_overflow);
849 Goto(&if_valueisheapnumber); 600 Goto(&if_valueisheapnumber);
850 Bind(&if_notoverflow); 601 Bind(&if_notoverflow);
851 { 602 {
852 Node* result = Projection(0, pair); 603 Node* result = Projection(0, pair);
853 var_result.Bind(result); 604 var_result.Bind(result);
854 Goto(&if_join); 605 Goto(&if_join);
855 } 606 }
856 } 607 }
857 } 608 }
858 Bind(&if_valueisheapnumber); 609 Bind(&if_valueisheapnumber);
859 { 610 {
860 Node* result = AllocateHeapNumberWithValue(value); 611 Node* result = AllocateHeapNumberWithValue(value);
861 var_result.Bind(result); 612 var_result.Bind(result);
862 Goto(&if_join); 613 Goto(&if_join);
863 } 614 }
864 Bind(&if_join); 615 Bind(&if_join);
865 return var_result.value(); 616 return var_result.value();
866 } 617 }
867 618
868 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) { 619 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
869 if (raw_assembler_->machine()->Is64()) { 620 if (Is64()) {
870 return SmiTag(ChangeInt32ToInt64(value)); 621 return SmiTag(ChangeInt32ToInt64(value));
871 } 622 }
872 Variable var_result(this, MachineRepresentation::kTagged); 623 Variable var_result(this, MachineRepresentation::kTagged);
873 Node* pair = Int32AddWithOverflow(value, value); 624 Node* pair = Int32AddWithOverflow(value, value);
874 Node* overflow = Projection(1, pair); 625 Node* overflow = Projection(1, pair);
875 Label if_overflow(this, Label::kDeferred), if_notoverflow(this), 626 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
876 if_join(this); 627 if_join(this);
877 Branch(overflow, &if_overflow, &if_notoverflow); 628 Branch(overflow, &if_overflow, &if_notoverflow);
878 Bind(&if_overflow); 629 Bind(&if_overflow);
879 { 630 {
(...skipping 14 matching lines...) Expand all
894 645
895 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) { 646 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
896 Label if_overflow(this, Label::kDeferred), if_not_overflow(this), 647 Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
897 if_join(this); 648 if_join(this);
898 Variable var_result(this, MachineRepresentation::kTagged); 649 Variable var_result(this, MachineRepresentation::kTagged);
899 // If {value} > 2^31 - 1, we need to store it in a HeapNumber. 650 // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
900 Branch(Int32LessThan(value, Int32Constant(0)), &if_overflow, 651 Branch(Int32LessThan(value, Int32Constant(0)), &if_overflow,
901 &if_not_overflow); 652 &if_not_overflow);
902 Bind(&if_not_overflow); 653 Bind(&if_not_overflow);
903 { 654 {
904 if (raw_assembler_->machine()->Is64()) { 655 if (Is64()) {
905 var_result.Bind(SmiTag(ChangeUint32ToUint64(value))); 656 var_result.Bind(SmiTag(ChangeUint32ToUint64(value)));
906 } else { 657 } else {
907 // If tagging {value} results in an overflow, we need to use a HeapNumber 658 // If tagging {value} results in an overflow, we need to use a HeapNumber
908 // to represent it. 659 // to represent it.
909 Node* pair = Int32AddWithOverflow(value, value); 660 Node* pair = Int32AddWithOverflow(value, value);
910 Node* overflow = Projection(1, pair); 661 Node* overflow = Projection(1, pair);
911 GotoIf(overflow, &if_overflow); 662 GotoIf(overflow, &if_overflow);
912 663
913 Node* result = Projection(0, pair); 664 Node* result = Projection(0, pair);
914 var_result.Bind(result); 665 var_result.Bind(result);
915 } 666 }
916 } 667 }
917 Goto(&if_join); 668 Goto(&if_join);
918 669
919 Bind(&if_overflow); 670 Bind(&if_overflow);
920 { 671 {
921 Node* float64_value = ChangeUint32ToFloat64(value); 672 Node* float64_value = ChangeUint32ToFloat64(value);
922 var_result.Bind(AllocateHeapNumberWithValue(float64_value)); 673 var_result.Bind(AllocateHeapNumberWithValue(float64_value));
923 } 674 }
924 Goto(&if_join); 675 Goto(&if_join);
925 676
926 Bind(&if_join); 677 Bind(&if_join);
927 return var_result.value(); 678 return var_result.value();
928 } 679 }
929 680
930 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
931 // We might need to loop once due to ToNumber conversion.
932 Variable var_value(this, MachineRepresentation::kTagged),
933 var_result(this, MachineRepresentation::kFloat64);
934 Label loop(this, &var_value), done_loop(this, &var_result);
935 var_value.Bind(value);
936 Goto(&loop);
937 Bind(&loop);
938 {
939 // Load the current {value}.
940 value = var_value.value();
941
942 // Check if the {value} is a Smi or a HeapObject.
943 Label if_valueissmi(this), if_valueisnotsmi(this);
944 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
945
946 Bind(&if_valueissmi);
947 {
948 // Convert the Smi {value}.
949 var_result.Bind(SmiToFloat64(value));
950 Goto(&done_loop);
951 }
952
953 Bind(&if_valueisnotsmi);
954 {
955 // Check if {value} is a HeapNumber.
956 Label if_valueisheapnumber(this),
957 if_valueisnotheapnumber(this, Label::kDeferred);
958 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
959 &if_valueisheapnumber, &if_valueisnotheapnumber);
960
961 Bind(&if_valueisheapnumber);
962 {
963 // Load the floating point value.
964 var_result.Bind(LoadHeapNumberValue(value));
965 Goto(&done_loop);
966 }
967
968 Bind(&if_valueisnotheapnumber);
969 {
970 // Convert the {value} to a Number first.
971 Callable callable = CodeFactory::NonNumberToNumber(isolate());
972 var_value.Bind(CallStub(callable, context, value));
973 Goto(&loop);
974 }
975 }
976 }
977 Bind(&done_loop);
978 return var_result.value();
979 }
980
981 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
982 // We might need to loop once due to ToNumber conversion.
983 Variable var_value(this, MachineRepresentation::kTagged),
984 var_result(this, MachineRepresentation::kWord32);
985 Label loop(this, &var_value), done_loop(this, &var_result);
986 var_value.Bind(value);
987 Goto(&loop);
988 Bind(&loop);
989 {
990 // Load the current {value}.
991 value = var_value.value();
992
993 // Check if the {value} is a Smi or a HeapObject.
994 Label if_valueissmi(this), if_valueisnotsmi(this);
995 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
996
997 Bind(&if_valueissmi);
998 {
999 // Convert the Smi {value}.
1000 var_result.Bind(SmiToWord32(value));
1001 Goto(&done_loop);
1002 }
1003
1004 Bind(&if_valueisnotsmi);
1005 {
1006 // Check if {value} is a HeapNumber.
1007 Label if_valueisheapnumber(this),
1008 if_valueisnotheapnumber(this, Label::kDeferred);
1009 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
1010 &if_valueisheapnumber, &if_valueisnotheapnumber);
1011
1012 Bind(&if_valueisheapnumber);
1013 {
1014 // Truncate the floating point value.
1015 var_result.Bind(TruncateHeapNumberValueToWord32(value));
1016 Goto(&done_loop);
1017 }
1018
1019 Bind(&if_valueisnotheapnumber);
1020 {
1021 // Convert the {value} to a Number first.
1022 Callable callable = CodeFactory::NonNumberToNumber(isolate());
1023 var_value.Bind(CallStub(callable, context, value));
1024 Goto(&loop);
1025 }
1026 }
1027 }
1028 Bind(&done_loop);
1029 return var_result.value();
1030 }
1031
1032 Node* CodeStubAssembler::ToThisString(Node* context, Node* value, 681 Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
1033 char const* method_name) { 682 char const* method_name) {
1034 Variable var_value(this, MachineRepresentation::kTagged); 683 Variable var_value(this, MachineRepresentation::kTagged);
1035 var_value.Bind(value); 684 var_value.Bind(value);
1036 685
1037 // Check if the {value} is a Smi or a HeapObject. 686 // Check if the {value} is a Smi or a HeapObject.
1038 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this), 687 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
1039 if_valueisstring(this); 688 if_valueisstring(this);
1040 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi); 689 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
1041 Bind(&if_valueisnotsmi); 690 Bind(&if_valueisnotsmi);
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
1313 MachineRepresentation::kWord16, result, 962 MachineRepresentation::kWord16, result,
1314 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code); 963 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
1315 var_result.Bind(result); 964 var_result.Bind(result);
1316 Goto(&if_done); 965 Goto(&if_done);
1317 } 966 }
1318 967
1319 Bind(&if_done); 968 Bind(&if_done);
1320 return var_result.value(); 969 return var_result.value();
1321 } 970 }
1322 971
1323 Node* CodeStubAssembler::TruncateFloat64ToInt32(Node* value) { 972 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
1324 return raw_assembler_->TruncateFloat64ToInt32(TruncationMode::kJavaScript, 973 uint32_t mask) {
1325 value); 974 return Word32Shr(Word32And(word32, Int32Constant(mask)),
975 Int32Constant(shift));
1326 } 976 }
1327 977
1328 void CodeStubAssembler::BranchIf(Node* condition, Label* if_true,
1329 Label* if_false) {
1330 Label if_condition_is_true(this), if_condition_is_false(this);
1331 Branch(condition, &if_condition_is_true, &if_condition_is_false);
1332 Bind(&if_condition_is_true);
1333 Goto(if_true);
1334 Bind(&if_condition_is_false);
1335 Goto(if_false);
1336 }
1337
1338 Node* CodeStubAssembler::CallN(CallDescriptor* descriptor, Node* code_target,
1339 Node** args) {
1340 CallPrologue();
1341 Node* return_value = raw_assembler_->CallN(descriptor, code_target, args);
1342 CallEpilogue();
1343 return return_value;
1344 }
1345
1346
1347 Node* CodeStubAssembler::TailCallN(CallDescriptor* descriptor,
1348 Node* code_target, Node** args) {
1349 return raw_assembler_->TailCallN(descriptor, code_target, args);
1350 }
1351
1352 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id,
1353 Node* context) {
1354 CallPrologue();
1355 Node* return_value = raw_assembler_->CallRuntime0(function_id, context);
1356 CallEpilogue();
1357 return return_value;
1358 }
1359
1360 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id,
1361 Node* context, Node* arg1) {
1362 CallPrologue();
1363 Node* return_value = raw_assembler_->CallRuntime1(function_id, arg1, context);
1364 CallEpilogue();
1365 return return_value;
1366 }
1367
1368 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id,
1369 Node* context, Node* arg1, Node* arg2) {
1370 CallPrologue();
1371 Node* return_value =
1372 raw_assembler_->CallRuntime2(function_id, arg1, arg2, context);
1373 CallEpilogue();
1374 return return_value;
1375 }
1376
1377 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id,
1378 Node* context, Node* arg1, Node* arg2,
1379 Node* arg3) {
1380 CallPrologue();
1381 Node* return_value =
1382 raw_assembler_->CallRuntime3(function_id, arg1, arg2, arg3, context);
1383 CallEpilogue();
1384 return return_value;
1385 }
1386
1387 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id,
1388 Node* context, Node* arg1, Node* arg2,
1389 Node* arg3, Node* arg4) {
1390 CallPrologue();
1391 Node* return_value = raw_assembler_->CallRuntime4(function_id, arg1, arg2,
1392 arg3, arg4, context);
1393 CallEpilogue();
1394 return return_value;
1395 }
1396
1397 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id,
1398 Node* context) {
1399 return raw_assembler_->TailCallRuntime0(function_id, context);
1400 }
1401
1402 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id,
1403 Node* context, Node* arg1) {
1404 return raw_assembler_->TailCallRuntime1(function_id, arg1, context);
1405 }
1406
1407 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id,
1408 Node* context, Node* arg1,
1409 Node* arg2) {
1410 return raw_assembler_->TailCallRuntime2(function_id, arg1, arg2, context);
1411 }
1412
1413 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id,
1414 Node* context, Node* arg1, Node* arg2,
1415 Node* arg3) {
1416 return raw_assembler_->TailCallRuntime3(function_id, arg1, arg2, arg3,
1417 context);
1418 }
1419
1420 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id,
1421 Node* context, Node* arg1, Node* arg2,
1422 Node* arg3, Node* arg4) {
1423 return raw_assembler_->TailCallRuntime4(function_id, arg1, arg2, arg3, arg4,
1424 context);
1425 }
1426
1427 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context,
1428 Node* arg1, size_t result_size) {
1429 Node* target = HeapConstant(callable.code());
1430 return CallStub(callable.descriptor(), target, context, arg1, result_size);
1431 }
1432
1433 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context,
1434 Node* arg1, Node* arg2, size_t result_size) {
1435 Node* target = HeapConstant(callable.code());
1436 return CallStub(callable.descriptor(), target, context, arg1, arg2,
1437 result_size);
1438 }
1439
1440 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context,
1441 Node* arg1, Node* arg2, Node* arg3,
1442 size_t result_size) {
1443 Node* target = HeapConstant(callable.code());
1444 return CallStub(callable.descriptor(), target, context, arg1, arg2, arg3,
1445 result_size);
1446 }
1447
1448 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
1449 Node* target, Node* context, Node* arg1,
1450 size_t result_size) {
1451 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
1452 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
1453 CallDescriptor::kNoFlags, Operator::kNoProperties,
1454 MachineType::AnyTagged(), result_size);
1455
1456 Node** args = zone()->NewArray<Node*>(2);
1457 args[0] = arg1;
1458 args[1] = context;
1459
1460 return CallN(call_descriptor, target, args);
1461 }
1462
1463 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
1464 Node* target, Node* context, Node* arg1,
1465 Node* arg2, size_t result_size) {
1466 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
1467 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
1468 CallDescriptor::kNoFlags, Operator::kNoProperties,
1469 MachineType::AnyTagged(), result_size);
1470
1471 Node** args = zone()->NewArray<Node*>(3);
1472 args[0] = arg1;
1473 args[1] = arg2;
1474 args[2] = context;
1475
1476 return CallN(call_descriptor, target, args);
1477 }
1478
1479 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
1480 Node* target, Node* context, Node* arg1,
1481 Node* arg2, Node* arg3, size_t result_size) {
1482 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
1483 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
1484 CallDescriptor::kNoFlags, Operator::kNoProperties,
1485 MachineType::AnyTagged(), result_size);
1486
1487 Node** args = zone()->NewArray<Node*>(4);
1488 args[0] = arg1;
1489 args[1] = arg2;
1490 args[2] = arg3;
1491 args[3] = context;
1492
1493 return CallN(call_descriptor, target, args);
1494 }
1495
1496 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
1497 Node* target, Node* context, Node* arg1,
1498 Node* arg2, Node* arg3, Node* arg4,
1499 size_t result_size) {
1500 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
1501 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
1502 CallDescriptor::kNoFlags, Operator::kNoProperties,
1503 MachineType::AnyTagged(), result_size);
1504
1505 Node** args = zone()->NewArray<Node*>(5);
1506 args[0] = arg1;
1507 args[1] = arg2;
1508 args[2] = arg3;
1509 args[3] = arg4;
1510 args[4] = context;
1511
1512 return CallN(call_descriptor, target, args);
1513 }
1514
1515 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
1516 Node* target, Node* context, Node* arg1,
1517 Node* arg2, Node* arg3, Node* arg4,
1518 Node* arg5, size_t result_size) {
1519 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
1520 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
1521 CallDescriptor::kNoFlags, Operator::kNoProperties,
1522 MachineType::AnyTagged(), result_size);
1523
1524 Node** args = zone()->NewArray<Node*>(6);
1525 args[0] = arg1;
1526 args[1] = arg2;
1527 args[2] = arg3;
1528 args[3] = arg4;
1529 args[4] = arg5;
1530 args[5] = context;
1531
1532 return CallN(call_descriptor, target, args);
1533 }
1534
1535 Node* CodeStubAssembler::TailCallStub(Callable const& callable, Node* context,
1536 Node* arg1, Node* arg2,
1537 size_t result_size) {
1538 Node* target = HeapConstant(callable.code());
1539 return TailCallStub(callable.descriptor(), target, context, arg1, arg2,
1540 result_size);
1541 }
1542
1543 Node* CodeStubAssembler::TailCallStub(const CallInterfaceDescriptor& descriptor,
1544 Node* target, Node* context, Node* arg1,
1545 Node* arg2, size_t result_size) {
1546 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
1547 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
1548 CallDescriptor::kSupportsTailCalls, Operator::kNoProperties,
1549 MachineType::AnyTagged(), result_size);
1550
1551 Node** args = zone()->NewArray<Node*>(3);
1552 args[0] = arg1;
1553 args[1] = arg2;
1554 args[2] = context;
1555
1556 return raw_assembler_->TailCallN(call_descriptor, target, args);
1557 }
1558
1559 Node* CodeStubAssembler::TailCallBytecodeDispatch(
1560 const CallInterfaceDescriptor& interface_descriptor,
1561 Node* code_target_address, Node** args) {
1562 CallDescriptor* descriptor = Linkage::GetBytecodeDispatchCallDescriptor(
1563 isolate(), zone(), interface_descriptor,
1564 interface_descriptor.GetStackParameterCount());
1565 return raw_assembler_->TailCallN(descriptor, code_target_address, args);
1566 }
1567
1568 void CodeStubAssembler::Goto(CodeStubAssembler::Label* label) {
1569 label->MergeVariables();
1570 raw_assembler_->Goto(label->label_);
1571 }
1572
1573 void CodeStubAssembler::GotoIf(Node* condition, Label* true_label) {
1574 Label false_label(this);
1575 Branch(condition, true_label, &false_label);
1576 Bind(&false_label);
1577 }
1578
1579 void CodeStubAssembler::GotoUnless(Node* condition, Label* false_label) {
1580 Label true_label(this);
1581 Branch(condition, &true_label, false_label);
1582 Bind(&true_label);
1583 }
1584
1585 void CodeStubAssembler::Branch(Node* condition,
1586 CodeStubAssembler::Label* true_label,
1587 CodeStubAssembler::Label* false_label) {
1588 true_label->MergeVariables();
1589 false_label->MergeVariables();
1590 return raw_assembler_->Branch(condition, true_label->label_,
1591 false_label->label_);
1592 }
1593
1594 void CodeStubAssembler::Switch(Node* index, Label* default_label,
1595 int32_t* case_values, Label** case_labels,
1596 size_t case_count) {
1597 RawMachineLabel** labels =
1598 new (zone()->New(sizeof(RawMachineLabel*) * case_count))
1599 RawMachineLabel*[case_count];
1600 for (size_t i = 0; i < case_count; ++i) {
1601 labels[i] = case_labels[i]->label_;
1602 case_labels[i]->MergeVariables();
1603 default_label->MergeVariables();
1604 }
1605 return raw_assembler_->Switch(index, default_label->label_, case_values,
1606 labels, case_count);
1607 }
1608
1609 // RawMachineAssembler delegate helpers:
1610 Isolate* CodeStubAssembler::isolate() const {
1611 return raw_assembler_->isolate();
1612 }
1613
1614 Factory* CodeStubAssembler::factory() const { return isolate()->factory(); }
1615
1616 Graph* CodeStubAssembler::graph() const { return raw_assembler_->graph(); }
1617
1618 Zone* CodeStubAssembler::zone() const { return raw_assembler_->zone(); }
1619
1620 // The core implementation of Variable is stored through an indirection so
1621 // that it can outlive the often block-scoped Variable declarations. This is
1622 // needed to ensure that variable binding and merging through phis can
1623 // properly be verified.
1624 class CodeStubAssembler::Variable::Impl : public ZoneObject {
1625 public:
1626 explicit Impl(MachineRepresentation rep) : value_(nullptr), rep_(rep) {}
1627 Node* value_;
1628 MachineRepresentation rep_;
1629 };
1630
1631 CodeStubAssembler::Variable::Variable(CodeStubAssembler* assembler,
1632 MachineRepresentation rep)
1633 : impl_(new (assembler->zone()) Impl(rep)) {
1634 assembler->variables_.push_back(impl_);
1635 }
1636
1637 void CodeStubAssembler::Variable::Bind(Node* value) { impl_->value_ = value; }
1638
1639 Node* CodeStubAssembler::Variable::value() const {
1640 DCHECK_NOT_NULL(impl_->value_);
1641 return impl_->value_;
1642 }
1643
1644 MachineRepresentation CodeStubAssembler::Variable::rep() const {
1645 return impl_->rep_;
1646 }
1647
1648 bool CodeStubAssembler::Variable::IsBound() const {
1649 return impl_->value_ != nullptr;
1650 }
1651
1652 CodeStubAssembler::Label::Label(CodeStubAssembler* assembler,
1653 int merged_value_count,
1654 CodeStubAssembler::Variable** merged_variables,
1655 CodeStubAssembler::Label::Type type)
1656 : bound_(false), merge_count_(0), assembler_(assembler), label_(nullptr) {
1657 void* buffer = assembler->zone()->New(sizeof(RawMachineLabel));
1658 label_ = new (buffer)
1659 RawMachineLabel(type == kDeferred ? RawMachineLabel::kDeferred
1660 : RawMachineLabel::kNonDeferred);
1661 for (int i = 0; i < merged_value_count; ++i) {
1662 variable_phis_[merged_variables[i]->impl_] = nullptr;
1663 }
1664 }
1665
1666 void CodeStubAssembler::Label::MergeVariables() {
1667 ++merge_count_;
1668 for (auto var : assembler_->variables_) {
1669 size_t count = 0;
1670 Node* node = var->value_;
1671 if (node != nullptr) {
1672 auto i = variable_merges_.find(var);
1673 if (i != variable_merges_.end()) {
1674 i->second.push_back(node);
1675 count = i->second.size();
1676 } else {
1677 count = 1;
1678 variable_merges_[var] = std::vector<Node*>(1, node);
1679 }
1680 }
1681 // If the following asserts, then you've jumped to a label without a bound
1682 // variable along that path that expects to merge its value into a phi.
1683 DCHECK(variable_phis_.find(var) == variable_phis_.end() ||
1684 count == merge_count_);
1685 USE(count);
1686
1687 // If the label is already bound, we already know the set of variables to
1688 // merge and phi nodes have already been created.
1689 if (bound_) {
1690 auto phi = variable_phis_.find(var);
1691 if (phi != variable_phis_.end()) {
1692 DCHECK_NOT_NULL(phi->second);
1693 assembler_->raw_assembler_->AppendPhiInput(phi->second, node);
1694 } else {
1695 auto i = variable_merges_.find(var);
1696 if (i != variable_merges_.end()) {
1697 // If the following assert fires, then you've declared a variable that
1698 // has the same bound value along all paths up until the point you
1699 // bound this label, but then later merged a path with a new value for
1700 // the variable after the label bind (it's not possible to add phis to
1701 // the bound label after the fact, just make sure to list the variable
1702 // in the label's constructor's list of merged variables).
1703 DCHECK(find_if(i->second.begin(), i->second.end(),
1704 [node](Node* e) -> bool { return node != e; }) ==
1705 i->second.end());
1706 }
1707 }
1708 }
1709 }
1710 }
1711
1712 void CodeStubAssembler::Label::Bind() {
1713 DCHECK(!bound_);
1714 assembler_->raw_assembler_->Bind(label_);
1715
1716 // Make sure that all variables that have changed along any path up to this
1717 // point are marked as merge variables.
1718 for (auto var : assembler_->variables_) {
1719 Node* shared_value = nullptr;
1720 auto i = variable_merges_.find(var);
1721 if (i != variable_merges_.end()) {
1722 for (auto value : i->second) {
1723 DCHECK(value != nullptr);
1724 if (value != shared_value) {
1725 if (shared_value == nullptr) {
1726 shared_value = value;
1727 } else {
1728 variable_phis_[var] = nullptr;
1729 }
1730 }
1731 }
1732 }
1733 }
1734
1735 for (auto var : variable_phis_) {
1736 CodeStubAssembler::Variable::Impl* var_impl = var.first;
1737 auto i = variable_merges_.find(var_impl);
1738 // If the following assert fires, then a variable that has been marked as
1739 // being merged at the label--either by explicitly marking it so in the
1740 // label constructor or by having seen different bound values at branches
1741 // into the label--doesn't have a bound value along all of the paths that
1742 // have been merged into the label up to this point.
1743 DCHECK(i != variable_merges_.end() && i->second.size() == merge_count_);
1744 Node* phi = assembler_->raw_assembler_->Phi(
1745 var.first->rep_, static_cast<int>(merge_count_), &(i->second[0]));
1746 variable_phis_[var_impl] = phi;
1747 }
1748
1749 // Bind all variables to a merge phi, the common value along all paths or
1750 // null.
1751 for (auto var : assembler_->variables_) {
1752 auto i = variable_phis_.find(var);
1753 if (i != variable_phis_.end()) {
1754 var->value_ = i->second;
1755 } else {
1756 auto j = variable_merges_.find(var);
1757 if (j != variable_merges_.end() && j->second.size() == merge_count_) {
1758 var->value_ = j->second.back();
1759 } else {
1760 var->value_ = nullptr;
1761 }
1762 }
1763 }
1764
1765 bound_ = true;
1766 }
1767
1768 } // namespace compiler
1769 } // namespace internal 978 } // namespace internal
1770 } // namespace v8 979 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698