OLD | NEW |
| (Empty) |
1 // Copyright 2015 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/compiler/code-stub-assembler.h" | |
6 | |
7 #include <ostream> | |
8 | |
9 #include "src/code-factory.h" | |
10 #include "src/compiler/graph.h" | |
11 #include "src/compiler/instruction-selector.h" | |
12 #include "src/compiler/linkage.h" | |
13 #include "src/compiler/pipeline.h" | |
14 #include "src/compiler/raw-machine-assembler.h" | |
15 #include "src/compiler/schedule.h" | |
16 #include "src/frames.h" | |
17 #include "src/interface-descriptors.h" | |
18 #include "src/interpreter/bytecodes.h" | |
19 #include "src/machine-type.h" | |
20 #include "src/macro-assembler.h" | |
21 #include "src/zone.h" | |
22 | |
23 namespace v8 { | |
24 namespace internal { | |
25 namespace compiler { | |
26 | |
27 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, | |
28 const CallInterfaceDescriptor& descriptor, | |
29 Code::Flags flags, const char* name, | |
30 size_t result_size) | |
31 : CodeStubAssembler( | |
32 isolate, zone, | |
33 Linkage::GetStubCallDescriptor( | |
34 isolate, zone, descriptor, descriptor.GetStackParameterCount(), | |
35 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
36 MachineType::AnyTagged(), result_size), | |
37 flags, name) {} | |
38 | |
39 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, | |
40 int parameter_count, Code::Flags flags, | |
41 const char* name) | |
42 : CodeStubAssembler(isolate, zone, Linkage::GetJSCallDescriptor( | |
43 zone, false, parameter_count, | |
44 CallDescriptor::kNoFlags), | |
45 flags, name) {} | |
46 | |
47 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, | |
48 CallDescriptor* call_descriptor, | |
49 Code::Flags flags, const char* name) | |
50 : raw_assembler_(new RawMachineAssembler( | |
51 isolate, new (zone) Graph(zone), call_descriptor, | |
52 MachineType::PointerRepresentation(), | |
53 InstructionSelector::SupportedMachineOperatorFlags())), | |
54 flags_(flags), | |
55 name_(name), | |
56 code_generated_(false), | |
57 variables_(zone) {} | |
58 | |
59 CodeStubAssembler::~CodeStubAssembler() {} | |
60 | |
61 void CodeStubAssembler::CallPrologue() {} | |
62 | |
63 void CodeStubAssembler::CallEpilogue() {} | |
64 | |
65 Handle<Code> CodeStubAssembler::GenerateCode() { | |
66 DCHECK(!code_generated_); | |
67 | |
68 Schedule* schedule = raw_assembler_->Export(); | |
69 Handle<Code> code = Pipeline::GenerateCodeForCodeStub( | |
70 isolate(), raw_assembler_->call_descriptor(), graph(), schedule, flags_, | |
71 name_); | |
72 | |
73 code_generated_ = true; | |
74 return code; | |
75 } | |
76 | |
77 | |
78 Node* CodeStubAssembler::Int32Constant(int value) { | |
79 return raw_assembler_->Int32Constant(value); | |
80 } | |
81 | |
82 | |
83 Node* CodeStubAssembler::IntPtrConstant(intptr_t value) { | |
84 return raw_assembler_->IntPtrConstant(value); | |
85 } | |
86 | |
87 | |
88 Node* CodeStubAssembler::NumberConstant(double value) { | |
89 return raw_assembler_->NumberConstant(value); | |
90 } | |
91 | |
92 Node* CodeStubAssembler::SmiConstant(Smi* value) { | |
93 return IntPtrConstant(bit_cast<intptr_t>(value)); | |
94 } | |
95 | |
96 Node* CodeStubAssembler::HeapConstant(Handle<HeapObject> object) { | |
97 return raw_assembler_->HeapConstant(object); | |
98 } | |
99 | |
100 | |
101 Node* CodeStubAssembler::BooleanConstant(bool value) { | |
102 return raw_assembler_->BooleanConstant(value); | |
103 } | |
104 | |
105 Node* CodeStubAssembler::ExternalConstant(ExternalReference address) { | |
106 return raw_assembler_->ExternalConstant(address); | |
107 } | |
108 | |
109 Node* CodeStubAssembler::Float64Constant(double value) { | |
110 return raw_assembler_->Float64Constant(value); | |
111 } | |
112 | |
113 Node* CodeStubAssembler::BooleanMapConstant() { | |
114 return HeapConstant(isolate()->factory()->boolean_map()); | |
115 } | |
116 | |
117 Node* CodeStubAssembler::EmptyStringConstant() { | |
118 return LoadRoot(Heap::kempty_stringRootIndex); | |
119 } | |
120 | |
121 Node* CodeStubAssembler::HeapNumberMapConstant() { | |
122 return HeapConstant(isolate()->factory()->heap_number_map()); | |
123 } | |
124 | |
125 Node* CodeStubAssembler::NaNConstant() { | |
126 return LoadRoot(Heap::kNanValueRootIndex); | |
127 } | |
128 | |
129 Node* CodeStubAssembler::NoContextConstant() { | |
130 return SmiConstant(Smi::FromInt(0)); | |
131 } | |
132 | |
133 Node* CodeStubAssembler::NullConstant() { | |
134 return LoadRoot(Heap::kNullValueRootIndex); | |
135 } | |
136 | |
137 Node* CodeStubAssembler::UndefinedConstant() { | |
138 return LoadRoot(Heap::kUndefinedValueRootIndex); | |
139 } | |
140 | |
141 Node* CodeStubAssembler::Parameter(int value) { | |
142 return raw_assembler_->Parameter(value); | |
143 } | |
144 | |
145 void CodeStubAssembler::Return(Node* value) { | |
146 return raw_assembler_->Return(value); | |
147 } | |
148 | |
149 void CodeStubAssembler::Bind(CodeStubAssembler::Label* label) { | |
150 return label->Bind(); | |
151 } | |
152 | |
153 Node* CodeStubAssembler::LoadFramePointer() { | |
154 return raw_assembler_->LoadFramePointer(); | |
155 } | |
156 | |
157 Node* CodeStubAssembler::LoadParentFramePointer() { | |
158 return raw_assembler_->LoadParentFramePointer(); | |
159 } | |
160 | |
161 Node* CodeStubAssembler::LoadStackPointer() { | |
162 return raw_assembler_->LoadStackPointer(); | |
163 } | |
164 | |
165 Node* CodeStubAssembler::SmiShiftBitsConstant() { | |
166 return IntPtrConstant(kSmiShiftSize + kSmiTagSize); | |
167 } | |
168 | |
169 Node* CodeStubAssembler::Float64Round(Node* x) { | |
170 Node* one = Float64Constant(1.0); | |
171 Node* one_half = Float64Constant(0.5); | |
172 | |
173 Variable var_x(this, MachineRepresentation::kFloat64); | |
174 Label return_x(this); | |
175 | |
176 // Round up {x} towards Infinity. | |
177 var_x.Bind(Float64Ceil(x)); | |
178 | |
179 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x), | |
180 &return_x); | |
181 var_x.Bind(Float64Sub(var_x.value(), one)); | |
182 Goto(&return_x); | |
183 | |
184 Bind(&return_x); | |
185 return var_x.value(); | |
186 } | |
187 | |
188 Node* CodeStubAssembler::Float64Ceil(Node* x) { | |
189 if (raw_assembler_->machine()->Float64RoundUp().IsSupported()) { | |
190 return raw_assembler_->Float64RoundUp(x); | |
191 } | |
192 | |
193 Node* one = Float64Constant(1.0); | |
194 Node* zero = Float64Constant(0.0); | |
195 Node* two_52 = Float64Constant(4503599627370496.0E0); | |
196 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); | |
197 | |
198 Variable var_x(this, MachineRepresentation::kFloat64); | |
199 Label return_x(this), return_minus_x(this); | |
200 var_x.Bind(x); | |
201 | |
202 // Check if {x} is greater than zero. | |
203 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this); | |
204 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero, | |
205 &if_xnotgreaterthanzero); | |
206 | |
207 Bind(&if_xgreaterthanzero); | |
208 { | |
209 // Just return {x} unless it's in the range ]0,2^52[. | |
210 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x); | |
211 | |
212 // Round positive {x} towards Infinity. | |
213 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52)); | |
214 GotoUnless(Float64LessThan(var_x.value(), x), &return_x); | |
215 var_x.Bind(Float64Add(var_x.value(), one)); | |
216 Goto(&return_x); | |
217 } | |
218 | |
219 Bind(&if_xnotgreaterthanzero); | |
220 { | |
221 // Just return {x} unless it's in the range ]-2^52,0[ | |
222 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x); | |
223 GotoUnless(Float64LessThan(x, zero), &return_x); | |
224 | |
225 // Round negated {x} towards Infinity and return the result negated. | |
226 Node* minus_x = Float64Neg(x); | |
227 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52)); | |
228 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x); | |
229 var_x.Bind(Float64Sub(var_x.value(), one)); | |
230 Goto(&return_minus_x); | |
231 } | |
232 | |
233 Bind(&return_minus_x); | |
234 var_x.Bind(Float64Neg(var_x.value())); | |
235 Goto(&return_x); | |
236 | |
237 Bind(&return_x); | |
238 return var_x.value(); | |
239 } | |
240 | |
241 Node* CodeStubAssembler::Float64Floor(Node* x) { | |
242 if (raw_assembler_->machine()->Float64RoundDown().IsSupported()) { | |
243 return raw_assembler_->Float64RoundDown(x); | |
244 } | |
245 | |
246 Node* one = Float64Constant(1.0); | |
247 Node* zero = Float64Constant(0.0); | |
248 Node* two_52 = Float64Constant(4503599627370496.0E0); | |
249 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); | |
250 | |
251 Variable var_x(this, MachineRepresentation::kFloat64); | |
252 Label return_x(this), return_minus_x(this); | |
253 var_x.Bind(x); | |
254 | |
255 // Check if {x} is greater than zero. | |
256 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this); | |
257 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero, | |
258 &if_xnotgreaterthanzero); | |
259 | |
260 Bind(&if_xgreaterthanzero); | |
261 { | |
262 // Just return {x} unless it's in the range ]0,2^52[. | |
263 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x); | |
264 | |
265 // Round positive {x} towards -Infinity. | |
266 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52)); | |
267 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x); | |
268 var_x.Bind(Float64Sub(var_x.value(), one)); | |
269 Goto(&return_x); | |
270 } | |
271 | |
272 Bind(&if_xnotgreaterthanzero); | |
273 { | |
274 // Just return {x} unless it's in the range ]-2^52,0[ | |
275 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x); | |
276 GotoUnless(Float64LessThan(x, zero), &return_x); | |
277 | |
278 // Round negated {x} towards -Infinity and return the result negated. | |
279 Node* minus_x = Float64Neg(x); | |
280 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52)); | |
281 GotoUnless(Float64LessThan(var_x.value(), minus_x), &return_minus_x); | |
282 var_x.Bind(Float64Add(var_x.value(), one)); | |
283 Goto(&return_minus_x); | |
284 } | |
285 | |
286 Bind(&return_minus_x); | |
287 var_x.Bind(Float64Neg(var_x.value())); | |
288 Goto(&return_x); | |
289 | |
290 Bind(&return_x); | |
291 return var_x.value(); | |
292 } | |
293 | |
294 Node* CodeStubAssembler::Float64Trunc(Node* x) { | |
295 if (raw_assembler_->machine()->Float64RoundTruncate().IsSupported()) { | |
296 return raw_assembler_->Float64RoundTruncate(x); | |
297 } | |
298 | |
299 Node* one = Float64Constant(1.0); | |
300 Node* zero = Float64Constant(0.0); | |
301 Node* two_52 = Float64Constant(4503599627370496.0E0); | |
302 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); | |
303 | |
304 Variable var_x(this, MachineRepresentation::kFloat64); | |
305 Label return_x(this), return_minus_x(this); | |
306 var_x.Bind(x); | |
307 | |
308 // Check if {x} is greater than 0. | |
309 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this); | |
310 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero, | |
311 &if_xnotgreaterthanzero); | |
312 | |
313 Bind(&if_xgreaterthanzero); | |
314 { | |
315 if (raw_assembler_->machine()->Float64RoundDown().IsSupported()) { | |
316 var_x.Bind(raw_assembler_->Float64RoundDown(x)); | |
317 } else { | |
318 // Just return {x} unless it's in the range ]0,2^52[. | |
319 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x); | |
320 | |
321 // Round positive {x} towards -Infinity. | |
322 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52)); | |
323 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x); | |
324 var_x.Bind(Float64Sub(var_x.value(), one)); | |
325 } | |
326 Goto(&return_x); | |
327 } | |
328 | |
329 Bind(&if_xnotgreaterthanzero); | |
330 { | |
331 if (raw_assembler_->machine()->Float64RoundUp().IsSupported()) { | |
332 var_x.Bind(raw_assembler_->Float64RoundUp(x)); | |
333 Goto(&return_x); | |
334 } else { | |
335 // Just return {x} unless its in the range ]-2^52,0[. | |
336 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x); | |
337 GotoUnless(Float64LessThan(x, zero), &return_x); | |
338 | |
339 // Round negated {x} towards -Infinity and return result negated. | |
340 Node* minus_x = Float64Neg(x); | |
341 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52)); | |
342 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x); | |
343 var_x.Bind(Float64Sub(var_x.value(), one)); | |
344 Goto(&return_minus_x); | |
345 } | |
346 } | |
347 | |
348 Bind(&return_minus_x); | |
349 var_x.Bind(Float64Neg(var_x.value())); | |
350 Goto(&return_x); | |
351 | |
352 Bind(&return_x); | |
353 return var_x.value(); | |
354 } | |
355 | |
356 Node* CodeStubAssembler::SmiTag(Node* value) { | |
357 return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); | |
358 } | |
359 | |
360 Node* CodeStubAssembler::SmiUntag(Node* value) { | |
361 return raw_assembler_->WordSar(value, SmiShiftBitsConstant()); | |
362 } | |
363 | |
364 Node* CodeStubAssembler::SmiFromWord32(Node* value) { | |
365 if (raw_assembler_->machine()->Is64()) { | |
366 value = raw_assembler_->ChangeInt32ToInt64(value); | |
367 } | |
368 return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); | |
369 } | |
370 | |
371 Node* CodeStubAssembler::SmiToWord32(Node* value) { | |
372 Node* result = raw_assembler_->WordSar(value, SmiShiftBitsConstant()); | |
373 if (raw_assembler_->machine()->Is64()) { | |
374 result = raw_assembler_->TruncateInt64ToInt32(result); | |
375 } | |
376 return result; | |
377 } | |
378 | |
379 Node* CodeStubAssembler::SmiToFloat64(Node* value) { | |
380 return ChangeInt32ToFloat64(SmiUntag(value)); | |
381 } | |
382 | |
383 Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); } | |
384 | |
385 Node* CodeStubAssembler::SmiAddWithOverflow(Node* a, Node* b) { | |
386 return IntPtrAddWithOverflow(a, b); | |
387 } | |
388 | |
389 Node* CodeStubAssembler::SmiSub(Node* a, Node* b) { return IntPtrSub(a, b); } | |
390 | |
391 Node* CodeStubAssembler::SmiSubWithOverflow(Node* a, Node* b) { | |
392 return IntPtrSubWithOverflow(a, b); | |
393 } | |
394 | |
395 Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); } | |
396 | |
397 Node* CodeStubAssembler::SmiAboveOrEqual(Node* a, Node* b) { | |
398 return UintPtrGreaterThanOrEqual(a, b); | |
399 } | |
400 | |
401 Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) { | |
402 return IntPtrLessThan(a, b); | |
403 } | |
404 | |
405 Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) { | |
406 return IntPtrLessThanOrEqual(a, b); | |
407 } | |
408 | |
409 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) { | |
410 // TODO(bmeurer): Consider using Select once available. | |
411 Variable min(this, MachineRepresentation::kTagged); | |
412 Label if_a(this), if_b(this), join(this); | |
413 BranchIfSmiLessThan(a, b, &if_a, &if_b); | |
414 Bind(&if_a); | |
415 min.Bind(a); | |
416 Goto(&join); | |
417 Bind(&if_b); | |
418 min.Bind(b); | |
419 Goto(&join); | |
420 Bind(&join); | |
421 return min.value(); | |
422 } | |
423 | |
424 #define DEFINE_CODE_STUB_ASSEMBER_BINARY_OP(name) \ | |
425 Node* CodeStubAssembler::name(Node* a, Node* b) { \ | |
426 return raw_assembler_->name(a, b); \ | |
427 } | |
428 CODE_STUB_ASSEMBLER_BINARY_OP_LIST(DEFINE_CODE_STUB_ASSEMBER_BINARY_OP) | |
429 #undef DEFINE_CODE_STUB_ASSEMBER_BINARY_OP | |
430 | |
431 Node* CodeStubAssembler::WordShl(Node* value, int shift) { | |
432 return raw_assembler_->WordShl(value, IntPtrConstant(shift)); | |
433 } | |
434 | |
435 #define DEFINE_CODE_STUB_ASSEMBER_UNARY_OP(name) \ | |
436 Node* CodeStubAssembler::name(Node* a) { return raw_assembler_->name(a); } | |
437 CODE_STUB_ASSEMBLER_UNARY_OP_LIST(DEFINE_CODE_STUB_ASSEMBER_UNARY_OP) | |
438 #undef DEFINE_CODE_STUB_ASSEMBER_UNARY_OP | |
439 | |
440 Node* CodeStubAssembler::WordIsSmi(Node* a) { | |
441 return WordEqual(raw_assembler_->WordAnd(a, IntPtrConstant(kSmiTagMask)), | |
442 IntPtrConstant(0)); | |
443 } | |
444 | |
445 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { | |
446 return WordEqual( | |
447 raw_assembler_->WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), | |
448 IntPtrConstant(0)); | |
449 } | |
450 | |
451 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, | |
452 MachineType rep) { | |
453 return raw_assembler_->Load(rep, buffer, IntPtrConstant(offset)); | |
454 } | |
455 | |
456 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, | |
457 MachineType rep) { | |
458 return raw_assembler_->Load(rep, object, | |
459 IntPtrConstant(offset - kHeapObjectTag)); | |
460 } | |
461 | |
462 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( | |
463 Node* object, int offset, Node* value, MachineRepresentation rep) { | |
464 return StoreNoWriteBarrier(rep, object, | |
465 IntPtrConstant(offset - kHeapObjectTag), value); | |
466 } | |
467 | |
468 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) { | |
469 return Load(MachineType::Float64(), object, | |
470 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag)); | |
471 } | |
472 | |
473 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { | |
474 return StoreNoWriteBarrier( | |
475 MachineRepresentation::kFloat64, object, | |
476 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value); | |
477 } | |
478 | |
479 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) { | |
480 Node* value = LoadHeapNumberValue(object); | |
481 return raw_assembler_->TruncateFloat64ToInt32(TruncationMode::kJavaScript, | |
482 value); | |
483 } | |
484 | |
485 Node* CodeStubAssembler::LoadMapBitField(Node* map) { | |
486 return Load(MachineType::Uint8(), map, | |
487 IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag)); | |
488 } | |
489 | |
490 Node* CodeStubAssembler::LoadMapBitField2(Node* map) { | |
491 return Load(MachineType::Uint8(), map, | |
492 IntPtrConstant(Map::kBitField2Offset - kHeapObjectTag)); | |
493 } | |
494 | |
495 Node* CodeStubAssembler::LoadMapBitField3(Node* map) { | |
496 return Load(MachineType::Uint32(), map, | |
497 IntPtrConstant(Map::kBitField3Offset - kHeapObjectTag)); | |
498 } | |
499 | |
500 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) { | |
501 return Load(MachineType::Uint8(), map, | |
502 IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag)); | |
503 } | |
504 | |
505 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) { | |
506 return LoadObjectField(map, Map::kDescriptorsOffset); | |
507 } | |
508 | |
509 Node* CodeStubAssembler::LoadNameHash(Node* name) { | |
510 return Load(MachineType::Uint32(), name, | |
511 IntPtrConstant(Name::kHashFieldOffset - kHeapObjectTag)); | |
512 } | |
513 | |
514 Node* CodeStubAssembler::LoadFixedArrayElementInt32Index( | |
515 Node* object, Node* index, int additional_offset) { | |
516 Node* header_size = IntPtrConstant(additional_offset + | |
517 FixedArray::kHeaderSize - kHeapObjectTag); | |
518 if (raw_assembler_->machine()->Is64()) { | |
519 index = ChangeInt32ToInt64(index); | |
520 } | |
521 Node* scaled_index = WordShl(index, IntPtrConstant(kPointerSizeLog2)); | |
522 Node* offset = IntPtrAdd(scaled_index, header_size); | |
523 return Load(MachineType::AnyTagged(), object, offset); | |
524 } | |
525 | |
526 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) { | |
527 return Load(MachineType::Uint8(), map, | |
528 IntPtrConstant(Map::kInstanceSizeOffset - kHeapObjectTag)); | |
529 } | |
530 | |
531 Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object, | |
532 Node* smi_index, | |
533 int additional_offset) { | |
534 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize; | |
535 Node* header_size = IntPtrConstant(additional_offset + | |
536 FixedArray::kHeaderSize - kHeapObjectTag); | |
537 Node* scaled_index = | |
538 (kSmiShiftBits > kPointerSizeLog2) | |
539 ? WordSar(smi_index, IntPtrConstant(kSmiShiftBits - kPointerSizeLog2)) | |
540 : WordShl(smi_index, | |
541 IntPtrConstant(kPointerSizeLog2 - kSmiShiftBits)); | |
542 Node* offset = IntPtrAdd(scaled_index, header_size); | |
543 return Load(MachineType::AnyTagged(), object, offset); | |
544 } | |
545 | |
546 Node* CodeStubAssembler::LoadFixedArrayElementConstantIndex(Node* object, | |
547 int index) { | |
548 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag + | |
549 index * kPointerSize); | |
550 return raw_assembler_->Load(MachineType::AnyTagged(), object, offset); | |
551 } | |
552 | |
553 Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object, | |
554 Node* index, | |
555 Node* value) { | |
556 Node* offset = | |
557 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), | |
558 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); | |
559 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, | |
560 value); | |
561 } | |
562 | |
563 Node* CodeStubAssembler::StoreFixedArrayElementInt32Index(Node* object, | |
564 Node* index, | |
565 Node* value) { | |
566 if (raw_assembler_->machine()->Is64()) { | |
567 index = ChangeInt32ToInt64(index); | |
568 } | |
569 Node* offset = | |
570 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), | |
571 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); | |
572 return Store(MachineRepresentation::kTagged, object, offset, value); | |
573 } | |
574 | |
575 Node* CodeStubAssembler::LoadRoot(Heap::RootListIndex root_index) { | |
576 if (isolate()->heap()->RootCanBeTreatedAsConstant(root_index)) { | |
577 Handle<Object> root = isolate()->heap()->root_handle(root_index); | |
578 if (root->IsSmi()) { | |
579 return SmiConstant(Smi::cast(*root)); | |
580 } else { | |
581 return HeapConstant(Handle<HeapObject>::cast(root)); | |
582 } | |
583 } | |
584 | |
585 compiler::Node* roots_array_start = | |
586 ExternalConstant(ExternalReference::roots_array_start(isolate())); | |
587 USE(roots_array_start); | |
588 | |
589 // TODO(danno): Implement thee root-access case where the root is not constant | |
590 // and must be loaded from the root array. | |
591 UNIMPLEMENTED(); | |
592 return nullptr; | |
593 } | |
594 | |
595 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, | |
596 AllocationFlags flags, | |
597 Node* top_address, | |
598 Node* limit_address) { | |
599 Node* top = Load(MachineType::Pointer(), top_address); | |
600 Node* limit = Load(MachineType::Pointer(), limit_address); | |
601 | |
602 // If there's not enough space, call the runtime. | |
603 RawMachineLabel runtime_call(RawMachineLabel::kDeferred), no_runtime_call, | |
604 merge_runtime; | |
605 raw_assembler_->Branch( | |
606 raw_assembler_->IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes), | |
607 &runtime_call, &no_runtime_call); | |
608 | |
609 raw_assembler_->Bind(&runtime_call); | |
610 // AllocateInTargetSpace does not use the context. | |
611 Node* context = IntPtrConstant(0); | |
612 Node* runtime_flags = SmiTag(Int32Constant( | |
613 AllocateDoubleAlignFlag::encode(false) | | |
614 AllocateTargetSpace::encode(flags & kPretenured | |
615 ? AllocationSpace::OLD_SPACE | |
616 : AllocationSpace::NEW_SPACE))); | |
617 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context, | |
618 SmiTag(size_in_bytes), runtime_flags); | |
619 raw_assembler_->Goto(&merge_runtime); | |
620 | |
621 // When there is enough space, return `top' and bump it up. | |
622 raw_assembler_->Bind(&no_runtime_call); | |
623 Node* no_runtime_result = top; | |
624 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | |
625 IntPtrAdd(top, size_in_bytes)); | |
626 no_runtime_result = | |
627 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)); | |
628 raw_assembler_->Goto(&merge_runtime); | |
629 | |
630 raw_assembler_->Bind(&merge_runtime); | |
631 return raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
632 runtime_result, no_runtime_result); | |
633 } | |
634 | |
635 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, | |
636 AllocationFlags flags, | |
637 Node* top_address, | |
638 Node* limit_address) { | |
639 Node* top = Load(MachineType::Pointer(), top_address); | |
640 Node* limit = Load(MachineType::Pointer(), limit_address); | |
641 Node* adjusted_size = size_in_bytes; | |
642 if (flags & kDoubleAlignment) { | |
643 // TODO(epertoso): Simd128 alignment. | |
644 RawMachineLabel aligned, not_aligned, merge; | |
645 raw_assembler_->Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), | |
646 ¬_aligned, &aligned); | |
647 | |
648 raw_assembler_->Bind(¬_aligned); | |
649 Node* not_aligned_size = | |
650 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize)); | |
651 raw_assembler_->Goto(&merge); | |
652 | |
653 raw_assembler_->Bind(&aligned); | |
654 raw_assembler_->Goto(&merge); | |
655 | |
656 raw_assembler_->Bind(&merge); | |
657 adjusted_size = raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
658 not_aligned_size, adjusted_size); | |
659 } | |
660 | |
661 Node* address = AllocateRawUnaligned(adjusted_size, kNone, top, limit); | |
662 | |
663 RawMachineLabel needs_filler, doesnt_need_filler, merge_address; | |
664 raw_assembler_->Branch( | |
665 raw_assembler_->IntPtrEqual(adjusted_size, size_in_bytes), | |
666 &doesnt_need_filler, &needs_filler); | |
667 | |
668 raw_assembler_->Bind(&needs_filler); | |
669 // Store a filler and increase the address by kPointerSize. | |
670 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change | |
671 // it when Simd128 alignment is supported. | |
672 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, | |
673 LoadRoot(Heap::kOnePointerFillerMapRootIndex)); | |
674 Node* address_with_filler = IntPtrAdd(address, IntPtrConstant(kPointerSize)); | |
675 raw_assembler_->Goto(&merge_address); | |
676 | |
677 raw_assembler_->Bind(&doesnt_need_filler); | |
678 Node* address_without_filler = address; | |
679 raw_assembler_->Goto(&merge_address); | |
680 | |
681 raw_assembler_->Bind(&merge_address); | |
682 address = raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
683 address_with_filler, address_without_filler); | |
684 // Update the top. | |
685 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | |
686 IntPtrAdd(top, adjusted_size)); | |
687 return address; | |
688 } | |
689 | |
690 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { | |
691 bool const new_space = !(flags & kPretenured); | |
692 Node* top_address = ExternalConstant( | |
693 new_space | |
694 ? ExternalReference::new_space_allocation_top_address(isolate()) | |
695 : ExternalReference::old_space_allocation_top_address(isolate())); | |
696 Node* limit_address = ExternalConstant( | |
697 new_space | |
698 ? ExternalReference::new_space_allocation_limit_address(isolate()) | |
699 : ExternalReference::old_space_allocation_limit_address(isolate())); | |
700 | |
701 #ifdef V8_HOST_ARCH_32_BIT | |
702 if (flags & kDoubleAlignment) { | |
703 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address, | |
704 limit_address); | |
705 } | |
706 #endif | |
707 | |
708 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address, | |
709 limit_address); | |
710 } | |
711 | |
712 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { | |
713 return IntPtrAdd(previous, IntPtrConstant(offset)); | |
714 } | |
715 | |
716 Node* CodeStubAssembler::AllocateHeapNumber() { | |
717 Node* result = Allocate(HeapNumber::kSize, kNone); | |
718 StoreMapNoWriteBarrier(result, HeapNumberMapConstant()); | |
719 return result; | |
720 } | |
721 | |
722 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) { | |
723 Node* result = AllocateHeapNumber(); | |
724 StoreHeapNumberValue(result, value); | |
725 return result; | |
726 } | |
727 | |
728 Node* CodeStubAssembler::AllocateSeqOneByteString(int length) { | |
729 Node* result = Allocate(SeqOneByteString::SizeFor(length)); | |
730 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex)); | |
731 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset, | |
732 SmiConstant(Smi::FromInt(length))); | |
733 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset, | |
734 IntPtrConstant(String::kEmptyHashField)); | |
735 return result; | |
736 } | |
737 | |
738 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length) { | |
739 Node* result = Allocate(SeqTwoByteString::SizeFor(length)); | |
740 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex)); | |
741 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset, | |
742 SmiConstant(Smi::FromInt(length))); | |
743 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset, | |
744 IntPtrConstant(String::kEmptyHashField)); | |
745 return result; | |
746 } | |
747 | |
748 Node* CodeStubAssembler::Load(MachineType rep, Node* base) { | |
749 return raw_assembler_->Load(rep, base); | |
750 } | |
751 | |
752 Node* CodeStubAssembler::Load(MachineType rep, Node* base, Node* index) { | |
753 return raw_assembler_->Load(rep, base, index); | |
754 } | |
755 | |
756 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, | |
757 Node* value) { | |
758 return raw_assembler_->Store(rep, base, value, kFullWriteBarrier); | |
759 } | |
760 | |
761 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, | |
762 Node* index, Node* value) { | |
763 return raw_assembler_->Store(rep, base, index, value, kFullWriteBarrier); | |
764 } | |
765 | |
766 Node* CodeStubAssembler::StoreNoWriteBarrier(MachineRepresentation rep, | |
767 Node* base, Node* value) { | |
768 return raw_assembler_->Store(rep, base, value, kNoWriteBarrier); | |
769 } | |
770 | |
771 Node* CodeStubAssembler::StoreNoWriteBarrier(MachineRepresentation rep, | |
772 Node* base, Node* index, | |
773 Node* value) { | |
774 return raw_assembler_->Store(rep, base, index, value, kNoWriteBarrier); | |
775 } | |
776 | |
777 Node* CodeStubAssembler::Projection(int index, Node* value) { | |
778 return raw_assembler_->Projection(index, value); | |
779 } | |
780 | |
781 Node* CodeStubAssembler::LoadMap(Node* object) { | |
782 return LoadObjectField(object, HeapObject::kMapOffset); | |
783 } | |
784 | |
785 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { | |
786 return StoreNoWriteBarrier( | |
787 MachineRepresentation::kTagged, object, | |
788 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map); | |
789 } | |
790 | |
791 Node* CodeStubAssembler::LoadInstanceType(Node* object) { | |
792 return LoadMapInstanceType(LoadMap(object)); | |
793 } | |
794 | |
795 Node* CodeStubAssembler::LoadElements(Node* object) { | |
796 return LoadObjectField(object, JSObject::kElementsOffset); | |
797 } | |
798 | |
799 Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) { | |
800 return LoadObjectField(array, FixedArrayBase::kLengthOffset); | |
801 } | |
802 | |
803 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, | |
804 uint32_t mask) { | |
805 return raw_assembler_->Word32Shr( | |
806 raw_assembler_->Word32And(word32, raw_assembler_->Int32Constant(mask)), | |
807 raw_assembler_->Int32Constant(shift)); | |
808 } | |
809 | |
810 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) { | |
811 Node* value32 = raw_assembler_->TruncateFloat64ToInt32( | |
812 TruncationMode::kRoundToZero, value); | |
813 Node* value64 = ChangeInt32ToFloat64(value32); | |
814 | |
815 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this); | |
816 | |
817 Label if_valueisequal(this), if_valueisnotequal(this); | |
818 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal); | |
819 Bind(&if_valueisequal); | |
820 { | |
821 Label if_valueiszero(this), if_valueisnotzero(this); | |
822 Branch(Float64Equal(value, Float64Constant(0.0)), &if_valueiszero, | |
823 &if_valueisnotzero); | |
824 | |
825 Bind(&if_valueiszero); | |
826 BranchIfInt32LessThan(raw_assembler_->Float64ExtractHighWord32(value), | |
827 Int32Constant(0), &if_valueisheapnumber, | |
828 &if_valueisint32); | |
829 | |
830 Bind(&if_valueisnotzero); | |
831 Goto(&if_valueisint32); | |
832 } | |
833 Bind(&if_valueisnotequal); | |
834 Goto(&if_valueisheapnumber); | |
835 | |
836 Variable var_result(this, MachineRepresentation::kTagged); | |
837 Bind(&if_valueisint32); | |
838 { | |
839 if (raw_assembler_->machine()->Is64()) { | |
840 Node* result = SmiTag(ChangeInt32ToInt64(value32)); | |
841 var_result.Bind(result); | |
842 Goto(&if_join); | |
843 } else { | |
844 Node* pair = Int32AddWithOverflow(value32, value32); | |
845 Node* overflow = Projection(1, pair); | |
846 Label if_overflow(this, Label::kDeferred), if_notoverflow(this); | |
847 Branch(overflow, &if_overflow, &if_notoverflow); | |
848 Bind(&if_overflow); | |
849 Goto(&if_valueisheapnumber); | |
850 Bind(&if_notoverflow); | |
851 { | |
852 Node* result = Projection(0, pair); | |
853 var_result.Bind(result); | |
854 Goto(&if_join); | |
855 } | |
856 } | |
857 } | |
858 Bind(&if_valueisheapnumber); | |
859 { | |
860 Node* result = AllocateHeapNumberWithValue(value); | |
861 var_result.Bind(result); | |
862 Goto(&if_join); | |
863 } | |
864 Bind(&if_join); | |
865 return var_result.value(); | |
866 } | |
867 | |
868 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) { | |
869 if (raw_assembler_->machine()->Is64()) { | |
870 return SmiTag(ChangeInt32ToInt64(value)); | |
871 } | |
872 Variable var_result(this, MachineRepresentation::kTagged); | |
873 Node* pair = Int32AddWithOverflow(value, value); | |
874 Node* overflow = Projection(1, pair); | |
875 Label if_overflow(this, Label::kDeferred), if_notoverflow(this), | |
876 if_join(this); | |
877 Branch(overflow, &if_overflow, &if_notoverflow); | |
878 Bind(&if_overflow); | |
879 { | |
880 Node* value64 = ChangeInt32ToFloat64(value); | |
881 Node* result = AllocateHeapNumberWithValue(value64); | |
882 var_result.Bind(result); | |
883 } | |
884 Goto(&if_join); | |
885 Bind(&if_notoverflow); | |
886 { | |
887 Node* result = Projection(0, pair); | |
888 var_result.Bind(result); | |
889 } | |
890 Goto(&if_join); | |
891 Bind(&if_join); | |
892 return var_result.value(); | |
893 } | |
894 | |
895 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) { | |
896 Label if_overflow(this, Label::kDeferred), if_not_overflow(this), | |
897 if_join(this); | |
898 Variable var_result(this, MachineRepresentation::kTagged); | |
899 // If {value} > 2^31 - 1, we need to store it in a HeapNumber. | |
900 Branch(Int32LessThan(value, Int32Constant(0)), &if_overflow, | |
901 &if_not_overflow); | |
902 Bind(&if_not_overflow); | |
903 { | |
904 if (raw_assembler_->machine()->Is64()) { | |
905 var_result.Bind(SmiTag(ChangeUint32ToUint64(value))); | |
906 } else { | |
907 // If tagging {value} results in an overflow, we need to use a HeapNumber | |
908 // to represent it. | |
909 Node* pair = Int32AddWithOverflow(value, value); | |
910 Node* overflow = Projection(1, pair); | |
911 GotoIf(overflow, &if_overflow); | |
912 | |
913 Node* result = Projection(0, pair); | |
914 var_result.Bind(result); | |
915 } | |
916 } | |
917 Goto(&if_join); | |
918 | |
919 Bind(&if_overflow); | |
920 { | |
921 Node* float64_value = ChangeUint32ToFloat64(value); | |
922 var_result.Bind(AllocateHeapNumberWithValue(float64_value)); | |
923 } | |
924 Goto(&if_join); | |
925 | |
926 Bind(&if_join); | |
927 return var_result.value(); | |
928 } | |
929 | |
930 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) { | |
931 // We might need to loop once due to ToNumber conversion. | |
932 Variable var_value(this, MachineRepresentation::kTagged), | |
933 var_result(this, MachineRepresentation::kFloat64); | |
934 Label loop(this, &var_value), done_loop(this, &var_result); | |
935 var_value.Bind(value); | |
936 Goto(&loop); | |
937 Bind(&loop); | |
938 { | |
939 // Load the current {value}. | |
940 value = var_value.value(); | |
941 | |
942 // Check if the {value} is a Smi or a HeapObject. | |
943 Label if_valueissmi(this), if_valueisnotsmi(this); | |
944 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi); | |
945 | |
946 Bind(&if_valueissmi); | |
947 { | |
948 // Convert the Smi {value}. | |
949 var_result.Bind(SmiToFloat64(value)); | |
950 Goto(&done_loop); | |
951 } | |
952 | |
953 Bind(&if_valueisnotsmi); | |
954 { | |
955 // Check if {value} is a HeapNumber. | |
956 Label if_valueisheapnumber(this), | |
957 if_valueisnotheapnumber(this, Label::kDeferred); | |
958 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()), | |
959 &if_valueisheapnumber, &if_valueisnotheapnumber); | |
960 | |
961 Bind(&if_valueisheapnumber); | |
962 { | |
963 // Load the floating point value. | |
964 var_result.Bind(LoadHeapNumberValue(value)); | |
965 Goto(&done_loop); | |
966 } | |
967 | |
968 Bind(&if_valueisnotheapnumber); | |
969 { | |
970 // Convert the {value} to a Number first. | |
971 Callable callable = CodeFactory::NonNumberToNumber(isolate()); | |
972 var_value.Bind(CallStub(callable, context, value)); | |
973 Goto(&loop); | |
974 } | |
975 } | |
976 } | |
977 Bind(&done_loop); | |
978 return var_result.value(); | |
979 } | |
980 | |
981 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) { | |
982 // We might need to loop once due to ToNumber conversion. | |
983 Variable var_value(this, MachineRepresentation::kTagged), | |
984 var_result(this, MachineRepresentation::kWord32); | |
985 Label loop(this, &var_value), done_loop(this, &var_result); | |
986 var_value.Bind(value); | |
987 Goto(&loop); | |
988 Bind(&loop); | |
989 { | |
990 // Load the current {value}. | |
991 value = var_value.value(); | |
992 | |
993 // Check if the {value} is a Smi or a HeapObject. | |
994 Label if_valueissmi(this), if_valueisnotsmi(this); | |
995 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi); | |
996 | |
997 Bind(&if_valueissmi); | |
998 { | |
999 // Convert the Smi {value}. | |
1000 var_result.Bind(SmiToWord32(value)); | |
1001 Goto(&done_loop); | |
1002 } | |
1003 | |
1004 Bind(&if_valueisnotsmi); | |
1005 { | |
1006 // Check if {value} is a HeapNumber. | |
1007 Label if_valueisheapnumber(this), | |
1008 if_valueisnotheapnumber(this, Label::kDeferred); | |
1009 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()), | |
1010 &if_valueisheapnumber, &if_valueisnotheapnumber); | |
1011 | |
1012 Bind(&if_valueisheapnumber); | |
1013 { | |
1014 // Truncate the floating point value. | |
1015 var_result.Bind(TruncateHeapNumberValueToWord32(value)); | |
1016 Goto(&done_loop); | |
1017 } | |
1018 | |
1019 Bind(&if_valueisnotheapnumber); | |
1020 { | |
1021 // Convert the {value} to a Number first. | |
1022 Callable callable = CodeFactory::NonNumberToNumber(isolate()); | |
1023 var_value.Bind(CallStub(callable, context, value)); | |
1024 Goto(&loop); | |
1025 } | |
1026 } | |
1027 } | |
1028 Bind(&done_loop); | |
1029 return var_result.value(); | |
1030 } | |
1031 | |
1032 Node* CodeStubAssembler::ToThisString(Node* context, Node* value, | |
1033 char const* method_name) { | |
1034 Variable var_value(this, MachineRepresentation::kTagged); | |
1035 var_value.Bind(value); | |
1036 | |
1037 // Check if the {value} is a Smi or a HeapObject. | |
1038 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this), | |
1039 if_valueisstring(this); | |
1040 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi); | |
1041 Bind(&if_valueisnotsmi); | |
1042 { | |
1043 // Load the instance type of the {value}. | |
1044 Node* value_instance_type = LoadInstanceType(value); | |
1045 | |
1046 // Check if the {value} is already String. | |
1047 Label if_valueisnotstring(this, Label::kDeferred); | |
1048 Branch( | |
1049 Int32LessThan(value_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)), | |
1050 &if_valueisstring, &if_valueisnotstring); | |
1051 Bind(&if_valueisnotstring); | |
1052 { | |
1053 // Check if the {value} is null. | |
1054 Label if_valueisnullorundefined(this, Label::kDeferred), | |
1055 if_valueisnotnullorundefined(this, Label::kDeferred), | |
1056 if_valueisnotnull(this, Label::kDeferred); | |
1057 Branch(WordEqual(value, NullConstant()), &if_valueisnullorundefined, | |
1058 &if_valueisnotnull); | |
1059 Bind(&if_valueisnotnull); | |
1060 { | |
1061 // Check if the {value} is undefined. | |
1062 Branch(WordEqual(value, UndefinedConstant()), | |
1063 &if_valueisnullorundefined, &if_valueisnotnullorundefined); | |
1064 Bind(&if_valueisnotnullorundefined); | |
1065 { | |
1066 // Convert the {value} to a String. | |
1067 Callable callable = CodeFactory::ToString(isolate()); | |
1068 var_value.Bind(CallStub(callable, context, value)); | |
1069 Goto(&if_valueisstring); | |
1070 } | |
1071 } | |
1072 | |
1073 Bind(&if_valueisnullorundefined); | |
1074 { | |
1075 // The {value} is either null or undefined. | |
1076 CallRuntime(Runtime::kThrowCalledOnNullOrUndefined, context, | |
1077 HeapConstant(factory()->NewStringFromAsciiChecked( | |
1078 method_name, TENURED))); | |
1079 Goto(&if_valueisstring); // Never reached. | |
1080 } | |
1081 } | |
1082 } | |
1083 Bind(&if_valueissmi); | |
1084 { | |
1085 // The {value} is a Smi, convert it to a String. | |
1086 Callable callable = CodeFactory::NumberToString(isolate()); | |
1087 var_value.Bind(CallStub(callable, context, value)); | |
1088 Goto(&if_valueisstring); | |
1089 } | |
1090 Bind(&if_valueisstring); | |
1091 return var_value.value(); | |
1092 } | |
1093 | |
1094 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index) { | |
1095 // Translate the {index} into a Word. | |
1096 index = SmiToWord(index); | |
1097 | |
1098 // We may need to loop in case of cons or sliced strings. | |
1099 Variable var_index(this, MachineType::PointerRepresentation()); | |
1100 Variable var_result(this, MachineRepresentation::kWord32); | |
1101 Variable var_string(this, MachineRepresentation::kTagged); | |
1102 Variable* loop_vars[] = {&var_index, &var_string}; | |
1103 Label done_loop(this, &var_result), loop(this, 2, loop_vars); | |
1104 var_string.Bind(string); | |
1105 var_index.Bind(index); | |
1106 Goto(&loop); | |
1107 Bind(&loop); | |
1108 { | |
1109 // Load the current {index}. | |
1110 index = var_index.value(); | |
1111 | |
1112 // Load the current {string}. | |
1113 string = var_string.value(); | |
1114 | |
1115 // Load the instance type of the {string}. | |
1116 Node* string_instance_type = LoadInstanceType(string); | |
1117 | |
1118 // Check if the {string} is a SeqString. | |
1119 Label if_stringissequential(this), if_stringisnotsequential(this); | |
1120 Branch(Word32Equal(Word32And(string_instance_type, | |
1121 Int32Constant(kStringRepresentationMask)), | |
1122 Int32Constant(kSeqStringTag)), | |
1123 &if_stringissequential, &if_stringisnotsequential); | |
1124 | |
1125 Bind(&if_stringissequential); | |
1126 { | |
1127 // Check if the {string} is a TwoByteSeqString or a OneByteSeqString. | |
1128 Label if_stringistwobyte(this), if_stringisonebyte(this); | |
1129 Branch(Word32Equal(Word32And(string_instance_type, | |
1130 Int32Constant(kStringEncodingMask)), | |
1131 Int32Constant(kTwoByteStringTag)), | |
1132 &if_stringistwobyte, &if_stringisonebyte); | |
1133 | |
1134 Bind(&if_stringisonebyte); | |
1135 { | |
1136 var_result.Bind( | |
1137 Load(MachineType::Uint8(), string, | |
1138 IntPtrAdd(index, IntPtrConstant(SeqOneByteString::kHeaderSize - | |
1139 kHeapObjectTag)))); | |
1140 Goto(&done_loop); | |
1141 } | |
1142 | |
1143 Bind(&if_stringistwobyte); | |
1144 { | |
1145 var_result.Bind( | |
1146 Load(MachineType::Uint16(), string, | |
1147 IntPtrAdd(WordShl(index, IntPtrConstant(1)), | |
1148 IntPtrConstant(SeqTwoByteString::kHeaderSize - | |
1149 kHeapObjectTag)))); | |
1150 Goto(&done_loop); | |
1151 } | |
1152 } | |
1153 | |
1154 Bind(&if_stringisnotsequential); | |
1155 { | |
1156 // Check if the {string} is a ConsString. | |
1157 Label if_stringiscons(this), if_stringisnotcons(this); | |
1158 Branch(Word32Equal(Word32And(string_instance_type, | |
1159 Int32Constant(kStringRepresentationMask)), | |
1160 Int32Constant(kConsStringTag)), | |
1161 &if_stringiscons, &if_stringisnotcons); | |
1162 | |
1163 Bind(&if_stringiscons); | |
1164 { | |
1165 // Check whether the right hand side is the empty string (i.e. if | |
1166 // this is really a flat string in a cons string). If that is not | |
1167 // the case we flatten the string first. | |
1168 Label if_rhsisempty(this), if_rhsisnotempty(this, Label::kDeferred); | |
1169 Node* rhs = LoadObjectField(string, ConsString::kSecondOffset); | |
1170 Branch(WordEqual(rhs, EmptyStringConstant()), &if_rhsisempty, | |
1171 &if_rhsisnotempty); | |
1172 | |
1173 Bind(&if_rhsisempty); | |
1174 { | |
1175 // Just operate on the left hand side of the {string}. | |
1176 var_string.Bind(LoadObjectField(string, ConsString::kFirstOffset)); | |
1177 Goto(&loop); | |
1178 } | |
1179 | |
1180 Bind(&if_rhsisnotempty); | |
1181 { | |
1182 // Flatten the {string} and lookup in the resulting string. | |
1183 var_string.Bind(CallRuntime(Runtime::kFlattenString, | |
1184 NoContextConstant(), string)); | |
1185 Goto(&loop); | |
1186 } | |
1187 } | |
1188 | |
1189 Bind(&if_stringisnotcons); | |
1190 { | |
1191 // Check if the {string} is an ExternalString. | |
1192 Label if_stringisexternal(this), if_stringisnotexternal(this); | |
1193 Branch(Word32Equal(Word32And(string_instance_type, | |
1194 Int32Constant(kStringRepresentationMask)), | |
1195 Int32Constant(kExternalStringTag)), | |
1196 &if_stringisexternal, &if_stringisnotexternal); | |
1197 | |
1198 Bind(&if_stringisexternal); | |
1199 { | |
1200 // Check if the {string} is a short external string. | |
1201 Label if_stringisshort(this), | |
1202 if_stringisnotshort(this, Label::kDeferred); | |
1203 Branch(Word32Equal(Word32And(string_instance_type, | |
1204 Int32Constant(kShortExternalStringMask)), | |
1205 Int32Constant(0)), | |
1206 &if_stringisshort, &if_stringisnotshort); | |
1207 | |
1208 Bind(&if_stringisshort); | |
1209 { | |
1210 // Load the actual resource data from the {string}. | |
1211 Node* string_resource_data = | |
1212 LoadObjectField(string, ExternalString::kResourceDataOffset, | |
1213 MachineType::Pointer()); | |
1214 | |
1215 // Check if the {string} is a TwoByteExternalString or a | |
1216 // OneByteExternalString. | |
1217 Label if_stringistwobyte(this), if_stringisonebyte(this); | |
1218 Branch(Word32Equal(Word32And(string_instance_type, | |
1219 Int32Constant(kStringEncodingMask)), | |
1220 Int32Constant(kTwoByteStringTag)), | |
1221 &if_stringistwobyte, &if_stringisonebyte); | |
1222 | |
1223 Bind(&if_stringisonebyte); | |
1224 { | |
1225 var_result.Bind( | |
1226 Load(MachineType::Uint8(), string_resource_data, index)); | |
1227 Goto(&done_loop); | |
1228 } | |
1229 | |
1230 Bind(&if_stringistwobyte); | |
1231 { | |
1232 var_result.Bind(Load(MachineType::Uint16(), string_resource_data, | |
1233 WordShl(index, IntPtrConstant(1)))); | |
1234 Goto(&done_loop); | |
1235 } | |
1236 } | |
1237 | |
1238 Bind(&if_stringisnotshort); | |
1239 { | |
1240 // The {string} might be compressed, call the runtime. | |
1241 var_result.Bind(SmiToWord32( | |
1242 CallRuntime(Runtime::kExternalStringGetChar, | |
1243 NoContextConstant(), string, SmiTag(index)))); | |
1244 Goto(&done_loop); | |
1245 } | |
1246 } | |
1247 | |
1248 Bind(&if_stringisnotexternal); | |
1249 { | |
1250 // The {string} is a SlicedString, continue with its parent. | |
1251 Node* string_offset = | |
1252 SmiToWord(LoadObjectField(string, SlicedString::kOffsetOffset)); | |
1253 Node* string_parent = | |
1254 LoadObjectField(string, SlicedString::kParentOffset); | |
1255 var_index.Bind(IntPtrAdd(index, string_offset)); | |
1256 var_string.Bind(string_parent); | |
1257 Goto(&loop); | |
1258 } | |
1259 } | |
1260 } | |
1261 } | |
1262 | |
1263 Bind(&done_loop); | |
1264 return var_result.value(); | |
1265 } | |
1266 | |
1267 Node* CodeStubAssembler::StringFromCharCode(Node* code) { | |
1268 Variable var_result(this, MachineRepresentation::kTagged); | |
1269 | |
1270 // Check if the {code} is a one-byte char code. | |
1271 Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred), | |
1272 if_done(this); | |
1273 Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)), | |
1274 &if_codeisonebyte, &if_codeistwobyte); | |
1275 Bind(&if_codeisonebyte); | |
1276 { | |
1277 // Load the isolate wide single character string cache. | |
1278 Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex); | |
1279 | |
1280 // Check if we have an entry for the {code} in the single character string | |
1281 // cache already. | |
1282 Label if_entryisundefined(this, Label::kDeferred), | |
1283 if_entryisnotundefined(this); | |
1284 Node* entry = LoadFixedArrayElementInt32Index(cache, code); | |
1285 Branch(WordEqual(entry, UndefinedConstant()), &if_entryisundefined, | |
1286 &if_entryisnotundefined); | |
1287 | |
1288 Bind(&if_entryisundefined); | |
1289 { | |
1290 // Allocate a new SeqOneByteString for {code} and store it in the {cache}. | |
1291 Node* result = AllocateSeqOneByteString(1); | |
1292 StoreNoWriteBarrier( | |
1293 MachineRepresentation::kWord8, result, | |
1294 IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code); | |
1295 StoreFixedArrayElementInt32Index(cache, code, result); | |
1296 var_result.Bind(result); | |
1297 Goto(&if_done); | |
1298 } | |
1299 | |
1300 Bind(&if_entryisnotundefined); | |
1301 { | |
1302 // Return the entry from the {cache}. | |
1303 var_result.Bind(entry); | |
1304 Goto(&if_done); | |
1305 } | |
1306 } | |
1307 | |
1308 Bind(&if_codeistwobyte); | |
1309 { | |
1310 // Allocate a new SeqTwoByteString for {code}. | |
1311 Node* result = AllocateSeqTwoByteString(1); | |
1312 StoreNoWriteBarrier( | |
1313 MachineRepresentation::kWord16, result, | |
1314 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code); | |
1315 var_result.Bind(result); | |
1316 Goto(&if_done); | |
1317 } | |
1318 | |
1319 Bind(&if_done); | |
1320 return var_result.value(); | |
1321 } | |
1322 | |
1323 Node* CodeStubAssembler::TruncateFloat64ToInt32(Node* value) { | |
1324 return raw_assembler_->TruncateFloat64ToInt32(TruncationMode::kJavaScript, | |
1325 value); | |
1326 } | |
1327 | |
1328 void CodeStubAssembler::BranchIf(Node* condition, Label* if_true, | |
1329 Label* if_false) { | |
1330 Label if_condition_is_true(this), if_condition_is_false(this); | |
1331 Branch(condition, &if_condition_is_true, &if_condition_is_false); | |
1332 Bind(&if_condition_is_true); | |
1333 Goto(if_true); | |
1334 Bind(&if_condition_is_false); | |
1335 Goto(if_false); | |
1336 } | |
1337 | |
1338 Node* CodeStubAssembler::CallN(CallDescriptor* descriptor, Node* code_target, | |
1339 Node** args) { | |
1340 CallPrologue(); | |
1341 Node* return_value = raw_assembler_->CallN(descriptor, code_target, args); | |
1342 CallEpilogue(); | |
1343 return return_value; | |
1344 } | |
1345 | |
1346 | |
1347 Node* CodeStubAssembler::TailCallN(CallDescriptor* descriptor, | |
1348 Node* code_target, Node** args) { | |
1349 return raw_assembler_->TailCallN(descriptor, code_target, args); | |
1350 } | |
1351 | |
1352 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
1353 Node* context) { | |
1354 CallPrologue(); | |
1355 Node* return_value = raw_assembler_->CallRuntime0(function_id, context); | |
1356 CallEpilogue(); | |
1357 return return_value; | |
1358 } | |
1359 | |
1360 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
1361 Node* context, Node* arg1) { | |
1362 CallPrologue(); | |
1363 Node* return_value = raw_assembler_->CallRuntime1(function_id, arg1, context); | |
1364 CallEpilogue(); | |
1365 return return_value; | |
1366 } | |
1367 | |
1368 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
1369 Node* context, Node* arg1, Node* arg2) { | |
1370 CallPrologue(); | |
1371 Node* return_value = | |
1372 raw_assembler_->CallRuntime2(function_id, arg1, arg2, context); | |
1373 CallEpilogue(); | |
1374 return return_value; | |
1375 } | |
1376 | |
1377 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
1378 Node* context, Node* arg1, Node* arg2, | |
1379 Node* arg3) { | |
1380 CallPrologue(); | |
1381 Node* return_value = | |
1382 raw_assembler_->CallRuntime3(function_id, arg1, arg2, arg3, context); | |
1383 CallEpilogue(); | |
1384 return return_value; | |
1385 } | |
1386 | |
1387 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
1388 Node* context, Node* arg1, Node* arg2, | |
1389 Node* arg3, Node* arg4) { | |
1390 CallPrologue(); | |
1391 Node* return_value = raw_assembler_->CallRuntime4(function_id, arg1, arg2, | |
1392 arg3, arg4, context); | |
1393 CallEpilogue(); | |
1394 return return_value; | |
1395 } | |
1396 | |
1397 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
1398 Node* context) { | |
1399 return raw_assembler_->TailCallRuntime0(function_id, context); | |
1400 } | |
1401 | |
1402 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
1403 Node* context, Node* arg1) { | |
1404 return raw_assembler_->TailCallRuntime1(function_id, arg1, context); | |
1405 } | |
1406 | |
1407 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
1408 Node* context, Node* arg1, | |
1409 Node* arg2) { | |
1410 return raw_assembler_->TailCallRuntime2(function_id, arg1, arg2, context); | |
1411 } | |
1412 | |
1413 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
1414 Node* context, Node* arg1, Node* arg2, | |
1415 Node* arg3) { | |
1416 return raw_assembler_->TailCallRuntime3(function_id, arg1, arg2, arg3, | |
1417 context); | |
1418 } | |
1419 | |
1420 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
1421 Node* context, Node* arg1, Node* arg2, | |
1422 Node* arg3, Node* arg4) { | |
1423 return raw_assembler_->TailCallRuntime4(function_id, arg1, arg2, arg3, arg4, | |
1424 context); | |
1425 } | |
1426 | |
1427 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context, | |
1428 Node* arg1, size_t result_size) { | |
1429 Node* target = HeapConstant(callable.code()); | |
1430 return CallStub(callable.descriptor(), target, context, arg1, result_size); | |
1431 } | |
1432 | |
1433 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context, | |
1434 Node* arg1, Node* arg2, size_t result_size) { | |
1435 Node* target = HeapConstant(callable.code()); | |
1436 return CallStub(callable.descriptor(), target, context, arg1, arg2, | |
1437 result_size); | |
1438 } | |
1439 | |
1440 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context, | |
1441 Node* arg1, Node* arg2, Node* arg3, | |
1442 size_t result_size) { | |
1443 Node* target = HeapConstant(callable.code()); | |
1444 return CallStub(callable.descriptor(), target, context, arg1, arg2, arg3, | |
1445 result_size); | |
1446 } | |
1447 | |
1448 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
1449 Node* target, Node* context, Node* arg1, | |
1450 size_t result_size) { | |
1451 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
1452 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
1453 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
1454 MachineType::AnyTagged(), result_size); | |
1455 | |
1456 Node** args = zone()->NewArray<Node*>(2); | |
1457 args[0] = arg1; | |
1458 args[1] = context; | |
1459 | |
1460 return CallN(call_descriptor, target, args); | |
1461 } | |
1462 | |
1463 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
1464 Node* target, Node* context, Node* arg1, | |
1465 Node* arg2, size_t result_size) { | |
1466 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
1467 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
1468 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
1469 MachineType::AnyTagged(), result_size); | |
1470 | |
1471 Node** args = zone()->NewArray<Node*>(3); | |
1472 args[0] = arg1; | |
1473 args[1] = arg2; | |
1474 args[2] = context; | |
1475 | |
1476 return CallN(call_descriptor, target, args); | |
1477 } | |
1478 | |
1479 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
1480 Node* target, Node* context, Node* arg1, | |
1481 Node* arg2, Node* arg3, size_t result_size) { | |
1482 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
1483 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
1484 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
1485 MachineType::AnyTagged(), result_size); | |
1486 | |
1487 Node** args = zone()->NewArray<Node*>(4); | |
1488 args[0] = arg1; | |
1489 args[1] = arg2; | |
1490 args[2] = arg3; | |
1491 args[3] = context; | |
1492 | |
1493 return CallN(call_descriptor, target, args); | |
1494 } | |
1495 | |
1496 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
1497 Node* target, Node* context, Node* arg1, | |
1498 Node* arg2, Node* arg3, Node* arg4, | |
1499 size_t result_size) { | |
1500 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
1501 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
1502 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
1503 MachineType::AnyTagged(), result_size); | |
1504 | |
1505 Node** args = zone()->NewArray<Node*>(5); | |
1506 args[0] = arg1; | |
1507 args[1] = arg2; | |
1508 args[2] = arg3; | |
1509 args[3] = arg4; | |
1510 args[4] = context; | |
1511 | |
1512 return CallN(call_descriptor, target, args); | |
1513 } | |
1514 | |
1515 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
1516 Node* target, Node* context, Node* arg1, | |
1517 Node* arg2, Node* arg3, Node* arg4, | |
1518 Node* arg5, size_t result_size) { | |
1519 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
1520 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
1521 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
1522 MachineType::AnyTagged(), result_size); | |
1523 | |
1524 Node** args = zone()->NewArray<Node*>(6); | |
1525 args[0] = arg1; | |
1526 args[1] = arg2; | |
1527 args[2] = arg3; | |
1528 args[3] = arg4; | |
1529 args[4] = arg5; | |
1530 args[5] = context; | |
1531 | |
1532 return CallN(call_descriptor, target, args); | |
1533 } | |
1534 | |
1535 Node* CodeStubAssembler::TailCallStub(Callable const& callable, Node* context, | |
1536 Node* arg1, Node* arg2, | |
1537 size_t result_size) { | |
1538 Node* target = HeapConstant(callable.code()); | |
1539 return TailCallStub(callable.descriptor(), target, context, arg1, arg2, | |
1540 result_size); | |
1541 } | |
1542 | |
1543 Node* CodeStubAssembler::TailCallStub(const CallInterfaceDescriptor& descriptor, | |
1544 Node* target, Node* context, Node* arg1, | |
1545 Node* arg2, size_t result_size) { | |
1546 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
1547 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
1548 CallDescriptor::kSupportsTailCalls, Operator::kNoProperties, | |
1549 MachineType::AnyTagged(), result_size); | |
1550 | |
1551 Node** args = zone()->NewArray<Node*>(3); | |
1552 args[0] = arg1; | |
1553 args[1] = arg2; | |
1554 args[2] = context; | |
1555 | |
1556 return raw_assembler_->TailCallN(call_descriptor, target, args); | |
1557 } | |
1558 | |
1559 Node* CodeStubAssembler::TailCallBytecodeDispatch( | |
1560 const CallInterfaceDescriptor& interface_descriptor, | |
1561 Node* code_target_address, Node** args) { | |
1562 CallDescriptor* descriptor = Linkage::GetBytecodeDispatchCallDescriptor( | |
1563 isolate(), zone(), interface_descriptor, | |
1564 interface_descriptor.GetStackParameterCount()); | |
1565 return raw_assembler_->TailCallN(descriptor, code_target_address, args); | |
1566 } | |
1567 | |
1568 void CodeStubAssembler::Goto(CodeStubAssembler::Label* label) { | |
1569 label->MergeVariables(); | |
1570 raw_assembler_->Goto(label->label_); | |
1571 } | |
1572 | |
1573 void CodeStubAssembler::GotoIf(Node* condition, Label* true_label) { | |
1574 Label false_label(this); | |
1575 Branch(condition, true_label, &false_label); | |
1576 Bind(&false_label); | |
1577 } | |
1578 | |
1579 void CodeStubAssembler::GotoUnless(Node* condition, Label* false_label) { | |
1580 Label true_label(this); | |
1581 Branch(condition, &true_label, false_label); | |
1582 Bind(&true_label); | |
1583 } | |
1584 | |
1585 void CodeStubAssembler::Branch(Node* condition, | |
1586 CodeStubAssembler::Label* true_label, | |
1587 CodeStubAssembler::Label* false_label) { | |
1588 true_label->MergeVariables(); | |
1589 false_label->MergeVariables(); | |
1590 return raw_assembler_->Branch(condition, true_label->label_, | |
1591 false_label->label_); | |
1592 } | |
1593 | |
1594 void CodeStubAssembler::Switch(Node* index, Label* default_label, | |
1595 int32_t* case_values, Label** case_labels, | |
1596 size_t case_count) { | |
1597 RawMachineLabel** labels = | |
1598 new (zone()->New(sizeof(RawMachineLabel*) * case_count)) | |
1599 RawMachineLabel*[case_count]; | |
1600 for (size_t i = 0; i < case_count; ++i) { | |
1601 labels[i] = case_labels[i]->label_; | |
1602 case_labels[i]->MergeVariables(); | |
1603 default_label->MergeVariables(); | |
1604 } | |
1605 return raw_assembler_->Switch(index, default_label->label_, case_values, | |
1606 labels, case_count); | |
1607 } | |
1608 | |
1609 // RawMachineAssembler delegate helpers: | |
1610 Isolate* CodeStubAssembler::isolate() const { | |
1611 return raw_assembler_->isolate(); | |
1612 } | |
1613 | |
1614 Factory* CodeStubAssembler::factory() const { return isolate()->factory(); } | |
1615 | |
1616 Graph* CodeStubAssembler::graph() const { return raw_assembler_->graph(); } | |
1617 | |
1618 Zone* CodeStubAssembler::zone() const { return raw_assembler_->zone(); } | |
1619 | |
1620 // The core implementation of Variable is stored through an indirection so | |
1621 // that it can outlive the often block-scoped Variable declarations. This is | |
1622 // needed to ensure that variable binding and merging through phis can | |
1623 // properly be verified. | |
1624 class CodeStubAssembler::Variable::Impl : public ZoneObject { | |
1625 public: | |
1626 explicit Impl(MachineRepresentation rep) : value_(nullptr), rep_(rep) {} | |
1627 Node* value_; | |
1628 MachineRepresentation rep_; | |
1629 }; | |
1630 | |
1631 CodeStubAssembler::Variable::Variable(CodeStubAssembler* assembler, | |
1632 MachineRepresentation rep) | |
1633 : impl_(new (assembler->zone()) Impl(rep)) { | |
1634 assembler->variables_.push_back(impl_); | |
1635 } | |
1636 | |
1637 void CodeStubAssembler::Variable::Bind(Node* value) { impl_->value_ = value; } | |
1638 | |
1639 Node* CodeStubAssembler::Variable::value() const { | |
1640 DCHECK_NOT_NULL(impl_->value_); | |
1641 return impl_->value_; | |
1642 } | |
1643 | |
1644 MachineRepresentation CodeStubAssembler::Variable::rep() const { | |
1645 return impl_->rep_; | |
1646 } | |
1647 | |
1648 bool CodeStubAssembler::Variable::IsBound() const { | |
1649 return impl_->value_ != nullptr; | |
1650 } | |
1651 | |
1652 CodeStubAssembler::Label::Label(CodeStubAssembler* assembler, | |
1653 int merged_value_count, | |
1654 CodeStubAssembler::Variable** merged_variables, | |
1655 CodeStubAssembler::Label::Type type) | |
1656 : bound_(false), merge_count_(0), assembler_(assembler), label_(nullptr) { | |
1657 void* buffer = assembler->zone()->New(sizeof(RawMachineLabel)); | |
1658 label_ = new (buffer) | |
1659 RawMachineLabel(type == kDeferred ? RawMachineLabel::kDeferred | |
1660 : RawMachineLabel::kNonDeferred); | |
1661 for (int i = 0; i < merged_value_count; ++i) { | |
1662 variable_phis_[merged_variables[i]->impl_] = nullptr; | |
1663 } | |
1664 } | |
1665 | |
1666 void CodeStubAssembler::Label::MergeVariables() { | |
1667 ++merge_count_; | |
1668 for (auto var : assembler_->variables_) { | |
1669 size_t count = 0; | |
1670 Node* node = var->value_; | |
1671 if (node != nullptr) { | |
1672 auto i = variable_merges_.find(var); | |
1673 if (i != variable_merges_.end()) { | |
1674 i->second.push_back(node); | |
1675 count = i->second.size(); | |
1676 } else { | |
1677 count = 1; | |
1678 variable_merges_[var] = std::vector<Node*>(1, node); | |
1679 } | |
1680 } | |
1681 // If the following asserts, then you've jumped to a label without a bound | |
1682 // variable along that path that expects to merge its value into a phi. | |
1683 DCHECK(variable_phis_.find(var) == variable_phis_.end() || | |
1684 count == merge_count_); | |
1685 USE(count); | |
1686 | |
1687 // If the label is already bound, we already know the set of variables to | |
1688 // merge and phi nodes have already been created. | |
1689 if (bound_) { | |
1690 auto phi = variable_phis_.find(var); | |
1691 if (phi != variable_phis_.end()) { | |
1692 DCHECK_NOT_NULL(phi->second); | |
1693 assembler_->raw_assembler_->AppendPhiInput(phi->second, node); | |
1694 } else { | |
1695 auto i = variable_merges_.find(var); | |
1696 if (i != variable_merges_.end()) { | |
1697 // If the following assert fires, then you've declared a variable that | |
1698 // has the same bound value along all paths up until the point you | |
1699 // bound this label, but then later merged a path with a new value for | |
1700 // the variable after the label bind (it's not possible to add phis to | |
1701 // the bound label after the fact, just make sure to list the variable | |
1702 // in the label's constructor's list of merged variables). | |
1703 DCHECK(find_if(i->second.begin(), i->second.end(), | |
1704 [node](Node* e) -> bool { return node != e; }) == | |
1705 i->second.end()); | |
1706 } | |
1707 } | |
1708 } | |
1709 } | |
1710 } | |
1711 | |
1712 void CodeStubAssembler::Label::Bind() { | |
1713 DCHECK(!bound_); | |
1714 assembler_->raw_assembler_->Bind(label_); | |
1715 | |
1716 // Make sure that all variables that have changed along any path up to this | |
1717 // point are marked as merge variables. | |
1718 for (auto var : assembler_->variables_) { | |
1719 Node* shared_value = nullptr; | |
1720 auto i = variable_merges_.find(var); | |
1721 if (i != variable_merges_.end()) { | |
1722 for (auto value : i->second) { | |
1723 DCHECK(value != nullptr); | |
1724 if (value != shared_value) { | |
1725 if (shared_value == nullptr) { | |
1726 shared_value = value; | |
1727 } else { | |
1728 variable_phis_[var] = nullptr; | |
1729 } | |
1730 } | |
1731 } | |
1732 } | |
1733 } | |
1734 | |
1735 for (auto var : variable_phis_) { | |
1736 CodeStubAssembler::Variable::Impl* var_impl = var.first; | |
1737 auto i = variable_merges_.find(var_impl); | |
1738 // If the following assert fires, then a variable that has been marked as | |
1739 // being merged at the label--either by explicitly marking it so in the | |
1740 // label constructor or by having seen different bound values at branches | |
1741 // into the label--doesn't have a bound value along all of the paths that | |
1742 // have been merged into the label up to this point. | |
1743 DCHECK(i != variable_merges_.end() && i->second.size() == merge_count_); | |
1744 Node* phi = assembler_->raw_assembler_->Phi( | |
1745 var.first->rep_, static_cast<int>(merge_count_), &(i->second[0])); | |
1746 variable_phis_[var_impl] = phi; | |
1747 } | |
1748 | |
1749 // Bind all variables to a merge phi, the common value along all paths or | |
1750 // null. | |
1751 for (auto var : assembler_->variables_) { | |
1752 auto i = variable_phis_.find(var); | |
1753 if (i != variable_phis_.end()) { | |
1754 var->value_ = i->second; | |
1755 } else { | |
1756 auto j = variable_merges_.find(var); | |
1757 if (j != variable_merges_.end() && j->second.size() == merge_count_) { | |
1758 var->value_ = j->second.back(); | |
1759 } else { | |
1760 var->value_ = nullptr; | |
1761 } | |
1762 } | |
1763 } | |
1764 | |
1765 bound_ = true; | |
1766 } | |
1767 | |
1768 } // namespace compiler | |
1769 } // namespace internal | |
1770 } // namespace v8 | |
OLD | NEW |