Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(33)

Side by Side Diff: src/compiler/code-assembler.cc

Issue 1875583003: Separate CodeAssembler and CodeStubAssembler (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix gn build. Again. Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/code-assembler.h ('k') | src/compiler/code-stub-assembler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/code-assembler.h"
6
7 #include <ostream>
8
9 #include "src/code-factory.h"
10 #include "src/compiler/graph.h"
11 #include "src/compiler/instruction-selector.h"
12 #include "src/compiler/linkage.h"
13 #include "src/compiler/pipeline.h"
14 #include "src/compiler/raw-machine-assembler.h"
15 #include "src/compiler/schedule.h"
16 #include "src/frames.h"
17 #include "src/interface-descriptors.h"
18 #include "src/interpreter/bytecodes.h"
19 #include "src/machine-type.h"
20 #include "src/macro-assembler.h"
21 #include "src/zone.h"
22
23 namespace v8 {
24 namespace internal {
25 namespace compiler {
26
27 CodeAssembler::CodeAssembler(Isolate* isolate, Zone* zone,
28 const CallInterfaceDescriptor& descriptor,
29 Code::Flags flags, const char* name,
30 size_t result_size)
31 : CodeAssembler(
32 isolate, zone,
33 Linkage::GetStubCallDescriptor(
34 isolate, zone, descriptor, descriptor.GetStackParameterCount(),
35 CallDescriptor::kNoFlags, Operator::kNoProperties,
36 MachineType::AnyTagged(), result_size),
37 flags, name) {}
38
39 CodeAssembler::CodeAssembler(Isolate* isolate, Zone* zone, int parameter_count,
40 Code::Flags flags, const char* name)
41 : CodeAssembler(isolate, zone,
42 Linkage::GetJSCallDescriptor(zone, false, parameter_count,
43 CallDescriptor::kNoFlags),
44 flags, name) {}
45
46 CodeAssembler::CodeAssembler(Isolate* isolate, Zone* zone,
47 CallDescriptor* call_descriptor, Code::Flags flags,
48 const char* name)
49 : raw_assembler_(new RawMachineAssembler(
50 isolate, new (zone) Graph(zone), call_descriptor,
51 MachineType::PointerRepresentation(),
52 InstructionSelector::SupportedMachineOperatorFlags())),
53 flags_(flags),
54 name_(name),
55 code_generated_(false),
56 variables_(zone) {}
57
58 CodeAssembler::~CodeAssembler() {}
59
60 void CodeAssembler::CallPrologue() {}
61
62 void CodeAssembler::CallEpilogue() {}
63
64 Handle<Code> CodeAssembler::GenerateCode() {
65 DCHECK(!code_generated_);
66
67 Schedule* schedule = raw_assembler_->Export();
68 Handle<Code> code = Pipeline::GenerateCodeForCodeStub(
69 isolate(), raw_assembler_->call_descriptor(), graph(), schedule, flags_,
70 name_);
71
72 code_generated_ = true;
73 return code;
74 }
75
76 bool CodeAssembler::Is64() const { return raw_assembler_->machine()->Is64(); }
77
78 bool CodeAssembler::IsFloat64RoundUpSupported() const {
79 return raw_assembler_->machine()->Float64RoundUp().IsSupported();
80 }
81
82 bool CodeAssembler::IsFloat64RoundDownSupported() const {
83 return raw_assembler_->machine()->Float64RoundDown().IsSupported();
84 }
85
86 bool CodeAssembler::IsFloat64RoundTruncateSupported() const {
87 return raw_assembler_->machine()->Float64RoundTruncate().IsSupported();
88 }
89
90 Node* CodeAssembler::Int32Constant(int value) {
91 return raw_assembler_->Int32Constant(value);
92 }
93
94 Node* CodeAssembler::IntPtrConstant(intptr_t value) {
95 return raw_assembler_->IntPtrConstant(value);
96 }
97
98 Node* CodeAssembler::NumberConstant(double value) {
99 return raw_assembler_->NumberConstant(value);
100 }
101
102 Node* CodeAssembler::SmiConstant(Smi* value) {
103 return IntPtrConstant(bit_cast<intptr_t>(value));
104 }
105
106 Node* CodeAssembler::HeapConstant(Handle<HeapObject> object) {
107 return raw_assembler_->HeapConstant(object);
108 }
109
110 Node* CodeAssembler::BooleanConstant(bool value) {
111 return raw_assembler_->BooleanConstant(value);
112 }
113
114 Node* CodeAssembler::ExternalConstant(ExternalReference address) {
115 return raw_assembler_->ExternalConstant(address);
116 }
117
118 Node* CodeAssembler::Float64Constant(double value) {
119 return raw_assembler_->Float64Constant(value);
120 }
121
122 Node* CodeAssembler::BooleanMapConstant() {
123 return HeapConstant(isolate()->factory()->boolean_map());
124 }
125
126 Node* CodeAssembler::EmptyStringConstant() {
127 return LoadRoot(Heap::kempty_stringRootIndex);
128 }
129
130 Node* CodeAssembler::HeapNumberMapConstant() {
131 return HeapConstant(isolate()->factory()->heap_number_map());
132 }
133
134 Node* CodeAssembler::NaNConstant() {
135 return LoadRoot(Heap::kNanValueRootIndex);
136 }
137
138 Node* CodeAssembler::NoContextConstant() {
139 return SmiConstant(Smi::FromInt(0));
140 }
141
142 Node* CodeAssembler::NullConstant() {
143 return LoadRoot(Heap::kNullValueRootIndex);
144 }
145
146 Node* CodeAssembler::UndefinedConstant() {
147 return LoadRoot(Heap::kUndefinedValueRootIndex);
148 }
149
150 Node* CodeAssembler::Parameter(int value) {
151 return raw_assembler_->Parameter(value);
152 }
153
154 void CodeAssembler::Return(Node* value) {
155 return raw_assembler_->Return(value);
156 }
157
158 void CodeAssembler::Bind(CodeAssembler::Label* label) { return label->Bind(); }
159
160 Node* CodeAssembler::LoadFramePointer() {
161 return raw_assembler_->LoadFramePointer();
162 }
163
164 Node* CodeAssembler::LoadParentFramePointer() {
165 return raw_assembler_->LoadParentFramePointer();
166 }
167
168 Node* CodeAssembler::LoadStackPointer() {
169 return raw_assembler_->LoadStackPointer();
170 }
171
172 Node* CodeAssembler::SmiShiftBitsConstant() {
173 return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
174 }
175
176 Node* CodeAssembler::SmiTag(Node* value) {
177 return raw_assembler_->WordShl(value, SmiShiftBitsConstant());
178 }
179
180 Node* CodeAssembler::SmiUntag(Node* value) {
181 return raw_assembler_->WordSar(value, SmiShiftBitsConstant());
182 }
183
184 #define DEFINE_CODE_ASSEMBLER_BINARY_OP(name) \
185 Node* CodeAssembler::name(Node* a, Node* b) { \
186 return raw_assembler_->name(a, b); \
187 }
188 CODE_ASSEMBLER_BINARY_OP_LIST(DEFINE_CODE_ASSEMBLER_BINARY_OP)
189 #undef DEFINE_CODE_ASSEMBLER_BINARY_OP
190
191 Node* CodeAssembler::WordShl(Node* value, int shift) {
192 return raw_assembler_->WordShl(value, IntPtrConstant(shift));
193 }
194
195 Node* CodeAssembler::TruncateFloat64ToInt32RoundToZero(Node* a) {
196 return raw_assembler_->TruncateFloat64ToInt32(TruncationMode::kRoundToZero,
197 a);
198 }
199
200 Node* CodeAssembler::TruncateFloat64ToInt32JavaScript(Node* a) {
201 return raw_assembler_->TruncateFloat64ToInt32(TruncationMode::kJavaScript, a);
202 }
203
204 #define DEFINE_CODE_ASSEMBLER_UNARY_OP(name) \
205 Node* CodeAssembler::name(Node* a) { return raw_assembler_->name(a); }
206 CODE_ASSEMBLER_UNARY_OP_LIST(DEFINE_CODE_ASSEMBLER_UNARY_OP)
207 #undef DEFINE_CODE_ASSEMBLER_UNARY_OP
208
209 Node* CodeAssembler::LoadRoot(Heap::RootListIndex root_index) {
210 if (isolate()->heap()->RootCanBeTreatedAsConstant(root_index)) {
211 Handle<Object> root = isolate()->heap()->root_handle(root_index);
212 if (root->IsSmi()) {
213 return SmiConstant(Smi::cast(*root));
214 } else {
215 return HeapConstant(Handle<HeapObject>::cast(root));
216 }
217 }
218
219 compiler::Node* roots_array_start =
220 ExternalConstant(ExternalReference::roots_array_start(isolate()));
221 USE(roots_array_start);
222
223 // TODO(danno): Implement thee root-access case where the root is not constant
224 // and must be loaded from the root array.
225 UNIMPLEMENTED();
226 return nullptr;
227 }
228
229 Node* CodeAssembler::AllocateRawUnaligned(Node* size_in_bytes,
230 AllocationFlags flags,
231 Node* top_address,
232 Node* limit_address) {
233 Node* top = Load(MachineType::Pointer(), top_address);
234 Node* limit = Load(MachineType::Pointer(), limit_address);
235
236 // If there's not enough space, call the runtime.
237 RawMachineLabel runtime_call(RawMachineLabel::kDeferred), no_runtime_call,
238 merge_runtime;
239 raw_assembler_->Branch(
240 raw_assembler_->IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes),
241 &runtime_call, &no_runtime_call);
242
243 raw_assembler_->Bind(&runtime_call);
244 // AllocateInTargetSpace does not use the context.
245 Node* context = IntPtrConstant(0);
246 Node* runtime_flags = SmiTag(Int32Constant(
247 AllocateDoubleAlignFlag::encode(false) |
248 AllocateTargetSpace::encode(flags & kPretenured
249 ? AllocationSpace::OLD_SPACE
250 : AllocationSpace::NEW_SPACE)));
251 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context,
252 SmiTag(size_in_bytes), runtime_flags);
253 raw_assembler_->Goto(&merge_runtime);
254
255 // When there is enough space, return `top' and bump it up.
256 raw_assembler_->Bind(&no_runtime_call);
257 Node* no_runtime_result = top;
258 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
259 IntPtrAdd(top, size_in_bytes));
260 no_runtime_result =
261 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag));
262 raw_assembler_->Goto(&merge_runtime);
263
264 raw_assembler_->Bind(&merge_runtime);
265 return raw_assembler_->Phi(MachineType::PointerRepresentation(),
266 runtime_result, no_runtime_result);
267 }
268
269 Node* CodeAssembler::AllocateRawAligned(Node* size_in_bytes,
270 AllocationFlags flags,
271 Node* top_address,
272 Node* limit_address) {
273 Node* top = Load(MachineType::Pointer(), top_address);
274 Node* limit = Load(MachineType::Pointer(), limit_address);
275 Node* adjusted_size = size_in_bytes;
276 if (flags & kDoubleAlignment) {
277 // TODO(epertoso): Simd128 alignment.
278 RawMachineLabel aligned, not_aligned, merge;
279 raw_assembler_->Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)),
280 &not_aligned, &aligned);
281
282 raw_assembler_->Bind(&not_aligned);
283 Node* not_aligned_size =
284 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
285 raw_assembler_->Goto(&merge);
286
287 raw_assembler_->Bind(&aligned);
288 raw_assembler_->Goto(&merge);
289
290 raw_assembler_->Bind(&merge);
291 adjusted_size = raw_assembler_->Phi(MachineType::PointerRepresentation(),
292 not_aligned_size, adjusted_size);
293 }
294
295 Node* address = AllocateRawUnaligned(adjusted_size, kNone, top, limit);
296
297 RawMachineLabel needs_filler, doesnt_need_filler, merge_address;
298 raw_assembler_->Branch(
299 raw_assembler_->IntPtrEqual(adjusted_size, size_in_bytes),
300 &doesnt_need_filler, &needs_filler);
301
302 raw_assembler_->Bind(&needs_filler);
303 // Store a filler and increase the address by kPointerSize.
304 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
305 // it when Simd128 alignment is supported.
306 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
307 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
308 Node* address_with_filler = IntPtrAdd(address, IntPtrConstant(kPointerSize));
309 raw_assembler_->Goto(&merge_address);
310
311 raw_assembler_->Bind(&doesnt_need_filler);
312 Node* address_without_filler = address;
313 raw_assembler_->Goto(&merge_address);
314
315 raw_assembler_->Bind(&merge_address);
316 address = raw_assembler_->Phi(MachineType::PointerRepresentation(),
317 address_with_filler, address_without_filler);
318 // Update the top.
319 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
320 IntPtrAdd(top, adjusted_size));
321 return address;
322 }
323
324 Node* CodeAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
325 bool const new_space = !(flags & kPretenured);
326 Node* top_address = ExternalConstant(
327 new_space
328 ? ExternalReference::new_space_allocation_top_address(isolate())
329 : ExternalReference::old_space_allocation_top_address(isolate()));
330 Node* limit_address = ExternalConstant(
331 new_space
332 ? ExternalReference::new_space_allocation_limit_address(isolate())
333 : ExternalReference::old_space_allocation_limit_address(isolate()));
334
335 #ifdef V8_HOST_ARCH_32_BIT
336 if (flags & kDoubleAlignment) {
337 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address,
338 limit_address);
339 }
340 #endif
341
342 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address,
343 limit_address);
344 }
345
346 Node* CodeAssembler::InnerAllocate(Node* previous, int offset) {
347 return IntPtrAdd(previous, IntPtrConstant(offset));
348 }
349
350 Node* CodeAssembler::Load(MachineType rep, Node* base) {
351 return raw_assembler_->Load(rep, base);
352 }
353
354 Node* CodeAssembler::Load(MachineType rep, Node* base, Node* index) {
355 return raw_assembler_->Load(rep, base, index);
356 }
357
358 Node* CodeAssembler::Store(MachineRepresentation rep, Node* base, Node* value) {
359 return raw_assembler_->Store(rep, base, value, kFullWriteBarrier);
360 }
361
362 Node* CodeAssembler::Store(MachineRepresentation rep, Node* base, Node* index,
363 Node* value) {
364 return raw_assembler_->Store(rep, base, index, value, kFullWriteBarrier);
365 }
366
367 Node* CodeAssembler::StoreNoWriteBarrier(MachineRepresentation rep, Node* base,
368 Node* value) {
369 return raw_assembler_->Store(rep, base, value, kNoWriteBarrier);
370 }
371
372 Node* CodeAssembler::StoreNoWriteBarrier(MachineRepresentation rep, Node* base,
373 Node* index, Node* value) {
374 return raw_assembler_->Store(rep, base, index, value, kNoWriteBarrier);
375 }
376
377 Node* CodeAssembler::Projection(int index, Node* value) {
378 return raw_assembler_->Projection(index, value);
379 }
380
381 void CodeAssembler::BranchIf(Node* condition, Label* if_true, Label* if_false) {
382 Label if_condition_is_true(this), if_condition_is_false(this);
383 Branch(condition, &if_condition_is_true, &if_condition_is_false);
384 Bind(&if_condition_is_true);
385 Goto(if_true);
386 Bind(&if_condition_is_false);
387 Goto(if_false);
388 }
389
390 Node* CodeAssembler::CallN(CallDescriptor* descriptor, Node* code_target,
391 Node** args) {
392 CallPrologue();
393 Node* return_value = raw_assembler_->CallN(descriptor, code_target, args);
394 CallEpilogue();
395 return return_value;
396 }
397
398 Node* CodeAssembler::TailCallN(CallDescriptor* descriptor, Node* code_target,
399 Node** args) {
400 return raw_assembler_->TailCallN(descriptor, code_target, args);
401 }
402
403 Node* CodeAssembler::CallRuntime(Runtime::FunctionId function_id,
404 Node* context) {
405 CallPrologue();
406 Node* return_value = raw_assembler_->CallRuntime0(function_id, context);
407 CallEpilogue();
408 return return_value;
409 }
410
411 Node* CodeAssembler::CallRuntime(Runtime::FunctionId function_id, Node* context,
412 Node* arg1) {
413 CallPrologue();
414 Node* return_value = raw_assembler_->CallRuntime1(function_id, arg1, context);
415 CallEpilogue();
416 return return_value;
417 }
418
419 Node* CodeAssembler::CallRuntime(Runtime::FunctionId function_id, Node* context,
420 Node* arg1, Node* arg2) {
421 CallPrologue();
422 Node* return_value =
423 raw_assembler_->CallRuntime2(function_id, arg1, arg2, context);
424 CallEpilogue();
425 return return_value;
426 }
427
428 Node* CodeAssembler::CallRuntime(Runtime::FunctionId function_id, Node* context,
429 Node* arg1, Node* arg2, Node* arg3) {
430 CallPrologue();
431 Node* return_value =
432 raw_assembler_->CallRuntime3(function_id, arg1, arg2, arg3, context);
433 CallEpilogue();
434 return return_value;
435 }
436
437 Node* CodeAssembler::CallRuntime(Runtime::FunctionId function_id, Node* context,
438 Node* arg1, Node* arg2, Node* arg3,
439 Node* arg4) {
440 CallPrologue();
441 Node* return_value = raw_assembler_->CallRuntime4(function_id, arg1, arg2,
442 arg3, arg4, context);
443 CallEpilogue();
444 return return_value;
445 }
446
447 Node* CodeAssembler::TailCallRuntime(Runtime::FunctionId function_id,
448 Node* context) {
449 return raw_assembler_->TailCallRuntime0(function_id, context);
450 }
451
452 Node* CodeAssembler::TailCallRuntime(Runtime::FunctionId function_id,
453 Node* context, Node* arg1) {
454 return raw_assembler_->TailCallRuntime1(function_id, arg1, context);
455 }
456
457 Node* CodeAssembler::TailCallRuntime(Runtime::FunctionId function_id,
458 Node* context, Node* arg1, Node* arg2) {
459 return raw_assembler_->TailCallRuntime2(function_id, arg1, arg2, context);
460 }
461
462 Node* CodeAssembler::TailCallRuntime(Runtime::FunctionId function_id,
463 Node* context, Node* arg1, Node* arg2,
464 Node* arg3) {
465 return raw_assembler_->TailCallRuntime3(function_id, arg1, arg2, arg3,
466 context);
467 }
468
469 Node* CodeAssembler::TailCallRuntime(Runtime::FunctionId function_id,
470 Node* context, Node* arg1, Node* arg2,
471 Node* arg3, Node* arg4) {
472 return raw_assembler_->TailCallRuntime4(function_id, arg1, arg2, arg3, arg4,
473 context);
474 }
475
476 Node* CodeAssembler::CallStub(Callable const& callable, Node* context,
477 Node* arg1, size_t result_size) {
478 Node* target = HeapConstant(callable.code());
479 return CallStub(callable.descriptor(), target, context, arg1, result_size);
480 }
481
482 Node* CodeAssembler::CallStub(Callable const& callable, Node* context,
483 Node* arg1, Node* arg2, size_t result_size) {
484 Node* target = HeapConstant(callable.code());
485 return CallStub(callable.descriptor(), target, context, arg1, arg2,
486 result_size);
487 }
488
489 Node* CodeAssembler::CallStub(Callable const& callable, Node* context,
490 Node* arg1, Node* arg2, Node* arg3,
491 size_t result_size) {
492 Node* target = HeapConstant(callable.code());
493 return CallStub(callable.descriptor(), target, context, arg1, arg2, arg3,
494 result_size);
495 }
496
497 Node* CodeAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
498 Node* target, Node* context, Node* arg1,
499 size_t result_size) {
500 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
501 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
502 CallDescriptor::kNoFlags, Operator::kNoProperties,
503 MachineType::AnyTagged(), result_size);
504
505 Node** args = zone()->NewArray<Node*>(2);
506 args[0] = arg1;
507 args[1] = context;
508
509 return CallN(call_descriptor, target, args);
510 }
511
512 Node* CodeAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
513 Node* target, Node* context, Node* arg1,
514 Node* arg2, size_t result_size) {
515 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
516 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
517 CallDescriptor::kNoFlags, Operator::kNoProperties,
518 MachineType::AnyTagged(), result_size);
519
520 Node** args = zone()->NewArray<Node*>(3);
521 args[0] = arg1;
522 args[1] = arg2;
523 args[2] = context;
524
525 return CallN(call_descriptor, target, args);
526 }
527
528 Node* CodeAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
529 Node* target, Node* context, Node* arg1,
530 Node* arg2, Node* arg3, size_t result_size) {
531 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
532 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
533 CallDescriptor::kNoFlags, Operator::kNoProperties,
534 MachineType::AnyTagged(), result_size);
535
536 Node** args = zone()->NewArray<Node*>(4);
537 args[0] = arg1;
538 args[1] = arg2;
539 args[2] = arg3;
540 args[3] = context;
541
542 return CallN(call_descriptor, target, args);
543 }
544
545 Node* CodeAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
546 Node* target, Node* context, Node* arg1,
547 Node* arg2, Node* arg3, Node* arg4,
548 size_t result_size) {
549 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
550 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
551 CallDescriptor::kNoFlags, Operator::kNoProperties,
552 MachineType::AnyTagged(), result_size);
553
554 Node** args = zone()->NewArray<Node*>(5);
555 args[0] = arg1;
556 args[1] = arg2;
557 args[2] = arg3;
558 args[3] = arg4;
559 args[4] = context;
560
561 return CallN(call_descriptor, target, args);
562 }
563
564 Node* CodeAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
565 Node* target, Node* context, Node* arg1,
566 Node* arg2, Node* arg3, Node* arg4, Node* arg5,
567 size_t result_size) {
568 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
569 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
570 CallDescriptor::kNoFlags, Operator::kNoProperties,
571 MachineType::AnyTagged(), result_size);
572
573 Node** args = zone()->NewArray<Node*>(6);
574 args[0] = arg1;
575 args[1] = arg2;
576 args[2] = arg3;
577 args[3] = arg4;
578 args[4] = arg5;
579 args[5] = context;
580
581 return CallN(call_descriptor, target, args);
582 }
583
584 Node* CodeAssembler::TailCallStub(Callable const& callable, Node* context,
585 Node* arg1, Node* arg2, size_t result_size) {
586 Node* target = HeapConstant(callable.code());
587 return TailCallStub(callable.descriptor(), target, context, arg1, arg2,
588 result_size);
589 }
590
591 Node* CodeAssembler::TailCallStub(const CallInterfaceDescriptor& descriptor,
592 Node* target, Node* context, Node* arg1,
593 Node* arg2, size_t result_size) {
594 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
595 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
596 CallDescriptor::kSupportsTailCalls, Operator::kNoProperties,
597 MachineType::AnyTagged(), result_size);
598
599 Node** args = zone()->NewArray<Node*>(3);
600 args[0] = arg1;
601 args[1] = arg2;
602 args[2] = context;
603
604 return raw_assembler_->TailCallN(call_descriptor, target, args);
605 }
606
607 Node* CodeAssembler::TailCallBytecodeDispatch(
608 const CallInterfaceDescriptor& interface_descriptor,
609 Node* code_target_address, Node** args) {
610 CallDescriptor* descriptor = Linkage::GetBytecodeDispatchCallDescriptor(
611 isolate(), zone(), interface_descriptor,
612 interface_descriptor.GetStackParameterCount());
613 return raw_assembler_->TailCallN(descriptor, code_target_address, args);
614 }
615
616 void CodeAssembler::Goto(CodeAssembler::Label* label) {
617 label->MergeVariables();
618 raw_assembler_->Goto(label->label_);
619 }
620
621 void CodeAssembler::GotoIf(Node* condition, Label* true_label) {
622 Label false_label(this);
623 Branch(condition, true_label, &false_label);
624 Bind(&false_label);
625 }
626
627 void CodeAssembler::GotoUnless(Node* condition, Label* false_label) {
628 Label true_label(this);
629 Branch(condition, &true_label, false_label);
630 Bind(&true_label);
631 }
632
633 void CodeAssembler::Branch(Node* condition, CodeAssembler::Label* true_label,
634 CodeAssembler::Label* false_label) {
635 true_label->MergeVariables();
636 false_label->MergeVariables();
637 return raw_assembler_->Branch(condition, true_label->label_,
638 false_label->label_);
639 }
640
641 void CodeAssembler::Switch(Node* index, Label* default_label,
642 int32_t* case_values, Label** case_labels,
643 size_t case_count) {
644 RawMachineLabel** labels =
645 new (zone()->New(sizeof(RawMachineLabel*) * case_count))
646 RawMachineLabel*[case_count];
647 for (size_t i = 0; i < case_count; ++i) {
648 labels[i] = case_labels[i]->label_;
649 case_labels[i]->MergeVariables();
650 default_label->MergeVariables();
651 }
652 return raw_assembler_->Switch(index, default_label->label_, case_values,
653 labels, case_count);
654 }
655
656 // RawMachineAssembler delegate helpers:
657 Isolate* CodeAssembler::isolate() const { return raw_assembler_->isolate(); }
658
659 Factory* CodeAssembler::factory() const { return isolate()->factory(); }
660
661 Graph* CodeAssembler::graph() const { return raw_assembler_->graph(); }
662
663 Zone* CodeAssembler::zone() const { return raw_assembler_->zone(); }
664
665 // The core implementation of Variable is stored through an indirection so
666 // that it can outlive the often block-scoped Variable declarations. This is
667 // needed to ensure that variable binding and merging through phis can
668 // properly be verified.
669 class CodeAssembler::Variable::Impl : public ZoneObject {
670 public:
671 explicit Impl(MachineRepresentation rep) : value_(nullptr), rep_(rep) {}
672 Node* value_;
673 MachineRepresentation rep_;
674 };
675
676 CodeAssembler::Variable::Variable(CodeAssembler* assembler,
677 MachineRepresentation rep)
678 : impl_(new (assembler->zone()) Impl(rep)) {
679 assembler->variables_.push_back(impl_);
680 }
681
682 void CodeAssembler::Variable::Bind(Node* value) { impl_->value_ = value; }
683
684 Node* CodeAssembler::Variable::value() const {
685 DCHECK_NOT_NULL(impl_->value_);
686 return impl_->value_;
687 }
688
689 MachineRepresentation CodeAssembler::Variable::rep() const {
690 return impl_->rep_;
691 }
692
693 bool CodeAssembler::Variable::IsBound() const {
694 return impl_->value_ != nullptr;
695 }
696
697 CodeAssembler::Label::Label(CodeAssembler* assembler, int merged_value_count,
698 CodeAssembler::Variable** merged_variables,
699 CodeAssembler::Label::Type type)
700 : bound_(false), merge_count_(0), assembler_(assembler), label_(nullptr) {
701 void* buffer = assembler->zone()->New(sizeof(RawMachineLabel));
702 label_ = new (buffer)
703 RawMachineLabel(type == kDeferred ? RawMachineLabel::kDeferred
704 : RawMachineLabel::kNonDeferred);
705 for (int i = 0; i < merged_value_count; ++i) {
706 variable_phis_[merged_variables[i]->impl_] = nullptr;
707 }
708 }
709
710 void CodeAssembler::Label::MergeVariables() {
711 ++merge_count_;
712 for (auto var : assembler_->variables_) {
713 size_t count = 0;
714 Node* node = var->value_;
715 if (node != nullptr) {
716 auto i = variable_merges_.find(var);
717 if (i != variable_merges_.end()) {
718 i->second.push_back(node);
719 count = i->second.size();
720 } else {
721 count = 1;
722 variable_merges_[var] = std::vector<Node*>(1, node);
723 }
724 }
725 // If the following asserts, then you've jumped to a label without a bound
726 // variable along that path that expects to merge its value into a phi.
727 DCHECK(variable_phis_.find(var) == variable_phis_.end() ||
728 count == merge_count_);
729 USE(count);
730
731 // If the label is already bound, we already know the set of variables to
732 // merge and phi nodes have already been created.
733 if (bound_) {
734 auto phi = variable_phis_.find(var);
735 if (phi != variable_phis_.end()) {
736 DCHECK_NOT_NULL(phi->second);
737 assembler_->raw_assembler_->AppendPhiInput(phi->second, node);
738 } else {
739 auto i = variable_merges_.find(var);
740 if (i != variable_merges_.end()) {
741 // If the following assert fires, then you've declared a variable that
742 // has the same bound value along all paths up until the point you
743 // bound this label, but then later merged a path with a new value for
744 // the variable after the label bind (it's not possible to add phis to
745 // the bound label after the fact, just make sure to list the variable
746 // in the label's constructor's list of merged variables).
747 DCHECK(find_if(i->second.begin(), i->second.end(),
748 [node](Node* e) -> bool { return node != e; }) ==
749 i->second.end());
750 }
751 }
752 }
753 }
754 }
755
756 void CodeAssembler::Label::Bind() {
757 DCHECK(!bound_);
758 assembler_->raw_assembler_->Bind(label_);
759
760 // Make sure that all variables that have changed along any path up to this
761 // point are marked as merge variables.
762 for (auto var : assembler_->variables_) {
763 Node* shared_value = nullptr;
764 auto i = variable_merges_.find(var);
765 if (i != variable_merges_.end()) {
766 for (auto value : i->second) {
767 DCHECK(value != nullptr);
768 if (value != shared_value) {
769 if (shared_value == nullptr) {
770 shared_value = value;
771 } else {
772 variable_phis_[var] = nullptr;
773 }
774 }
775 }
776 }
777 }
778
779 for (auto var : variable_phis_) {
780 CodeAssembler::Variable::Impl* var_impl = var.first;
781 auto i = variable_merges_.find(var_impl);
782 // If the following assert fires, then a variable that has been marked as
783 // being merged at the label--either by explicitly marking it so in the
784 // label constructor or by having seen different bound values at branches
785 // into the label--doesn't have a bound value along all of the paths that
786 // have been merged into the label up to this point.
787 DCHECK(i != variable_merges_.end() && i->second.size() == merge_count_);
788 Node* phi = assembler_->raw_assembler_->Phi(
789 var.first->rep_, static_cast<int>(merge_count_), &(i->second[0]));
790 variable_phis_[var_impl] = phi;
791 }
792
793 // Bind all variables to a merge phi, the common value along all paths or
794 // null.
795 for (auto var : assembler_->variables_) {
796 auto i = variable_phis_.find(var);
797 if (i != variable_phis_.end()) {
798 var->value_ = i->second;
799 } else {
800 auto j = variable_merges_.find(var);
801 if (j != variable_merges_.end() && j->second.size() == merge_count_) {
802 var->value_ = j->second.back();
803 } else {
804 var->value_ = nullptr;
805 }
806 }
807 }
808
809 bound_ = true;
810 }
811
812 } // namespace compiler
813 } // namespace internal
814 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/code-assembler.h ('k') | src/compiler/code-stub-assembler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698