Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(306)

Side by Side Diff: src/a64/lithium-a64.cc

Issue 207823003: Rename A64 port to ARM64 port (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: retry Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/lithium-a64.h ('k') | src/a64/lithium-codegen-a64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "lithium-allocator-inl.h"
31 #include "a64/lithium-a64.h"
32 #include "a64/lithium-codegen-a64.h"
33 #include "hydrogen-osr.h"
34
35 namespace v8 {
36 namespace internal {
37
38
39 #define DEFINE_COMPILE(type) \
40 void L##type::CompileToNative(LCodeGen* generator) { \
41 generator->Do##type(this); \
42 }
43 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
44 #undef DEFINE_COMPILE
45
46 #ifdef DEBUG
47 void LInstruction::VerifyCall() {
48 // Call instructions can use only fixed registers as temporaries and
49 // outputs because all registers are blocked by the calling convention.
50 // Inputs operands must use a fixed register or use-at-start policy or
51 // a non-register policy.
52 ASSERT(Output() == NULL ||
53 LUnallocated::cast(Output())->HasFixedPolicy() ||
54 !LUnallocated::cast(Output())->HasRegisterPolicy());
55 for (UseIterator it(this); !it.Done(); it.Advance()) {
56 LUnallocated* operand = LUnallocated::cast(it.Current());
57 ASSERT(operand->HasFixedPolicy() ||
58 operand->IsUsedAtStart());
59 }
60 for (TempIterator it(this); !it.Done(); it.Advance()) {
61 LUnallocated* operand = LUnallocated::cast(it.Current());
62 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
63 }
64 }
65 #endif
66
67
68 void LLabel::PrintDataTo(StringStream* stream) {
69 LGap::PrintDataTo(stream);
70 LLabel* rep = replacement();
71 if (rep != NULL) {
72 stream->Add(" Dead block replaced with B%d", rep->block_id());
73 }
74 }
75
76
77 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
78 arguments()->PrintTo(stream);
79 stream->Add(" length ");
80 length()->PrintTo(stream);
81 stream->Add(" index ");
82 index()->PrintTo(stream);
83 }
84
85
86 void LBranch::PrintDataTo(StringStream* stream) {
87 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
88 value()->PrintTo(stream);
89 }
90
91
92 void LCallJSFunction::PrintDataTo(StringStream* stream) {
93 stream->Add("= ");
94 function()->PrintTo(stream);
95 stream->Add("#%d / ", arity());
96 }
97
98
99 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
100 for (int i = 0; i < InputCount(); i++) {
101 InputAt(i)->PrintTo(stream);
102 stream->Add(" ");
103 }
104 stream->Add("#%d / ", arity());
105 }
106
107
108 void LCallNew::PrintDataTo(StringStream* stream) {
109 stream->Add("= ");
110 constructor()->PrintTo(stream);
111 stream->Add(" #%d / ", arity());
112 }
113
114
115 void LCallNewArray::PrintDataTo(StringStream* stream) {
116 stream->Add("= ");
117 constructor()->PrintTo(stream);
118 stream->Add(" #%d / ", arity());
119 ElementsKind kind = hydrogen()->elements_kind();
120 stream->Add(" (%s) ", ElementsKindToString(kind));
121 }
122
123
124 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
125 stream->Add("if class_of_test(");
126 value()->PrintTo(stream);
127 stream->Add(", \"%o\") then B%d else B%d",
128 *hydrogen()->class_name(),
129 true_block_id(),
130 false_block_id());
131 }
132
133
134 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
135 stream->Add("if ");
136 left()->PrintTo(stream);
137 stream->Add(" %s ", Token::String(op()));
138 right()->PrintTo(stream);
139 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
140 }
141
142
143 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
144 stream->Add("if has_cached_array_index(");
145 value()->PrintTo(stream);
146 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
147 }
148
149
150 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
151 return !gen->IsNextEmittedBlock(block_id());
152 }
153
154
155 void LGoto::PrintDataTo(StringStream* stream) {
156 stream->Add("B%d", block_id());
157 }
158
159
160 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
161 stream->Add(" = ");
162 base_object()->PrintTo(stream);
163 stream->Add(" + ");
164 offset()->PrintTo(stream);
165 }
166
167
168 void LInvokeFunction::PrintDataTo(StringStream* stream) {
169 stream->Add("= ");
170 function()->PrintTo(stream);
171 stream->Add(" #%d / ", arity());
172 }
173
174
175 void LInstruction::PrintTo(StringStream* stream) {
176 stream->Add("%s ", this->Mnemonic());
177
178 PrintOutputOperandTo(stream);
179
180 PrintDataTo(stream);
181
182 if (HasEnvironment()) {
183 stream->Add(" ");
184 environment()->PrintTo(stream);
185 }
186
187 if (HasPointerMap()) {
188 stream->Add(" ");
189 pointer_map()->PrintTo(stream);
190 }
191 }
192
193
194 void LInstruction::PrintDataTo(StringStream* stream) {
195 stream->Add("= ");
196 for (int i = 0; i < InputCount(); i++) {
197 if (i > 0) stream->Add(" ");
198 if (InputAt(i) == NULL) {
199 stream->Add("NULL");
200 } else {
201 InputAt(i)->PrintTo(stream);
202 }
203 }
204 }
205
206
207 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
208 if (HasResult()) result()->PrintTo(stream);
209 }
210
211
212 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
213 stream->Add("if has_instance_type(");
214 value()->PrintTo(stream);
215 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
216 }
217
218
219 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
220 stream->Add("if is_object(");
221 value()->PrintTo(stream);
222 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
223 }
224
225
226 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
227 stream->Add("if is_string(");
228 value()->PrintTo(stream);
229 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
230 }
231
232
233 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
234 stream->Add("if is_smi(");
235 value()->PrintTo(stream);
236 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
237 }
238
239
240 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
241 stream->Add("if typeof ");
242 value()->PrintTo(stream);
243 stream->Add(" == \"%s\" then B%d else B%d",
244 hydrogen()->type_literal()->ToCString().get(),
245 true_block_id(), false_block_id());
246 }
247
248
249 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
250 stream->Add("if is_undetectable(");
251 value()->PrintTo(stream);
252 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
253 }
254
255
256 bool LGap::IsRedundant() const {
257 for (int i = 0; i < 4; i++) {
258 if ((parallel_moves_[i] != NULL) && !parallel_moves_[i]->IsRedundant()) {
259 return false;
260 }
261 }
262
263 return true;
264 }
265
266
267 void LGap::PrintDataTo(StringStream* stream) {
268 for (int i = 0; i < 4; i++) {
269 stream->Add("(");
270 if (parallel_moves_[i] != NULL) {
271 parallel_moves_[i]->PrintDataTo(stream);
272 }
273 stream->Add(") ");
274 }
275 }
276
277
278 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
279 context()->PrintTo(stream);
280 stream->Add("[%d]", slot_index());
281 }
282
283
284 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
285 stream->Add(" = ");
286 function()->PrintTo(stream);
287 stream->Add(".code_entry = ");
288 code_object()->PrintTo(stream);
289 }
290
291
292 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
293 context()->PrintTo(stream);
294 stream->Add("[%d] <- ", slot_index());
295 value()->PrintTo(stream);
296 }
297
298
299 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
300 object()->PrintTo(stream);
301 stream->Add("[");
302 key()->PrintTo(stream);
303 stream->Add("] <- ");
304 value()->PrintTo(stream);
305 }
306
307
308 void LStoreNamedField::PrintDataTo(StringStream* stream) {
309 object()->PrintTo(stream);
310 hydrogen()->access().PrintTo(stream);
311 stream->Add(" <- ");
312 value()->PrintTo(stream);
313 }
314
315
316 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
317 object()->PrintTo(stream);
318 stream->Add(".");
319 stream->Add(String::cast(*name())->ToCString().get());
320 stream->Add(" <- ");
321 value()->PrintTo(stream);
322 }
323
324
325 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
326 stream->Add("if string_compare(");
327 left()->PrintTo(stream);
328 right()->PrintTo(stream);
329 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
330 }
331
332
333 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
334 object()->PrintTo(stream);
335 stream->Add("%p -> %p", *original_map(), *transitioned_map());
336 }
337
338
339 template<int T>
340 void LUnaryMathOperation<T>::PrintDataTo(StringStream* stream) {
341 value()->PrintTo(stream);
342 }
343
344
345 const char* LArithmeticD::Mnemonic() const {
346 switch (op()) {
347 case Token::ADD: return "add-d";
348 case Token::SUB: return "sub-d";
349 case Token::MUL: return "mul-d";
350 case Token::DIV: return "div-d";
351 case Token::MOD: return "mod-d";
352 default:
353 UNREACHABLE();
354 return NULL;
355 }
356 }
357
358
359 const char* LArithmeticT::Mnemonic() const {
360 switch (op()) {
361 case Token::ADD: return "add-t";
362 case Token::SUB: return "sub-t";
363 case Token::MUL: return "mul-t";
364 case Token::MOD: return "mod-t";
365 case Token::DIV: return "div-t";
366 case Token::BIT_AND: return "bit-and-t";
367 case Token::BIT_OR: return "bit-or-t";
368 case Token::BIT_XOR: return "bit-xor-t";
369 case Token::ROR: return "ror-t";
370 case Token::SHL: return "shl-t";
371 case Token::SAR: return "sar-t";
372 case Token::SHR: return "shr-t";
373 default:
374 UNREACHABLE();
375 return NULL;
376 }
377 }
378
379
380 void LChunkBuilder::Abort(BailoutReason reason) {
381 info()->set_bailout_reason(reason);
382 status_ = ABORTED;
383 }
384
385
386 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
387 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
388 Register::ToAllocationIndex(reg));
389 }
390
391
392 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
393 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
394 DoubleRegister::ToAllocationIndex(reg));
395 }
396
397
398 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
399 if (value->EmitAtUses()) {
400 HInstruction* instr = HInstruction::cast(value);
401 VisitInstruction(instr);
402 }
403 operand->set_virtual_register(value->id());
404 return operand;
405 }
406
407
408 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
409 return Use(value, ToUnallocated(fixed_register));
410 }
411
412
413 LOperand* LChunkBuilder::UseFixedDouble(HValue* value,
414 DoubleRegister fixed_register) {
415 return Use(value, ToUnallocated(fixed_register));
416 }
417
418
419 LOperand* LChunkBuilder::UseRegister(HValue* value) {
420 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
421 }
422
423
424 LOperand* LChunkBuilder::UseRegisterAndClobber(HValue* value) {
425 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
426 }
427
428
429 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
430 return Use(value,
431 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
432 LUnallocated::USED_AT_START));
433 }
434
435
436 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
437 return value->IsConstant() ? UseConstant(value) : UseRegister(value);
438 }
439
440
441 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
442 return value->IsConstant() ? UseConstant(value) : UseRegisterAtStart(value);
443 }
444
445
446 LConstantOperand* LChunkBuilder::UseConstant(HValue* value) {
447 return chunk_->DefineConstantOperand(HConstant::cast(value));
448 }
449
450
451 LOperand* LChunkBuilder::UseAny(HValue* value) {
452 return value->IsConstant()
453 ? UseConstant(value)
454 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
455 }
456
457
458 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
459 LUnallocated* result) {
460 result->set_virtual_register(current_instruction_->id());
461 instr->set_result(result);
462 return instr;
463 }
464
465
466 LInstruction* LChunkBuilder::DefineAsRegister(
467 LTemplateResultInstruction<1>* instr) {
468 return Define(instr,
469 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
470 }
471
472
473 LInstruction* LChunkBuilder::DefineAsSpilled(
474 LTemplateResultInstruction<1>* instr, int index) {
475 return Define(instr,
476 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
477 }
478
479
480 LInstruction* LChunkBuilder::DefineSameAsFirst(
481 LTemplateResultInstruction<1>* instr) {
482 return Define(instr,
483 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
484 }
485
486
487 LInstruction* LChunkBuilder::DefineFixed(
488 LTemplateResultInstruction<1>* instr, Register reg) {
489 return Define(instr, ToUnallocated(reg));
490 }
491
492
493 LInstruction* LChunkBuilder::DefineFixedDouble(
494 LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
495 return Define(instr, ToUnallocated(reg));
496 }
497
498
499 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
500 HInstruction* hinstr,
501 CanDeoptimize can_deoptimize) {
502 info()->MarkAsNonDeferredCalling();
503 #ifdef DEBUG
504 instr->VerifyCall();
505 #endif
506 instr->MarkAsCall();
507 instr = AssignPointerMap(instr);
508
509 // If instruction does not have side-effects lazy deoptimization
510 // after the call will try to deoptimize to the point before the call.
511 // Thus we still need to attach environment to this call even if
512 // call sequence can not deoptimize eagerly.
513 bool needs_environment =
514 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
515 !hinstr->HasObservableSideEffects();
516 if (needs_environment && !instr->HasEnvironment()) {
517 instr = AssignEnvironment(instr);
518 }
519
520 return instr;
521 }
522
523
524 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
525 ASSERT(!instr->HasPointerMap());
526 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
527 return instr;
528 }
529
530
531 LUnallocated* LChunkBuilder::TempRegister() {
532 LUnallocated* operand =
533 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
534 int vreg = allocator_->GetVirtualRegister();
535 if (!allocator_->AllocationOk()) {
536 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
537 vreg = 0;
538 }
539 operand->set_virtual_register(vreg);
540 return operand;
541 }
542
543
544 int LPlatformChunk::GetNextSpillIndex() {
545 return spill_slot_count_++;
546 }
547
548
549 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
550 int index = GetNextSpillIndex();
551 if (kind == DOUBLE_REGISTERS) {
552 return LDoubleStackSlot::Create(index, zone());
553 } else {
554 ASSERT(kind == GENERAL_REGISTERS);
555 return LStackSlot::Create(index, zone());
556 }
557 }
558
559
560 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
561 LUnallocated* operand = ToUnallocated(reg);
562 ASSERT(operand->HasFixedPolicy());
563 return operand;
564 }
565
566
567 LPlatformChunk* LChunkBuilder::Build() {
568 ASSERT(is_unused());
569 chunk_ = new(zone()) LPlatformChunk(info_, graph_);
570 LPhase phase("L_Building chunk", chunk_);
571 status_ = BUILDING;
572
573 // If compiling for OSR, reserve space for the unoptimized frame,
574 // which will be subsumed into this frame.
575 if (graph()->has_osr()) {
576 // TODO(all): GetNextSpillIndex just increments a field. It has no other
577 // side effects, so we should get rid of this loop.
578 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
579 chunk_->GetNextSpillIndex();
580 }
581 }
582
583 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
584 for (int i = 0; i < blocks->length(); i++) {
585 DoBasicBlock(blocks->at(i));
586 if (is_aborted()) return NULL;
587 }
588 status_ = DONE;
589 return chunk_;
590 }
591
592
593 void LChunkBuilder::DoBasicBlock(HBasicBlock* block) {
594 ASSERT(is_building());
595 current_block_ = block;
596
597 if (block->IsStartBlock()) {
598 block->UpdateEnvironment(graph_->start_environment());
599 argument_count_ = 0;
600 } else if (block->predecessors()->length() == 1) {
601 // We have a single predecessor => copy environment and outgoing
602 // argument count from the predecessor.
603 ASSERT(block->phis()->length() == 0);
604 HBasicBlock* pred = block->predecessors()->at(0);
605 HEnvironment* last_environment = pred->last_environment();
606 ASSERT(last_environment != NULL);
607
608 // Only copy the environment, if it is later used again.
609 if (pred->end()->SecondSuccessor() == NULL) {
610 ASSERT(pred->end()->FirstSuccessor() == block);
611 } else {
612 if ((pred->end()->FirstSuccessor()->block_id() > block->block_id()) ||
613 (pred->end()->SecondSuccessor()->block_id() > block->block_id())) {
614 last_environment = last_environment->Copy();
615 }
616 }
617 block->UpdateEnvironment(last_environment);
618 ASSERT(pred->argument_count() >= 0);
619 argument_count_ = pred->argument_count();
620 } else {
621 // We are at a state join => process phis.
622 HBasicBlock* pred = block->predecessors()->at(0);
623 // No need to copy the environment, it cannot be used later.
624 HEnvironment* last_environment = pred->last_environment();
625 for (int i = 0; i < block->phis()->length(); ++i) {
626 HPhi* phi = block->phis()->at(i);
627 if (phi->HasMergedIndex()) {
628 last_environment->SetValueAt(phi->merged_index(), phi);
629 }
630 }
631 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
632 if (block->deleted_phis()->at(i) < last_environment->length()) {
633 last_environment->SetValueAt(block->deleted_phis()->at(i),
634 graph_->GetConstantUndefined());
635 }
636 }
637 block->UpdateEnvironment(last_environment);
638 // Pick up the outgoing argument count of one of the predecessors.
639 argument_count_ = pred->argument_count();
640 }
641
642 // Translate hydrogen instructions to lithium ones for the current block.
643 HInstruction* current = block->first();
644 int start = chunk_->instructions()->length();
645 while ((current != NULL) && !is_aborted()) {
646 // Code for constants in registers is generated lazily.
647 if (!current->EmitAtUses()) {
648 VisitInstruction(current);
649 }
650 current = current->next();
651 }
652 int end = chunk_->instructions()->length() - 1;
653 if (end >= start) {
654 block->set_first_instruction_index(start);
655 block->set_last_instruction_index(end);
656 }
657 block->set_argument_count(argument_count_);
658 current_block_ = NULL;
659 }
660
661
662 void LChunkBuilder::VisitInstruction(HInstruction* current) {
663 HInstruction* old_current = current_instruction_;
664 current_instruction_ = current;
665
666 LInstruction* instr = NULL;
667 if (current->CanReplaceWithDummyUses()) {
668 if (current->OperandCount() == 0) {
669 instr = DefineAsRegister(new(zone()) LDummy());
670 } else {
671 ASSERT(!current->OperandAt(0)->IsControlInstruction());
672 instr = DefineAsRegister(new(zone())
673 LDummyUse(UseAny(current->OperandAt(0))));
674 }
675 for (int i = 1; i < current->OperandCount(); ++i) {
676 if (current->OperandAt(i)->IsControlInstruction()) continue;
677 LInstruction* dummy =
678 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
679 dummy->set_hydrogen_value(current);
680 chunk_->AddInstruction(dummy, current_block_);
681 }
682 } else {
683 instr = current->CompileToLithium(this);
684 }
685
686 argument_count_ += current->argument_delta();
687 ASSERT(argument_count_ >= 0);
688
689 if (instr != NULL) {
690 // Associate the hydrogen instruction first, since we may need it for
691 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
692 instr->set_hydrogen_value(current);
693
694 #if DEBUG
695 // Make sure that the lithium instruction has either no fixed register
696 // constraints in temps or the result OR no uses that are only used at
697 // start. If this invariant doesn't hold, the register allocator can decide
698 // to insert a split of a range immediately before the instruction due to an
699 // already allocated register needing to be used for the instruction's fixed
700 // register constraint. In this case, the register allocator won't see an
701 // interference between the split child and the use-at-start (it would if
702 // the it was just a plain use), so it is free to move the split child into
703 // the same register that is used for the use-at-start.
704 // See https://code.google.com/p/chromium/issues/detail?id=201590
705 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
706 int fixed = 0;
707 int used_at_start = 0;
708 for (UseIterator it(instr); !it.Done(); it.Advance()) {
709 LUnallocated* operand = LUnallocated::cast(it.Current());
710 if (operand->IsUsedAtStart()) ++used_at_start;
711 }
712 if (instr->Output() != NULL) {
713 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
714 }
715 for (TempIterator it(instr); !it.Done(); it.Advance()) {
716 LUnallocated* operand = LUnallocated::cast(it.Current());
717 if (operand->HasFixedPolicy()) ++fixed;
718 }
719 ASSERT(fixed == 0 || used_at_start == 0);
720 }
721 #endif
722
723 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
724 instr = AssignPointerMap(instr);
725 }
726 if (FLAG_stress_environments && !instr->HasEnvironment()) {
727 instr = AssignEnvironment(instr);
728 }
729 chunk_->AddInstruction(instr, current_block_);
730
731 if (instr->IsCall()) {
732 HValue* hydrogen_value_for_lazy_bailout = current;
733 LInstruction* instruction_needing_environment = NULL;
734 if (current->HasObservableSideEffects()) {
735 HSimulate* sim = HSimulate::cast(current->next());
736 instruction_needing_environment = instr;
737 sim->ReplayEnvironment(current_block_->last_environment());
738 hydrogen_value_for_lazy_bailout = sim;
739 }
740 LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
741 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
742 chunk_->AddInstruction(bailout, current_block_);
743 if (instruction_needing_environment != NULL) {
744 // Store the lazy deopt environment with the instruction if needed.
745 // Right now it is only used for LInstanceOfKnownGlobal.
746 instruction_needing_environment->
747 SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
748 }
749 }
750 }
751 current_instruction_ = old_current;
752 }
753
754
755 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
756 HEnvironment* hydrogen_env = current_block_->last_environment();
757 int argument_index_accumulator = 0;
758 ZoneList<HValue*> objects_to_materialize(0, zone());
759 instr->set_environment(CreateEnvironment(hydrogen_env,
760 &argument_index_accumulator,
761 &objects_to_materialize));
762 return instr;
763 }
764
765
766 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
767 // The control instruction marking the end of a block that completed
768 // abruptly (e.g., threw an exception). There is nothing specific to do.
769 return NULL;
770 }
771
772
773 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
774 HArithmeticBinaryOperation* instr) {
775 ASSERT(instr->representation().IsDouble());
776 ASSERT(instr->left()->representation().IsDouble());
777 ASSERT(instr->right()->representation().IsDouble());
778
779 if (op == Token::MOD) {
780 LOperand* left = UseFixedDouble(instr->left(), d0);
781 LOperand* right = UseFixedDouble(instr->right(), d1);
782 LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
783 return MarkAsCall(DefineFixedDouble(result, d0), instr);
784 } else {
785 LOperand* left = UseRegisterAtStart(instr->left());
786 LOperand* right = UseRegisterAtStart(instr->right());
787 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
788 return DefineAsRegister(result);
789 }
790 }
791
792
793 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
794 HBinaryOperation* instr) {
795 ASSERT((op == Token::ADD) || (op == Token::SUB) || (op == Token::MUL) ||
796 (op == Token::DIV) || (op == Token::MOD) || (op == Token::SHR) ||
797 (op == Token::SHL) || (op == Token::SAR) || (op == Token::ROR) ||
798 (op == Token::BIT_OR) || (op == Token::BIT_AND) ||
799 (op == Token::BIT_XOR));
800 HValue* left = instr->left();
801 HValue* right = instr->right();
802
803 // TODO(jbramley): Once we've implemented smi support for all arithmetic
804 // operations, these assertions should check IsTagged().
805 ASSERT(instr->representation().IsSmiOrTagged());
806 ASSERT(left->representation().IsSmiOrTagged());
807 ASSERT(right->representation().IsSmiOrTagged());
808
809 LOperand* context = UseFixed(instr->context(), cp);
810 LOperand* left_operand = UseFixed(left, x1);
811 LOperand* right_operand = UseFixed(right, x0);
812 LArithmeticT* result =
813 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
814 return MarkAsCall(DefineFixed(result, x0), instr);
815 }
816
817
818 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
819 HBoundsCheckBaseIndexInformation* instr) {
820 UNREACHABLE();
821 return NULL;
822 }
823
824
825 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
826 info()->MarkAsRequiresFrame();
827 LOperand* args = NULL;
828 LOperand* length = NULL;
829 LOperand* index = NULL;
830
831 if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
832 args = UseRegisterAtStart(instr->arguments());
833 length = UseConstant(instr->length());
834 index = UseConstant(instr->index());
835 } else {
836 args = UseRegister(instr->arguments());
837 length = UseRegisterAtStart(instr->length());
838 index = UseRegisterOrConstantAtStart(instr->index());
839 }
840
841 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
842 }
843
844
845 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
846 if (instr->representation().IsSmiOrInteger32()) {
847 ASSERT(instr->left()->representation().Equals(instr->representation()));
848 ASSERT(instr->right()->representation().Equals(instr->representation()));
849 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
850 LOperand* right =
851 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
852 LInstruction* result = instr->representation().IsSmi() ?
853 DefineAsRegister(new(zone()) LAddS(left, right)) :
854 DefineAsRegister(new(zone()) LAddI(left, right));
855 if (instr->CheckFlag(HValue::kCanOverflow)) {
856 result = AssignEnvironment(result);
857 }
858 return result;
859 } else if (instr->representation().IsExternal()) {
860 ASSERT(instr->left()->representation().IsExternal());
861 ASSERT(instr->right()->representation().IsInteger32());
862 ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
863 LOperand* left = UseRegisterAtStart(instr->left());
864 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
865 return DefineAsRegister(new(zone()) LAddE(left, right));
866 } else if (instr->representation().IsDouble()) {
867 return DoArithmeticD(Token::ADD, instr);
868 } else {
869 ASSERT(instr->representation().IsTagged());
870 return DoArithmeticT(Token::ADD, instr);
871 }
872 }
873
874
875 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
876 info()->MarkAsDeferredCalling();
877 LOperand* context = UseAny(instr->context());
878 LOperand* size = UseRegisterOrConstant(instr->size());
879 LOperand* temp1 = TempRegister();
880 LOperand* temp2 = TempRegister();
881 LOperand* temp3 = instr->MustPrefillWithFiller() ? TempRegister() : NULL;
882 LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2, temp3);
883 return AssignPointerMap(DefineAsRegister(result));
884 }
885
886
887 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
888 LOperand* function = UseFixed(instr->function(), x1);
889 LOperand* receiver = UseFixed(instr->receiver(), x0);
890 LOperand* length = UseFixed(instr->length(), x2);
891 LOperand* elements = UseFixed(instr->elements(), x3);
892 LApplyArguments* result = new(zone()) LApplyArguments(function,
893 receiver,
894 length,
895 elements);
896 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
897 }
898
899
900 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* instr) {
901 info()->MarkAsRequiresFrame();
902 LOperand* temp = instr->from_inlined() ? NULL : TempRegister();
903 return DefineAsRegister(new(zone()) LArgumentsElements(temp));
904 }
905
906
907 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
908 info()->MarkAsRequiresFrame();
909 LOperand* value = UseRegisterAtStart(instr->value());
910 return DefineAsRegister(new(zone()) LArgumentsLength(value));
911 }
912
913
914 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
915 // There are no real uses of the arguments object.
916 // arguments.length and element access are supported directly on
917 // stack arguments, and any real arguments object use causes a bailout.
918 // So this value is never used.
919 return NULL;
920 }
921
922
923 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
924 if (instr->representation().IsSmiOrInteger32()) {
925 ASSERT(instr->left()->representation().Equals(instr->representation()));
926 ASSERT(instr->right()->representation().Equals(instr->representation()));
927 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
928
929 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
930 LOperand* right =
931 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
932 return instr->representation().IsSmi() ?
933 DefineAsRegister(new(zone()) LBitS(left, right)) :
934 DefineAsRegister(new(zone()) LBitI(left, right));
935 } else {
936 return DoArithmeticT(instr->op(), instr);
937 }
938 }
939
940
941 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
942 // V8 expects a label to be generated for each basic block.
943 // This is used in some places like LAllocator::IsBlockBoundary
944 // in lithium-allocator.cc
945 return new(zone()) LLabel(instr->block());
946 }
947
948
949 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
950 LOperand* value = UseRegisterOrConstantAtStart(instr->index());
951 LOperand* length = UseRegister(instr->length());
952 return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
953 }
954
955
956 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
957 LInstruction* goto_instr = CheckElideControlInstruction(instr);
958 if (goto_instr != NULL) return goto_instr;
959
960 HValue* value = instr->value();
961 Representation r = value->representation();
962 HType type = value->type();
963
964 if (r.IsInteger32() || r.IsSmi() || r.IsDouble()) {
965 // These representations have simple checks that cannot deoptimize.
966 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
967 } else {
968 ASSERT(r.IsTagged());
969 if (type.IsBoolean() || type.IsSmi() || type.IsJSArray() ||
970 type.IsHeapNumber()) {
971 // These types have simple checks that cannot deoptimize.
972 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
973 }
974
975 if (type.IsString()) {
976 // This type cannot deoptimize, but needs a scratch register.
977 return new(zone()) LBranch(UseRegister(value), TempRegister(), NULL);
978 }
979
980 ToBooleanStub::Types expected = instr->expected_input_types();
981 bool needs_temps = expected.NeedsMap() || expected.IsEmpty();
982 LOperand* temp1 = needs_temps ? TempRegister() : NULL;
983 LOperand* temp2 = needs_temps ? TempRegister() : NULL;
984
985 if (expected.IsGeneric() || expected.IsEmpty()) {
986 // The generic case cannot deoptimize because it already supports every
987 // possible input type.
988 ASSERT(needs_temps);
989 return new(zone()) LBranch(UseRegister(value), temp1, temp2);
990 } else {
991 return AssignEnvironment(
992 new(zone()) LBranch(UseRegister(value), temp1, temp2));
993 }
994 }
995 }
996
997
998 LInstruction* LChunkBuilder::DoCallJSFunction(
999 HCallJSFunction* instr) {
1000 LOperand* function = UseFixed(instr->function(), x1);
1001
1002 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1003
1004 return MarkAsCall(DefineFixed(result, x0), instr);
1005 }
1006
1007
1008 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1009 HCallWithDescriptor* instr) {
1010 const CallInterfaceDescriptor* descriptor = instr->descriptor();
1011
1012 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1013 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1014 ops.Add(target, zone());
1015 for (int i = 1; i < instr->OperandCount(); i++) {
1016 LOperand* op = UseFixed(instr->OperandAt(i),
1017 descriptor->GetParameterRegister(i - 1));
1018 ops.Add(op, zone());
1019 }
1020
1021 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(descriptor,
1022 ops,
1023 zone());
1024 return MarkAsCall(DefineFixed(result, x0), instr);
1025 }
1026
1027
1028 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1029 LOperand* context = UseFixed(instr->context(), cp);
1030 LOperand* function = UseFixed(instr->function(), x1);
1031 LCallFunction* call = new(zone()) LCallFunction(context, function);
1032 return MarkAsCall(DefineFixed(call, x0), instr);
1033 }
1034
1035
1036 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1037 LOperand* context = UseFixed(instr->context(), cp);
1038 // The call to CallConstructStub will expect the constructor to be in x1.
1039 LOperand* constructor = UseFixed(instr->constructor(), x1);
1040 LCallNew* result = new(zone()) LCallNew(context, constructor);
1041 return MarkAsCall(DefineFixed(result, x0), instr);
1042 }
1043
1044
1045 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1046 LOperand* context = UseFixed(instr->context(), cp);
1047 // The call to ArrayConstructCode will expect the constructor to be in x1.
1048 LOperand* constructor = UseFixed(instr->constructor(), x1);
1049 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1050 return MarkAsCall(DefineFixed(result, x0), instr);
1051 }
1052
1053
1054 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1055 LOperand* context = UseFixed(instr->context(), cp);
1056 return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), x0), instr);
1057 }
1058
1059
1060 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
1061 LOperand* context = UseFixed(instr->context(), cp);
1062 return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), x0), instr);
1063 }
1064
1065
1066 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
1067 instr->ReplayEnvironment(current_block_->last_environment());
1068
1069 // There are no real uses of a captured object.
1070 return NULL;
1071 }
1072
1073
1074 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1075 Representation from = instr->from();
1076 Representation to = instr->to();
1077
1078 if (from.IsSmi()) {
1079 if (to.IsTagged()) {
1080 LOperand* value = UseRegister(instr->value());
1081 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1082 }
1083 from = Representation::Tagged();
1084 }
1085
1086 if (from.IsTagged()) {
1087 if (to.IsDouble()) {
1088 LOperand* value = UseRegister(instr->value());
1089 LOperand* temp = TempRegister();
1090 LNumberUntagD* res = new(zone()) LNumberUntagD(value, temp);
1091 return AssignEnvironment(DefineAsRegister(res));
1092 } else if (to.IsSmi()) {
1093 LOperand* value = UseRegister(instr->value());
1094 if (instr->value()->type().IsSmi()) {
1095 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1096 }
1097 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1098 } else {
1099 ASSERT(to.IsInteger32());
1100 LInstruction* res = NULL;
1101
1102 if (instr->value()->type().IsSmi() ||
1103 instr->value()->representation().IsSmi()) {
1104 LOperand* value = UseRegisterAtStart(instr->value());
1105 res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1106 } else {
1107 LOperand* value = UseRegister(instr->value());
1108 LOperand* temp1 = TempRegister();
1109 LOperand* temp2 =
1110 instr->CanTruncateToInt32() ? TempRegister() : FixedTemp(d24);
1111 res = DefineAsRegister(new(zone()) LTaggedToI(value, temp1, temp2));
1112 res = AssignEnvironment(res);
1113 }
1114
1115 return res;
1116 }
1117 } else if (from.IsDouble()) {
1118 if (to.IsTagged()) {
1119 info()->MarkAsDeferredCalling();
1120 LOperand* value = UseRegister(instr->value());
1121 LOperand* temp1 = TempRegister();
1122 LOperand* temp2 = TempRegister();
1123
1124 LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1125 return AssignPointerMap(DefineAsRegister(result));
1126 } else {
1127 ASSERT(to.IsSmi() || to.IsInteger32());
1128 LOperand* value = UseRegister(instr->value());
1129
1130 if (instr->CanTruncateToInt32()) {
1131 LTruncateDoubleToIntOrSmi* result =
1132 new(zone()) LTruncateDoubleToIntOrSmi(value);
1133 return DefineAsRegister(result);
1134 } else {
1135 LDoubleToIntOrSmi* result = new(zone()) LDoubleToIntOrSmi(value);
1136 return AssignEnvironment(DefineAsRegister(result));
1137 }
1138 }
1139 } else if (from.IsInteger32()) {
1140 info()->MarkAsDeferredCalling();
1141 if (to.IsTagged()) {
1142 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1143 LOperand* value = UseRegister(instr->value());
1144 LNumberTagU* result = new(zone()) LNumberTagU(value,
1145 TempRegister(),
1146 TempRegister());
1147 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1148 } else {
1149 STATIC_ASSERT((kMinInt == Smi::kMinValue) &&
1150 (kMaxInt == Smi::kMaxValue));
1151 LOperand* value = UseRegisterAtStart(instr->value());
1152 return DefineAsRegister(new(zone()) LSmiTag(value));
1153 }
1154 } else if (to.IsSmi()) {
1155 LOperand* value = UseRegisterAtStart(instr->value());
1156 LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1157 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1158 result = AssignEnvironment(result);
1159 }
1160 return result;
1161 } else {
1162 ASSERT(to.IsDouble());
1163 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1164 return DefineAsRegister(
1165 new(zone()) LUint32ToDouble(UseRegisterAtStart(instr->value())));
1166 } else {
1167 return DefineAsRegister(
1168 new(zone()) LInteger32ToDouble(UseRegisterAtStart(instr->value())));
1169 }
1170 }
1171 }
1172
1173 UNREACHABLE();
1174 return NULL;
1175 }
1176
1177
1178 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1179 LOperand* value = UseRegisterAtStart(instr->value());
1180 return AssignEnvironment(new(zone()) LCheckValue(value));
1181 }
1182
1183
1184 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1185 LOperand* value = UseRegisterAtStart(instr->value());
1186 LOperand* temp = TempRegister();
1187 LInstruction* result = new(zone()) LCheckInstanceType(value, temp);
1188 return AssignEnvironment(result);
1189 }
1190
1191
1192 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1193 if (instr->CanOmitMapChecks()) {
1194 // LCheckMaps does nothing in this case.
1195 return new(zone()) LCheckMaps(NULL);
1196 } else {
1197 LOperand* value = UseRegisterAtStart(instr->value());
1198 LOperand* temp = TempRegister();
1199
1200 if (instr->has_migration_target()) {
1201 info()->MarkAsDeferredCalling();
1202 LInstruction* result = new(zone()) LCheckMaps(value, temp);
1203 return AssignPointerMap(AssignEnvironment(result));
1204 } else {
1205 return AssignEnvironment(new(zone()) LCheckMaps(value, temp));
1206 }
1207 }
1208 }
1209
1210
1211 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1212 LOperand* value = UseRegisterAtStart(instr->value());
1213 return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1214 }
1215
1216
1217 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1218 LOperand* value = UseRegisterAtStart(instr->value());
1219 return AssignEnvironment(new(zone()) LCheckSmi(value));
1220 }
1221
1222
1223 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1224 HValue* value = instr->value();
1225 Representation input_rep = value->representation();
1226 LOperand* reg = UseRegister(value);
1227 if (input_rep.IsDouble()) {
1228 return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1229 } else if (input_rep.IsInteger32()) {
1230 return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1231 } else {
1232 ASSERT(input_rep.IsSmiOrTagged());
1233 return AssignEnvironment(
1234 DefineAsRegister(new(zone()) LClampTToUint8(reg,
1235 TempRegister(),
1236 FixedTemp(d24))));
1237 }
1238 }
1239
1240
1241 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1242 HClassOfTestAndBranch* instr) {
1243 ASSERT(instr->value()->representation().IsTagged());
1244 LOperand* value = UseRegisterAtStart(instr->value());
1245 return new(zone()) LClassOfTestAndBranch(value,
1246 TempRegister(),
1247 TempRegister());
1248 }
1249
1250
1251 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1252 HCompareNumericAndBranch* instr) {
1253 Representation r = instr->representation();
1254
1255 if (r.IsSmiOrInteger32()) {
1256 ASSERT(instr->left()->representation().Equals(r));
1257 ASSERT(instr->right()->representation().Equals(r));
1258 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1259 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1260 return new(zone()) LCompareNumericAndBranch(left, right);
1261 } else {
1262 ASSERT(r.IsDouble());
1263 ASSERT(instr->left()->representation().IsDouble());
1264 ASSERT(instr->right()->representation().IsDouble());
1265 // TODO(all): In fact the only case that we can handle more efficiently is
1266 // when one of the operand is the constant 0. Currently the MacroAssembler
1267 // will be able to cope with any constant by loading it into an internal
1268 // scratch register. This means that if the constant is used more that once,
1269 // it will be loaded multiple times. Unfortunatly crankshaft already
1270 // duplicates constant loads, but we should modify the code below once this
1271 // issue has been addressed in crankshaft.
1272 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1273 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1274 return new(zone()) LCompareNumericAndBranch(left, right);
1275 }
1276 }
1277
1278
1279 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1280 ASSERT(instr->left()->representation().IsTagged());
1281 ASSERT(instr->right()->representation().IsTagged());
1282 LOperand* context = UseFixed(instr->context(), cp);
1283 LOperand* left = UseFixed(instr->left(), x1);
1284 LOperand* right = UseFixed(instr->right(), x0);
1285 LCmpT* result = new(zone()) LCmpT(context, left, right);
1286 return MarkAsCall(DefineFixed(result, x0), instr);
1287 }
1288
1289
1290 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1291 HCompareHoleAndBranch* instr) {
1292 LOperand* value = UseRegister(instr->value());
1293 if (instr->representation().IsTagged()) {
1294 return new(zone()) LCmpHoleAndBranchT(value);
1295 } else {
1296 LOperand* temp = TempRegister();
1297 return new(zone()) LCmpHoleAndBranchD(value, temp);
1298 }
1299 }
1300
1301
1302 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1303 HCompareObjectEqAndBranch* instr) {
1304 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1305 if (goto_instr != NULL) return goto_instr;
1306
1307 LOperand* left = UseRegisterAtStart(instr->left());
1308 LOperand* right = UseRegisterAtStart(instr->right());
1309 return new(zone()) LCmpObjectEqAndBranch(left, right);
1310 }
1311
1312
1313 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1314 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1315 if (goto_instr != NULL) return goto_instr;
1316
1317 ASSERT(instr->value()->representation().IsTagged());
1318 LOperand* value = UseRegisterAtStart(instr->value());
1319 LOperand* temp = TempRegister();
1320 return new(zone()) LCmpMapAndBranch(value, temp);
1321 }
1322
1323
1324 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1325 Representation r = instr->representation();
1326 if (r.IsSmi()) {
1327 return DefineAsRegister(new(zone()) LConstantS);
1328 } else if (r.IsInteger32()) {
1329 return DefineAsRegister(new(zone()) LConstantI);
1330 } else if (r.IsDouble()) {
1331 return DefineAsRegister(new(zone()) LConstantD);
1332 } else if (r.IsExternal()) {
1333 return DefineAsRegister(new(zone()) LConstantE);
1334 } else if (r.IsTagged()) {
1335 return DefineAsRegister(new(zone()) LConstantT);
1336 } else {
1337 UNREACHABLE();
1338 return NULL;
1339 }
1340 }
1341
1342
1343 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1344 if (instr->HasNoUses()) return NULL;
1345
1346 if (info()->IsStub()) {
1347 return DefineFixed(new(zone()) LContext, cp);
1348 }
1349
1350 return DefineAsRegister(new(zone()) LContext);
1351 }
1352
1353
1354 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1355 LOperand* object = UseFixed(instr->value(), x0);
1356 LDateField* result = new(zone()) LDateField(object, instr->index());
1357 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
1358 }
1359
1360
1361 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1362 return new(zone()) LDebugBreak();
1363 }
1364
1365
1366 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1367 LOperand* context = UseFixed(instr->context(), cp);
1368 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1369 }
1370
1371
1372 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
1373 return AssignEnvironment(new(zone()) LDeoptimize);
1374 }
1375
1376
1377 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1378 ASSERT(instr->representation().IsInteger32());
1379 ASSERT(instr->left()->representation().Equals(instr->representation()));
1380 ASSERT(instr->right()->representation().Equals(instr->representation()));
1381 LOperand* dividend = UseRegister(instr->left());
1382 int32_t divisor = instr->right()->GetInteger32Constant();
1383 LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1384 dividend, divisor));
1385 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1386 (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1387 (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1388 divisor != 1 && divisor != -1)) {
1389 result = AssignEnvironment(result);
1390 }
1391 return result;
1392 }
1393
1394
1395 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1396 ASSERT(instr->representation().IsInteger32());
1397 ASSERT(instr->left()->representation().Equals(instr->representation()));
1398 ASSERT(instr->right()->representation().Equals(instr->representation()));
1399 LOperand* dividend = UseRegister(instr->left());
1400 int32_t divisor = instr->right()->GetInteger32Constant();
1401 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1402 ? NULL : TempRegister();
1403 LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1404 dividend, divisor, temp));
1405 if (divisor == 0 ||
1406 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1407 !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1408 result = AssignEnvironment(result);
1409 }
1410 return result;
1411 }
1412
1413
1414 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1415 ASSERT(instr->representation().IsSmiOrInteger32());
1416 ASSERT(instr->left()->representation().Equals(instr->representation()));
1417 ASSERT(instr->right()->representation().Equals(instr->representation()));
1418 LOperand* dividend = UseRegister(instr->left());
1419 LOperand* divisor = UseRegister(instr->right());
1420 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1421 ? NULL : TempRegister();
1422 LDivI* div = new(zone()) LDivI(dividend, divisor, temp);
1423 return AssignEnvironment(DefineAsRegister(div));
1424 }
1425
1426
1427 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1428 if (instr->representation().IsSmiOrInteger32()) {
1429 if (instr->RightIsPowerOf2()) {
1430 return DoDivByPowerOf2I(instr);
1431 } else if (instr->right()->IsConstant()) {
1432 return DoDivByConstI(instr);
1433 } else {
1434 return DoDivI(instr);
1435 }
1436 } else if (instr->representation().IsDouble()) {
1437 return DoArithmeticD(Token::DIV, instr);
1438 } else {
1439 return DoArithmeticT(Token::DIV, instr);
1440 }
1441 }
1442
1443
1444 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
1445 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
1446 }
1447
1448
1449 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
1450 HEnvironment* outer = current_block_->last_environment();
1451 HConstant* undefined = graph()->GetConstantUndefined();
1452 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
1453 instr->arguments_count(),
1454 instr->function(),
1455 undefined,
1456 instr->inlining_kind());
1457 // Only replay binding of arguments object if it wasn't removed from graph.
1458 if ((instr->arguments_var() != NULL) &&
1459 instr->arguments_object()->IsLinked()) {
1460 inner->Bind(instr->arguments_var(), instr->arguments_object());
1461 }
1462 inner->set_entry(instr);
1463 current_block_->UpdateEnvironment(inner);
1464 chunk_->AddInlinedClosure(instr->closure());
1465 return NULL;
1466 }
1467
1468
1469 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
1470 UNREACHABLE();
1471 return NULL;
1472 }
1473
1474
1475 LInstruction* LChunkBuilder::DoForceRepresentation(
1476 HForceRepresentation* instr) {
1477 // All HForceRepresentation instructions should be eliminated in the
1478 // representation change phase of Hydrogen.
1479 UNREACHABLE();
1480 return NULL;
1481 }
1482
1483
1484 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
1485 LOperand* context = UseFixed(instr->context(), cp);
1486 return MarkAsCall(
1487 DefineFixed(new(zone()) LFunctionLiteral(context), x0), instr);
1488 }
1489
1490
1491 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1492 HGetCachedArrayIndex* instr) {
1493 ASSERT(instr->value()->representation().IsTagged());
1494 LOperand* value = UseRegisterAtStart(instr->value());
1495 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1496 }
1497
1498
1499 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1500 return new(zone()) LGoto(instr->FirstSuccessor());
1501 }
1502
1503
1504 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1505 HHasCachedArrayIndexAndBranch* instr) {
1506 ASSERT(instr->value()->representation().IsTagged());
1507 return new(zone()) LHasCachedArrayIndexAndBranch(
1508 UseRegisterAtStart(instr->value()), TempRegister());
1509 }
1510
1511
1512 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1513 HHasInstanceTypeAndBranch* instr) {
1514 ASSERT(instr->value()->representation().IsTagged());
1515 LOperand* value = UseRegisterAtStart(instr->value());
1516 return new(zone()) LHasInstanceTypeAndBranch(value, TempRegister());
1517 }
1518
1519
1520 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1521 HInnerAllocatedObject* instr) {
1522 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1523 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1524 return DefineAsRegister(
1525 new(zone()) LInnerAllocatedObject(base_object, offset));
1526 }
1527
1528
1529 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1530 LOperand* context = UseFixed(instr->context(), cp);
1531 LInstanceOf* result = new(zone()) LInstanceOf(
1532 context,
1533 UseFixed(instr->left(), InstanceofStub::left()),
1534 UseFixed(instr->right(), InstanceofStub::right()));
1535 return MarkAsCall(DefineFixed(result, x0), instr);
1536 }
1537
1538
1539 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1540 HInstanceOfKnownGlobal* instr) {
1541 LInstanceOfKnownGlobal* result = new(zone()) LInstanceOfKnownGlobal(
1542 UseFixed(instr->context(), cp),
1543 UseFixed(instr->left(), InstanceofStub::left()));
1544 return MarkAsCall(DefineFixed(result, x0), instr);
1545 }
1546
1547
1548 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1549 LOperand* context = UseFixed(instr->context(), cp);
1550 // The function is required (by MacroAssembler::InvokeFunction) to be in x1.
1551 LOperand* function = UseFixed(instr->function(), x1);
1552 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1553 return MarkAsCall(DefineFixed(result, x0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1554 }
1555
1556
1557 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
1558 HIsConstructCallAndBranch* instr) {
1559 return new(zone()) LIsConstructCallAndBranch(TempRegister(), TempRegister());
1560 }
1561
1562
1563 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1564 HCompareMinusZeroAndBranch* instr) {
1565 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1566 if (goto_instr != NULL) return goto_instr;
1567 LOperand* value = UseRegister(instr->value());
1568 LOperand* scratch = TempRegister();
1569 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1570 }
1571
1572
1573 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1574 ASSERT(instr->value()->representation().IsTagged());
1575 LOperand* value = UseRegisterAtStart(instr->value());
1576 LOperand* temp1 = TempRegister();
1577 LOperand* temp2 = TempRegister();
1578 return new(zone()) LIsObjectAndBranch(value, temp1, temp2);
1579 }
1580
1581
1582 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1583 ASSERT(instr->value()->representation().IsTagged());
1584 LOperand* value = UseRegisterAtStart(instr->value());
1585 LOperand* temp = TempRegister();
1586 return new(zone()) LIsStringAndBranch(value, temp);
1587 }
1588
1589
1590 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1591 ASSERT(instr->value()->representation().IsTagged());
1592 return new(zone()) LIsSmiAndBranch(UseRegisterAtStart(instr->value()));
1593 }
1594
1595
1596 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1597 HIsUndetectableAndBranch* instr) {
1598 ASSERT(instr->value()->representation().IsTagged());
1599 LOperand* value = UseRegisterAtStart(instr->value());
1600 return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1601 }
1602
1603
1604 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
1605 LInstruction* pop = NULL;
1606 HEnvironment* env = current_block_->last_environment();
1607
1608 if (env->entry()->arguments_pushed()) {
1609 int argument_count = env->arguments_environment()->parameter_count();
1610 pop = new(zone()) LDrop(argument_count);
1611 ASSERT(instr->argument_delta() == -argument_count);
1612 }
1613
1614 HEnvironment* outer =
1615 current_block_->last_environment()->DiscardInlined(false);
1616 current_block_->UpdateEnvironment(outer);
1617
1618 return pop;
1619 }
1620
1621
1622 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1623 LOperand* context = UseRegisterAtStart(instr->value());
1624 LInstruction* result =
1625 DefineAsRegister(new(zone()) LLoadContextSlot(context));
1626 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1627 }
1628
1629
1630 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1631 HLoadFunctionPrototype* instr) {
1632 LOperand* function = UseRegister(instr->function());
1633 LOperand* temp = TempRegister();
1634 return AssignEnvironment(DefineAsRegister(
1635 new(zone()) LLoadFunctionPrototype(function, temp)));
1636 }
1637
1638
1639 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1640 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell();
1641 return instr->RequiresHoleCheck()
1642 ? AssignEnvironment(DefineAsRegister(result))
1643 : DefineAsRegister(result);
1644 }
1645
1646
1647 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1648 LOperand* context = UseFixed(instr->context(), cp);
1649 LOperand* global_object = UseFixed(instr->global_object(), x0);
1650 LLoadGlobalGeneric* result =
1651 new(zone()) LLoadGlobalGeneric(context, global_object);
1652 return MarkAsCall(DefineFixed(result, x0), instr);
1653 }
1654
1655
1656 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
1657 ASSERT(instr->key()->representation().IsSmiOrInteger32());
1658 ElementsKind elements_kind = instr->elements_kind();
1659 LOperand* elements = UseRegister(instr->elements());
1660 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1661
1662 if (!instr->is_typed_elements()) {
1663 if (instr->representation().IsDouble()) {
1664 LOperand* temp = (!instr->key()->IsConstant() ||
1665 instr->RequiresHoleCheck())
1666 ? TempRegister()
1667 : NULL;
1668
1669 LLoadKeyedFixedDouble* result =
1670 new(zone()) LLoadKeyedFixedDouble(elements, key, temp);
1671 return instr->RequiresHoleCheck()
1672 ? AssignEnvironment(DefineAsRegister(result))
1673 : DefineAsRegister(result);
1674 } else {
1675 ASSERT(instr->representation().IsSmiOrTagged() ||
1676 instr->representation().IsInteger32());
1677 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1678 LLoadKeyedFixed* result =
1679 new(zone()) LLoadKeyedFixed(elements, key, temp);
1680 return instr->RequiresHoleCheck()
1681 ? AssignEnvironment(DefineAsRegister(result))
1682 : DefineAsRegister(result);
1683 }
1684 } else {
1685 ASSERT((instr->representation().IsInteger32() &&
1686 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
1687 (instr->representation().IsDouble() &&
1688 IsDoubleOrFloatElementsKind(instr->elements_kind())));
1689
1690 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1691 LLoadKeyedExternal* result =
1692 new(zone()) LLoadKeyedExternal(elements, key, temp);
1693 // An unsigned int array load might overflow and cause a deopt. Make sure it
1694 // has an environment.
1695 if (instr->RequiresHoleCheck() ||
1696 elements_kind == EXTERNAL_UINT32_ELEMENTS ||
1697 elements_kind == UINT32_ELEMENTS) {
1698 return AssignEnvironment(DefineAsRegister(result));
1699 } else {
1700 return DefineAsRegister(result);
1701 }
1702 }
1703 }
1704
1705
1706 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1707 LOperand* context = UseFixed(instr->context(), cp);
1708 LOperand* object = UseFixed(instr->object(), x1);
1709 LOperand* key = UseFixed(instr->key(), x0);
1710
1711 LInstruction* result =
1712 DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key), x0);
1713 return MarkAsCall(result, instr);
1714 }
1715
1716
1717 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1718 LOperand* object = UseRegisterAtStart(instr->object());
1719 return DefineAsRegister(new(zone()) LLoadNamedField(object));
1720 }
1721
1722
1723 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1724 LOperand* context = UseFixed(instr->context(), cp);
1725 LOperand* object = UseFixed(instr->object(), x0);
1726 LInstruction* result =
1727 DefineFixed(new(zone()) LLoadNamedGeneric(context, object), x0);
1728 return MarkAsCall(result, instr);
1729 }
1730
1731
1732 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
1733 return DefineAsRegister(new(zone()) LLoadRoot);
1734 }
1735
1736
1737 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1738 LOperand* map = UseRegisterAtStart(instr->value());
1739 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1740 }
1741
1742
1743 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1744 ASSERT(instr->representation().IsInteger32());
1745 ASSERT(instr->left()->representation().Equals(instr->representation()));
1746 ASSERT(instr->right()->representation().Equals(instr->representation()));
1747 LOperand* dividend = UseRegisterAtStart(instr->left());
1748 int32_t divisor = instr->right()->GetInteger32Constant();
1749 LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1750 dividend, divisor));
1751 if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1752 (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1753 result = AssignEnvironment(result);
1754 }
1755 return result;
1756 }
1757
1758
1759 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1760 ASSERT(instr->representation().IsInteger32());
1761 ASSERT(instr->left()->representation().Equals(instr->representation()));
1762 ASSERT(instr->right()->representation().Equals(instr->representation()));
1763 LOperand* dividend = UseRegister(instr->left());
1764 int32_t divisor = instr->right()->GetInteger32Constant();
1765 LOperand* temp =
1766 ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1767 (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1768 NULL : TempRegister();
1769 LInstruction* result = DefineAsRegister(
1770 new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1771 if (divisor == 0 ||
1772 (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1773 result = AssignEnvironment(result);
1774 }
1775 return result;
1776 }
1777
1778
1779 LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) {
1780 LOperand* dividend = UseRegister(instr->left());
1781 LOperand* divisor = UseRegister(instr->right());
1782 LOperand* remainder = TempRegister();
1783 LInstruction* result =
1784 DefineAsRegister(new(zone()) LFlooringDivI(dividend, divisor, remainder));
1785 return AssignEnvironment(result);
1786 }
1787
1788
1789 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1790 if (instr->RightIsPowerOf2()) {
1791 return DoFlooringDivByPowerOf2I(instr);
1792 } else if (instr->right()->IsConstant()) {
1793 return DoFlooringDivByConstI(instr);
1794 } else {
1795 return DoFlooringDivI(instr);
1796 }
1797 }
1798
1799
1800 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1801 LOperand* left = NULL;
1802 LOperand* right = NULL;
1803 if (instr->representation().IsSmiOrInteger32()) {
1804 ASSERT(instr->left()->representation().Equals(instr->representation()));
1805 ASSERT(instr->right()->representation().Equals(instr->representation()));
1806 left = UseRegisterAtStart(instr->BetterLeftOperand());
1807 right = UseRegisterOrConstantAtStart(instr->BetterRightOperand());
1808 } else {
1809 ASSERT(instr->representation().IsDouble());
1810 ASSERT(instr->left()->representation().IsDouble());
1811 ASSERT(instr->right()->representation().IsDouble());
1812 left = UseRegisterAtStart(instr->left());
1813 right = UseRegisterAtStart(instr->right());
1814 }
1815 return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1816 }
1817
1818
1819 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1820 ASSERT(instr->representation().IsInteger32());
1821 ASSERT(instr->left()->representation().Equals(instr->representation()));
1822 ASSERT(instr->right()->representation().Equals(instr->representation()));
1823 LOperand* dividend = UseRegisterAtStart(instr->left());
1824 int32_t divisor = instr->right()->GetInteger32Constant();
1825 LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1826 dividend, divisor));
1827 if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1828 result = AssignEnvironment(result);
1829 }
1830 return result;
1831 }
1832
1833
1834 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1835 ASSERT(instr->representation().IsInteger32());
1836 ASSERT(instr->left()->representation().Equals(instr->representation()));
1837 ASSERT(instr->right()->representation().Equals(instr->representation()));
1838 LOperand* dividend = UseRegister(instr->left());
1839 int32_t divisor = instr->right()->GetInteger32Constant();
1840 LOperand* temp = TempRegister();
1841 LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1842 dividend, divisor, temp));
1843 if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1844 result = AssignEnvironment(result);
1845 }
1846 return result;
1847 }
1848
1849
1850 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1851 ASSERT(instr->representation().IsSmiOrInteger32());
1852 ASSERT(instr->left()->representation().Equals(instr->representation()));
1853 ASSERT(instr->right()->representation().Equals(instr->representation()));
1854 LOperand* dividend = UseRegister(instr->left());
1855 LOperand* divisor = UseRegister(instr->right());
1856 LInstruction* result = DefineAsRegister(new(zone()) LModI(dividend, divisor));
1857 if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1858 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1859 result = AssignEnvironment(result);
1860 }
1861 return result;
1862 }
1863
1864
1865 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1866 if (instr->representation().IsSmiOrInteger32()) {
1867 if (instr->RightIsPowerOf2()) {
1868 return DoModByPowerOf2I(instr);
1869 } else if (instr->right()->IsConstant()) {
1870 return DoModByConstI(instr);
1871 } else {
1872 return DoModI(instr);
1873 }
1874 } else if (instr->representation().IsDouble()) {
1875 return DoArithmeticD(Token::MOD, instr);
1876 } else {
1877 return DoArithmeticT(Token::MOD, instr);
1878 }
1879 }
1880
1881
1882 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1883 if (instr->representation().IsSmiOrInteger32()) {
1884 ASSERT(instr->left()->representation().Equals(instr->representation()));
1885 ASSERT(instr->right()->representation().Equals(instr->representation()));
1886
1887 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1888 bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1889 bool needs_environment = can_overflow || bailout_on_minus_zero;
1890
1891 HValue* least_const = instr->BetterLeftOperand();
1892 HValue* most_const = instr->BetterRightOperand();
1893
1894 LOperand* left = UseRegisterAtStart(least_const);
1895
1896 // LMulConstI can handle a subset of constants:
1897 // With support for overflow detection:
1898 // -1, 0, 1, 2
1899 // Without support for overflow detection:
1900 // 2^n, -(2^n)
1901 // 2^n + 1, -(2^n - 1)
1902 if (most_const->IsConstant()) {
1903 int32_t constant = HConstant::cast(most_const)->Integer32Value();
1904 int32_t constant_abs = (constant >= 0) ? constant : -constant;
1905
1906 if (((constant >= -1) && (constant <= 2)) ||
1907 (!can_overflow && (IsPowerOf2(constant_abs) ||
1908 IsPowerOf2(constant_abs + 1) ||
1909 IsPowerOf2(constant_abs - 1)))) {
1910 LConstantOperand* right = UseConstant(most_const);
1911 LMulConstIS* mul = new(zone()) LMulConstIS(left, right);
1912 if (needs_environment) AssignEnvironment(mul);
1913 return DefineAsRegister(mul);
1914 }
1915 }
1916
1917 // LMulI/S can handle all cases, but it requires that a register is
1918 // allocated for the second operand.
1919 LInstruction* result;
1920 if (instr->representation().IsSmi()) {
1921 LOperand* right = UseRegisterAtStart(most_const);
1922 result = DefineAsRegister(new(zone()) LMulS(left, right));
1923 } else {
1924 LOperand* right = UseRegisterAtStart(most_const);
1925 result = DefineAsRegister(new(zone()) LMulI(left, right));
1926 }
1927 if (needs_environment) AssignEnvironment(result);
1928 return result;
1929 } else if (instr->representation().IsDouble()) {
1930 return DoArithmeticD(Token::MUL, instr);
1931 } else {
1932 return DoArithmeticT(Token::MUL, instr);
1933 }
1934 }
1935
1936
1937 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
1938 ASSERT(argument_count_ == 0);
1939 allocator_->MarkAsOsrEntry();
1940 current_block_->last_environment()->set_ast_id(instr->ast_id());
1941 return AssignEnvironment(new(zone()) LOsrEntry);
1942 }
1943
1944
1945 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
1946 LParameter* result = new(zone()) LParameter;
1947 if (instr->kind() == HParameter::STACK_PARAMETER) {
1948 int spill_index = chunk_->GetParameterStackSlot(instr->index());
1949 return DefineAsSpilled(result, spill_index);
1950 } else {
1951 ASSERT(info()->IsStub());
1952 CodeStubInterfaceDescriptor* descriptor =
1953 info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
1954 int index = static_cast<int>(instr->index());
1955 Register reg = descriptor->GetParameterRegister(index);
1956 return DefineFixed(result, reg);
1957 }
1958 }
1959
1960
1961 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1962 ASSERT(instr->representation().IsDouble());
1963 // We call a C function for double power. It can't trigger a GC.
1964 // We need to use fixed result register for the call.
1965 Representation exponent_type = instr->right()->representation();
1966 ASSERT(instr->left()->representation().IsDouble());
1967 LOperand* left = UseFixedDouble(instr->left(), d0);
1968 LOperand* right = exponent_type.IsInteger32()
1969 ? UseFixed(instr->right(), x12)
1970 : exponent_type.IsDouble()
1971 ? UseFixedDouble(instr->right(), d1)
1972 : UseFixed(instr->right(), x11);
1973 LPower* result = new(zone()) LPower(left, right);
1974 return MarkAsCall(DefineFixedDouble(result, d0),
1975 instr,
1976 CAN_DEOPTIMIZE_EAGERLY);
1977 }
1978
1979
1980 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1981 LOperand* argument = UseRegister(instr->argument());
1982 return new(zone()) LPushArgument(argument);
1983 }
1984
1985
1986 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
1987 LOperand* context = UseFixed(instr->context(), cp);
1988 return MarkAsCall(
1989 DefineFixed(new(zone()) LRegExpLiteral(context), x0), instr);
1990 }
1991
1992
1993 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
1994 HValue* value = instr->value();
1995 ASSERT(value->representation().IsDouble());
1996 return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
1997 }
1998
1999
2000 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2001 LOperand* lo = UseRegister(instr->lo());
2002 LOperand* hi = UseRegister(instr->hi());
2003 LOperand* temp = TempRegister();
2004 return DefineAsRegister(new(zone()) LConstructDouble(hi, lo, temp));
2005 }
2006
2007
2008 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2009 LOperand* context = info()->IsStub()
2010 ? UseFixed(instr->context(), cp)
2011 : NULL;
2012 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2013 return new(zone()) LReturn(UseFixed(instr->value(), x0), context,
2014 parameter_count);
2015 }
2016
2017
2018 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
2019 LOperand* string = UseRegisterAtStart(instr->string());
2020 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2021 LOperand* temp = TempRegister();
2022 LSeqStringGetChar* result =
2023 new(zone()) LSeqStringGetChar(string, index, temp);
2024 return DefineAsRegister(result);
2025 }
2026
2027
2028 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
2029 LOperand* string = UseRegister(instr->string());
2030 LOperand* index = FLAG_debug_code
2031 ? UseRegister(instr->index())
2032 : UseRegisterOrConstant(instr->index());
2033 LOperand* value = UseRegister(instr->value());
2034 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
2035 LOperand* temp = TempRegister();
2036 LSeqStringSetChar* result =
2037 new(zone()) LSeqStringSetChar(context, string, index, value, temp);
2038 return DefineAsRegister(result);
2039 }
2040
2041
2042 LInstruction* LChunkBuilder::DoShift(Token::Value op,
2043 HBitwiseBinaryOperation* instr) {
2044 if (instr->representation().IsTagged()) {
2045 return DoArithmeticT(op, instr);
2046 }
2047
2048 ASSERT(instr->representation().IsInteger32() ||
2049 instr->representation().IsSmi());
2050 ASSERT(instr->left()->representation().Equals(instr->representation()));
2051 ASSERT(instr->right()->representation().Equals(instr->representation()));
2052
2053 LOperand* left = instr->representation().IsSmi()
2054 ? UseRegister(instr->left())
2055 : UseRegisterAtStart(instr->left());
2056
2057 HValue* right_value = instr->right();
2058 LOperand* right = NULL;
2059 LOperand* temp = NULL;
2060 int constant_value = 0;
2061 if (right_value->IsConstant()) {
2062 right = UseConstant(right_value);
2063 HConstant* constant = HConstant::cast(right_value);
2064 constant_value = constant->Integer32Value() & 0x1f;
2065 } else {
2066 right = UseRegisterAtStart(right_value);
2067 if (op == Token::ROR) {
2068 temp = TempRegister();
2069 }
2070 }
2071
2072 // Shift operations can only deoptimize if we do a logical shift by 0 and the
2073 // result cannot be truncated to int32.
2074 bool does_deopt = false;
2075 if ((op == Token::SHR) && (constant_value == 0)) {
2076 if (FLAG_opt_safe_uint32_operations) {
2077 does_deopt = !instr->CheckFlag(HInstruction::kUint32);
2078 } else {
2079 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
2080 }
2081 }
2082
2083 LInstruction* result;
2084 if (instr->representation().IsInteger32()) {
2085 result = DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
2086 } else {
2087 ASSERT(instr->representation().IsSmi());
2088 result = DefineAsRegister(
2089 new(zone()) LShiftS(op, left, right, temp, does_deopt));
2090 }
2091
2092 return does_deopt ? AssignEnvironment(result) : result;
2093 }
2094
2095
2096 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
2097 return DoShift(Token::ROR, instr);
2098 }
2099
2100
2101 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
2102 return DoShift(Token::SAR, instr);
2103 }
2104
2105
2106 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
2107 return DoShift(Token::SHL, instr);
2108 }
2109
2110
2111 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
2112 return DoShift(Token::SHR, instr);
2113 }
2114
2115
2116 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2117 instr->ReplayEnvironment(current_block_->last_environment());
2118 return NULL;
2119 }
2120
2121
2122 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2123 if (instr->is_function_entry()) {
2124 LOperand* context = UseFixed(instr->context(), cp);
2125 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2126 } else {
2127 ASSERT(instr->is_backwards_branch());
2128 LOperand* context = UseAny(instr->context());
2129 return AssignEnvironment(
2130 AssignPointerMap(new(zone()) LStackCheck(context)));
2131 }
2132 }
2133
2134
2135 LInstruction* LChunkBuilder::DoStoreCodeEntry(HStoreCodeEntry* instr) {
2136 LOperand* function = UseRegister(instr->function());
2137 LOperand* code_object = UseRegisterAtStart(instr->code_object());
2138 LOperand* temp = TempRegister();
2139 return new(zone()) LStoreCodeEntry(function, code_object, temp);
2140 }
2141
2142
2143 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2144 LOperand* temp = TempRegister();
2145 LOperand* context;
2146 LOperand* value;
2147 if (instr->NeedsWriteBarrier()) {
2148 // TODO(all): Replace these constraints when RecordWriteStub has been
2149 // rewritten.
2150 context = UseRegisterAndClobber(instr->context());
2151 value = UseRegisterAndClobber(instr->value());
2152 } else {
2153 context = UseRegister(instr->context());
2154 value = UseRegister(instr->value());
2155 }
2156 LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2157 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2158 }
2159
2160
2161 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2162 LOperand* value = UseRegister(instr->value());
2163 if (instr->RequiresHoleCheck()) {
2164 return AssignEnvironment(new(zone()) LStoreGlobalCell(value,
2165 TempRegister(),
2166 TempRegister()));
2167 } else {
2168 return new(zone()) LStoreGlobalCell(value, TempRegister(), NULL);
2169 }
2170 }
2171
2172
2173 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2174 LOperand* temp = NULL;
2175 LOperand* elements = NULL;
2176 LOperand* val = NULL;
2177 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2178
2179 if (!instr->is_typed_elements() &&
2180 instr->value()->representation().IsTagged() &&
2181 instr->NeedsWriteBarrier()) {
2182 // RecordWrite() will clobber all registers.
2183 elements = UseRegisterAndClobber(instr->elements());
2184 val = UseRegisterAndClobber(instr->value());
2185 temp = TempRegister();
2186 } else {
2187 elements = UseRegister(instr->elements());
2188 val = UseRegister(instr->value());
2189 temp = instr->key()->IsConstant() ? NULL : TempRegister();
2190 }
2191
2192 if (instr->is_typed_elements()) {
2193 ASSERT((instr->value()->representation().IsInteger32() &&
2194 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2195 (instr->value()->representation().IsDouble() &&
2196 IsDoubleOrFloatElementsKind(instr->elements_kind())));
2197 ASSERT((instr->is_fixed_typed_array() &&
2198 instr->elements()->representation().IsTagged()) ||
2199 (instr->is_external() &&
2200 instr->elements()->representation().IsExternal()));
2201 return new(zone()) LStoreKeyedExternal(elements, key, val, temp);
2202
2203 } else if (instr->value()->representation().IsDouble()) {
2204 ASSERT(instr->elements()->representation().IsTagged());
2205 return new(zone()) LStoreKeyedFixedDouble(elements, key, val, temp);
2206
2207 } else {
2208 ASSERT(instr->elements()->representation().IsTagged());
2209 ASSERT(instr->value()->representation().IsSmiOrTagged() ||
2210 instr->value()->representation().IsInteger32());
2211 return new(zone()) LStoreKeyedFixed(elements, key, val, temp);
2212 }
2213 }
2214
2215
2216 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2217 LOperand* context = UseFixed(instr->context(), cp);
2218 LOperand* object = UseFixed(instr->object(), x2);
2219 LOperand* key = UseFixed(instr->key(), x1);
2220 LOperand* value = UseFixed(instr->value(), x0);
2221
2222 ASSERT(instr->object()->representation().IsTagged());
2223 ASSERT(instr->key()->representation().IsTagged());
2224 ASSERT(instr->value()->representation().IsTagged());
2225
2226 return MarkAsCall(
2227 new(zone()) LStoreKeyedGeneric(context, object, key, value), instr);
2228 }
2229
2230
2231 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2232 // TODO(jbramley): It might be beneficial to allow value to be a constant in
2233 // some cases. x64 makes use of this with FLAG_track_fields, for example.
2234
2235 LOperand* object = UseRegister(instr->object());
2236 LOperand* value;
2237 LOperand* temp0 = NULL;
2238 LOperand* temp1 = NULL;
2239
2240 if (instr->access().IsExternalMemory() ||
2241 instr->field_representation().IsDouble()) {
2242 value = UseRegister(instr->value());
2243 } else if (instr->NeedsWriteBarrier()) {
2244 value = UseRegisterAndClobber(instr->value());
2245 temp0 = TempRegister();
2246 temp1 = TempRegister();
2247 } else if (instr->NeedsWriteBarrierForMap()) {
2248 value = UseRegister(instr->value());
2249 temp0 = TempRegister();
2250 temp1 = TempRegister();
2251 } else {
2252 value = UseRegister(instr->value());
2253 temp0 = TempRegister();
2254 }
2255
2256 LStoreNamedField* result =
2257 new(zone()) LStoreNamedField(object, value, temp0, temp1);
2258 if (instr->field_representation().IsHeapObject() &&
2259 !instr->value()->type().IsHeapObject()) {
2260 return AssignEnvironment(result);
2261 }
2262 return result;
2263 }
2264
2265
2266 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2267 LOperand* context = UseFixed(instr->context(), cp);
2268 LOperand* object = UseFixed(instr->object(), x1);
2269 LOperand* value = UseFixed(instr->value(), x0);
2270 LInstruction* result = new(zone()) LStoreNamedGeneric(context, object, value);
2271 return MarkAsCall(result, instr);
2272 }
2273
2274
2275 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2276 LOperand* context = UseFixed(instr->context(), cp);
2277 LOperand* left = UseFixed(instr->left(), x1);
2278 LOperand* right = UseFixed(instr->right(), x0);
2279
2280 LStringAdd* result = new(zone()) LStringAdd(context, left, right);
2281 return MarkAsCall(DefineFixed(result, x0), instr);
2282 }
2283
2284
2285 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2286 LOperand* string = UseRegisterAndClobber(instr->string());
2287 LOperand* index = UseRegisterAndClobber(instr->index());
2288 LOperand* context = UseAny(instr->context());
2289 LStringCharCodeAt* result =
2290 new(zone()) LStringCharCodeAt(context, string, index);
2291 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2292 }
2293
2294
2295 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2296 LOperand* char_code = UseRegister(instr->value());
2297 LOperand* context = UseAny(instr->context());
2298 LStringCharFromCode* result =
2299 new(zone()) LStringCharFromCode(context, char_code);
2300 return AssignPointerMap(DefineAsRegister(result));
2301 }
2302
2303
2304 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
2305 HStringCompareAndBranch* instr) {
2306 ASSERT(instr->left()->representation().IsTagged());
2307 ASSERT(instr->right()->representation().IsTagged());
2308 LOperand* context = UseFixed(instr->context(), cp);
2309 LOperand* left = UseFixed(instr->left(), x1);
2310 LOperand* right = UseFixed(instr->right(), x0);
2311 LStringCompareAndBranch* result =
2312 new(zone()) LStringCompareAndBranch(context, left, right);
2313 return MarkAsCall(result, instr);
2314 }
2315
2316
2317 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
2318 if (instr->representation().IsSmiOrInteger32()) {
2319 ASSERT(instr->left()->representation().Equals(instr->representation()));
2320 ASSERT(instr->right()->representation().Equals(instr->representation()));
2321 LOperand *left;
2322 if (instr->left()->IsConstant() &&
2323 (HConstant::cast(instr->left())->Integer32Value() == 0)) {
2324 left = UseConstant(instr->left());
2325 } else {
2326 left = UseRegisterAtStart(instr->left());
2327 }
2328 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
2329 LInstruction* result = instr->representation().IsSmi() ?
2330 DefineAsRegister(new(zone()) LSubS(left, right)) :
2331 DefineAsRegister(new(zone()) LSubI(left, right));
2332 if (instr->CheckFlag(HValue::kCanOverflow)) {
2333 result = AssignEnvironment(result);
2334 }
2335 return result;
2336 } else if (instr->representation().IsDouble()) {
2337 return DoArithmeticD(Token::SUB, instr);
2338 } else {
2339 return DoArithmeticT(Token::SUB, instr);
2340 }
2341 }
2342
2343
2344 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
2345 if (instr->HasNoUses()) {
2346 return NULL;
2347 } else {
2348 return DefineAsRegister(new(zone()) LThisFunction);
2349 }
2350 }
2351
2352
2353 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2354 LOperand* object = UseFixed(instr->value(), x0);
2355 LToFastProperties* result = new(zone()) LToFastProperties(object);
2356 return MarkAsCall(DefineFixed(result, x0), instr);
2357 }
2358
2359
2360 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2361 HTransitionElementsKind* instr) {
2362 LOperand* object = UseRegister(instr->object());
2363 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2364 LTransitionElementsKind* result =
2365 new(zone()) LTransitionElementsKind(object, NULL,
2366 TempRegister(), TempRegister());
2367 return result;
2368 } else {
2369 LOperand* context = UseFixed(instr->context(), cp);
2370 LTransitionElementsKind* result =
2371 new(zone()) LTransitionElementsKind(object, context, TempRegister());
2372 return AssignPointerMap(result);
2373 }
2374 }
2375
2376
2377 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2378 HTrapAllocationMemento* instr) {
2379 LOperand* object = UseRegister(instr->object());
2380 LOperand* temp1 = TempRegister();
2381 LOperand* temp2 = TempRegister();
2382 LTrapAllocationMemento* result =
2383 new(zone()) LTrapAllocationMemento(object, temp1, temp2);
2384 return AssignEnvironment(result);
2385 }
2386
2387
2388 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2389 LOperand* context = UseFixed(instr->context(), cp);
2390 // TODO(jbramley): In ARM, this uses UseFixed to force the input to x0.
2391 // However, LCodeGen::DoTypeof just pushes it to the stack (for CallRuntime)
2392 // anyway, so the input doesn't have to be in x0. We might be able to improve
2393 // the ARM back-end a little by relaxing this restriction.
2394 LTypeof* result =
2395 new(zone()) LTypeof(context, UseRegisterAtStart(instr->value()));
2396 return MarkAsCall(DefineFixed(result, x0), instr);
2397 }
2398
2399
2400 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2401 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2402 if (goto_instr != NULL) return goto_instr;
2403
2404 // We only need temp registers in some cases, but we can't dereference the
2405 // instr->type_literal() handle to test that here.
2406 LOperand* temp1 = TempRegister();
2407 LOperand* temp2 = TempRegister();
2408
2409 return new(zone()) LTypeofIsAndBranch(
2410 UseRegister(instr->value()), temp1, temp2);
2411 }
2412
2413
2414 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
2415 switch (instr->op()) {
2416 case kMathAbs: {
2417 Representation r = instr->representation();
2418 if (r.IsTagged()) {
2419 // The tagged case might need to allocate a HeapNumber for the result,
2420 // so it is handled by a separate LInstruction.
2421 LOperand* context = UseFixed(instr->context(), cp);
2422 LOperand* input = UseRegister(instr->value());
2423 LOperand* temp1 = TempRegister();
2424 LOperand* temp2 = TempRegister();
2425 LOperand* temp3 = TempRegister();
2426 LMathAbsTagged* result =
2427 new(zone()) LMathAbsTagged(context, input, temp1, temp2, temp3);
2428 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2429 } else {
2430 LOperand* input = UseRegisterAtStart(instr->value());
2431 LMathAbs* result = new(zone()) LMathAbs(input);
2432 if (r.IsDouble()) {
2433 // The Double case can never fail so it doesn't need an environment.
2434 return DefineAsRegister(result);
2435 } else {
2436 ASSERT(r.IsInteger32() || r.IsSmi());
2437 // The Integer32 and Smi cases need an environment because they can
2438 // deoptimize on minimum representable number.
2439 return AssignEnvironment(DefineAsRegister(result));
2440 }
2441 }
2442 }
2443 case kMathExp: {
2444 ASSERT(instr->representation().IsDouble());
2445 ASSERT(instr->value()->representation().IsDouble());
2446 LOperand* input = UseRegister(instr->value());
2447 // TODO(all): Implement TempFPRegister.
2448 LOperand* double_temp1 = FixedTemp(d24); // This was chosen arbitrarily.
2449 LOperand* temp1 = TempRegister();
2450 LOperand* temp2 = TempRegister();
2451 LOperand* temp3 = TempRegister();
2452 LMathExp* result = new(zone()) LMathExp(input, double_temp1,
2453 temp1, temp2, temp3);
2454 return DefineAsRegister(result);
2455 }
2456 case kMathFloor: {
2457 ASSERT(instr->representation().IsInteger32());
2458 ASSERT(instr->value()->representation().IsDouble());
2459 // TODO(jbramley): A64 can easily handle a double argument with frintm,
2460 // but we're never asked for it here. At the moment, we fall back to the
2461 // runtime if the result doesn't fit, like the other architectures.
2462 LOperand* input = UseRegisterAtStart(instr->value());
2463 LMathFloor* result = new(zone()) LMathFloor(input);
2464 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2465 }
2466 case kMathLog: {
2467 ASSERT(instr->representation().IsDouble());
2468 ASSERT(instr->value()->representation().IsDouble());
2469 LOperand* input = UseFixedDouble(instr->value(), d0);
2470 LMathLog* result = new(zone()) LMathLog(input);
2471 return MarkAsCall(DefineFixedDouble(result, d0), instr);
2472 }
2473 case kMathPowHalf: {
2474 ASSERT(instr->representation().IsDouble());
2475 ASSERT(instr->value()->representation().IsDouble());
2476 LOperand* input = UseRegister(instr->value());
2477 return DefineAsRegister(new(zone()) LMathPowHalf(input));
2478 }
2479 case kMathRound: {
2480 ASSERT(instr->representation().IsInteger32());
2481 ASSERT(instr->value()->representation().IsDouble());
2482 // TODO(jbramley): As with kMathFloor, we can probably handle double
2483 // results fairly easily, but we are never asked for them.
2484 LOperand* input = UseRegister(instr->value());
2485 LOperand* temp = FixedTemp(d24); // Choosen arbitrarily.
2486 LMathRound* result = new(zone()) LMathRound(input, temp);
2487 return AssignEnvironment(DefineAsRegister(result));
2488 }
2489 case kMathSqrt: {
2490 ASSERT(instr->representation().IsDouble());
2491 ASSERT(instr->value()->representation().IsDouble());
2492 LOperand* input = UseRegisterAtStart(instr->value());
2493 return DefineAsRegister(new(zone()) LMathSqrt(input));
2494 }
2495 case kMathClz32: {
2496 ASSERT(instr->representation().IsInteger32());
2497 ASSERT(instr->value()->representation().IsInteger32());
2498 LOperand* input = UseRegisterAtStart(instr->value());
2499 return DefineAsRegister(new(zone()) LMathClz32(input));
2500 }
2501 default:
2502 UNREACHABLE();
2503 return NULL;
2504 }
2505 }
2506
2507
2508 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2509 // Use an index that corresponds to the location in the unoptimized frame,
2510 // which the optimized frame will subsume.
2511 int env_index = instr->index();
2512 int spill_index = 0;
2513 if (instr->environment()->is_parameter_index(env_index)) {
2514 spill_index = chunk_->GetParameterStackSlot(env_index);
2515 } else {
2516 spill_index = env_index - instr->environment()->first_local_index();
2517 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2518 Abort(kTooManySpillSlotsNeededForOSR);
2519 spill_index = 0;
2520 }
2521 }
2522 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2523 }
2524
2525
2526 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
2527 return NULL;
2528 }
2529
2530
2531 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2532 LOperand* context = UseFixed(instr->context(), cp);
2533 // Assign object to a fixed register different from those already used in
2534 // LForInPrepareMap.
2535 LOperand* object = UseFixed(instr->enumerable(), x0);
2536 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2537 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
2538 }
2539
2540
2541 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2542 LOperand* map = UseRegister(instr->map());
2543 return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2544 }
2545
2546
2547 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2548 LOperand* value = UseRegisterAtStart(instr->value());
2549 LOperand* map = UseRegister(instr->map());
2550 LOperand* temp = TempRegister();
2551 return AssignEnvironment(new(zone()) LCheckMapValue(value, map, temp));
2552 }
2553
2554
2555 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2556 LOperand* object = UseRegisterAtStart(instr->object());
2557 LOperand* index = UseRegister(instr->index());
2558 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2559 }
2560
2561
2562 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
2563 LOperand* receiver = UseRegister(instr->receiver());
2564 LOperand* function = UseRegister(instr->function());
2565 LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
2566 return AssignEnvironment(DefineAsRegister(result));
2567 }
2568
2569
2570 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/a64/lithium-a64.h ('k') | src/a64/lithium-codegen-a64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698